app.js
3.88 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
var express = require('express');
var app = express();
const line = require('@line/bot-sdk');
const config = require('./config');
//papago api
var request = require('request');
//번역 api_url
var translate_api_url = 'https://openapi.naver.com/v1/papago/n2mt';
//언어감지 api_url
var languagedetect_api_url = 'https://openapi.naver.com/v1/papago/detectLangs';
// create LINE SDK client
const client = new line.Client(config);
// create Express app
// about Express itself: https://expressjs.com/
// register a webhook handler with middleware
// about the middleware, please refer to doc
app.post("/webhook", line.middleware(config.line_config), (req, res) => {
// res.status(200).end();
Promise.all(req.body.events.map(handleEvent)).then(result =>
res.json(result)
);
});
function handleImgEvent(event) {
switch (event) {
case event.type = 'image':
async function detectText(fileName) {
// [START vision_text_detection]
const vision = require('@google-cloud/vision');
// Creates a client
const client = new vision.ImageAnnotatorClient();
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
// const fileName = 'Local image file, e.g. /path/to/image.png';
// Performs text detection on the local file
const [result] = await client.textDetection(fileName);
const detections = result.textAnnotations;
console.log('Text:');
detections.forEach(text => console.log(text));
// [END vision_text_detection]
}
break;
case event.type = 'message':
var detect_options = {
url: languagedetect_api_url,
form: {'query': event.message.text},
headers: {'X-Naver-Client-Id': client_id, 'X-Naver-Client-Secret': client_secret}
}
request.post(detect_options, function (error, response, body) {
console.log(response.statusCode);
if (!error && response.statusCode == 200) {
var detect_body = JSON.parse(response.body);
var source = '';
var target = '';
var result = {type: 'text', text: ''};
//언어 감지가 제대로 됐는지 확인
console.log(detect_body.langCode);
//번역은 한국어->영어 / 영어->한국어만 지원
if (detect_body.langCode == 'ko' || detect_body.langCode == 'en') {
source = detect_body.langCode == 'ko' ? 'ko' : 'en';
target = source == 'ko' ? 'en' : 'ko';
//papago 번역 option
var options = {
url: translate_api_url,
// 한국어(source : ko), 영어(target: en), 카톡에서 받는 메시지(text)
form: {'source': source, 'target': target, 'text': event.message.text},
headers: {'X-Naver-Client-Id': client_id, 'X-Naver-Client-Secret': client_secret}
};
// Naver Post API
request.post(options, function (error, response, body) {
// Translate API Sucess
if (!error && response.statusCode == 200) {
// JSON
var objBody = JSON.parse(response.body);
// Message 잘 찍히는지 확인
result.text = objBody.message.result.translatedText;
console.log(result.text);
//번역된 문장 보내기
client.replyMessage(event.replyToken, result).then(resolve).catch(reject);
}
});
}
// 메시지의 언어가 영어 또는 한국어가 아닐 경우
else {
result.text = '언어를 감지할 수 없습니다. \n 번역 언어는 한글 또는 영어만 가능합니다.';
client.replyMessage(event.replyToken, result).then(resolve).catch(reject);
}
break;
}
})
}
}
app.listen(3000, function () {
console.log('Linebot listening on port 3000!');
})