은승우

ocr

Showing 1 changed file with 53 additions and 22 deletions
var express = require("express");
var app = express();
const line = require('@line/bot-sdk');
var request = require('request');
var https=require('https');
var http=require('http');
const lex = require('greenlock-express').create({
/*
var fs = require("fs");
var httpsOptions = {
key: fs.readFileSync('/etc/letsencrypt/live/oss.chatbot.bu.to/privkey.pem'),
cert: fs.readFileSync('/etc/letsencrypt/live/oss.chatbot.bu.to/cert.pem')
};
http.createServer(app).listen(80);
https.createServer(httpsOptions, app).listen(443);
*/
/* if ssl expired
var greenlock= require('greenlock-express');
const lex = greenlock .create({
version: 'draft-11', // 버전2
store: require('greenlock-store-fs'),
configDir: '/etc/letsencrypt', // 또는 ~/letsencrypt/etc
......@@ -21,15 +34,11 @@ const lex = require('greenlock-express').create({
},
renewWithin: 81 * 24 * 60 * 60 * 1000,
renewBy: 80 * 24 * 60 * 60 * 1000,
});//papago api
});*/
//papago api
https.createServer(lex.httpsOptions, lex.middleware(app)).listen((process.env.SSL_PORT || 443),()=>{
console.log("server on 443");
});
http.createServer(lex.middleware(require('redirect-https')())).listen(process.env.PORT || 80,()=>{
console.log("server on 80");
});
//번역 api_url
var translate_api_url = 'https://openapi.naver.com/v1/papago/n2mt';
......@@ -53,22 +62,31 @@ const client = new line.Client(config);
// register a webhook handler with middleware
// about the middleware, please refer to doc
const vision = require('@google-cloud/vision');
// Creates a client
const visionclient = new vision.ImageAnnotatorClient();
var vision = require('google-vision-api-client');
var requtil = vision.requtil;
//Prepare your service account from trust preview certificated project
var jsonfile = '/home/ubuntu/a/LINEBOT/googlevisionapikey';
/**
* TODO(developer): Uncomment the following line before running the sample.
*/
// const fileName = 'Local image file, e.g. /path/to/image.png';
// Performs text detection on the local file
const [result] = await visionclient.textDetection('/home/ubuntu/a/LINEBOT/photo/Fancy-TWICE.jpg');
const detections = result.textAnnotations;
console.log('Text:');
detections.forEach(text => console.log(text));
//Initialize the api
vision.init(jsonfile);
//Build the request payloads
var d = requtil.createRequests().addRequest(
requtil.createRequest('/home/ubuntu/a/LINEBOT/photo/Fancy=TWICE.jpg')
.withFeature('Optical Character Recognition', 3)
.build());
//Do query to the api server
vision.query(d, function(e, r, d){
if(e) console.log('ERROR:', e);
......@@ -77,6 +95,9 @@ if(e) console.log('ERROR:', e);
app.post('/webhook', line.middleware(config), (req, res) => {
Promise
.all(req.body.events.map(handleEvent))
......@@ -87,10 +108,18 @@ app.post('/webhook', line.middleware(config), (req, res) => {
// event handler
function handleEvent(event) {
console.log(event.message);
if (event.type !== 'message' || event.message.type !== 'text') {
// ignore non-text-message event
if (event.type !== 'message' && (event.message.type !== 'text'||event.message.type !== 'image')) {
// ignore non-message event
return Promise.resolve(null);
}
else if(event.type == 'message' && event.message.type == 'image')
{
}
else if(event.type == 'message' && event.message.type == 'text')
{
return new Promise(function(resolve, reject) {
//언어 감지 option
var detect_options = {
......@@ -155,4 +184,6 @@ function handleEvent(event) {
});
}
}
......