Toggle navigation
Toggle navigation
This project
Loading...
Sign in
이혜연
/
Make_your_own_ chatbot
Go to a project
Toggle navigation
Toggle navigation pinning
Projects
Groups
Snippets
Help
Project
Activity
Repository
Graphs
Network
Create a new issue
Commits
Issue Boards
Authored by
이혜연
2020-09-18 20:27:17 +0900
Browse Files
Options
Browse Files
Download
Email Patches
Plain Diff
Commit
aec7e811d5f3663fd6a383f034af22b61a8f2152
aec7e811
1 parent
cbd2cfc2
Delete server.py
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
0 additions
and
53 deletions
final/flask/server.py
final/flask/server.py
deleted
100644 → 0
View file @
cbd2cfc
import
numpy
as
np
import
pickle
import
tensorflow
as
tf
from
flask
import
Flask
,
jsonify
,
render_template
,
request
import
model
# Load in data structures
with
open
(
"data/wordList.txt"
,
"rb"
)
as
fp
:
wordList
=
pickle
.
load
(
fp
)
wordList
.
append
(
'<pad>'
)
wordList
.
append
(
'<EOS>'
)
# Load in hyperparamters
vocabSize
=
len
(
wordList
)
batchSize
=
24
maxEncoderLength
=
15
maxDecoderLength
=
15
lstmUnits
=
112
numLayersLSTM
=
3
# Create placeholders
encoderInputs
=
[
tf
.
placeholder
(
tf
.
int32
,
shape
=
(
None
,))
for
i
in
range
(
maxEncoderLength
)]
decoderLabels
=
[
tf
.
placeholder
(
tf
.
int32
,
shape
=
(
None
,))
for
i
in
range
(
maxDecoderLength
)]
decoderInputs
=
[
tf
.
placeholder
(
tf
.
int32
,
shape
=
(
None
,))
for
i
in
range
(
maxDecoderLength
)]
feedPrevious
=
tf
.
placeholder
(
tf
.
bool
)
encoderLSTM
=
tf
.
nn
.
rnn_cell
.
BasicLSTMCell
(
lstmUnits
,
state_is_tuple
=
True
)
#encoderLSTM = tf.nn.rnn_cell.MultiRNNCell([singleCell]*numLayersLSTM, state_is_tuple=True)
decoderOutputs
,
decoderFinalState
=
tf
.
contrib
.
legacy_seq2seq
.
embedding_rnn_seq2seq
(
encoderInputs
,
decoderInputs
,
encoderLSTM
,
vocabSize
,
vocabSize
,
lstmUnits
,
feed_previous
=
feedPrevious
)
decoderPrediction
=
tf
.
argmax
(
decoderOutputs
,
2
)
# Start session and get graph
sess
=
tf
.
Session
()
#y, variables = model.getModel(encoderInputs, decoderLabels, decoderInputs, feedPrevious)
# Load in pretrained model
saver
=
tf
.
train
.
Saver
()
saver
.
restore
(
sess
,
tf
.
train
.
latest_checkpoint
(
'models'
))
zeroVector
=
np
.
zeros
((
1
),
dtype
=
'int32'
)
def
pred
(
inputString
):
inputVector
=
model
.
getTestInput
(
inputString
,
wordList
,
maxEncoderLength
)
feedDict
=
{
encoderInputs
[
t
]:
inputVector
[
t
]
for
t
in
range
(
maxEncoderLength
)}
feedDict
.
update
({
decoderLabels
[
t
]:
zeroVector
for
t
in
range
(
maxDecoderLength
)})
feedDict
.
update
({
decoderInputs
[
t
]:
zeroVector
for
t
in
range
(
maxDecoderLength
)})
feedDict
.
update
({
feedPrevious
:
True
})
ids
=
(
sess
.
run
(
decoderPrediction
,
feed_dict
=
feedDict
))
return
model
.
idsToSentence
(
ids
,
wordList
)
# webapp
app
=
Flask
(
__name__
,
template_folder
=
'./'
)
\ No newline at end of file
Please
register
or
login
to post a comment