Skip to content

Commit

Permalink
notebooks and models
Browse files Browse the repository at this point in the history
  • Loading branch information
dvgodoy committed Mar 19, 2017
1 parent fbe44a5 commit 7109c16
Show file tree
Hide file tree
Showing 125 changed files with 3,895,321 additions and 2 deletions.
16 changes: 16 additions & 0 deletions DockerLSTM/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
FROM continuumio/miniconda

RUN pip install tensorflow
RUN pip install keras==1.2.2
RUN pip install h5py
RUN pip install flask
RUN pip install cherrypy

EXPOSE 8002

RUN mkdir /webapp
ADD . /webapp

WORKDIR /webapp

CMD python /webapp/server.py
68 changes: 68 additions & 0 deletions DockerLSTM/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import pickle
import os
import json
import random
import numpy as np
import tensorflow as tf
from flask import Flask, request
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
from keras.models import load_model

app = Flask(__name__)

features = ['cool', 'funny', 'stars', 'useful']

tokenizers = {}
models = {}
graphs = {}

for feature in features:
with open('/webapp/' + feature + '/' + feature + '.tokenizer', 'rb') as f:
tokenizers.update({feature: pickle.load(f)})

models.update({feature: load_model('/webapp/' + feature + '/' + feature + '.model')})
# https://github.com/fchollet/keras/issues/2397
graphs.update({feature: tf.get_default_graph()})

def make_pred(feature, sentence):
sequence = tokenizers[feature].texts_to_sequences([sentence])
padded_seq = pad_sequences(sequence, maxlen=400)
with graphs[feature].as_default():
predicted_proba = models[feature].predict_proba(padded_seq)
return predicted_proba

@app.route("/cool", methods=["POST"])
def cool():
sentence = request.form.get('review').encode('utf-8')
predicted_proba = make_pred('cool', sentence)
prediction = (predicted_proba > 0.5).astype(int)[0][0]
response = {'cool': prediction}
return json.dumps(response)

@app.route("/funny", methods=["POST"])
def funny():
sentence = request.form.get('review').encode('utf-8')
predicted_proba = make_pred('funny', sentence)
prediction = (predicted_proba > 0.5).astype(int)[0][0]
response = {'funny': prediction}
return json.dumps(response)

@app.route("/stars", methods=["POST"])
def stars():
sentence = request.form.get('review').encode('utf-8')
predicted_proba = make_pred('stars', sentence)
prediction = np.argmax(predicted_proba, axis=1)[0]
response = {'stars': prediction}
return json.dumps(response)

@app.route("/useful", methods=["POST"])
def useful():
sentence = request.form.get('review').encode('utf-8')
predicted_proba = make_pred('useful', sentence)
prediction = (predicted_proba > 0.5).astype(int)[0][0]
response = {'useful': prediction}
return json.dumps(response)

if __name__ == "__main__":
app.run(port=8002)
Binary file added DockerLSTM/cool/cool.model
Binary file not shown.
Loading

0 comments on commit 7109c16

Please sign in to comment.