-
Notifications
You must be signed in to change notification settings - Fork 0
/
server.py
171 lines (125 loc) · 4.64 KB
/
server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
from flask import Flask
from flask_cors import CORS, cross_origin
from flask import flash, redirect, render_template, request, session, abort, make_response
from flask import jsonify
import os
from os import listdir
import pandas
from pandas import Series
import psycopg2
import pickle
import codecs
from patsy import dmatrices
import numpy as np
from sklearn.linear_model import LogisticRegression
root = "/home/mighty/gmc/gmc-backend"
DBNAME = os.environ["GMC_DBNAME"]
HOST = os.environ["GMC_HOST"]
PORT = os.environ["GMC_PORT"]
DBUSER = os.environ["GMC_USER"]
PASSWORD = os.environ["GMC_PASSWORD"]
app = Flask(__name__)
app.config['SECRET_KEY'] = os.environ.get('SECRET_KEY', 'XYZ')
CORS(app)
def createModel(user, data):
y, X = dmatrices('qual~danceability+energy+key+loudness+mode+speechiness+acousticness+instrumentalness+liveness+valence+tempo+duration_ms+time_signature', data, return_type="dataframe")
y = np.ravel(y)
model = LogisticRegression()
model.fit(X,y)
model_bytes = codecs.encode(pickle.dumps(model), "base64").decode()
conn = psycopg2.connect(host=HOST ,database=DBNAME, user=DBUSER, password=PASSWORD)
cur = conn.cursor()
query = """INSERT INTO gmc VALUES(%s, %s)"""
cur.execute(query, (user, model_bytes))
conn.commit()
cur.close()
def prediction(user, data):
# Retrieve the model from the database
conn = psycopg2.connect(host=HOST ,database=DBNAME, user=DBUSER, password=PASSWORD)
cur = conn.cursor()
query = """SELECT model FROM gmc WHERE id=%s"""
cur.execute(query, ('nothing_faith',))
b = cur.fetchone()
new_model = pickle.loads(codecs.decode(b[0].encode(), "base64"))
conn.commit()
cur.close()
y, X = dmatrices('qual ~ danceability + energy + key + loudness + mode + speechiness + acousticness + instrumentalness + liveness + valence + tempo + duration_ms + time_signature',
data, return_type="dataframe")
preds = new_model.predict(X)
# the sk-learn model is much different from the R version, figure out why
# later, but the software engineering aspect is more improtant right now.
list = []
for x in preds:
if x == 1:
list.append('good')
else:
list.append('bad')
return list
@app.route("/train", methods=['POST'])
def train():
USER = request.get_json()["user"]
GOOD_AUDIO_FEATURES = request.get_json()["good"]
BAD_AUDIO_FEATURES = request.get_json()["bad"]
df = pandas.DataFrame(eval(str(GOOD_AUDIO_FEATURES)))
col = []
for x in range(df.shape[0]):
col.append(1)
df['qual'] = Series(col)
col = []
df2 = pandas.DataFrame(eval(str(BAD_AUDIO_FEATURES)))
for x in range(df2.shape[0]):
col.append(0)
df2['qual'] = Series(col)
frames = [df, df2]
data = pandas.concat(frames, sort=False)
# Rather than saving a .csv file, how can we directly use the pandas data
# frame from python and send it to the R kernel to create the model?
# data.to_csv('models/' + str(USER) + '.csv')
createModel(USER, data)
# do something with the audio features. i.e: Call RPy2 and train model.
return jsonify({"result": True})
@app.route("/models/<string:user>", methods=['GET'])
def models(user):
print("Checking for model for ", user, "...")
conn = psycopg2.connect(host=HOST ,database=DBNAME, user=DBUSER, password=PASSWORD)
cur = conn.cursor()
query = """ SELECT id FROM gmc WHERE id = %s"""
cur.execute(query, (user,))
res = cur.fetchone()
if res is None:
print("No model for ", user, " exists. Create a new model.")
fileExists = False
else:
print("A model exists for ", user, "! Welcome back!")
fileExists = True
cur.close()
return jsonify({"result": fileExists})
@app.route("/delete/<string:user>", methods=['GET'])
def delete(user):
conn = psycopg2.connect(host=HOST ,database=DBNAME, user=DBUSER, password=PASSWORD)
cur = conn.cursor()
query = """DELETE FROM gmc WHERE id = %s"""
cur.execute(query, (user,))
print("Deleted ", cur.rowcount, " rows in the DB.")
conn.commit()
cur.close()
return jsonify({"result": True})
@app.route("/all", methods=['GET'])
def all():
l = os.listdir()
print(l)
return ('yo')
@app.route("/predict/<string:user>", methods=['POST'])
def predict(user):
AUDIO_FEATURES = request.get_json()["test"]
df = pandas.DataFrame(eval(str(AUDIO_FEATURES)))
col = []
for x in range(df.shape[0]):
col.append('bad')
df['qual'] = Series(col)
res = prediction(user, df)
print(res)
return jsonify({"results": res})
@app.route('/')
def index():
return ("hello world")