-
Notifications
You must be signed in to change notification settings - Fork 4
/
app_test.py
351 lines (253 loc) · 9.13 KB
/
app_test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
import pytest
import os
from app import app
from flask_restful import Api
from middleware.quick_search_query import (
unaltered_search_query,
quick_search_query,
QUICK_SEARCH_COLUMNS,
)
from middleware.data_source_queries import (
data_sources_query,
data_sources_results,
data_source_by_id_query,
data_source_by_id_results,
DATA_SOURCES_APPROVED_COLUMNS,
)
from middleware.user_queries import (
user_post_results,
user_check_email,
)
from middleware.login_queries import (
login_results,
create_session_token,
token_results,
is_admin,
)
from middleware.archives_queries import (
archives_get_results,
archives_get_query,
archives_put_broken_as_of_results,
archives_put_last_cached_results,
ARCHIVES_GET_COLUMNS,
)
from middleware.reset_token_queries import (
check_reset_token,
add_reset_token,
delete_reset_token,
)
from app_test_data import (
DATA_SOURCES_ROWS,
DATA_SOURCE_QUERY_RESULTS,
QUICK_SEARCH_QUERY_RESULTS,
AGENCIES_ROWS,
DATA_SOURCES_ID_QUERY_RESULTS,
ARCHIVES_GET_QUERY_RESULTS,
)
import datetime
import sqlite3
api_key = os.getenv("VUE_APP_PDAP_API_KEY")
HEADERS = {"Authorization": f"Bearer {api_key}"}
current_datetime = datetime.datetime.now()
DATETIME_STRING = current_datetime.strftime("%Y-%m-%d %H:%M:%S")
@pytest.fixture()
def test_app():
yield app
@pytest.fixture()
def client(test_app):
return test_app.test_client()
@pytest.fixture()
def runner(test_app):
return test_app.test_cli_runner()
@pytest.fixture
def session():
connection = sqlite3.connect("file::memory:?cache=shared", uri=True)
db_session = connection.cursor()
with open("do_db_ddl_clean.sql", "r") as f:
sql_file = f.read()
sql_queries = sql_file.split(";")
for query in sql_queries:
db_session.execute(query.replace("\n", ""))
for row in DATA_SOURCES_ROWS:
# valid_row = {k: v for k, v in row.items() if k in all_columns}
# clean_row = [r if r is not None else "" for r in row]
fully_clean_row = [str(r) for r in row]
fully_clean_row_str = "'" + "', '".join(fully_clean_row) + "'"
db_session.execute(f"insert into data_sources values ({fully_clean_row_str})")
db_session.execute(
"update data_sources set broken_source_url_as_of = null where broken_source_url_as_of = 'NULL'"
)
for row in AGENCIES_ROWS:
clean_row = [r if r is not None else "" for r in row]
fully_clean_row = [str(r) for r in clean_row]
fully_clean_row_str = "'" + "', '".join(fully_clean_row) + "'"
db_session.execute(f"insert into agencies values ({fully_clean_row_str})")
# sql_query_log = f"INSERT INTO quick_search_query_logs (id, search, location, results, result_count, datetime_of_request, created_at) VALUES (1, 'test', 'test', '', 0, '{DATETIME_STRING}', '{DATETIME_STRING}')"
# db_session.execute(sql_query_log)
yield connection
connection.close()
# unit tests
def test_unaltered_search_query(session):
response = unaltered_search_query(session.cursor(), "calls", "chicago")
assert response
def test_data_sources(session):
response = data_sources_results(conn=session)
assert response
def test_data_sources_approved(session):
response = data_sources_results(conn=session)
assert (
len([d for d in response if "https://joinstatepolice.ny.gov/15-mile-run" in d])
== 0
)
def test_data_source_by_id_results(session):
response = data_source_by_id_results(
data_source_id="rec00T2YLS2jU7Tbn", conn=session
)
assert response
def test_data_source_by_id_approved(session):
response = data_source_by_id_results(
data_source_id="rec013MFNfBnrTpZj", conn=session
)
assert not response
def test_user_post_query(session):
curs = session.cursor()
user_post_results(curs, "unit_test", "unit_test")
email_check = curs.execute(
f"SELECT email FROM users WHERE email = 'unit_test'"
).fetchone()[0]
assert email_check == "unit_test"
def test_login_query(session):
curs = session.cursor()
user_data = login_results(curs, "test")
assert user_data["password_digest"]
def test_create_session_token_results(session):
curs = session.cursor()
token = create_session_token(curs, 1, "test")
curs = session.cursor()
new_token = token_results(curs, token)
assert new_token["email"]
def test_is_admin(session):
curs = session.cursor()
admin = is_admin(curs, "mbodenator@gmail.com")
assert admin
def test_not_admin(session):
curs = session.cursor()
admin = is_admin(curs, "test")
assert not admin
def test_user_check_email(session):
curs = session.cursor()
user_data = user_check_email(curs, "test")
print(user_data)
assert user_data["id"]
def test_check_reset_token(session):
curs = session.cursor()
reset_token = check_reset_token(curs, "test")
print(reset_token)
assert reset_token["id"]
def test_add_reset_token(session):
curs = session.cursor()
add_reset_token(curs, "unit_test", "unit_test")
email_check = curs.execute(
f"SELECT email FROM reset_tokens WHERE email = 'unit_test'"
).fetchone()[0]
assert email_check == "unit_test"
def test_delete_reset_token(session):
curs = session.cursor()
delete_reset_token(curs, "test", "test")
email_check = curs.execute(
f"SELECT email FROM reset_tokens WHERE email = 'test'"
).fetchone()
assert not email_check
def test_archives_get_results(session):
response = archives_get_results(conn=session)
assert response
def test_archives_put_broken_as_of(session):
archives_put_broken_as_of_results(
id="rec00T2YLS2jU7Tbn",
broken_as_of=DATETIME_STRING,
last_cached=DATETIME_STRING,
conn=session,
)
curs = session.cursor()
broken_check, last_check = curs.execute(
f"SELECT broken_source_url_as_of, last_cached FROM data_sources WHERE airtable_uid = 'rec00T2YLS2jU7Tbn'"
).fetchone()
assert broken_check == DATETIME_STRING
assert last_check == DATETIME_STRING
def test_archives_put_last_cached(session):
archives_put_last_cached_results(
id="recUGIoPQbJ6laBmr", last_cached=DATETIME_STRING, conn=session
)
curs = session.cursor()
last_check = curs.execute(
f"SELECT last_cached FROM data_sources WHERE airtable_uid = 'recUGIoPQbJ6laBmr'"
).fetchone()[0]
assert last_check == DATETIME_STRING
# quick-search
def test_quicksearch_columns():
response = quick_search_query(
search="", location="", test_query_results=QUICK_SEARCH_QUERY_RESULTS
)
assert not set(QUICK_SEARCH_COLUMNS).difference(response["data"][0].keys())
assert type(response["data"][1]["record_format"]) == list
# data-sources
def test_data_sources_columns():
response = data_sources_query(conn={}, test_query_results=DATA_SOURCE_QUERY_RESULTS)
assert not set(DATA_SOURCES_APPROVED_COLUMNS).difference(response[0].keys())
def test_data_source_by_id_columns():
response = data_source_by_id_query("", DATA_SOURCES_ID_QUERY_RESULTS, {})
assert not set(DATA_SOURCES_APPROVED_COLUMNS).difference(response.keys())
# user
# def test_post_user(client):
# response = client.post(
# "/user", headers=HEADERS, json={"email": "test", "password": "test"}
# )
# # with initialize_psycopg2_connection() as psycopg2_connection:
# # cursor = psycopg2_connection.cursor()
# # cursor.execute(f"DELETE FROM users WHERE email = 'test'")
# # psycopg2_connection.commit()
# assert response.json["data"] == "Successfully added user"
# archives
def test_archives_get_columns():
response = archives_get_query(
test_query_results=ARCHIVES_GET_QUERY_RESULTS, conn={}
)
assert not set(ARCHIVES_GET_COLUMNS).difference(response[0].keys())
# def test_put_archives(client):
# current_datetime = datetime.datetime.now()
# datetime_string = current_datetime.strftime("%Y-%m-%d %H:%M:%S")
# response = client.put(
# "/archives",
# headers=HEADERS,
# json=json.dumps(
# {
# "id": "test",
# "last_cached": datetime_string,
# "broken_source_url_as_of": "",
# }
# ),
# )
# assert response.json["status"] == "success"
# def test_put_archives_brokenasof(client):
# current_datetime = datetime.datetime.now()
# datetime_string = current_datetime.strftime("%Y-%m-%d")
# response = client.put(
# "/archives",
# headers=HEADERS,
# json=json.dumps(
# {
# "id": "test",
# "last_cached": datetime_string,
# "broken_source_url_as_of": datetime_string,
# }
# ),
# )
# assert response.json["status"] == "success"
# # agencies
# def test_agencies(client):
# response = client.get("/agencies/1", headers=HEADERS)
# assert len(response.json["data"]) > 0
# def test_agencies_pagination(client):
# response1 = client.get("/agencies/1", headers=HEADERS)
# response2 = client.get("/agencies/2", headers=HEADERS)
# assert response1 != response2