Navigation Menu

Skip to content

Commit

Permalink
Site work
Browse files Browse the repository at this point in the history
  • Loading branch information
audaciouscode committed Dec 21, 2016
1 parent 651a54e commit 07dccdb
Show file tree
Hide file tree
Showing 14 changed files with 332 additions and 57 deletions.
5 changes: 3 additions & 2 deletions forms.py
Expand Up @@ -3,8 +3,9 @@
from models import PurpleRobotReading

class ExportJobForm(forms.Form):
start_date = forms.DateField()
end_date = forms.DateField()
start_date = forms.DateTimeField()
end_date = forms.DateTimeField()

destination = forms.EmailField()

def __init__(self, *args, **kwargs):
Expand Down
10 changes: 8 additions & 2 deletions management/commands/compile_accelerometer_report.py
@@ -1,6 +1,7 @@
import datetime
import gzip
import json
import pytz
import sys
import tempfile

Expand All @@ -17,10 +18,15 @@ class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES

start = datetime.datetime.now() - datetime.timedelta(days=14)
# start = datetime.datetime.now() - datetime.timedelta(days=120)
start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = start_ts + datetime.timedelta(hours=1)

# print(start_ts.isoformat())
# print(end_ts.isoformat())

for user_hash in hashes:
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start).order_by('logged')
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()

Expand Down
7 changes: 5 additions & 2 deletions management/commands/compile_barometer_report.py
@@ -1,6 +1,7 @@
import datetime
import gzip
import json
import pytz
import sys
import tempfile

Expand All @@ -17,12 +18,14 @@ class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct()

start = datetime.datetime.now() - datetime.timedelta(days=21)
# start = datetime.datetime.now() - datetime.timedelta(days=120)
start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = start_ts + datetime.timedelta(hours=1)

for user_hash in hashes:
# hash = hash['user_id']

payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start).order_by('logged')
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()
if count > 0:
Expand Down
100 changes: 100 additions & 0 deletions management/commands/compile_fallnet_report.py
@@ -0,0 +1,100 @@
import datetime
import gzip
import json
import pytz
import sys
import tempfile

from django.core.files import File
from django.core.management.base import BaseCommand
from django.utils import timezone

from purple_robot_app.models import PurpleRobotReading, PurpleRobotReport
from purple_robot.settings import REPORT_DEVICES

PROBE_NAME = 'edu.northwestern.cbits.purple_robot_manager.probes.studies.fallnet.FallNetProbe'

class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct()

start_ts = timezone.now() - datetime.timedelta(days=120)
# start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = timezone.now() # start_ts + datetime.timedelta(hours=1)

# print('HASHES: ' + str(hashes))

for user_hash in hashes:
# hash = hash['user_id']

payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()

# print(user_hash + ' -- ' + str(count))

if count > 0:
temp_file = tempfile.TemporaryFile()

gzf = gzip.GzipFile(mode='wb', fileobj=temp_file)
gzf.write('User ID\tTimestamp\tACCELEROMETER_READING_COUNT\tGYROSCOPE_READING_COUNT\tBAROMETER_READING_COUNT\tIS_FALL\tNOT_FALL_ODDS\tNOT_FALL_SUM\tNOT_FALL_PROBABILITY\tEVALUATION_WINDOW_START\tEVALUATION_WINDOW_END\tEVALUATION_WINDOW_SIZE\n')

index = 0

while index < count:
end = index + 100

if end > count:
end = count

for payload in payloads[index:end]:
reading_json = json.loads(payload.payload)

is_fall = 0

if reading_json['IS_FALL']:
is_fall = 1

accel_count = ''

if 'ACCELEROMETER_READING_COUNT' in reading_json:
accel_count = str(reading_json['ACCELEROMETER_READING_COUNT'])

gyro_count = ''

if 'GYROSCOPE_READING_COUNT' in reading_json:
gyro_count = str(reading_json['GYROSCOPE_READING_COUNT'])

baro_count = ''

if 'BAROMETER_READING_COUNT' in reading_json:
baro_count = str(reading_json['BAROMETER_READING_COUNT'])

eval_start = ''

if 'EVALUATION_WINDOW_START' in reading_json:
eval_start = str(reading_json['EVALUATION_WINDOW_START'])

eval_end = ''

if 'EVALUATION_WINDOW_END' in reading_json:
eval_end = str(reading_json['EVALUATION_WINDOW_END'])

eval_size = ''

if 'EVALUATION_WINDOW_SIZE' in reading_json:
eval_end = str(reading_json['EVALUATION_WINDOW_SIZE'])

gzf.write(user_hash + '\t' + str(reading_json['TIMESTAMP']) + '\t' + accel_count + '\t' + gyro_count + '\t' + baro_count + '\t' + str(is_fall) + '\t' + str(reading_json['NOT_FALL_ODDS']) + '\t' + str(reading_json['NOT_FALL_SUM']) + '\t' + str(reading_json['NOT_FALL_PROBABILITY']) + '\t' + eval_start + '\t' + eval_end + '\t' + eval_size + '\n')

index += 100

gzf.flush()
gzf.close()

temp_file.seek(0)

report = PurpleRobotReport(generated=timezone.now(), mime_type='application/x-gzip', probe=PROBE_NAME, user_id=user_hash)
report.save()
report.report_file.save(user_hash + '-fallnet.txt.gz', File(temp_file))
report.save()
7 changes: 5 additions & 2 deletions management/commands/compile_gyroscope_report.py
@@ -1,6 +1,7 @@
import datetime
import gzip
import json
import pytz
import sys
import tempfile

Expand All @@ -17,12 +18,14 @@ class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct()

start = datetime.datetime.now() - datetime.timedelta(days=21)
# start = datetime.datetime.now() - datetime.timedelta(days=120)
start_ts = datetime.datetime(2015, 7, 3, 5, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = start_ts + datetime.timedelta(hours=1)

for user_hash in hashes:
# hash = hash['user_id']

payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start).order_by('logged')
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()
if count > 0:
Expand Down
7 changes: 5 additions & 2 deletions management/commands/compile_labels_report.py
@@ -1,6 +1,7 @@
import datetime
import gzip
import json
import pytz
import tempfile

from django.core.files import File
Expand All @@ -16,15 +17,17 @@ class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct()

start = datetime.datetime.now() - datetime.timedelta(days=21)
# start = datetime.datetime.now() - datetime.timedelta(days=120)
start_ts = datetime.datetime(2015, 11, 10, 0, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = start_ts + datetime.timedelta(days=1)

labels = PurpleRobotReading.objects.exclude(probe__startswith='edu.northwestern').values('probe').distinct()

for user_hash in hashes:
for label in labels:
slug_label = slugify(label['probe'])

payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=label['probe'], logged__gte=start).order_by('logged')
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=label['probe'], logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()

Expand Down
7 changes: 5 additions & 2 deletions management/commands/compile_significant_motion_report.py
@@ -1,6 +1,7 @@
import datetime
import gzip
import json
import pytz
import tempfile

from django.core.files import File
Expand All @@ -16,10 +17,12 @@ class Command(BaseCommand):
def handle(self, *args, **options):
hashes = REPORT_DEVICES # PurpleRobotPayload.objects.order_by().values('user_id').distinct()

start = datetime.datetime.now() - datetime.timedelta(days=21)
# start = datetime.datetime.now() - datetime.timedelta(days=120)
start_ts = datetime.datetime(2015, 11, 10, 0, 0, 0, 0, tzinfo=pytz.timezone('US/Central'))
end_ts = start_ts + datetime.timedelta(days=1)

for user_hash in hashes:
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start).order_by('logged')
payloads = PurpleRobotReading.objects.filter(user_id=user_hash, probe=PROBE_NAME, logged__gte=start_ts, logged__lt=end_ts).order_by('logged')

count = payloads.count()
if count > 0:
Expand Down
84 changes: 49 additions & 35 deletions management/commands/run_export_jobs.py
@@ -1,5 +1,6 @@
# pylint: disable=line-too-long, no-member

import datetime
import gc
import gzip
import json
import tempfile
Expand All @@ -10,82 +11,95 @@
from django.db.models import Q
from django.template import Context
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.crypto import get_random_string

from purple_robot.settings import ADMINS, URL_PREFIX
from purple_robot_app.models import PurpleRobotExportJob, PurpleRobotReading
from django.conf import settings
from ...models import PurpleRobotExportJob, PurpleRobotReading


class Command(BaseCommand):
def handle(self, *args, **options):
processing = PurpleRobotExportJob.objects.filter(state='processing')
pending = PurpleRobotExportJob.objects.filter(state='pending')

now = timezone.now()

if processing.count() > 0:
pass # Do nothing - already compiling a job...
pass # Do nothing - already compiling a job...
elif pending.count() < 1:
pass # No work to do...
pass # No work to do...
else:
job = pending.order_by('pk')[0]

job.state = 'processing'
job.save()
hashes = job.users.split()
probes = job.probes.split()
start = datetime.datetime(job.start_date.year, job.start_date.month, job.start_date.day, 0, 0, 0, 0)
end = datetime.datetime(job.end_date.year, job.end_date.month, job.end_date.day, 23, 59, 59, 999999)

hashes = job.users.strip().split()
probes = job.probes.strip().split()

start = job.start_date # datetime.datetime(job.start_date.year, job.start_date.month, job.start_date.day, 0, 0, 0, 0, tzinfo=now.tzinfo)
end = job.end_date # datetime.datetime(job.end_date.year, job.end_date.month, job.end_date.day, 23, 59, 59, 999999, tzinfo=now.tzinfo)

q_probes = None

temp_file = tempfile.TemporaryFile()

gzf = gzip.GzipFile(mode='wb', fileobj=temp_file)
gzf.write('User ID\tProbe\tLogged\tPayload\n')

for probe in probes:
if q_probes is None:
q_probes = Q(probe=probe)
else:
q_probes = (q_probes | Q(probe=probe))


gzf = gzip.GzipFile(mode='wb', fileobj=temp_file)
gzf.write('User ID\tProbe\tLogged\tStart\tEnd\tDuration\tPayload\n')

for user_hash in hashes:
readings = None

if q_probes is not None:
readings = PurpleRobotReading.objects.filter(q_probes, user_id=user_hash, logged__gte=start, logged__lte=end).order_by('logged')
else:
readings = PurpleRobotReading.objects.filter(user_id=user_hash, logged__gte=start, logged__lte=end).order_by('logged')

count = readings.count()

for i in range(0, (count / 500) + 1):
page_start = i * 500
page_end = page_start + 499

for reading in readings[page_start:page_end]:
payload = json.loads(reading.payload)

gzf.write(user_hash + '\t' + reading.probe + '\t' + str(reading.logged) + '\t' + json.dumps(payload) + '\n')
if 'FEATURE_VALUE' in payload and 'start' in payload['FEATURE_VALUE'] and 'end' in payload['FEATURE_VALUE'] and 'duration' in payload['FEATURE_VALUE']:
range_start = payload['FEATURE_VALUE']['start']
range_end = payload['FEATURE_VALUE']['end']
duration = payload['FEATURE_VALUE']['duration']

gzf.write(user_hash + '\t' + reading.probe + '\t' + str(reading.logged) + '\t' + str(range_start) + '\t' + str(range_end) + '\t' + str(duration) + '\t' + json.dumps(payload) + '\n')


else:
gzf.write(user_hash + '\t' + reading.probe + '\t' + str(reading.logged) + '\t\t\t\t' + json.dumps(payload) + '\n')

gzf.flush()
gzf.close()

temp_file.seek(0)
job.export_file.save('export_' + job.start_date.isoformat() + '_' + \
job.end_date.isoformat() + '_' + \
get_random_string(16).lower() + '.txt.gz', \

job.export_file.save('export_' + job.start_date.isoformat() + '_' +
job.end_date.isoformat() + '_' +
get_random_string(16).lower() + '.txt.gz',
File(temp_file))

job.state = 'finished'
job.save()

if job.destination is not None and job.destination != '':
c = Context()
c['job'] = job
c['prefix'] = URL_PREFIX
context = Context()
context['job'] = job
context['prefix'] = settings.URL_PREFIX

message = render_to_string('export_email.txt', context)

message = render_to_string('export_email.txt', c)

send_mail('Your Purple Robot data is available', message, 'Purple Robot <' + ADMINS[0][1] + '>', [job.destination], fail_silently=False)
send_mail('Your Purple Robot data is available', message, 'Purple Robot <' + settings.ADMINS[0][1] + '>', [job.destination], fail_silently=False)

0 comments on commit 07dccdb

Please sign in to comment.