Skip to content

Commit

Permalink
Updated web interface to use cached performance data.
Browse files Browse the repository at this point in the history
* Renamed management commands to use pdk_ prefix.
  • Loading branch information
audaciouscode committed Jul 31, 2017
1 parent 0d7740d commit f6805be
Show file tree
Hide file tree
Showing 17 changed files with 316 additions and 45 deletions.
6 changes: 5 additions & 1 deletion admin.py
Expand Up @@ -14,7 +14,11 @@ class DataPointAdmin(admin.OSMGeoAdmin):

list_display = ('source', 'generator_identifier', 'secondary_identifier', 'created', \
'recorded',)
list_filter = ('created', 'recorded', 'generator_identifier', 'secondary_identifier',)
list_filter = (
'created',
'recorded',
'generator_identifier',
)

@admin.register(DataBundle)
class DataBundleAdmin(admin.OSMGeoAdmin):
Expand Down
2 changes: 1 addition & 1 deletion generators/pdk_sensor_accelerometer.py
Expand Up @@ -104,7 +104,7 @@ def data_table(source, generator): # pylint: disable=unused-argument
def compile_report(generator, sources): # pylint: disable=too-many-locals
filename = tempfile.gettempdir() + '/pdk_export_' + str(arrow.get().timestamp) + '.zip'

with ZipFile(filename, 'w') as export_file:
with ZipFile(filename, 'w', allowZip64=True) as export_file:
for source in sources:
identifier = slugify(generator + '__' + source)

Expand Down
Expand Up @@ -65,7 +65,7 @@ def handle(self, *args, **options): # pylint: disable=too-many-locals,too-many-b

filename = tempfile.gettempdir() + '/pdk_export_' + str(report.pk) + '.zip'

with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED) as export_file:
with zipfile.ZipFile(filename, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) as export_file: # pylint: disable=line-too-long
for generator in generators: # pylint: disable=too-many-nested-blocks
if raw_json:
for source in sources:
Expand Down
Expand Up @@ -54,27 +54,28 @@ def handle(self, *args, **options): # pylint: disable=too-many-locals,too-many-b
for source in sources:
data_point = DataPoint.objects.filter(source=source, generator_identifier='pdk-withings-device').order_by('-created').first()

properties = data_point.fetch_properties()
if data_point is not None:
properties = data_point.fetch_properties()

if 'oauth_user_token' in properties and 'oauth_user_secret' in properties and 'oauth_user_id' in properties:
index_date = start_date
if 'oauth_user_token' in properties and 'oauth_user_secret' in properties and 'oauth_user_id' in properties:
index_date = start_date

while index_date < end_date:
next_day = index_date.replace(days=+1)
while index_date < end_date:
next_day = index_date.replace(days=+1)

# print('FETCHING INTRADAY FOR ' + source + ': ' + str(index_date) + ': ' + str(next_day))
# print('FETCHING INTRADAY FOR ' + source + ': ' + str(index_date) + ': ' + str(next_day))

fetch_intraday(source, properties, index_date, next_day)
fetch_intraday(source, properties, index_date, next_day)

time.sleep(1)
time.sleep(1)

# print('FETCHING SLEEP MEASURES FOR ' + source + ': ' + str(index_date) + ': ' + str(next_day))
# print('FETCHING SLEEP MEASURES FOR ' + source + ': ' + str(index_date) + ': ' + str(next_day))

fetch_sleep_measures(source, properties, index_date, next_day)
fetch_sleep_measures(source, properties, index_date, next_day)

time.sleep(1)
time.sleep(1)

index_date = next_day
index_date = next_day


def fetch_intraday(user_id, properties, start_date, end_date): # pylint: disable=too-many-locals, too-many-statements, too-many-branches
Expand Down
Expand Up @@ -23,7 +23,7 @@ def add_arguments(self, parser):
parser.add_argument('--count',
type=int,
dest='bundle_count',
default=100,
default=10,
help='Number of bundles to process in a single run')

@handle_lock
Expand Down
Expand Up @@ -27,9 +27,9 @@ def handle(self, *args, **options): # pylint: disable=too-many-branches, too-man
alert_details = {}
alert_level = 'info'

delta = now - last_upload.created

if last_upload is not None:
delta = now - last_upload.created

if delta.days >= WARNING_DAYS:
alert_name = 'Withings upload is overdue'
alert_details['message'] = 'Latest Withings upload was 1 day ago.'
Expand Down
18 changes: 18 additions & 0 deletions management/commands/pdk_update_performance_metadata.py
@@ -0,0 +1,18 @@
# pylint: disable=no-member

from django.core.management.base import BaseCommand

from ...decorators import handle_lock
from ...models import DataSource

class Command(BaseCommand):
help = 'Updates each user performance metadata measurements on a round-robin basis'

@handle_lock
def handle(self, *args, **options):
source = DataSource.objects.filter(performance_metadata_updated=None).first()

if source is None:
source = DataSource.objects.all().order_by('performance_metadata_updated').first()

source.update_performance_metadata()
21 changes: 21 additions & 0 deletions migrations/0020_auto_20170731_1939.py
@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 19:39
# pylint: skip-file

from __future__ import unicode_literals

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0019_datasourcealert_alert_level'),
]

operations = [
migrations.AlterIndexTogether(
name='datapoint',
index_together=set([('source', 'generator_identifier', 'created'), ('source', 'generator_identifier')]),
),
]
21 changes: 21 additions & 0 deletions migrations/0021_auto_20170731_2011.py
@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 20:11
# pylint: skip-file

from __future__ import unicode_literals

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0020_auto_20170731_1939'),
]

operations = [
migrations.AlterIndexTogether(
name='datapoint',
index_together=set([('source', 'generator_identifier', 'secondary_identifier'), ('source', 'generator_identifier', 'secondary_identifier', 'created'), ('generator_identifier', 'secondary_identifier', 'recorded'), ('source', 'generator_identifier', 'secondary_identifier', 'recorded'), ('generator_identifier', 'secondary_identifier'), ('generator_identifier', 'created', 'recorded'), ('source', 'generator_identifier', 'created'), ('source', 'generator_identifier'), ('source', 'generator_identifier', 'created', 'recorded'), ('generator_identifier', 'secondary_identifier', 'created', 'recorded'), ('generator_identifier', 'created'), ('generator_identifier', 'recorded'), ('source', 'generator_identifier', 'secondary_identifier', 'created', 'recorded'), ('generator_identifier', 'secondary_identifier', 'created'), ('source', 'generator_identifier', 'recorded')]),
),
]
37 changes: 37 additions & 0 deletions migrations/0022_auto_20170731_2133.py
@@ -0,0 +1,37 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 21:33
# pylint: skip-file

from __future__ import unicode_literals

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0021_auto_20170731_2011'),
]

operations = [
migrations.AlterField(
model_name='datasourcealert',
name='active',
field=models.BooleanField(db_index=True, default=True),
),
migrations.AlterField(
model_name='datasourcealert',
name='alert_level',
field=models.CharField(choices=[('info', 'Informative'), ('warning', 'Warning'), ('critical', 'Critical')], db_index=True, default='info', max_length=64),
),
migrations.AlterField(
model_name='datasourcealert',
name='created',
field=models.DateTimeField(db_index=True),
),
migrations.AlterField(
model_name='datasourcealert',
name='updated',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
]
21 changes: 21 additions & 0 deletions migrations/0023_auto_20170731_2137.py
@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 21:37
# pylint: skip-file

from __future__ import unicode_literals

from django.db import migrations


class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0022_auto_20170731_2133'),
]

operations = [
migrations.AlterIndexTogether(
name='datapoint',
index_together=set([('source', 'generator_identifier', 'secondary_identifier'), ('generator_identifier', 'recorded'), ('source', 'generator_identifier'), ('generator_identifier', 'secondary_identifier', 'recorded'), ('source', 'created'), ('generator_identifier', 'secondary_identifier'), ('source', 'generator_identifier', 'secondary_identifier', 'recorded'), ('source', 'generator_identifier', 'secondary_identifier', 'created'), ('generator_identifier', 'created', 'recorded'), ('source', 'generator_identifier', 'created'), ('generator_identifier', 'secondary_identifier', 'created'), ('source', 'generator_identifier', 'created', 'recorded'), ('generator_identifier', 'created'), ('generator_identifier', 'secondary_identifier', 'created', 'recorded'), ('source', 'generator_identifier', 'secondary_identifier', 'created', 'recorded'), ('source', 'generator_identifier', 'recorded')]),
),
]
33 changes: 33 additions & 0 deletions migrations/0024_datasource_performance_metadata.py
@@ -0,0 +1,33 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 21:50
# pylint: skip-file

from __future__ import unicode_literals

import django.contrib.postgres.fields.jsonb
from django.db import migrations, models

from ..models import install_supports_jsonfield

class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0023_auto_20170731_2137'),
]

if install_supports_jsonfield():
operations = [
migrations.AddField(
model_name='datasource',
name='performance_metadata',
field=django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True),
),
]
else:
operations = [
migrations.AddField(
model_name='datasource',
name='performance_metadata',
field=models.TextField(max_length=(32 * 1024 * 1024 * 1024), blank=True, null=True)
),
]
22 changes: 22 additions & 0 deletions migrations/0025_datasource_performance_metadata_updated.py
@@ -0,0 +1,22 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-31 22:13
# pylint: skip-file

from __future__ import unicode_literals

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
('passive_data_kit', '0024_datasource_performance_metadata'),
]

operations = [
migrations.AddField(
model_name='datasource',
name='performance_metadata_updated',
field=models.DateTimeField(blank=True, db_index=True, null=True),
),
]

0 comments on commit f6805be

Please sign in to comment.