Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Data export refinements for aggregate files.
* Added exporter for Withings device generator.
* Added server-side fetch for Withings data.
* Update Django requirement to 1.11.1.
  • Loading branch information
audaciouscode committed May 22, 2017
1 parent e98c0c4 commit 0980d77
Show file tree
Hide file tree
Showing 5 changed files with 256 additions and 2 deletions.
101 changes: 101 additions & 0 deletions generators/pdk_withings_device.py
@@ -1,6 +1,107 @@
import calendar
import csv
import tempfile

from zipfile import ZipFile

import arrow

from ..models import DataPoint

SECONDARY_FIELDS = {
'intraday-activity': [
'intraday_activity_history',
'activity_start',
'activity_duration',
'calories',
'distance',
'elevation_climbed',
'steps',
'swim_strokes',
'pool_laps',
],
'body': [
'measure_date',
'measure_status',
'measure_category',
'measure_type',
'measure_value',
],
'activity-measures': [
'date_start',
'timezone',
'steps',
'distance',
'active_calories',
'total_calories',
'elevation',
'soft_activity_duration',
'moderate_activity_duration',
'intense_activity_duration',
],
'sleep-measures': [
'start_date',
'end_date',
'state',
'measurement_device',
]
}

def extract_secondary_identifier(properties):
if 'datastream' in properties:
return properties['datastream']

return None


def compile_report(generator, sources): # pylint: disable=too-many-locals
filename = tempfile.gettempdir() + '/pdk_export_' + str(arrow.get().timestamp) + '.zip'

with ZipFile(filename, 'w') as export_file:
for secondary_identifier in SECONDARY_FIELDS:
secondary_filename = tempfile.gettempdir() + '/' + generator + '-' + \
secondary_identifier + '.txt'

with open(secondary_filename, 'w') as outfile:
writer = csv.writer(outfile, delimiter='\t')

columns = [
'Source',
'Created Timestamp',
'Created Date',
]

for column in SECONDARY_FIELDS[secondary_identifier]:
columns.append(column)

writer.writerow(columns)

for source in sources:
points = DataPoint.objects.filter(source=source, generator_identifier=generator, secondary_identifier=secondary_identifier).order_by('source', 'created') # pylint: disable=no-member,line-too-long

index = 0
count = points.count()

while index < count:
for point in points[index:(index + 5000)]:
row = []

row.append(point.source)
row.append(calendar.timegm(point.created.utctimetuple()))
row.append(point.created.isoformat())

properties = point.fetch_properties()

for column in SECONDARY_FIELDS[secondary_identifier]:
if column in properties:
row.append(properties[column])
else:
row.append('')

writer.writerow(row)

index += 5000

export_file.write(secondary_filename, secondary_filename.split('/')[-1])

return filename
8 changes: 8 additions & 0 deletions management/commands/compile_reports.py
Expand Up @@ -122,6 +122,14 @@ def handle(self, *args, **options): # pylint: disable=too-many-locals,too-many-b
pdk_api = importlib.import_module(app + '.pdk_api')

output_file = pdk_api.compile_report(generator, sources)

if output_file.lower().endswith('.zip'):
with ZipFile(output_file, 'r') as source_file:
for name in source_file.namelist():
export_file.writestr(name, source_file.open(name).read()) # pylint: disable=line-too-long

os.remove(output_file)
output_file = None
except ImportError:
# traceback.print_exc()
output_file = None
Expand Down
144 changes: 144 additions & 0 deletions management/commands/fetch_historical_withings_data.py
@@ -0,0 +1,144 @@
# -*- coding: utf-8 -*-
# pylint: disable=no-member, line-too-long

import datetime
import json
import os

import arrow

from requests_oauthlib import OAuth1Session

from django.conf import settings
from django.core.management.base import BaseCommand
from django.utils import timezone

from ...decorators import handle_lock
from ...models import DataPoint, install_supports_jsonfield

GENERATOR_NAME = 'pdk-withings-device: Passive Data Kit Server'

class Command(BaseCommand):
help = 'Compiles data reports requested by end users.'

def add_arguments(self, parser):
parser.add_argument('--start',
type=str,
dest='start',
help='Start of date range to retrieve Withings data in format YYYY-MM-DD')

parser.add_argument('--end',
type=str,
dest='end',
help='End of date range to retrieve Withings data in format YYYY-MM-DD')

@handle_lock
def handle(self, *args, **options): # pylint: disable=too-many-locals,too-many-branches,too-many-statements
os.umask(000)

start = options['start']
if start is None:
start = timezone.now().strftime('%Y-%m-%d')

end = options['end']

if end is None:
end = (timezone.now() + datetime.timedelta(days=1)).strftime('%Y-%m-%d')

start_date = arrow.get(start).replace(hour=0, minute=0, second=0).to(settings.TIME_ZONE)
end_date = arrow.get(end).replace(hour=0, minute=0, second=0).to(settings.TIME_ZONE)

sources = DataPoint.objects.order_by('source').values_list('source', flat=True).distinct()

for source in sources:
data_point = DataPoint.objects.filter(source=source, generator_identifier='pdk-withings-device').order_by('-created').first()

properties = data_point.fetch_properties()

if 'oauth_user_token' in properties and 'oauth_user_secret' in properties and 'oauth_user_id' in properties:
fetch_intraday(source, properties, start_date, end_date)


def fetch_intraday(user_id, properties, start_date, end_date): # pylint: disable=too-many-locals, too-many-statements
api_url = 'https://wbsapi.withings.net/v2/measure?action=getintradayactivity'
api_url += '&userid=' + properties['oauth_user_id']
api_url += '&startdate=' + str(start_date.timestamp)
api_url += '&enddate=' + str(end_date.timestamp)

oauth = OAuth1Session(settings.PDK_WITHINGS_API_KEY, \
client_secret=settings.PDK_WITHINGS_API_SECRET, \
resource_owner_key=properties['oauth_user_token'], \
resource_owner_secret=properties['oauth_user_secret'],
signature_type='query')

response = oauth.get(url=api_url)

results = response.json()

if 'body' in results and 'series' in results['body']:
if results['body']['series'] == []:
return

for timestamp, values in results['body']['series'].iteritems():
found = False

created_date = arrow.get(timestamp).datetime

matches = DataPoint.objects.filter(source=user_id, generator_identifier='pdk-withings-device', created=created_date)

for match in matches:
match_props = match.fetch_properties()

if match_props['datastream'] == 'intraday-activity':
found = True

if found is False:
now = arrow.utcnow()

new_point = DataPoint(source=user_id, generator=GENERATOR_NAME, generator_identifier='pdk-withings-device')

new_point.created = created_date
new_point.recorded = now.datetime

new_properties = {}
new_properties['datastream'] = 'intraday-activity'

new_properties['activity_start'] = int(timestamp)
new_properties['calories'] = values['calories']
new_properties['activity_duration'] = values['duration']

if 'distance' in values:
new_properties['distance'] = values['distance']

if 'steps' in values:
new_properties['steps'] = values['steps']

if 'elevation' in values:
new_properties['elevation_climbed'] = values['elevation']

if 'sleep_state' in values:
new_properties['sleep_state'] = values['sleep_state']

new_properties['observed'] = now.timestamp * 1000
new_properties['server_fetched'] = True

new_properties['oauth_user_token'] = properties['oauth_user_token']
new_properties['oauth_user_secret'] = properties['oauth_user_secret']
new_properties['oauth_user_id'] = properties['oauth_user_id']

pdk_metadata = {}
pdk_metadata['source'] = user_id
pdk_metadata['generator-id'] = 'pdk-withings-device'
pdk_metadata['generator'] = GENERATOR_NAME
pdk_metadata['timestamp'] = user_id

new_properties['passive-data-metadata'] = pdk_metadata

if install_supports_jsonfield():
new_point.properties = new_properties
else:
new_point.properties = json.dumps(new_properties, indent=2)

new_point.fetch_secondary_identifier()

new_point.save()
2 changes: 1 addition & 1 deletion pdk_api.py
Expand Up @@ -42,7 +42,7 @@ def compile_report(generator, sources):
except AttributeError:
pass

filename = tempfile.gettempdir() + '/pdk_' + generator + '.txt'
filename = tempfile.gettempdir() + '/' + generator + '.txt'

with open(filename, 'w') as outfile:
writer = csv.writer(outfile, delimiter='\t')
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
@@ -1,4 +1,4 @@
Django==1.11
Django==1.11.1
argparse==1.2.1
arrow==0.10.0
dj-database-url==0.4.2
Expand All @@ -12,3 +12,4 @@ whitenoise==3.3.0
wsgiref==0.1.2
pylint==1.7.1
bandit==1.4.0
requests==2.14.2

0 comments on commit 0980d77

Please sign in to comment.