diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..a0839b5 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,426 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code +extension-pkg-whitelist= + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. +jobs=1 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once).You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use"--disable=all --enable=classes +# --disable=W" +disable=print-statement,parameter-unpacking,unpacking-in-except,old-raise-syntax,backtick,long-suffix,old-ne-operator,old-octal-literal,import-star-module-level,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,useless-suppression,deprecated-pragma,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,raising-string,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,invalid-str-codec,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,missing-docstring + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio).You can also give a reporter class, eg +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages +reports=yes + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma,dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[LOGGING] + +# Logging modules to check that the string format arguments are in logging +# function parameter format +logging-modules=logging + + +[SPELLING] + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME,XXX,TODO + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid to define new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_,_cb + +# A regular expression matching the name of dummy variables (i.e. expectedly +# not used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,future.builtins + + +[BASIC] + +# Naming hint for argument names +argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct argument names +argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Naming hint for attribute names +attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct attribute names +attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Bad variable names which should always be refused, separated by a comma +bad-names=foo,bar,baz,toto,tutu,tata + +# Naming hint for class attribute names +class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Regular expression matching correct class attribute names +class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ + +# Naming hint for class names +class-name-hint=[A-Z_][a-zA-Z0-9]+$ + +# Regular expression matching correct class names +class-rgx=[A-Z_][a-zA-Z0-9]+$ + +# Naming hint for constant names +const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Regular expression matching correct constant names +const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming hint for function names +function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct function names +function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Good variable names which should always be accepted, separated by a comma +good-names=i,j,k,ex,Run,_,urlpatterns,register,default_app_config + +# Include a hint for the correct naming format with invalid-name +include-naming-hint=no + +# Naming hint for inline iteration names +inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ + +# Regular expression matching correct inline iteration names +inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ + +# Naming hint for method names +method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct method names +method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Naming hint for module names +module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Regular expression matching correct module names +module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +property-classes=abc.abstractproperty + +# Naming hint for variable names +variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + +# Regular expression matching correct variable names +variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ + + +[DESIGN] + +# Maximum number of arguments for function / method +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in a if statement +max-bool-expr=5 + +# Maximum number of branch for function / method body +max-branches=12 + +# Maximum number of locals for function / method body +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body +max-returns=6 + +# Maximum number of statements in function / method body +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__,__new__,setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma +deprecated-modules=regsub,TERMIOS,Bastion,rexec + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled) +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled) +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled) +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception" +overgeneral-exceptions=Exception + diff --git a/.travis.yml b/.travis.yml index 33d49f2..4ffc702 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,3 +36,6 @@ script: - cp passive_data_kit/travis_settings.py pdk/settings.py - python manage.py migrate - python manage.py test + - cp passive_data_kit/.pylintrc . + - pylint passive_data_kit + - bandit -r . diff --git a/__init__.py b/__init__.py index bd84cb9..c03d195 100644 --- a/__init__.py +++ b/__init__.py @@ -1 +1 @@ -default_app_config = 'passive_data_kit.apps.PassiveDataKitConfig' \ No newline at end of file +default_app_config = 'passive_data_kit.apps.PassiveDataKitConfig' diff --git a/admin.py b/admin.py index 2f219b4..458ad94 100644 --- a/admin.py +++ b/admin.py @@ -11,8 +11,9 @@ class DataPointVisualizationsAdmin(admin.OSMGeoAdmin): @admin.register(DataPoint) class DataPointAdmin(admin.OSMGeoAdmin): openlayers_url = 'https://openlayers.org/api/2.13.1/OpenLayers.js' - - list_display = ('source', 'generator_identifier', 'secondary_identifier', 'created', 'recorded',) + + list_display = ('source', 'generator_identifier', 'secondary_identifier', 'created', \ + 'recorded',) list_filter = ('created', 'recorded', 'generator_identifier', 'secondary_identifier',) @admin.register(DataBundle) diff --git a/aptible_settings.py b/aptible_settings.py index 21fa5c3..496e1e6 100644 --- a/aptible_settings.py +++ b/aptible_settings.py @@ -1,3 +1,5 @@ +# pylint: skip-file + """ Settings.py for deploying standalone site on Aptible. """ diff --git a/aptible_wsgi.py b/aptible_wsgi.py index a4b4cee..addf5f9 100644 --- a/aptible_wsgi.py +++ b/aptible_wsgi.py @@ -1,3 +1,5 @@ +# pylint: skip-file + """ WSGI config for Passive Data Kit project. diff --git a/decorators.py b/decorators.py index 4af96d3..26f5e95 100644 --- a/decorators.py +++ b/decorators.py @@ -1,3 +1,5 @@ +# pylint: disable=pointless-string-statement + import time import logging import tempfile @@ -23,7 +25,7 @@ def handle_lock(handle): Decorate the handle method with a file lock to ensure there is only ever one process running at any one time. """ - + def wrapper(self, *args, **options): start_time = time.time() verbosity = options.get('verbosity', 0) @@ -35,14 +37,15 @@ def wrapper(self, *args, **options): level = logging.INFO else: level = logging.DEBUG - + logging.basicConfig(level=level, format="%(message)s") logging.debug("-" * 72) - + lock_name = self.__module__.split('.').pop() - lock = FileLock(tempfile.gettempdir() + '/pdk_lock_' + lock_name) - - logging.debug("%s - acquiring lock..." % lock_name) + lock = FileLock('%s/pdk_lock_%s' % (tempfile.gettempdir(), lock_name)) + + logging.debug("%s - acquiring lock...", lock_name) + try: lock.acquire(LOCK_WAIT_TIMEOUT) except AlreadyLocked: @@ -51,22 +54,23 @@ def wrapper(self, *args, **options): except LockTimeout: logging.debug("waiting for the lock timed out. quitting.") return + logging.debug("acquired.") - + try: handle(self, *args, **options) - except: + except: # pylint: disable=bare-except import traceback logging.error("Command Failed") logging.error('==' * 72) logging.error(traceback.format_exc()) logging.error('==' * 72) - + logging.debug("releasing lock...") lock.release() logging.debug("released.") - - logging.info("done in %.2f seconds" % (time.time() - start_time)) + + logging.info("done in %.2f seconds", (time.time() - start_time)) return - + return wrapper diff --git a/management/commands/compile_reports.py b/management/commands/compile_reports.py index 29e7b80..02e573f 100644 --- a/management/commands/compile_reports.py +++ b/management/commands/compile_reports.py @@ -1,26 +1,23 @@ - # -*- coding: utf-8 -*- +# -*- coding: utf-8 -*- +# pylint: disable=no-member import datetime +import importlib import json import os -import pytz -import tempdir -import traceback - -import importlib +import tempfile from zipfile import ZipFile from django.conf import settings from django.core.files import File from django.core.mail import send_mail -from django.core.management.base import BaseCommand, CommandError -from django.db.models import Count +from django.core.management.base import BaseCommand from django.template.loader import render_to_string from django.utils import timezone from passive_data_kit.decorators import handle_lock -from passive_data_kit.models import DataPoint, DataBundle, DataPointVisualizations, ReportJob +from passive_data_kit.models import DataPoint, ReportJob class Command(BaseCommand): help = 'Compiles data reports requested by end users.' @@ -33,65 +30,84 @@ def add_arguments(self, parser): # default=False, # help='Delete data bundles after processing') # -# parser.add_argument('--count', -# type=int, +# parser.add_argument('--count', +# type=int, # dest='bundle_count', # default=100, # help='Number of bundles to process in a single run') - + @handle_lock - def handle(self, *args, **options): + def handle(self, *args, **options): # pylint: disable=too-many-locals,too-many-branches,too-many-statements os.umask(000) - - report = ReportJob.objects.filter(started=None, completed=None).order_by('requested').first() - + + report = ReportJob.objects.filter(started=None, completed=None)\ + .order_by('requested')\ + .first() + if report is not None: report.started = timezone.now() report.save() - + sources = report.parameters['sources'] generators = report.parameters['generators'] - + raw_json = False - + if ('raw_data' in report.parameters) and report.parameters['raw_data'] is True: raw_json = True - + filename = tempfile.gettempdir() + '/pdk_export_' + str(report.pk) + '.zip' with ZipFile(filename, 'w') as export_file: - for generator in generators: + for generator in generators: # pylint: disable=too-many-nested-blocks if raw_json: for source in sources: - first = DataPoint.objects.filter(source=source, generator_identifier=generator).first() - last = DataPoint.objects.filter(source=source, generator_identifier=generator).last() - + first = DataPoint.objects.filter(source=source, generator_identifier=generator).first() # pylint: disable=line-too-long + last = DataPoint.objects.filter(source=source, generator_identifier=generator).last() # pylint: disable=line-too-long + if first is not None: first_create = first.created last_create = last.created - - start = datetime.datetime(first_create.year, first_create.month, first_create.day, 0, 0, 0, 0, first_create.tzinfo) - end = datetime.datetime(last_create.year, last_create.month, last_create.day, 0, 0, 0, 0, first_create.tzinfo) + datetime.timedelta(days=1) - + + start = datetime.datetime(first_create.year, \ + first_create.month, \ + first_create.day, \ + 0, \ + 0, \ + 0, \ + 0, \ + first_create.tzinfo) + + end = datetime.datetime(last_create.year, \ + last_create.month, \ + last_create.day, \ + 0, \ + 0, \ + 0, \ + 0, \ + first_create.tzinfo) + \ + datetime.timedelta(days=1) + while start <= end: day_end = start + datetime.timedelta(days=1) - - day_filename = source + '__' + generator + '__' + start.date().isoformat() + '.json' - - points = DataPoint.objects.filter(source=source, generator_identifier=generator, created__gte=start, created__lt=day_end).order_by('created') - + + day_filename = source + '__' + generator + '__' + \ + start.date().isoformat() + '.json' + + points = DataPoint.objects.filter(source=source, generator_identifier=generator, created__gte=start, created__lt=day_end).order_by('created') # pylint: disable=line-too-long + out_points = [] - + for point in points: out_points.append(point.properties) - - if len(out_points) > 0: - export_file.writestr(day_filename, unicode(json.dumps(out_points, indent=2)).encode("utf-8")) - + + if out_points: + export_file.writestr(day_filename, unicode(json.dumps(out_points, indent=2)).encode("utf-8")) # pylint: disable=line-too-long + start = day_end else: output_file = None - + for app in settings.INSTALLED_APPS: if output_file is None: try: @@ -103,27 +119,33 @@ def handle(self, *args, **options): output_file = None except AttributeError: # traceback.print_exc() - output_file = None - + output_file = None + if output_file is not None: export_file.write(output_file, output_file.split('/')[-1]) - + os.remove(output_file) - + export_file.close() - + report.report.save(filename.split('/')[-1], File(open(filename, 'r'))) report.completed = timezone.now() report.save() - - subject = render_to_string('pdk_report_subject.txt', {'report': report, 'url': settings.SITE_URL}) - message = render_to_string('pdk_report_message.txt', {'report': report, 'url': settings.SITE_URL}) - - host = settings.SITE_URL.split('/')[-2] - send_mail(subject, message, 'Petey Kay ', [report.requester.email], fail_silently=False) - + subject = render_to_string('pdk_report_subject.txt', { + 'report': report, + 'url': settings.SITE_URL + }) + + message = render_to_string('pdk_report_message.txt', { + 'report': report, + 'url': settings.SITE_URL + }) + + host = settings.SITE_URL.split('/')[-2] - - - + send_mail(subject, \ + message, \ + 'Petey Kay ', \ + [report.requester.email], \ + fail_silently=False) diff --git a/management/commands/compile_visualizations.py b/management/commands/compile_visualizations.py index 26dcd47..2d98186 100644 --- a/management/commands/compile_visualizations.py +++ b/management/commands/compile_visualizations.py @@ -1,17 +1,17 @@ +# pylint: disable=no-member,line-too-long + import datetime -import json +import importlib import os -import pytz -import importlib +import pytz from django.conf import settings -from django.core.management.base import BaseCommand, CommandError -from django.db.models import Count +from django.core.management.base import BaseCommand from django.utils import timezone from passive_data_kit.decorators import handle_lock -from passive_data_kit.models import DataPoint, DataBundle, DataPointVisualizations +from passive_data_kit.models import DataPoint, DataPointVisualizations class Command(BaseCommand): help = 'Compiles support files and other resources used for data inspection and visualization.' @@ -24,40 +24,38 @@ def add_arguments(self, parser): # default=False, # help='Delete data bundles after processing') # -# parser.add_argument('--count', -# type=int, +# parser.add_argument('--count', +# type=int, # dest='bundle_count', # default=100, # help='Number of bundles to process in a single run') - + @handle_lock def handle(self, *args, **options): last_updated = None - + sources = DataPoint.objects.all().order_by('source').values_list('source', flat=True).distinct() for source in sources: identifiers = DataPoint.objects.filter(source=source).order_by('generator_identifier').values_list('generator_identifier', flat=True).distinct() - + for identifier in identifiers: compiled = DataPointVisualizations.objects.filter(source=source, generator_identifier=identifier).order_by('last_updated').first() - + if compiled is None: compiled = DataPointVisualizations(source=source, generator_identifier=identifier) - - tz = pytz.timezone('UTC') - - compiled.last_updated = tz.localize(datetime.datetime.min) + + compiled.last_updated = pytz.timezone('UTC').localize(datetime.datetime.min) compiled.save() - + last_point = DataPoint.objects.filter(source=source, generator_identifier=identifier).order_by('-recorded').first() - + if last_point is not None and last_point.recorded > compiled.last_updated: if last_updated is None: last_updated = compiled elif last_updated.last_updated > compiled.last_updated: last_updated = compiled - + if last_updated is not None: points = DataPoint.objects.filter(source=last_updated.source, generator_identifier=last_updated.generator_identifier) @@ -65,24 +63,16 @@ def handle(self, *args, **options): if os.path.exists(folder) is False: os.makedirs(folder) - - output = {} - + for app in settings.INSTALLED_APPS: try: pdk_api = importlib.import_module(app + '.pdk_api') - output = pdk_api.compile_visualization(last_updated.generator_identifier, points, folder) + pdk_api.compile_visualization(last_updated.generator_identifier, points, folder) except ImportError: pass except AttributeError: pass - + last_updated.last_updated = timezone.now() last_updated.save() - - - - - - diff --git a/management/commands/delete_redundant_readings.py b/management/commands/delete_redundant_readings.py index 818a8c7..2b19f2a 100644 --- a/management/commands/delete_redundant_readings.py +++ b/management/commands/delete_redundant_readings.py @@ -1,12 +1,13 @@ +# pylint: disable=no-member,line-too-long + import datetime -import json -from django.core.management.base import BaseCommand, CommandError +from django.core.management.base import BaseCommand from django.db.models import Count from django.utils import timezone from passive_data_kit.decorators import handle_lock -from passive_data_kit.models import DataPoint, DataBundle +from passive_data_kit.models import DataPoint class Command(BaseCommand): help = 'Deletes identical DataPoint objects that may have been uploaded more than once.' @@ -19,43 +20,42 @@ def add_arguments(self, parser): # default=False, # help='Delete data bundles after processing') # -# parser.add_argument('--count', -# type=int, +# parser.add_argument('--count', +# type=int, # dest='bundle_count', # default=100, # help='Number of bundles to process in a single run') - + @handle_lock def handle(self, *args, **options): to_delete = [] - + start = timezone.now() - datetime.timedelta(hours=4) - + matches = DataPoint.objects.filter(recorded__gte=start).order_by('source', 'generator_identifier', 'created').values('source', 'generator_identifier', 'created').annotate(Count('pk')) - + dupes = [] - + for match in matches: if match['pk__count'] > 1: dupes.append(match) to_delete = [] - + for dupe in dupes: dupe_objs = DataPoint.objects.filter(source=dupe['source'], generator=dupe['generator_identifier'], created=dupe['created']).order_by('pk') - + for dupe_obj in dupe_objs[1:]: if dupe_objs[0].properties == dupe_obj.properties: - print(str(dupe)) - - to_delete.append(dupe_obj.pk) - + print str(dupe) + + to_delete.append(dupe_obj.pk) + if len(to_delete) % 500 == 0: - print('TO DELETE: ' + str(len(to_delete))) - - for pk in to_delete: - DataPoint.objects.get(pk=pk).delete() - - if len(to_delete) > 0: - print('Deleted duplicates: ' + str(len(to_delete))) + print 'TO DELETE: ' + str(len(to_delete)) + + for dp_id in to_delete: + DataPoint.objects.get(pk=dp_id).delete() + if to_delete: + print 'Deleted duplicates: ' + str(len(to_delete)) diff --git a/management/commands/process_bundles.py b/management/commands/process_bundles.py index 2b8784a..b6d9cd1 100644 --- a/management/commands/process_bundles.py +++ b/management/commands/process_bundles.py @@ -1,8 +1,10 @@ +# pylint: disable=no-member,line-too-long + import datetime import json -from django.contrib.gis.geos import * -from django.core.management.base import BaseCommand, CommandError +from django.contrib.gis.geos import GEOSGeometry +from django.core.management.base import BaseCommand from django.utils import timezone from ...decorators import handle_lock @@ -13,54 +15,54 @@ class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument('--delete', - action='store_true', - dest='delete', - default=False, - help='Delete data bundles after processing') - - parser.add_argument('--count', - type=int, - dest='bundle_count', - default=100, - help='Number of bundles to process in a single run') - + action='store_true', + dest='delete', + default=False, + help='Delete data bundles after processing') + + parser.add_argument('--count', + type=int, + dest='bundle_count', + default=100, + help='Number of bundles to process in a single run') + @handle_lock def handle(self, *args, **options): to_delete = [] - + for bundle in DataBundle.objects.filter(processed=False)[:options['bundle_count']]: if install_supports_jsonfield() is False: bundle.properties = json.loads(bundle.properties) - + for bundle_point in bundle.properties: if 'passive-data-metadata' in bundle_point: point = DataPoint(recorded=timezone.now()) point.source = bundle_point['passive-data-metadata']['source'] point.generator = bundle_point['passive-data-metadata']['generator'] - + if 'generator-id' in bundle_point['passive-data-metadata']: point.generator_identifier = bundle_point['passive-data-metadata']['generator-id'] if 'latitude' in bundle_point['passive-data-metadata'] and 'longitude' in bundle_point['passive-data-metadata']: point.generated_at = GEOSGeometry('POINT(' + str(bundle_point['passive-data-metadata']['longitude']) + ' ' + str(bundle_point['passive-data-metadata']['latitude']) + ')') - + point.created = datetime.datetime.fromtimestamp(bundle_point['passive-data-metadata']['timestamp'], tz=timezone.get_default_timezone()) if install_supports_jsonfield(): point.properties = bundle_point else: point.properties = json.dumps(bundle_point, indent=2) - + point.fetch_secondary_identifier() - + point.save() if install_supports_jsonfield() is False: bundle.properties = json.dumps(bundle.properties, indent=2) - + bundle.processed = True bundle.save() - + if options['delete']: to_delete.append(bundle) diff --git a/migrations/0001_initial.py b/migrations/0001_initial.py index 0dfd6d1..dad0e82 100644 --- a/migrations/0001_initial.py +++ b/migrations/0001_initial.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9 on 2015-12-20 03:48 +# pylint: skip-file + from __future__ import unicode_literals import django.contrib.gis.db.models.fields diff --git a/migrations/0002_databundle.py b/migrations/0002_databundle.py index 64e13d0..8796cdb 100644 --- a/migrations/0002_databundle.py +++ b/migrations/0002_databundle.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-11 01:30 +# pylint: skip-file + from __future__ import unicode_literals import django.contrib.postgres.fields.jsonb diff --git a/migrations/0003_auto_20160211_0223.py b/migrations/0003_auto_20160211_0223.py index 506b5e5..74da1a2 100644 --- a/migrations/0003_auto_20160211_0223.py +++ b/migrations/0003_auto_20160211_0223.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-11 02:23 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0004_auto_20160224_2218.py b/migrations/0004_auto_20160224_2218.py index 69b1640..34b2e6c 100644 --- a/migrations/0004_auto_20160224_2218.py +++ b/migrations/0004_auto_20160224_2218.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 22:18 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0005_auto_20160224_2239.py b/migrations/0005_auto_20160224_2239.py index 1d0584c..6a2dfb8 100644 --- a/migrations/0005_auto_20160224_2239.py +++ b/migrations/0005_auto_20160224_2239.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 22:39 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0006_auto_20160224_2240.py b/migrations/0006_auto_20160224_2240.py index 0f347df..edf60a1 100644 --- a/migrations/0006_auto_20160224_2240.py +++ b/migrations/0006_auto_20160224_2240.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-02-24 22:40 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0007_datapoint_generator_identifier.py b/migrations/0007_datapoint_generator_identifier.py index 5024024..3cc2a77 100644 --- a/migrations/0007_datapoint_generator_identifier.py +++ b/migrations/0007_datapoint_generator_identifier.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-17 20:40 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0008_datapointvisualizations.py b/migrations/0008_datapointvisualizations.py index 53764fb..9888da3 100644 --- a/migrations/0008_datapointvisualizations.py +++ b/migrations/0008_datapointvisualizations.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-17 22:59 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0009_reportjob.py b/migrations/0009_reportjob.py index d8e681a..8824d84 100644 --- a/migrations/0009_reportjob.py +++ b/migrations/0009_reportjob.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-19 21:45 +# pylint: skip-file + from __future__ import unicode_literals from django.conf import settings diff --git a/migrations/0010_auto_20160319_2148.py b/migrations/0010_auto_20160319_2148.py index fe377f2..77455ac 100644 --- a/migrations/0010_auto_20160319_2148.py +++ b/migrations/0010_auto_20160319_2148.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-19 21:48 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations diff --git a/migrations/0011_auto_20160319_2158.py b/migrations/0011_auto_20160319_2158.py index a1b694b..511296a 100644 --- a/migrations/0011_auto_20160319_2158.py +++ b/migrations/0011_auto_20160319_2158.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-19 21:58 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0012_auto_20160319_2225.py b/migrations/0012_auto_20160319_2225.py index d3fba2c..3b9b8f9 100644 --- a/migrations/0012_auto_20160319_2225.py +++ b/migrations/0012_auto_20160319_2225.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-03-19 22:25 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0013_datafile.py b/migrations/0013_datafile.py index 6234487..75802be 100644 --- a/migrations/0013_datafile.py +++ b/migrations/0013_datafile.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-08-12 02:25 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0014_auto_20160812_0243.py b/migrations/0014_auto_20160812_0243.py index 25d1e0d..e700868 100644 --- a/migrations/0014_auto_20160812_0243.py +++ b/migrations/0014_auto_20160812_0243.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-08-12 02:43 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0015_datafile_identifier.py b/migrations/0015_datafile_identifier.py index 3c64bdc..8ca0c68 100644 --- a/migrations/0015_datafile_identifier.py +++ b/migrations/0015_datafile_identifier.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-08-12 02:44 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/migrations/0016_datapoint_secondary_identifier.py b/migrations/0016_datapoint_secondary_identifier.py index 152ff32..1ba3fae 100644 --- a/migrations/0016_datapoint_secondary_identifier.py +++ b/migrations/0016_datapoint_secondary_identifier.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-04-02 01:00 +# pylint: skip-file + from __future__ import unicode_literals from django.db import migrations, models diff --git a/models.py b/models.py index bb20500..ee33fd0 100644 --- a/models.py +++ b/models.py @@ -1,9 +1,10 @@ +# pylint: disable=no-member, line-too-long + from __future__ import unicode_literals import json import importlib -import psycopg2 from django.conf import settings from django.contrib.postgres.fields import JSONField @@ -16,21 +17,21 @@ def generator_label(identifier): for app in settings.INSTALLED_APPS: try: pdk_api = importlib.import_module(app + '.pdk_api') - + name = pdk_api.name_for_generator(identifier) - + if name is not None: return name except ImportError: pass except AttributeError: pass - + return identifier def install_supports_jsonfield(): - return (connection.pg_version >= 90400) + return connection.pg_version >= 90400 class DataPoint(models.Model): @@ -38,57 +39,57 @@ class DataPoint(models.Model): generator = models.CharField(max_length=1024, db_index=True) generator_identifier = models.CharField(max_length=1024, db_index=True, default='unknown-generator') secondary_identifier = models.CharField(max_length=1024, db_index=True, null=True, blank=True) - + created = models.DateTimeField(db_index=True) generated_at = models.PointField(null=True) - + recorded = models.DateTimeField(db_index=True) - + if install_supports_jsonfield(): properties = JSONField() - else: + else: properties = models.TextField(max_length=(32 * 1024 * 1024 * 1024)) - + def fetch_secondary_identifier(self): if self.secondary_identifier is not None: return self.secondary_identifier else: if self.generator_identifier == 'pdk-app-event': props = self.fetch_properties() - + self.secondary_identifier = props['event_name'] self.save() - + return self.secondary_identifier - + return None - + def fetch_properties(self): if install_supports_jsonfield(): return self.properties - + return json.loads(self.properties) - + class DataBundle(models.Model): recorded = models.DateTimeField(db_index=True) if install_supports_jsonfield(): properties = JSONField() - else: + else: properties = models.TextField(max_length=(32 * 1024 * 1024 * 1024)) - + processed = models.BooleanField(default=False, db_index=True) class DataFile(models.Model): data_point = models.ForeignKey(DataPoint, related_name='data_files', null=True, blank=True) data_bundle = models.ForeignKey(DataBundle, related_name='data_files', null=True, blank=True) - + identifier = models.CharField(max_length=256, db_index=True) content_type = models.CharField(max_length=256, db_index=True) content_file = models.FileField(upload_to='data_files') class DataSourceGroup(models.Model): - name = models.CharField(max_length=1024, db_index=True) + name = models.CharField(max_length=1024, db_index=True) def __unicode__(self): return self.name @@ -96,60 +97,60 @@ def __unicode__(self): class DataSource(models.Model): identifier = models.CharField(max_length=1024, db_index=True) name = models.CharField(max_length=1024, db_index=True, unique=True) - + group = models.ForeignKey(DataSourceGroup, related_name='sources', null=True, on_delete=models.SET_NULL) - + def __unicode__(self): return self.name + ' (' + self.identifier + ')' - + def latest_point(self): return DataPoint.objects.filter(source=self.identifier).order_by('-created').first() - + def point_count(self): return DataPoint.objects.filter(source=self.identifier).count() - + def point_frequency(self): count = self.point_count() - + if count > 0: first = DataPoint.objects.filter(source=self.identifier).order_by('created').first() last = DataPoint.objects.filter(source=self.identifier).order_by('created').last() - + seconds = (last.created - first.created).total_seconds() - + return count / seconds - + return 0 - + def generator_statistics(self): generators = [] - + identifiers = DataPoint.objects.filter(source=self.identifier).order_by('generator_identifier').values_list('generator_identifier', flat=True).distinct() - + for identifier in identifiers: generator = {} - + generator['identifier'] = identifier generator['source'] = self.identifier generator['label'] = generator_label(identifier) - + generator['points_count'] = DataPoint.objects.filter(source=self.identifier, generator_identifier=identifier).count() - + first_point = DataPoint.objects.filter(source=self.identifier, generator_identifier=identifier).order_by('created').first() last_point = DataPoint.objects.filter(source=self.identifier, generator_identifier=identifier).order_by('-created').first() last_recorded = DataPoint.objects.filter(source=self.identifier, generator_identifier=identifier).order_by('-recorded').first() - + generator['last_recorded'] = last_recorded.recorded generator['first_created'] = first_point.created generator['last_created'] = last_point.created generator['frequency'] = float(generator['points_count']) / (last_point.created - first_point.created).total_seconds() - + generators.append(generator) - + return generators - + class DataPointVisualizations(models.Model): source = models.CharField(max_length=1024, db_index=True) generator_identifier = models.CharField(max_length=1024, db_index=True) @@ -158,20 +159,20 @@ class DataPointVisualizations(models.Model): class ReportJob(models.Model): requester = models.ForeignKey(settings.AUTH_USER_MODEL) - + requested = models.DateTimeField(db_index=True) started = models.DateTimeField(db_index=True, null=True, blank=True) completed = models.DateTimeField(db_index=True, null=True, blank=True) if install_supports_jsonfield(): parameters = JSONField() - else: + else: parameters = models.TextField(max_length=(32 * 1024 * 1024 * 1024)) - + report = models.FileField(upload_to='pdk_reports', null=True, blank=True) @receiver(post_delete, sender=ReportJob) -def report_job_post_delete_handler(sender, **kwargs): +def report_job_post_delete_handler(sender, **kwargs): # pylint: disable=unused-argument job = kwargs['instance'] storage, path = job.report.storage, job.report.path storage.delete(path) diff --git a/pdk_api.py b/pdk_api.py index d508b61..df5787e 100644 --- a/pdk_api.py +++ b/pdk_api.py @@ -1,19 +1,16 @@ import calendar -import codecs import csv import json -import tempdir - -from django.template.loader import render_to_string +import tempfile from .models import DataPoint # def name_for_generator(identifier): # if identifier == 'web-historian': # return 'Web Historian Web Visits' -# +# # return None - + # def compile_visualization(identifier, points_query, folder): # # if identifier == 'web-historian': @@ -25,50 +22,61 @@ # 'source': source, # 'identifier': identifier, # } -# +# # return render_to_string('table_web_historian.html', context) -# +# # return None def compile_report(generator, sources): filename = tempfile.gettempdir() + '/pdk_' + generator + '.txt' - + with open(filename, 'w') as outfile: writer = csv.writer(outfile, delimiter='\t') - - writer.writerow(['Source', 'Generator', 'Generator Identifier', 'Created Timestamp', 'Created Date', 'Latitude', 'Longitude', 'Recorded Timestamp', 'Recorded Date', 'Properties']) - + + writer.writerow([ + 'Source', + 'Generator', + 'Generator Identifier', + 'Created Timestamp', + 'Created Date', + 'Latitude', + 'Longitude', + 'Recorded Timestamp', + 'Recorded Date', + 'Properties' + ]) + for source in sources: - points = DataPoint.objects.filter(source=source, generator_identifier=generator).order_by('created') - + points = DataPoint.objects.filter(source=source, generator_identifier=generator).order_by('created') # pylint: disable=no-member,line-too-long + index = 0 count = points.count() - + while index < count: # print(source + '/' + generator + ': ' + str(index) + ' / ' + str(count)) -# +# for point in points[index:(index + 5000)]: row = [] - + row.append(point.source) row.append(point.generator) row.append(point.generator_identifier) row.append(calendar.timegm(point.created.utctimetuple())) row.append(point.created.isoformat()) - + if point.generated_at is not None: row.append(point.generated_at.y) row.append(point.generated_at.x) else: row.append('') row.append('') - + row.append(calendar.timegm(point.recorded.utctimetuple())) row.append(point.recorded.isoformat()) row.append(json.dumps(point.properties)) - + writer.writerow(row) - + index += 5000 - + return filename diff --git a/requirements.txt b/requirements.txt index 3559ee9..3815e5b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,3 +10,5 @@ pytz==2017.2 six==1.10.0 whitenoise==3.3.0 wsgiref==0.1.2 +pylint==1.7.1 +bandit==1.4.0 diff --git a/templatetags/passive_data_kit.py b/templatetags/passive_data_kit.py index de9c360..ebd375f 100644 --- a/templatetags/passive_data_kit.py +++ b/templatetags/passive_data_kit.py @@ -1,18 +1,18 @@ import arrow from django import template -from django.conf import settings from django.template.loader import render_to_string from django.utils import timezone register = template.Library() @register.tag(name="sources_table") -def sources_table(parser, token): +def sources_table(parser, token): # pylint: disable=unused-argument try: - tag_name, query = token.split_contents() + tag_name, query = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return SourcesTableNode(query) @@ -29,11 +29,12 @@ def render(self, context): @register.tag(name="latest_point") -def latest_point(parser, token): +def latest_point(parser, token): # pylint: disable=unused-argument try: - tag_name, source = token.split_contents() + tag_name, source = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % + token.contents.split()[0]) return LatestPointNode(source) @@ -43,18 +44,19 @@ def __init__(self, source): def render(self, context): source = self.source.resolve(context) - + context['latest_point'] = source.latest_point() - + return render_to_string('tag_latest_point.html', context) @register.tag(name="point_count") -def point_count(parser, token): +def point_count(parser, token): # pylint: disable=unused-argument try: - tag_name, source = token.split_contents() + tag_name, source = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return PointCountNode(source) @@ -64,16 +66,17 @@ def __init__(self, source): def render(self, context): source = self.source.resolve(context) - + return source.point_count() @register.tag(name="point_hz") -def point_hz(parser, token): +def point_hz(parser, token): # pylint: disable=unused-argument try: - tag_name, source = token.split_contents() + tag_name, source = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return PointHzNode(source) @@ -83,7 +86,7 @@ def __init__(self, source): def render(self, context): source = self.source.resolve(context) - + frequency = source.point_frequency() value = "{:10.3f}".format(frequency) + " Hz" @@ -119,11 +122,12 @@ def render(self, context): return render_to_string('tag_point_hz.html', context) @register.tag(name="to_hz") -def to_hz(parser, token): +def to_hz(parser, token): # pylint: disable=unused-argument try: - tag_name, frequency = token.split_contents() + tag_name, frequency = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return ToHzNode(frequency) @@ -167,11 +171,12 @@ def render(self, context): return render_to_string('tag_point_hz.html', context) @register.tag(name="date_ago") -def date_ago(parser, token): +def date_ago(parser, token): # pylint: disable=unused-argument try: - tag_name, date_obj = token.split_contents() + tag_name, date_obj = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return DateAgoNode(date_obj) @@ -208,11 +213,12 @@ def render(self, context): @register.tag(name="human_duration") -def tag_human_duration(parser, token): +def tag_human_duration(parser, token): # pylint: disable=unused-argument try: - tag_name, seconds_obj = token.split_contents() + tag_name, seconds_obj = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return HumanDurationNode(seconds_obj) @@ -242,11 +248,12 @@ def render(self, context): @register.tag(name="generators_table") -def generators_table(parser, token): +def generators_table(parser, token): # pylint: disable=unused-argument try: - tag_name, source = token.split_contents() + tag_name, source = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) return GeneratorsTableNode(source) @@ -260,16 +267,17 @@ def render(self, context): context['source'] = source return render_to_string('tag_generators_table.html', context) - + @register.tag(name="generator_label") -def generator_label(parser, token): +def generator_label(parser, token): # pylint: disable=unused-argument try: - tag_name, generator_id = token.split_contents() + tag_name, generator_id = token.split_contents() # pylint: disable=unused-variable except ValueError: - raise template.TemplateSyntaxError("%r tag requires a single argument" % token.contents.split()[0]) + raise template.TemplateSyntaxError("%r tag requires a single argument" % \ + token.contents.split()[0]) - return GeneratorLabelNode(source) + return GeneratorLabelNode(generator_id) class GeneratorLabelNode(template.Node): def __init__(self, source): @@ -281,7 +289,3 @@ def render(self, context): context['source'] = source return render_to_string('tag_generators_table.html', context) - - - - diff --git a/travis_settings.py b/travis_settings.py index 380c7f3..dbbcc29 100644 --- a/travis_settings.py +++ b/travis_settings.py @@ -1,3 +1,5 @@ +# pylint: skip-file + """ Settings.py for testing on Travis CI. """ diff --git a/urls.py b/urls.py index 5e6be6b..fa0b34c 100644 --- a/urls.py +++ b/urls.py @@ -4,9 +4,11 @@ pdk_source_generator, pdk_visualization_data, pdk_export, pdk_download_report urlpatterns = [ - url(r'^visualization/(?P.+)/(?P.+)/(?P\d+).json$', pdk_visualization_data, name='pdk_visualization_data'), + url(r'^visualization/(?P.+)/(?P.+)/(?P\d+).json$', \ + pdk_visualization_data, name='pdk_visualization_data'), url(r'^report/(?P\d+)/download$', pdk_download_report, name='pdk_download_report'), - url(r'^source/(?P.+)/(?P.+)$', pdk_source_generator, name='pdk_source_generator'), + url(r'^source/(?P.+)/(?P.+)$', pdk_source_generator, \ + name='pdk_source_generator'), url(r'^source/(?P.+)$', pdk_source, name='pdk_source'), url(r'^export$', pdk_export, name='pdk_export'), url(r'^add-point.json$', add_data_point, name='pdk_add_data_point'), diff --git a/views.py b/views.py index 638108f..8638558 100644 --- a/views.py +++ b/views.py @@ -1,64 +1,72 @@ +# pylint: disable=no-member + import datetime import importlib import json import os from django.conf import settings -from django.http import HttpResponse, HttpResponseNotAllowed, JsonResponse, HttpResponseNotFound, FileResponse -from django.shortcuts import render, render_to_response +from django.http import HttpResponse, HttpResponseNotAllowed, JsonResponse, HttpResponseNotFound, \ + FileResponse +from django.shortcuts import render_to_response from django.utils import timezone from django.views.decorators.csrf import csrf_exempt from django.contrib.admin.views.decorators import staff_member_required -from .models import DataPoint, DataBundle, DataFile, DataSourceGroup, DataSource, ReportJob, generator_label, install_supports_jsonfield +from .models import DataPoint, DataBundle, DataFile, DataSourceGroup, DataSource, ReportJob, \ + generator_label, install_supports_jsonfield @csrf_exempt def add_data_point(request): - response = { 'message': 'Data point added successfully.' } - + response = {'message': 'Data point added successfully.'} + if request.method == 'CREATE': - response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', status=201) + response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', \ + status=201) response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' - + point = json.loads(request.body) - - point = DataPoint(recorded=timezone.now()) - point.source = point['passive-data-metadata']['source'] - point.generator = point['passive-data-metadata']['generator'] - point.created = datetime.datetime.fromtimestamp(point['passive-data-metadata']['source'], tz=timezone.get_default_timezone()) - + + data_point = DataPoint(recorded=timezone.now()) + data_point.source = point['passive-data-metadata']['source'] + data_point.generator = point['passive-data-metadata']['generator'] + data_point.created = datetime.datetime.fromtimestamp(point['passive-data-metadata']['source'], tz=timezone.get_default_timezone()) # pylint: disable=line-too-long + if install_supports_jsonfield(): - point.properties = point + data_point.properties = point else: - point.properties = json.dumps(point, indent=2) - - point.save() + data_point.properties = json.dumps(point, indent=2) + + data_point.save() return response elif request.method == 'POST': - response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', status=201) + response = HttpResponse(json.dumps(response, indent=2), \ + content_type='application/json', \ + status=201) + response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' - + point = json.loads(request.POST['payload']) - - point = DataPoint(recorded=timezone.now()) - point.source = point['passive-data-metadata']['source'] - point.generator = point['passive-data-metadata']['generator'] - point.created = datetime.datetime.fromtimestamp(point['passive-data-metadata']['source'], tz=timezone.get_default_timezone()) + + data_point = DataPoint(recorded=timezone.now()) + data_point.source = point['passive-data-metadata']['source'] + data_point.generator = point['passive-data-metadata']['generator'] + data_point.created = datetime.datetime.fromtimestamp(point['passive-data-metadata']['source'], tz=timezone.get_default_timezone()) # pylint: disable=line-too-long if install_supports_jsonfield(): - point.properties = point + data_point.properties = point else: - point.properties = json.dumps(point, indent=2) - - point.save() + data_point.properties = json.dumps(point, indent=2) + + data_point.save() return response elif request.method == 'OPTIONS': @@ -67,25 +75,28 @@ def add_data_point(request): response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' - + return response - + return HttpResponseNotAllowed(['CREATE', 'POST']) @csrf_exempt def add_data_bundle(request): - response = { 'message': 'Data bundle added successfully, and ready for processing.' } - + response = {'message': 'Data bundle added successfully, and ready for processing.'} + if request.method == 'CREATE': - response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', status=201) + response = HttpResponse(json.dumps(response, indent=2), \ + content_type='application/json', \ + status=201) + response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' points = json.loads(request.body) - + bundle = DataBundle(recorded=timezone.now()) if install_supports_jsonfield(): @@ -94,18 +105,21 @@ def add_data_bundle(request): bundle.properties = json.dumps(points, indent=2) bundle.save() - + return response elif request.method == 'POST': - response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', status=201) + response = HttpResponse(json.dumps(response, indent=2), \ + content_type='application/json', \ + status=201) + response['Access-Control-Allow-Origin'] = '*' response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' - + try: points = json.loads(request.POST['payload']) - + bundle = DataBundle(recorded=timezone.now()) if install_supports_jsonfield(): @@ -115,16 +129,18 @@ def add_data_bundle(request): bundle.save() except ValueError: - response = { 'message': 'Unable to parse data bundle.' } - response = HttpResponse(json.dumps(response, indent=2), content_type='application/json', status=201) - - for key, value in request.FILES.iteritems(): - file = DataFile(data_bundle=bundle) - file.identifier = value.name - file.content_type = value.content_type - file.content_file.save(value.name, value) - file.save() - + response = {'message': 'Unable to parse data bundle.'} + response = HttpResponse(json.dumps(response, indent=2), \ + content_type='application/json', \ + status=201) + + for key, value in request.FILES.iteritems(): # pylint: disable=unused-variable + data_file = DataFile(data_bundle=bundle) + data_file.identifier = value.name + data_file.content_type = value.content_type + data_file.content_file.save(value.name, value) + data_file.save() + return response elif request.method == 'OPTIONS': response = HttpResponse('', content_type='text/plain', status=200) @@ -132,9 +148,9 @@ def add_data_bundle(request): response['Access-Control-Allow-Methods'] = 'CREATE, POST' response['Access-Control-Request-Headers'] = 'Content-Type' response['Access-Control-Allow-Headers'] = 'Content-Type' - + return response - + return HttpResponseNotAllowed(['CREATE', 'POST']) @staff_member_required @@ -142,99 +158,93 @@ def pdk_home(request): for app in settings.INSTALLED_APPS: try: app_views = importlib.import_module(app + '.views') - + return app_views.custom_pdk_home(request) except ImportError: pass except AttributeError: pass - c = {} - + context = {} + if request.method == 'POST': if request.POST['operation'] == 'add_source': identifier = request.POST['source_identifier'].strip() name = request.POST['friendly_name'].strip() - + group = request.POST['assigned_group'] group_name = request.POST['new_group_name'].strip() - + source = DataSource(identifier=identifier, name=name) - + if group == "-1": pass elif group == "0": group = DataSourceGroup.objects.filter(name=group_name).first() - - if group == None: + + if group is None: group = DataSourceGroup(name=group_name) group.save() - + source.group = group else: source.group = DataSourceGroup.objects.get(pk=int(group)) - + source.save() if request.POST['operation'] == 'remove_source': DataSource.objects.filter(pk=int(request.POST['pk'])).delete() - - c['groups'] = DataSourceGroup.objects.order_by('name') - c['solo_sources'] = DataSource.objects.filter(group=None).order_by('name') - return render_to_response('pdk_home.html', c) + context['groups'] = DataSourceGroup.objects.order_by('name') + context['solo_sources'] = DataSource.objects.filter(group=None).order_by('name') + + return render_to_response('pdk_home.html', context) @staff_member_required -def pdk_source(request, source_id): - c = {} - - source_name = None - source_group = None +def pdk_source(request, source_id): # pylint: disable=unused-argument + context = {} source = DataSource.objects.filter(identifier=source_id).first() - + if source is None: source = DataSource(identifier=source_id, name='Unknown') source.save() - - c['source'] = source - return render_to_response('pdk_source.html', c) + context['source'] = source + + return render_to_response('pdk_source.html', context) @staff_member_required -def pdk_source_generator(request, source_id, generator_id): - c = {} - - source_name = None - source_group = None +def pdk_source_generator(request, source_id, generator_id): # pylint: disable=unused-argument + context = {} source = DataSource.objects.filter(identifier=source_id).first() - + if source is None: source = DataSource(identifier=source_id, name='Unknown') source.save() - - c['source'] = source - c['generator'] = generator_id - c['generator_label'] = generator_label(generator_id) - c['viz_template'] = None + context['source'] = source + context['generator'] = generator_id + context['generator_label'] = generator_label(generator_id) + + context['viz_template'] = None for app in settings.INSTALLED_APPS: try: pdk_api = importlib.import_module(app + '.pdk_api') - c['viz_template'] = pdk_api.viz_template(source, generator_id) + context['viz_template'] = pdk_api.viz_template(source, generator_id) except ImportError: pass except AttributeError: pass - return render_to_response('pdk_source_generator.html', c) + return render_to_response('pdk_source_generator.html', context) @staff_member_required -def unmatched_sources(request): +def unmatched_sources(request): # pylint: disable=unused-argument sources = [] - + for point in DataPoint.objects.order_by('source').values_list('source', flat=True).distinct(): sources.append(point) @@ -242,88 +252,93 @@ def unmatched_sources(request): @staff_member_required -def pdk_visualization_data(request, source_id, generator_id, page): +def pdk_visualization_data(request, source_id, generator_id, page): # pylint: disable=unused-argument folder = settings.MEDIA_ROOT + '/pdk_visualizations/' + source_id + '/' + generator_id - + filename = 'visualization-' + page + '.json' - + if page == '0': filename = 'visualization.json' - + try: - with open(folder + '/' + filename) as data_file: + with open(folder + '/' + filename) as data_file: return HttpResponse(data_file.read(), content_type='application/json') except IOError: pass - + return HttpResponseNotFound() @staff_member_required -def pdk_download_report(request, report_id): +def pdk_download_report(request, report_id): # pylint: disable=unused-argument job = ReportJob.objects.get(pk=int(report_id)) filename = settings.MEDIA_ROOT + '/' + job.report.name - + response = FileResponse(open(filename, 'rb'), content_type='application/octet-stream') - + response['Content-Length'] = os.path.getsize(filename) response['Content-Disposition'] = 'attachment; filename=pdk-export.zip' - + return response - + @staff_member_required def pdk_export(request): - c = {} - - c['sources'] = DataPoint.objects.all().order_by('source').values_list('source', flat=True).distinct() - c['generators'] = DataPoint.objects.all().order_by('generator_identifier').values_list('generator_identifier', flat=True).distinct() - - c['message'] = '' - c['message_type'] = 'ok' - + context = {} + + context['sources'] = DataPoint.objects.all().order_by('source')\ + .values_list('source', flat=True)\ + .distinct() + + context['generators'] = DataPoint.objects.all().order_by('generator_identifier')\ + .values_list('generator_identifier', flat=True)\ + .distinct() + + context['message'] = '' + context['message_type'] = 'ok' + if request.method == 'POST': export_sources = [] export_generators = [] - - for source in c['sources']: + + for source in context['sources']: key = 'source_' + source - + if key in request.POST: export_sources.append(source) - for generator in c['generators']: + for generator in context['generators']: key = 'generator_' + generator - + if key in request.POST: export_generators.append(generator) - - if len(export_sources) == 0: - c['message_type'] = 'error' - - if len(export_generators) == 0: - c['message'] = 'Please select one or more sources and generators to export data.' + + if len(export_sources) == 0: # pylint: disable=len-as-condition + context['message_type'] = 'error' + + if len(export_generators) == 0: # pylint: disable=len-as-condition + context['message'] = 'Please select one or more sources and generators to export data.' # pylint: disable=line-too-long else: - c['message'] = 'Please select one or more sources to export data.' - elif len(export_generators) == 0: - c['message_type'] = 'error' - - c['message'] = 'Please select one or more generators to export data.' + context['message'] = 'Please select one or more sources to export data.' + elif len(export_generators) == 0: # pylint: disable=len-as-condition + context['message_type'] = 'error' + + context['message'] = 'Please select one or more generators to export data.' else: job = ReportJob(requester=request.user, requested=timezone.now()) - + params = {} - + params['sources'] = export_sources params['generators'] = export_generators - + if 'export_raw_json' in request.POST and request.POST['export_raw_json']: params['raw_data'] = True - + job.parameters = params - + job.save() - - c['message_type'] = 'ok' - c['message'] = 'Export job queued. Check your e-mail for a link to the output when the export is complete.' - - return render_to_response('pdk_export.html', c) + + context['message_type'] = 'ok' + context['message'] = 'Export job queued. Check your e-mail for a link to the output when the export is complete.' # pylint: disable=line-too-long + + return render_to_response('pdk_export.html', context)