Skip to content

Commit

Permalink
Merge branch 'release-1.7.1'
Browse files Browse the repository at this point in the history
  • Loading branch information
ilan-gold committed Oct 21, 2019
2 parents 1bacf5c + fec0f1b commit 5a4630b
Show file tree
Hide file tree
Showing 111 changed files with 7,127 additions and 7,105 deletions.
1 change: 0 additions & 1 deletion .gitignore
Expand Up @@ -51,7 +51,6 @@ codekit-config.json
/refinery/config/config.json
/refinery/ui/production/
/refinery/ui/development/
/refinery/ui/bower_components/
/refinery/ui/node_modules/
/refinery/ui/.tmp/
/refinery/ui/coverage/
Expand Down
12 changes: 5 additions & 7 deletions .travis.yml
Expand Up @@ -19,13 +19,13 @@ addons:

install:
- node --version
- NODE_VERSION=6.11.0
- NODE_VERSION=10.16.0
- nvm install $NODE_VERSION
- nvm use $NODE_VERSION
- node --version

- npm --version
- npm install -g npm@3.10.10
- npm install -g npm@6.9.0
- npm --version

- pip install -r requirements.txt --quiet
Expand Down Expand Up @@ -55,10 +55,9 @@ before_script:
# See http://www.stuartellis.eu/articles/erb/#running-erb-from-the-command-line
- erb config/config.json.erb > config/config.json
- python manage.py migrate --noinput --fake-initial
- npm install -g grunt-cli@0.1.13 bower@1.8.8 --progress false --quiet || ( cat npm-debug.log && false )
- npm install -g grunt-cli@0.1.13 --progress false --quiet || ( cat npm-debug.log && false )
- cd ui
- npm install --progress false --quiet || ( cat npm-debug.log && false )
- bower install --config.interactive=false --quiet
- cd ../

# Required for cypress tests; TODO: Move to puppet.
Expand All @@ -82,19 +81,18 @@ script:
- echo 'travis_fold:end:grunt'

- echo 'travis_fold:start:django-tests'
- coverage run manage.py test
- coverage run manage.py test --failfast
- echo 'travis_fold:end:django-tests'

- echo 'travis_fold:start:cypress'
- pushd ui && node_modules/.bin/cypress run && popd
- pushd ui && node_modules/.bin/cypress run --record && popd
- echo 'travis_fold:end:cypress'

- set +e # Currently, codecov does not always exit with 0, but that should not cause travis to fail.

after_success:
- echo 'travis_fold:start:codecov'
- codecov
- npm run codecov
- echo 'travis_fold:end:codecov'
notifications:
slack:
Expand Down
1,087 changes: 255 additions & 832 deletions CHANGELOG.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion deployment/puppet/Puppetfile
@@ -1,6 +1,6 @@
forge "https://forgeapi.puppetlabs.com"

mod 'puppet/nodejs', '2.1.0'
mod 'puppet/nodejs', '7.0.1'
mod 'puppet/python', '3.0.1'
mod 'puppet/rabbitmq', '9.0.0'
mod 'puppetlabs-apache', '5.0.0'
Expand Down
10 changes: 4 additions & 6 deletions deployment/puppet/Puppetfile.lock
Expand Up @@ -6,9 +6,8 @@ FORGE
puppetlabs-stdlib (< 7.0.0, >= 4.13.1)
puppet-archive (3.2.1)
puppetlabs-stdlib (< 6.0.0, >= 4.13.1)
puppet-nodejs (2.1.0)
puppetlabs-stdlib (< 5.0.0, >= 4.1.0)
treydock-gpg_key (< 1.0.0, >= 0.0.3)
puppet-nodejs (7.0.1)
puppetlabs-stdlib (< 7.0.0, >= 4.25.0)
puppet-python (3.0.1)
puppetlabs-stdlib (< 7.0.0, >= 4.19.0)
stahnma-epel (< 2.0.0, >= 1.2.2)
Expand Down Expand Up @@ -43,7 +42,7 @@ FORGE
puppetlabs-stdlib (< 6.0.0, >= 4.13.1)
puppetlabs-powershell (2.3.0)
puppetlabs-reboot (2.1.2)
puppetlabs-stdlib (4.25.1)
puppetlabs-stdlib (5.2.0)
puppetlabs-translate (1.2.0)
saz-timezone (5.1.1)
puppetlabs-stdlib (< 6.0.0, >= 2.6.0)
Expand All @@ -52,15 +51,14 @@ FORGE
puppetlabs-stdlib (>= 3.0.0)
stm-debconf (2.3.0)
tpdownes-sysctl (2.1.0)
treydock-gpg_key (0.1.0)

PATH
remote: refinery
specs:
refinery-refinery (0.0.1)

DEPENDENCIES
puppet-nodejs (= 2.1.0)
puppet-nodejs (= 7.0.1)
puppet-python (= 3.0.1)
puppet-rabbitmq (= 9.0.0)
puppetlabs-apache (= 5.0.0)
Expand Down
2 changes: 1 addition & 1 deletion deployment/puppet/refinery/manifests/postgresql.pp
Expand Up @@ -7,7 +7,7 @@
$rds_endpoint_address = $refinery::params::rds_endpoint_address,
) inherits refinery::params {
$server_version = '10'
$package_version = "${server_version}.9-1.pgdg16.04+1"
$package_version = "${server_version}.10-1.pgdg16.04+1"

if $deployment_platform == 'aws' {
$rds_settings = {
Expand Down
2 changes: 1 addition & 1 deletion deployment/puppet/refinery/manifests/solr.pp
Expand Up @@ -74,7 +74,7 @@
match => "^SOLR_HOME";
'solr_config_heap':
path => '/var/solr/solr.in.sh',
line => 'SOLR_HEAP="32m"',
line => 'SOLR_HEAP="64m"',
match => 'SOLR_HEAP=',
replace => $deployment_platform ? {
'aws' => false,
Expand Down
12 changes: 1 addition & 11 deletions deployment/puppet/refinery/manifests/ui.pp
Expand Up @@ -20,7 +20,7 @@
apt::source { 'nodejs':
ensure => 'present',
comment => 'Nodesource NodeJS repo',
location => 'https://deb.nodesource.com/node_6.x',
location => 'https://deb.nodesource.com/node_10.x',
release => 'trusty',
repos => 'main',
key => {
Expand All @@ -41,7 +41,6 @@
}
->
package {
'bower': ensure => '1.8.8', provider => 'npm';
'grunt-cli': ensure => '0.1.13', provider => 'npm';
}
->
Expand All @@ -61,15 +60,6 @@
group => $app_group,
}
->
exec { "bower_modules":
command => "/bin/rm -rf ${ui_app_root}/bower_components && /usr/bin/bower install --config.interactive=false",
cwd => $ui_app_root,
logoutput => on_failure,
user => $app_user,
group => $app_group,
environment => ["HOME=/home/${app_user}"],
}
->
exec { "grunt":
command => "/usr/bin/grunt make",
cwd => $ui_app_root,
Expand Down
30 changes: 0 additions & 30 deletions fabfile.py
Expand Up @@ -129,8 +129,6 @@ def update_refinery():
with cd(env.refinery_ui_dir):
run("npm prune --progress false")
run("npm update --progress false")
run("rm -rf bower_components")
run("bower update --config.interactive=false")
run("grunt make")
with prefix("workon {refinery_virtualenv_name}".format(**env)):
run("pip install -r {refinery_project_dir}/requirements.txt"
Expand All @@ -143,31 +141,3 @@ def update_refinery():
run("supervisorctl reload")
with cd(env.refinery_project_dir):
run("touch {refinery_app_dir}/config/wsgi_*.py".format(**env))


@task(alias="relaunch")
@with_settings(user=env.project_user)
def relaunch_refinery(dependencies=False, migrations=False):
"""Perform a relaunch of a Refinery Platform instance, including processing
of grunt tasks
dependencies: update bower and pip dependencies
migrations: apply migrations
"""
puts("Relaunching Refinery")
with cd(os.path.join(env.refinery_app_dir, "ui")):
if dependencies:
run("bower update --config.interactive=false")
run("grunt make")
with prefix("workon {refinery_virtualenv_name}".format(**env)):
if dependencies:
run("pip install -r {refinery_project_dir}/requirements.txt"
.format(**env))
run("find . -name '*.pyc' -delete")
if migrations:
run("{refinery_app_dir}/manage.py migrate --noinput "
"--fake-initial".format(**env))
run("{refinery_app_dir}/manage.py collectstatic --noinput"
.format(**env))
run("supervisorctl restart all")
with cd(env.refinery_project_dir):
run("touch {refinery_app_dir}/config/wsgi_*.py".format(**env))
39 changes: 21 additions & 18 deletions refinery/analysis_manager/tasks.py
Expand Up @@ -419,9 +419,21 @@ def _galaxy_file_import(analysis_uuid, file_store_item_uuid, history_dict,
file_store_item_uuid, e)
run_analysis.update_state(state=celery.states.FAILURE)
return
file_store_url = file_store_item.get_datafile_url()
try:
file_url_absolute = core.utils.\
build_absolute_url(file_store_url)
except ValueError:
logger.error('{} is not a relative URL'.format(str(file_store_url)))
run_analysis.update_state(state=celery.states.FAILURE)
return
except RuntimeError:
logger.error('Could not build URL for {}'.format(str(file_store_url)))
run_analysis.update_state(state=celery.states.FAILURE)
return
library_dataset_dict = tool.upload_datafile_to_library_from_url(
library_dict["id"],
core.utils.get_absolute_url(file_store_item.get_datafile_url())
file_url_absolute
)
history_dataset_dict = tool.import_library_dataset_to_history(
history_dict["id"],
Expand Down Expand Up @@ -456,38 +468,32 @@ def _get_galaxy_download_task_ids(analysis):
"""Get file import tasks for Galaxy analysis results"""
logger.debug("Preparing to download analysis results from Galaxy")
task_id_list = []

# retrieving list of files to download for workflow
tool = _get_workflow_tool(analysis.uuid)
tool.create_analysis_output_node_connections()

galaxy_instance = analysis.workflow.workflow_engine.instance

try:
download_list = tool.get_galaxy_dataset_download_list()
except galaxy.client.ConnectionError as exc:
error_msg = (
error_msg = \
"Error downloading Galaxy history files for analysis '%s': %s"
)
logger.error(error_msg, analysis.name, exc.message)
analysis.set_status(Analysis.FAILURE_STATUS, error_msg)
analysis.galaxy_cleanup()
return task_id_list

# Iterating through files in current galaxy history
for results in download_list:
# download file if result state is "ok"
if results['state'] == 'ok':
file_extension = results["type"]
result_name = "{}.{}".format(results['name'], file_extension)

# size of file defined by galaxy
file_size = results['file_size']

file_store_item = FileStoreItem(source=urlparse.urljoin(
galaxy_instance.base_url,
"datasets/{}/display?to_ext=txt".format(results['dataset_id'])
))

# workaround to set the correct file type for zip archives of
# FastQC HTML reports produced by Galaxy dynamically
if file_extension == 'html':
Expand All @@ -504,16 +510,13 @@ def _get_galaxy_download_task_ids(analysis):
file_store_item.filetype = extension.filetype

file_store_item.save()

# adding history files to django model
temp_file = AnalysisResult(analysis_uuid=analysis.uuid,
file_store_uuid=file_store_item.uuid,
file_name=result_name,
file_type=file_extension)
temp_file.save()
analysis.results.add(temp_file)
analysis.save()

analysis.results.add(
AnalysisResult.objects.create(
analysis=analysis, file_store_uuid=file_store_item.uuid,
file_name=result_name, file_type=file_extension
)
)
# downloading analysis results into file_store
# only download files if size is greater than 1
if file_size > 0:
Expand Down
45 changes: 45 additions & 0 deletions refinery/analysis_manager/test_models.py
@@ -0,0 +1,45 @@
import uuid

from analysis_manager.models import AnalysisStatus
from analysis_manager.tests import AnalysisManagerTestBase


# models
class AnalysisStatusTests(AnalysisManagerTestBase):
def test_set_galaxy_history_state_with_valid_state(self):
self.analysis_status.set_galaxy_history_state(AnalysisStatus.PROGRESS)
self.assertEqual(
self.analysis_status.galaxy_history_state,
AnalysisStatus.PROGRESS
)

def test_set_galaxy_history_state_with_invalid_state(self):
with self.assertRaises(ValueError) as context:
self.analysis_status.set_galaxy_history_state("NOT A VALID STATE")
self.assertEqual(
context.exception.message,
"Invalid Galaxy history state given"
)

def test_set_galaxy_import_state_with_valid_state(self):
self.analysis_status.set_galaxy_import_state(AnalysisStatus.PROGRESS)
self.assertEqual(
self.analysis_status.galaxy_import_state,
AnalysisStatus.PROGRESS
)

def test_set_galaxy_import_state_with_invalid_state(self):
with self.assertRaises(ValueError) as context:
self.analysis_status.set_galaxy_import_state("NOT A VALID STATE")
self.assertEqual(
context.exception.message,
"Invalid Galaxy history state given"
)

def test_set_galaxy_import_task_group_id(self):
test_uuid = str(uuid.uuid4())
self.analysis_status.set_galaxy_import_task_group_id(test_uuid)
self.assertEqual(
self.analysis_status.galaxy_import_task_group_id,
test_uuid
)

0 comments on commit 5a4630b

Please sign in to comment.