From 166745b5efd8d3cb9ffda79f3e77061283d98dd1 Mon Sep 17 00:00:00 2001 From: Andre Merzky Date: Mon, 1 Jun 2020 23:00:47 +0200 Subject: [PATCH 01/42] fix tags --- src/radical/entk/execman/rp/task_processor.py | 43 ++++++++++++++----- src/radical/entk/task/task.py | 28 +++++++++--- 2 files changed, 55 insertions(+), 16 deletions(-) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index aa7722c28..b7e6aea6a 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -152,14 +152,36 @@ def resolve_arguments(args, placeholders): # ------------------------------------------------------------------------------ # -def resolve_tags(tag, parent_pipeline_name, placeholders): +def resolve_tags(tags, parent_pipeline_name, placeholders): + + # entk only handles co_location tags. If tags are given as strings, they + # get translated into `{'colocation': ''}`. Tags passed as dictionaies + # are checked to conform with above form. + # + # In both cases, the tag string is expanded with the given placeholders. + + if not tags: + return + + val = None + if isinstance(tags, str): + val = tags + + elif isinstance(tags, dict): + + if list(tags.keys()) != ['colocation']: + raise ValueError('unsupported task tags %s' % tags.keys()) + + val = tags['colocation'] + # Check self pipeline first for sname in placeholders[parent_pipeline_name]: for tname in placeholders[parent_pipeline_name][sname]: - if tag != tname: + if val != tname: continue - return placeholders[parent_pipeline_name][sname][tname]['rts_uid'] + return {'colocation': + placeholders[parent_pipeline_name][sname][tname]['rts_uid']} for pname in placeholders: @@ -169,12 +191,13 @@ def resolve_tags(tag, parent_pipeline_name, placeholders): for sname in placeholders[pname]: for tname in placeholders[pname][sname]: - if tag != tname: + if val != tname: continue - return placeholders[pname][sname][tname]['rts_uid'] + return {'colocation': + placeholders[pname][sname][tname]['rts_uid']} - raise ree.EnTKError(msg='Tag %s cannot be used as no previous task with ' - 'that name is found' % tag) + raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' + 'task with that name is found' % val) # ------------------------------------------------------------------------------ @@ -444,10 +467,10 @@ def create_cud_from_task(task, placeholders, prof=None): cud.sandbox = task.sandbox cud.post_exec = task.post_exec - if task.tag: + if task.tags: if task.parent_pipeline['name']: - cud.tag = resolve_tags( - tag=task.tag, + cud.tags = resolve_tags( + tags=task.tags, parent_pipeline_name=task.parent_pipeline['name'], placeholders=placeholders) diff --git a/src/radical/entk/task/task.py b/src/radical/entk/task/task.py index 2ac87ed48..d5b69bedd 100644 --- a/src/radical/entk/task/task.py +++ b/src/radical/entk/task/task.py @@ -466,12 +466,21 @@ def path(self): @property def tag(self): ''' - Set the tag for the task that can be used while scheduling by the RTS + DEPRECATED: use `self.tags` + ''' + + return self.tags + - :getter: return the tag of the current task + @property + def tags(self): ''' + Set the tags for the task that can be used while scheduling by the RTS - return self._tag + :getter: return the tags of the current task + ''' + + return self._tags @property @@ -826,11 +835,18 @@ def path(self, value): @tag.setter def tag(self, value): + # this method exists for backward compatibility + self.tags = value + + + @tags.setter + def tags(self, value): + if not isinstance(value, str): - raise ree.TypeError(entity='tag', expected_type=str, + raise ree.TypeError(entity='tags', expected_type=str, actual_type=type(value)) - self._tag = value + self._tags = value @parent_stage.setter @@ -889,7 +905,7 @@ def to_dict(self): 'exit_code' : self._exit_code, 'path' : self._path, - 'tag' : self._tag, + 'tags' : self._tags, 'parent_stage' : self._p_stage, 'parent_pipeline' : self._p_pipeline, From 2b5a7faf695e52978ef0286e8d2625c2e480104d Mon Sep 17 00:00:00 2001 From: Andre Merzky Date: Sat, 14 Nov 2020 22:57:42 +0100 Subject: [PATCH 02/42] merge from devel --- examples/misc/lfs_tagging_dd.py | 2 +- src/radical/entk/execman/rp/task_processor.py | 18 +-- src/radical/entk/task/task.py | 14 +- tests/test_component/test_task.py | 78 +++++------ tests/test_component/test_tproc_rp_2.py | 126 ++++++++++++++++++ .../tagging_lfs_rp_da_scheduler.py | 2 +- tests/test_integration/test_tproc_rp_2.py | 6 +- tests/test_issues/test_issue_324.py | 2 +- 8 files changed, 185 insertions(+), 63 deletions(-) create mode 100755 tests/test_component/test_tproc_rp_2.py diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index c2fab7dd7..d7bc2affe 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -57,7 +57,7 @@ def get_pipeline(shared_fs=False, size=1): t.cpu_reqs['threads_per_process'] = 24 t.cpu_reqs['thread_type'] = '' t.cpu_reqs['process_type'] = '' - t.tag = 't%s'%x + t.tags = 't%s'%x s2.add_tasks(t) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index d2328cff6..e6db024f3 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -164,22 +164,12 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): if not tags: return - val = None - if isinstance(tags, str): - val = tags - - elif isinstance(tags, dict): - - if list(tags.keys()) != ['colocation']: - raise ValueError('unsupported task tags %s' % tags.keys()) - - val = tags['colocation'] - + colo_tag = tags['colocation'] # Check self pipeline first for sname in placeholders[parent_pipeline_name]: for tname in placeholders[parent_pipeline_name][sname]: - if val != tname: + if colo_tag != tname: continue return {'colocation': placeholders[parent_pipeline_name][sname][tname]['rts_uid']} @@ -192,13 +182,13 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): for sname in placeholders[pname]: for tname in placeholders[pname][sname]: - if val != tname: + if colo_tag != tname: continue return {'colocation': placeholders[pname][sname][tname]['rts_uid']} raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' - 'task with that name is found' % val) + 'task with that name is found' % colo_tag) # ------------------------------------------------------------------------------ diff --git a/src/radical/entk/task/task.py b/src/radical/entk/task/task.py index 63baeb2fc..075dadce8 100644 --- a/src/radical/entk/task/task.py +++ b/src/radical/entk/task/task.py @@ -908,17 +908,23 @@ def path(self, value): def tag(self, value): # this method exists for backward compatibility - self.tags = value + if not isinstance(value, str): + raise ree.TypeError(entity='tags', expected_type=str, + actual_type=type(value)) + self.tags = {'colocate': value} @tags.setter def tags(self, value): - if not isinstance(value, str): - raise ree.TypeError(entity='tags', expected_type=str, + if not isinstance(value, dict): + raise ree.TypeError(entity='tags', expected_type=dict, actual_type=type(value)) - self._tags = value + if list(tags.keys()) != ['colocation']: + raise ree.TypeError('unsupported tags %s' % tags.keys()) + + self.tags = value @parent_stage.setter diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 719e13b57..f854c0f79 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -29,7 +29,7 @@ class TestTask(TestCase): @mock.patch('radical.utils.generate_id', return_value='test.0000') def test_task_initialization(self, mocked_generate_id): ''' - **Purpose**: Test if the task attributes have, thus expect, the correct + **Purpose**: Test if the task attributes have, thus expect, the correct data types ''' @@ -84,35 +84,35 @@ def test_cpu_reqs(self, mocked_generate_id, mocked_init): 'process_type' : None, 'threads_per_process' : 1, 'thread_type' : None} - cpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, + cpu_reqs = {'processes' : 2, + 'process_type' : None, + 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} - task.cpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, + task.cpu_reqs = {'processes' : 2, + 'process_type' : None, + 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} self.assertEqual(task._cpu_reqs, cpu_reqs) - self.assertEqual(task.cpu_reqs, {'cpu_processes' : 2, - 'cpu_process_type' : None, - 'cpu_threads' : 1, + self.assertEqual(task.cpu_reqs, {'cpu_processes' : 2, + 'cpu_process_type' : None, + 'cpu_threads' : 1, 'cpu_thread_type' : 'OpenMP'}) with self.assertRaises(ree.MissingError): - task.cpu_reqs = {'cpu_processes' : 2, - 'cpu_process_type' : None, + task.cpu_reqs = {'cpu_processes' : 2, + 'cpu_process_type' : None, 'cpu_thread_type' : 'OpenMP'} with self.assertRaises(ree.TypeError): - task.cpu_reqs = {'cpu_processes' : 'a', - 'cpu_process_type' : None, + task.cpu_reqs = {'cpu_processes' : 'a', + 'cpu_process_type' : None, 'cpu_threads' : 1, 'cpu_thread_type' : 'OpenMP'} with self.assertRaises(ree.TypeError): - task.cpu_reqs = {'cpu_processes' : 1, - 'cpu_process_type' : None, + task.cpu_reqs = {'cpu_processes' : 1, + 'cpu_process_type' : None, 'cpu_threads' : 'a', 'cpu_thread_type' : 'OpenMP'} @@ -120,14 +120,14 @@ def test_cpu_reqs(self, mocked_generate_id, mocked_init): task.cpu_reqs = list() with self.assertRaises(ree.ValueError): - task.cpu_reqs = {'cpu_processes' : 1, - 'cpu_process_type' : None, + task.cpu_reqs = {'cpu_processes' : 1, + 'cpu_process_type' : None, 'cpu_threads' : 1, 'cpu_thread_type' : 'MPI'} with self.assertRaises(ree.ValueError): - task.cpu_reqs = {'cpu_processes' : 1, - 'cpu_process_type' : 'test', + task.cpu_reqs = {'cpu_processes' : 1, + 'cpu_process_type' : 'test', 'cpu_threads' : 1, 'cpu_thread_type' : 'OpenMP'} @@ -142,50 +142,50 @@ def test_gpu_reqs(self, mocked_generate_id, mocked_init): 'process_type' : None, 'threads_per_process' : 1, 'thread_type' : None} - gpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, + gpu_reqs = {'processes' : 2, + 'process_type' : None, + 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} - task.gpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, + task.gpu_reqs = {'processes' : 2, + 'process_type' : None, + 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} self.assertEqual(task._gpu_reqs, gpu_reqs) - self.assertEqual(task.gpu_reqs, {'gpu_processes' : 2, - 'gpu_process_type' : None, - 'gpu_threads' : 1, + self.assertEqual(task.gpu_reqs, {'gpu_processes' : 2, + 'gpu_process_type' : None, + 'gpu_threads' : 1, 'gpu_thread_type' : 'OpenMP'}) with self.assertRaises(ree.TypeError): task.gpu_reqs = list() with self.assertRaises(ree.MissingError): - task.gpu_reqs = {'gpu_processes' : 2, - 'gpu_process_type' : None, + task.gpu_reqs = {'gpu_processes' : 2, + 'gpu_process_type' : None, 'gpu_thread_type' : 'OpenMP'} with self.assertRaises(ree.TypeError): - task.gpu_reqs = {'gpu_processes' : 'a', - 'gpu_process_type' : None, + task.gpu_reqs = {'gpu_processes' : 'a', + 'gpu_process_type' : None, 'gpu_threads' : 1, 'gpu_thread_type' : 'OpenMP'} with self.assertRaises(ree.TypeError): - task.gpu_reqs = {'gpu_processes' : 1, - 'gpu_process_type' : None, + task.gpu_reqs = {'gpu_processes' : 1, + 'gpu_process_type' : None, 'gpu_threads' : 'a', 'gpu_thread_type' : 'OpenMP'} with self.assertRaises(ree.ValueError): - task.gpu_reqs = {'gpu_processes' : 1, - 'gpu_process_type' : None, + task.gpu_reqs = {'gpu_processes' : 1, + 'gpu_process_type' : None, 'gpu_threads' : 1, 'gpu_thread_type' : 'MPI'} with self.assertRaises(ree.ValueError): - task.gpu_reqs = {'gpu_processes' : 1, - 'gpu_process_type' : 'test', + task.gpu_reqs = {'gpu_processes' : 1, + 'gpu_process_type' : 'test', 'gpu_threads' : 1, 'gpu_thread_type' : 'OpenMP'} diff --git a/tests/test_component/test_tproc_rp_2.py b/tests/test_component/test_tproc_rp_2.py new file mode 100755 index 000000000..89cf094a1 --- /dev/null +++ b/tests/test_component/test_tproc_rp_2.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python + +import os +import pytest + +import radical.pilot as rp + +import radical.entk.exceptions as rse + +from radical.entk.execman.rp.task_processor import resolve_tags +from radical.entk.execman.rp.task_processor import resolve_arguments +from radical.entk.execman.rp.task_processor import create_task_from_cu + + +# MLAB = 'mongodb://entk:entk123@ds143511.mlab.com:43511/entk_0_7_4_release' +MLAB = os.environ.get('RADICAL_PILOT_DBURL') + + +# ------------------------------------------------------------------------------ +# +def test_create_task_from_cu(): + """ + **Purpose**: Test if the 'create_task_from_cu' function generates a Task + with the correct uid, parent_stage and parent_pipeline from + a RP ComputeUnit + """ + + session = rp.Session(dburl=MLAB) + umgr = rp.UnitManager(session=session) + cud = rp.ComputeUnitDescription() + cud.name = 'uid, name, parent_stage_uid, parent_stage_name, ' \ + 'parent_pipeline_uid, parent_pipeline_name' + cud.executable = '/bin/echo' + + cu = rp.ComputeUnit(umgr, cud) + t = create_task_from_cu(cu) + + assert t.uid == 'uid' + assert t.name == 'name' + assert t.parent_stage['uid'] == 'parent_stage_uid' + assert t.parent_stage['name'] == 'parent_stage_name' + assert t.parent_pipeline['uid'] == 'parent_pipeline_uid' + assert t.parent_pipeline['name'] == 'parent_pipeline_name' + + session.close() + + +# ------------------------------------------------------------------------------ +# +def test_resolve_args(): + + pipeline_name = 'p1' + stage_name = 's1' + t1_name = 't1' + t2_name = 't2' + + placeholders = { + pipeline_name: { + stage_name: { + t1_name: { + 'path' : '/home/vivek/t1', + 'rts_uid': 'unit.0002' + }, + t2_name: { + 'path' : '/home/vivek/t2', + 'rts_uid': 'unit.0003' + } + } + } + } + + arguments = ['$SHARED', + '$Pipeline_%s_Stage_%s_Task_%s' % (pipeline_name, stage_name, t1_name), + '$Pipeline_%s_Stage_%s_Task_%s' % (pipeline_name, stage_name, t2_name), + '$NODE_LFS_PATH/test.txt'] + + assert resolve_arguments(arguments, placeholders) == \ + ['$RP_PILOT_STAGING', '/home/vivek/t1', + '/home/vivek/t2', '$NODE_LFS_PATH/test.txt'] + + +# ------------------------------------------------------------------------------ +# +def test_resolve_tags(): + + pipeline_name = 'p1' + stage_name = 's1' + t1_name = 't1' + t2_name = 't2' + + placeholders = { + pipeline_name: { + stage_name: { + t1_name: { + 'path' : '/home/vivek/t1', + 'rts_uid': 'unit.0002' + }, + t2_name: { + 'path' : '/home/vivek/t2', + 'rts_uid': 'unit.0003' + } + } + } + } + + assert resolve_tags(tag=t1_name, + parent_pipeline_name=pipeline_name, + placeholders=placeholders) == {'colocate': 'unit.0002}' + + with pytest.raises(rse.EnTKError): + resolve_tags(tag='t3', + parent_pipeline_name=pipeline_name, + placeholders=placeholders) == {'colocate': 'unit.0002}' + + +# ------------------------------------------------------------------------------ +# +if __name__ == '__main__': + + test_create_task_from_cu() + test_resolve_args() + test_resolve_tags() + + +# ------------------------------------------------------------------------------ + diff --git a/tests/test_integration/tagging_lfs_rp_da_scheduler.py b/tests/test_integration/tagging_lfs_rp_da_scheduler.py index fb71471dd..058063799 100644 --- a/tests/test_integration/tagging_lfs_rp_da_scheduler.py +++ b/tests/test_integration/tagging_lfs_rp_da_scheduler.py @@ -49,7 +49,7 @@ def test_rp_da_scheduler_bw(): t.cpu_reqs['thread_type'] = '' t.cpu_reqs['process_type'] = '' t.download_output_data = ['hostname.txt > s2_t%s_hostname.txt'%(x)] - t.tag = 't%s'%x + t.tags = 't%s'%x s2.add_tasks(t) diff --git a/tests/test_integration/test_tproc_rp_2.py b/tests/test_integration/test_tproc_rp_2.py index 0ea6d87c8..42f0a644b 100644 --- a/tests/test_integration/test_tproc_rp_2.py +++ b/tests/test_integration/test_tproc_rp_2.py @@ -98,12 +98,12 @@ def test_resolve_tags(): } } - resolved = resolve_tags(tag=t1_name, parent_pipeline_name=pipeline_name, + resolved = resolve_tags(tags=t1_name, parent_pipeline_name=pipeline_name, placeholders=placeholders) - assert resolved == 'unit.0002' + assert resolved == {'colocate': 'unit.0002}' with pytest.raises(EnTKError): - resolve_tags(tag='t3', parent_pipeline_name=pipeline_name, + resolve_tags(tags='t3', parent_pipeline_name=pipeline_name, placeholders=placeholders) diff --git a/tests/test_issues/test_issue_324.py b/tests/test_issues/test_issue_324.py index de06cfd85..bd50c05e7 100644 --- a/tests/test_issues/test_issue_324.py +++ b/tests/test_issues/test_issue_324.py @@ -36,7 +36,7 @@ def test_issue_271(): 'stderr': 'err', 'exit_code': 555, 'path': 'here/it/is', - 'tag': 'task.0010', + 'tags': {'colocate': 'task.0010'}, 'parent_stage': {'uid': 's1', 'name': 'stage1'}, 'parent_pipeline': {'uid': 'p1', 'name': 'pipe1'}} From 7bc80d03baa592d1138db1915ddd6e0d6863fa0f Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 16 Nov 2020 10:35:09 -0500 Subject: [PATCH 03/42] Start supporting tags again --- src/radical/entk/task/task.py | 14 +++++---- tests/test_component/test_task.py | 52 +++++++++++++++---------------- 2 files changed, 34 insertions(+), 32 deletions(-) diff --git a/src/radical/entk/task/task.py b/src/radical/entk/task/task.py index 783a3d6c2..808488ca7 100644 --- a/src/radical/entk/task/task.py +++ b/src/radical/entk/task/task.py @@ -69,7 +69,7 @@ def __init__(self, from_dict=None): # to cuds and cus to tasks self._path = None self._exit_code = None - self._tag = None + self._tags = None # Keep track of res attained self._state_history = [res.INITIAL] @@ -513,7 +513,7 @@ def tag(self): DEPRECATED: use `self.tags` ''' - return self.tags + return self._tags @property @@ -914,7 +914,7 @@ def tag(self, value): if not isinstance(value, str): raise ree.TypeError(entity='tags', expected_type=str, actual_type=type(value)) - self.tags = {'colocate': value} + self._tags = {'colocate': value} @tags.setter @@ -924,10 +924,12 @@ def tags(self, value): raise ree.TypeError(entity='tags', expected_type=dict, actual_type=type(value)) - if list(tags.keys()) != ['colocation']: - raise ree.TypeError('unsupported tags %s' % tags.keys()) + if list(value.keys()) != ['colocation']: + raise ree.TypeError(expected_type=dict, + actual_type=type(value.get('colocation')), + entity='colocation') - self.tags = value + self._tags = value @parent_stage.setter diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index f854c0f79..264928e1e 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -36,13 +36,13 @@ def test_task_initialization(self, mocked_generate_id): t = Task() self.assertEqual(t._uid, 'test.0000') - self.assertEqual(t.name, '') - self.assertEqual(t.state, states.INITIAL) - self.assertEqual(t.state_history, [states.INITIAL]) - self.assertEqual(t.executable, '') - self.assertIsInstance(t.arguments, list) - self.assertIsInstance(t.pre_exec, list) - self.assertIsInstance(t.post_exec, list) + self.assertEqual(t._name, '') + self.assertEqual(t._state, states.INITIAL) + self.assertEqual(t._state_history, [states.INITIAL]) + self.assertEqual(t._executable, '') + self.assertIsInstance(t._arguments, list) + self.assertIsInstance(t._pre_exec, list) + self.assertIsInstance(t._post_exec, list) self.assertEqual(t._cpu_reqs['processes'], 1) self.assertIsNone(t._cpu_reqs['process_type']) @@ -53,25 +53,25 @@ def test_task_initialization(self, mocked_generate_id): self.assertEqual(t._gpu_reqs['threads_per_process'], 0) self.assertIsNone(t._gpu_reqs['thread_type']) - self.assertEqual(t.lfs_per_process, 0) - self.assertEqual(t.sandbox, '') - self.assertIsInstance(t.upload_input_data, list) - self.assertIsInstance(t.copy_input_data, list) - self.assertIsInstance(t.link_input_data, list) - self.assertIsInstance(t.move_input_data, list) - self.assertIsInstance(t.copy_output_data, list) - self.assertIsInstance(t.link_output_data, list) - self.assertIsInstance(t.move_output_data, list) - self.assertIsInstance(t.download_output_data, list) - self.assertEqual(t.stdout, '') - self.assertEqual(t.stderr, '') - self.assertIsNone(t.exit_code) - self.assertIsNone(t.tag) - self.assertIsNone(t.path) - self.assertIsNone(t.parent_pipeline['uid']) - self.assertIsNone(t.parent_pipeline['name']) - self.assertIsNone(t.parent_stage['name']) - self.assertIsNone(t.parent_stage['uid']) + self.assertEqual(t._lfs_per_process, 0) + self.assertEqual(t._sandbox, '') + self.assertIsInstance(t._upload_input_data, list) + self.assertIsInstance(t._copy_input_data, list) + self.assertIsInstance(t._link_input_data, list) + self.assertIsInstance(t._move_input_data, list) + self.assertIsInstance(t._copy_output_data, list) + self.assertIsInstance(t._link_output_data, list) + self.assertIsInstance(t._move_output_data, list) + self.assertIsInstance(t._download_output_data, list) + self.assertEqual(t._stdout, '') + self.assertEqual(t._stderr, '') + self.assertIsNone(t._exit_code) + self.assertIsNone(t._tag) + self.assertIsNone(t._path) + self.assertIsNone(t._p_pipeline['uid']) + self.assertIsNone(t._p_pipeline['name']) + self.assertIsNone(t._p_stage['name']) + self.assertIsNone(t._p_stage['uid']) # -------------------------------------------------------------------------- From bb48531ae9d72020bbee20a5b5de339d21a97d0b Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 16 Nov 2020 13:58:51 -0500 Subject: [PATCH 04/42] Task ready. --- src/radical/entk/task/task.py | 8 +- tests/test_component/test_task.py | 134 +++++++++++++++++++++++- tests/test_component/test_tproc_rp_2.py | 4 +- 3 files changed, 141 insertions(+), 5 deletions(-) diff --git a/src/radical/entk/task/task.py b/src/radical/entk/task/task.py index 808488ca7..d5f69a52c 100644 --- a/src/radical/entk/task/task.py +++ b/src/radical/entk/task/task.py @@ -1,7 +1,8 @@ - __copyright__ = 'Copyright 2014-2020, http://radical.rutgers.edu' __license__ = 'MIT' +import warnings + import radical.utils as ru from .. import exceptions as ree @@ -743,7 +744,6 @@ def gpu_reqs(self, value): 'gpu_process_type', 'gpu_thread_type']) if set(value.keys()).issubset(depr_expected_keys): - import warnings warnings.simplefilter("once") warnings.warn("GPU requirements keys are renamed using 'gpu_'" + "as a prefix for all keys.",DeprecationWarning) @@ -910,6 +910,10 @@ def path(self, value): @tag.setter def tag(self, value): + warnings.simplefilter("once") + warnings.warn("Attribute tag is depcrecated. Please use tags", + DeprecationWarning) + # this method exists for backward compatibility if not isinstance(value, str): raise ree.TypeError(entity='tags', expected_type=str, diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 264928e1e..6d991c83c 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -66,7 +66,7 @@ def test_task_initialization(self, mocked_generate_id): self.assertEqual(t._stdout, '') self.assertEqual(t._stderr, '') self.assertIsNone(t._exit_code) - self.assertIsNone(t._tag) + self.assertIsNone(t._tags) self.assertIsNone(t._path) self.assertIsNone(t._p_pipeline['uid']) self.assertIsNone(t._p_pipeline['name']) @@ -268,3 +268,135 @@ def test_dict_to_task(self): d = 'test' with pytest.raises(ree.TypeError): t = Task(from_dict=d) + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_name(self, mocked_init): + + task = Task() + task._uid = 'test' + task._name = 'test_name' + self.assertEqual(task.name, 'test_name') + + task.name = 'task.0000' + self.assertEqual(task._name, 'task.0000') + + with self.assertRaises(ree.TypeError): + task.name = 0 + + with self.assertRaises(ree.ValueError): + task.name = 'task,0000' + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_state_history(self, mocked_init): + + task = Task() + task._uid = 'test' + task._state_history = [states.INITIAL] + self.assertEqual(task.state_history, [states.INITIAL]) + + task.state_history = [states.SCHEDULED] + self.assertEqual(task._state_history, [states.SCHEDULED]) + + with self.assertRaises(ree.TypeError): + task.state_history = states.SCHEDULING + + with self.assertRaises(ree.ValueError): + task.state_history = ['EXECUTING'] + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_pre_exec(self, mocked_init): + + task = Task() + task._pre_exec = ['module load mymodule'] + self.assertEqual(task.pre_exec, ['module load mymodule']) + + task.pre_exec = ['module load mymodule2'] + self.assertEqual(task._pre_exec, ['module load mymodule2']) + with self.assertRaises(ree.TypeError): + task.pre_exec = 'module load mymodule' + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_arguments(self, mocked_init): + + task = Task() + task._arguments = ['module load mymodule'] + self.assertEqual(task.arguments, ['module load mymodule']) + + task.arguments = ['module load mymodule2'] + self.assertEqual(task._arguments, ['module load mymodule2']) + with self.assertRaises(ree.TypeError): + task.arguments = 'module load mymodule' + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_sandbox(self, mocked_init): + + task = Task() + task._sandbox = '/path/to/a/sandbox' + self.assertEqual(task.sandbox, '/path/to/a/sandbox') + + task.sandbox = '/path_to_a_sandbox' + self.assertEqual(task._sandbox, '/path_to_a_sandbox') + with self.assertRaises(ree.TypeError): + task.sandbox = [] + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_post_exec(self, mocked_init): + + task = Task() + task._post_exec = ['module load mymodule'] + self.assertEqual(task.post_exec, ['module load mymodule']) + + task.post_exec = ['module load mymodule2'] + self.assertEqual(task._post_exec, ['module load mymodule2']) + with self.assertRaises(ree.TypeError): + task.post_exec = 'module load mymodule' + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_tag(self, mocked_init): + + task = Task() + task._tags = {'colocate':'tasks'} + self.assertEqual(task.tag, {'colocate':'tasks'}) + + task.tag = 'task' + self.assertEqual(task._tags, {'colocate':'task'}) + with self.assertRaises(ree.TypeError): + task.tag = {'colocate':'tasks'} + + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) + def test_tags(self, mocked_init): + + task = Task() + task._tags = {'colocate':'tasks'} + self.assertEqual(task.tag, {'colocate':'tasks'}) + + task.tags = {'colocation':'task'} + self.assertEqual(task._tags, {'colocation':'task'}) + + with self.assertRaises(ree.TypeError): + task.tags = 'task' + + with self.assertRaises(ree.TypeError): + task.tags = {'key':'task'} diff --git a/tests/test_component/test_tproc_rp_2.py b/tests/test_component/test_tproc_rp_2.py index 89cf094a1..12892f22a 100755 --- a/tests/test_component/test_tproc_rp_2.py +++ b/tests/test_component/test_tproc_rp_2.py @@ -105,12 +105,12 @@ def test_resolve_tags(): assert resolve_tags(tag=t1_name, parent_pipeline_name=pipeline_name, - placeholders=placeholders) == {'colocate': 'unit.0002}' + placeholders=placeholders) == {'colocate': 'unit.0002'} with pytest.raises(rse.EnTKError): resolve_tags(tag='t3', parent_pipeline_name=pipeline_name, - placeholders=placeholders) == {'colocate': 'unit.0002}' + placeholders=placeholders) # ------------------------------------------------------------------------------ From 763fef6a51c077d42cded987938fdbeaf057f6d7 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 18 Nov 2020 09:45:35 -0500 Subject: [PATCH 05/42] Tags introduced in task proc and task description --- src/radical/entk/execman/rp/task_processor.py | 6 +++--- src/radical/entk/task/task.py | 6 +++--- tests/test_component/test_task.py | 4 ++-- tests/test_component/test_tproc_rp.py | 11 +++++++---- 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index e6db024f3..9806afc13 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -164,14 +164,14 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): if not tags: return - colo_tag = tags['colocation'] + colo_tag = tags['colocate'] # Check self pipeline first for sname in placeholders[parent_pipeline_name]: for tname in placeholders[parent_pipeline_name][sname]: if colo_tag != tname: continue - return {'colocation': + return {'colocate': placeholders[parent_pipeline_name][sname][tname]['rts_uid']} for pname in placeholders: @@ -184,7 +184,7 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): for tname in placeholders[pname][sname]: if colo_tag != tname: continue - return {'colocation': + return {'colocate': placeholders[pname][sname][tname]['rts_uid']} raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' diff --git a/src/radical/entk/task/task.py b/src/radical/entk/task/task.py index d5f69a52c..9590f8309 100644 --- a/src/radical/entk/task/task.py +++ b/src/radical/entk/task/task.py @@ -928,10 +928,10 @@ def tags(self, value): raise ree.TypeError(entity='tags', expected_type=dict, actual_type=type(value)) - if list(value.keys()) != ['colocation']: + if list(value.keys()) != ['colocate']: raise ree.TypeError(expected_type=dict, - actual_type=type(value.get('colocation')), - entity='colocation') + actual_type=type(value.get('colocate')), + entity='colocate') self._tags = value diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 6d991c83c..623854934 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -392,8 +392,8 @@ def test_tags(self, mocked_init): task._tags = {'colocate':'tasks'} self.assertEqual(task.tag, {'colocate':'tasks'}) - task.tags = {'colocation':'task'} - self.assertEqual(task._tags, {'colocation':'task'}) + task.tags = {'colocate':'task'} + self.assertEqual(task._tags, {'colocate':'task'}) with self.assertRaises(ree.TypeError): task.tags = 'task' diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index b47b5cf83..3d0fdabb9 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -116,12 +116,15 @@ def test_resolve_tags(self, mocked_Logger): } } - self.assertEqual(resolve_tags(tag=t1_name, + + + self.assertEqual(resolve_tags(tags={'colocation': 't1'}, parent_pipeline_name=pipeline_name, - placeholders=placeholders), 'unit.0002') + placeholders=placeholders), + {'colocation':'unit.0002'}) with self.assertRaises(ree.EnTKError): - resolve_tags(tag='t3', parent_pipeline_name=pipeline_name, + resolve_tags(tags={'colocation': 't3'}, parent_pipeline_name=pipeline_name, placeholders=placeholders) # ------------------------------------------------------------------------------ @@ -177,7 +180,7 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, 'gpu_threads': 6, 'gpu_process_type': 'POSIX', 'gpu_thread_type': None} - task.tag = None + task.tags = None task.lfs_per_process = 235 task.stderr = 'stderr' From 1e012a802709dec10bbeea665e9c498db0edb2d0 Mon Sep 17 00:00:00 2001 From: Andre Merzky Date: Wed, 6 Jan 2021 10:17:08 +0100 Subject: [PATCH 06/42] use "tags" in `to_dict()` --- src/radical/entk/task.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index dfea2222e..6a528a5bb 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -929,7 +929,7 @@ def tag(self, value): warnings.simplefilter("once") warnings.warn("Attribute tag is depcrecated. Please use tags", - DeprecationWarning) + DeprecationWarning) # this method exists for backward compatibility if not isinstance(value, str): @@ -1009,7 +1009,7 @@ def to_dict(self): 'exit_code' : self._exit_code, 'path' : self._path, - 'tag' : self._tag, + 'tags' : self._tags, 'rts_uid' : self._rts_uid, 'parent_stage' : self._p_stage, From 866ecb999cffea78e4dab19c03c70d977b52758f Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 6 Jan 2021 18:03:01 -0500 Subject: [PATCH 07/42] Tag key is back. --- src/radical/entk/execman/rp/task_processor.py | 18 ++++++++++++------ src/radical/entk/task.py | 13 +++++++------ 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index a795cd1d4..0cb599e8b 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -456,12 +456,18 @@ def create_cud_from_task(task, placeholders, prof=None): cud.sandbox = task.sandbox cud.post_exec = task.post_exec - if task.tags: - if task.parent_pipeline['name']: - cud.tags = resolve_tags( - tags=task.tags, - parent_pipeline_name=task.parent_pipeline['name'], - placeholders=placeholders) + # FIXME: Use tags properly. + # if task.tags: + # if task.parent_pipeline['name']: + # cud.tags = resolve_tags( + # tags=task.tags, + # parent_pipeline_name=task.parent_pipeline['name'], + # placeholders=placeholders) + + if task.tag: + cud.tag = task.tag + else: + cud.tag = task.name cud.cpu_processes = task.cpu_reqs['cpu_processes'] cud.cpu_threads = task.cpu_reqs['cpu_threads'] diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 00c717776..469102102 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -75,6 +75,7 @@ def __init__(self, from_dict=None): self._path = None self._exit_code = None self._tags = None + self._tag = None # Keep track of res attained self._state_history = [res.INITIAL] @@ -522,10 +523,10 @@ def path(self): @property def tag(self): ''' - DEPRECATED: use `self.tags` + WARNING: It will be deprecated. ''' - return self._tags + return self._tag @property @@ -939,14 +940,13 @@ def path(self, value): def tag(self, value): warnings.simplefilter("once") - warnings.warn("Attribute tag is depcrecated. Please use tags", - DeprecationWarning) + warnings.warn("Attribute tag will be depcrecated", DeprecationWarning) # this method exists for backward compatibility if not isinstance(value, str): - raise ree.TypeError(entity='tags', expected_type=str, + raise ree.TypeError(entity='tag', expected_type=str, actual_type=type(value)) - self._tags = {'colocate': value} + self._tag = value @tags.setter @@ -1021,6 +1021,7 @@ def to_dict(self): 'exit_code' : self._exit_code, 'path' : self._path, 'tags' : self._tags, + 'tag' : self._tag, 'rts_uid' : self._rts_uid, 'parent_stage' : self._p_stage, From ba3247da2a76789c28148ceaf70142d6702772fd Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Thu, 7 Jan 2021 11:43:42 -0500 Subject: [PATCH 08/42] Test update --- tests/test_component/test_task.py | 15 +++++++++++---- tests/test_component/test_tproc_rp.py | 16 ++++++++++------ 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 66fbf14cf..1f608b1c4 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -67,6 +67,7 @@ def test_task_initialization(self, mocked_generate_id): self.assertEqual(t._stderr, '') self.assertIsNone(t._exit_code) self.assertIsNone(t._tags) + self.assertIsNone(t._tag) self.assertIsNone(t._path) self.assertIsNone(t._p_pipeline['uid']) self.assertIsNone(t._p_pipeline['name']) @@ -386,11 +387,11 @@ def test_post_exec(self, mocked_init): def test_tag(self, mocked_init): task = Task() - task._tags = {'colocate':'tasks'} + task._tag = {'colocate':'tasks'} self.assertEqual(task.tag, {'colocate':'tasks'}) - task.tag = 'task' - self.assertEqual(task._tags, {'colocate':'task'}) + task.tag = 'task.tag' + self.assertEqual(task._tag, 'task.tag') with self.assertRaises(ree.TypeError): task.tag = {'colocate':'tasks'} @@ -402,7 +403,7 @@ def test_tags(self, mocked_init): task = Task() task._tags = {'colocate':'tasks'} - self.assertEqual(task.tag, {'colocate':'tasks'}) + self.assertEqual(task.tags, {'colocate':'tasks'}) task.tags = {'colocate':'task'} self.assertEqual(task._tags, {'colocate':'task'}) @@ -412,6 +413,10 @@ def test_tags(self, mocked_init): with self.assertRaises(ree.TypeError): task.tags = {'key':'task'} + + # -------------------------------------------------------------------------- + # + @mock.patch.object(Task, '__init__', return_value=None) def test_task_to_dict(self, mocked_init): t = Task() @@ -449,6 +454,7 @@ def test_task_to_dict(self, mocked_init): t._stderr = 'Hello World' t._exit_code = 0 t._tag = None + t._tags = None t._path = 'some_path' t._p_pipeline = dict() t._p_pipeline['uid'] = 'pipe.0000' @@ -488,6 +494,7 @@ def test_task_to_dict(self, mocked_init): 'exit_code': 0, 'path': 'some_path', 'tag': None, + 'tags': None, 'rts_uid': 'unit.0000', 'parent_stage': {'name': 'stage.0000', 'uid': 'stage.0000'}, diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index 6386b15d3..efb63008c 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -118,13 +118,13 @@ def test_resolve_tags(self, mocked_Logger): - self.assertEqual(resolve_tags(tags={'colocation': 't1'}, + self.assertEqual(resolve_tags(tags={'colocate': 't1'}, parent_pipeline_name=pipeline_name, placeholders=placeholders), - {'colocation':'unit.0002'}) + {'colocate':'unit.0002'}) with self.assertRaises(ree.EnTKError): - resolve_tags(tags={'colocation': 't3'}, parent_pipeline_name=pipeline_name, + resolve_tags(tags={'colocate': 't3'}, parent_pipeline_name=pipeline_name, placeholders=placeholders) # ------------------------------------------------------------------------------ @@ -162,7 +162,7 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, task = mock.Mock() task.uid = 'task.0000' - task.name = 'task.0000' + task.name = 'task.name' task.parent_stage = {'uid' : 'stage.0000', 'name' : 'stage.0000'} task.parent_pipeline = {'uid' : 'pipe.0000', @@ -180,14 +180,14 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, 'gpu_threads': 6, 'gpu_process_type': 'POSIX', 'gpu_thread_type': None} - task.tags = None + task.tag = None task.lfs_per_process = 235 task.stderr = 'stderr' task.stdout = 'stdout' test_cud = create_cud_from_task(task, None) - self.assertEqual(test_cud.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') + self.assertEqual(test_cud.name, 'task.0000,task.name,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_cud.pre_exec, 'post_exec') self.assertEqual(test_cud.executable, '/bin/date') self.assertEqual(test_cud.arguments, 'test_args') @@ -206,7 +206,11 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, self.assertEqual(test_cud.stderr, 'stderr') self.assertEqual(test_cud.input_staging, 'inputs') self.assertEqual(test_cud.output_staging, 'outputs') + self.assertEqual(test_cud.tag, 'task.name') + task.tag = 'task.tag' + test_cud = create_cud_from_task(task, None) + self.assertEqual(test_cud.tag, 'task.tag') # ------------------------------------------------------------------------------ # From 83c541962bc1175267aa3b53950cfbe0220e6253 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Thu, 7 Jan 2021 11:53:15 -0500 Subject: [PATCH 09/42] small integration test between EnTK and RP --- src/radical/entk/task.py | 2 +- .../test_tmgr_rp/test_tproc_rp.py | 66 +++++++++++++++++++ 2 files changed, 67 insertions(+), 1 deletion(-) create mode 100755 tests/test_integration/test_tmgr_rp/test_tproc_rp.py diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 469102102..033aa658a 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -8,10 +8,10 @@ from . import exceptions as ree from . import states as res -import warnings warnings.simplefilter(action="once", category=DeprecationWarning, lineno=707) warnings.simplefilter(action="once", category=DeprecationWarning, lineno=764) + # ------------------------------------------------------------------------------ # class Task(object): diff --git a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py new file mode 100755 index 000000000..9fff54a34 --- /dev/null +++ b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py @@ -0,0 +1,66 @@ +# pylint: disable=protected-access, unused-argument +# pylint: disable=no-value-for-parameter + +from unittest import TestCase + +from radical.entk.execman.rp.task_processor import create_cud_from_task +from radical.entk import Task + + +class TestBase(TestCase): + + # ------------------------------------------------------------------------------ + # + def test_create_cud_from_task(self): + + task = Task() + task.uid = 'task.0000' + task.name = 'task.name' + task.parent_stage = {'uid' : 'stage.0000', + 'name' : 'stage.0000'} + task.parent_pipeline = {'uid' : 'pipe.0000', + 'name' : 'pipe.0000'} + task.pre_exec = ['post_exec'] + task.executable = '/bin/date' + task.arguments = ['test_args'] + task.sandbox = 'unit.0000' + task.post_exec = [''] + task.cpu_reqs = {'cpu_processes': 5, + 'cpu_threads': 6, + 'cpu_process_type': 'MPI', + 'cpu_thread_type': None} + task.gpu_reqs = {'gpu_processes': 5, + 'gpu_threads': 6, + 'gpu_process_type': None, + 'gpu_thread_type': None} + + task.lfs_per_process = 235 + task.stderr = 'stderr' + task.stdout = 'stdout' + + test_cud = create_cud_from_task(task, None) + self.assertEqual(test_cud.name, 'task.0000,task.name,stage.0000,stage.0000,pipe.0000,pipe.0000') + self.assertEqual(test_cud.pre_exec, ['post_exec']) + self.assertEqual(test_cud.executable, '/bin/date') + self.assertEqual(test_cud.arguments, ['test_args']) + self.assertEqual(test_cud.sandbox, 'unit.0000') + self.assertEqual(test_cud.post_exec, ['']) + self.assertEqual(test_cud.cpu_processes, 5) + self.assertEqual(test_cud.cpu_threads, 6) + self.assertEqual(test_cud.cpu_process_type, 'MPI') + self.assertIsNone(test_cud.cpu_thread_type) + self.assertEqual(test_cud.gpu_processes, 5) + self.assertEqual(test_cud.gpu_threads, 6) + self.assertEqual(test_cud.gpu_process_type, None) + self.assertIsNone(test_cud.gpu_thread_type) + self.assertEqual(test_cud.lfs_per_process, 235) + self.assertEqual(test_cud.stdout, 'stdout') + self.assertEqual(test_cud.stderr, 'stderr') + self.assertEqual(test_cud.input_staging, []) + self.assertEqual(test_cud.output_staging, []) + self.assertEqual(test_cud.tag, 'task.name') + + task.tag = 'task.tag' + test_cud = create_cud_from_task(task, None) + self.assertEqual(test_cud.tag, 'task.tag') + From 66d49bde857102af5f2dc0d2b4d8abf4b150ab8b Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Thu, 7 Jan 2021 17:18:23 -0600 Subject: [PATCH 10/42] Tag is ready --- src/radical/entk/execman/rp/task_processor.py | 2 +- src/radical/entk/task.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index 0cb599e8b..3f76a62e7 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -491,7 +491,7 @@ def create_cud_from_task(task, placeholders, prof=None): prof.prof('cud from task - done', uid=task.uid) logger.debug('CU %s created from Task %s' % (cud.name, task.uid)) - + return cud diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 033aa658a..593c95c6b 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -10,6 +10,7 @@ warnings.simplefilter(action="once", category=DeprecationWarning, lineno=707) warnings.simplefilter(action="once", category=DeprecationWarning, lineno=764) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=943) # ------------------------------------------------------------------------------ @@ -939,7 +940,6 @@ def path(self, value): @tag.setter def tag(self, value): - warnings.simplefilter("once") warnings.warn("Attribute tag will be depcrecated", DeprecationWarning) # this method exists for backward compatibility From 782cbd4643d0e332b2e3f6189c558a6c21f41661 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Thu, 7 Jan 2021 18:41:58 -0500 Subject: [PATCH 11/42] Update tag example --- examples/misc/lfs_tagging_dd.py | 125 +++++++++++++++++--------------- 1 file changed, 66 insertions(+), 59 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index d7bc2affe..fa6db4f85 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -1,41 +1,46 @@ #!/usr/bin/env python from radical.entk import Pipeline, Stage, Task, AppManager -from radical.entk.exceptions import * import os -import sys -import argparse -hostname = os.environ.get('RMQ_HOSTNAME','localhost') -port = int(os.environ.get('RMQ_PORT',5672)) +if os.environ.get('RADICAL_ENTK_VERBOSE') is None: + os.environ['RADICAL_ENTK_REPORT'] = 'True' +# Description of how the RabbitMQ process is accessible +# No need to change/set any variables if you installed RabbitMQ has a system +# process. If you are running RabbitMQ under a docker container or another +# VM, set "RMQ_HOSTNAME" and "RMQ_PORT" in the session where you are running +# this script. +hostname = os.environ.get('RMQ_HOSTNAME', 'localhost') +port = int(os.environ.get('RMQ_PORT', 5672)) +username = os.environ.get('RMQ_USERNAME') +password = os.environ.get('RMQ_PASSWORD') -def get_pipeline(shared_fs=False, size=1): +# Each task in this example prints the hostname of the node it executed. Tagged +# tasks should print the same hostname as the respective task in the first stage +# of the pipeline the following function returns. + +def get_pipeline(n=2): + + # We create a pipeline which has 3 stages. The tasks from the second and + # and third stage will execute at the same node as the respective tasks from + # the first stage. p = Pipeline() p.name = 'p' - n = 4 - s1 = Stage() s1.name = 's1' for x in range(n): + # The tasks from the first stage will execute at the first available node + # they fit. t = Task() - t.name = 't%s'%x - - # dd if=/dev/random bs= count= of= - - t.executable = 'dd' - - if not shared_fs: - t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s1_t%s.txt'%x] - else: - t.arguments = ['if=/dev/urandom','bs=%sM'%size, 'count=1', 'of=/home/vivek91/s1_t%s.txt'%x] - - t.cpu_reqs['processes'] = 1 - t.cpu_reqs['threads_per_process'] = 24 - t.cpu_reqs['thread_type'] = '' - t.cpu_reqs['process_type'] = '' + t.name = 't1.%04d' % x + t.executable = 'hostname' + t.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': , # Set enough threads for this task to get a whole node + 'cpu_process_type': None, + 'cpu_thread_type': None} t.lfs_per_process = 1024 s1.add_tasks(t) @@ -44,59 +49,61 @@ def get_pipeline(shared_fs=False, size=1): s2 = Stage() s2.name = 's2' - for x in range(n): + for x in range(2 * n): + # Tasks from this stage will execute on the node the task from stage 1 + # it depends executed. t = Task() - t.executable = 'dd' - - if not shared_fs: - t.arguments = ['if=$NODE_LFS_PATH/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=$NODE_LFS_PATH/s2_t%s.txt'%x] - else: - t.arguments = ['if=/home/vivek91/s1_t%s.txt'%x,'bs=%sM'%size, 'count=1', 'of=/home/vivek91/s2_t%s.txt'%x] - - t.cpu_reqs['processes'] = 1 - t.cpu_reqs['threads_per_process'] = 24 - t.cpu_reqs['thread_type'] = '' - t.cpu_reqs['process_type'] = '' - t.tags = 't%s'%x + t.name = 't2.%04d' % x + t.executable = 'hostname' + t.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, + 'cpu_process_type': None, + 'cpu_thread_type': None} + t.lfs_per_process = 1024 + t.tag = 't1.%04d' % (x % 4) # As a tag we use the name of the task this task depends upon. s2.add_tasks(t) p.add_stages(s2) - return p - + s3 = Stage() + s3.name = 's3' + for x in range(n): + # Tasks from this stage will execute on the node the task from stage 1 + # it depends executed. + t = Task() + t.name = 't3.%04d' % x + t.executable = 'hostname' + t.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, + 'cpu_process_type': None, + 'cpu_thread_type': None} + t.lfs_per_process = 1024 + t.tag = 't1.%04d' % x # As a tag we use the name of the task this task depends upon. + s3.add_tasks(t) -if __name__ == '__main__': + p.add_stages(s3) - args = argparse.ArgumentParser() - args.add_argument('sharedfs') - args.add_argument('size') + return p - args = args.parse_args() - if args.sharedfs == 'shared': - shared_fs = True - else: - shared_fs = False - size = args.size - print('SharedFS: ', shared_fs, size) - os.environ['RADICAL_PILOT_DBURL'] = 'mongodb://entk:entk123@ds159631.mlab.com:59631/da-lfs-test' +if __name__ == '__main__': + # Request at least two nodes res_dict = { - 'resource' : 'xsede.comet', - 'walltime' : 30, - 'cpus' : 120, - 'project' : 'unc100' - # 'project' : 'gk4', - # 'queue' : 'high' + 'resource' : '', + 'walltime' : , + 'cpus' : , + 'project' : '', + 'queue' : '' } - appman = AppManager(hostname=hostname, port=port) + appman = AppManager(hostname=hostname, port=port, username=username, password=password) appman.resource_desc = res_dict - p = get_pipeline(shared_fs=shared_fs, size=size) - appman.workflow = [p] + p = get_pipeline(n=2) # Select n to be greater or equal to the number of nodes. + appman.workflow = set([p]) appman.run() From 71871cc2076fb0e4cf6f064b7b4435919ea989f6 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Fri, 8 Jan 2021 18:13:05 -0500 Subject: [PATCH 12/42] placeholders are based on uid not names --- src/radical/entk/execman/rp/task_manager.py | 29 +++++++------- src/radical/entk/execman/rp/task_processor.py | 40 ++++++------------- src/radical/entk/task.py | 12 +++--- tests/test_component/test_task.py | 7 +--- tests/test_component/test_tproc_rp.py | 29 ++++++-------- .../test_tmgr_rp/test_tproc_rp.py | 28 +++++++++++-- 6 files changed, 73 insertions(+), 72 deletions(-) diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index 2f0652bed..4beb4d5a6 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -217,23 +217,23 @@ def _process_tasks(self, task_queue, rmgr, rmq_conn_params): ''' placeholders = dict() - + placeholder_lock = mt.Lock() # ---------------------------------------------------------------------- - def load_placeholder(task, rts_uid): - - parent_pipeline = str(task.parent_pipeline['name']) - parent_stage = str(task.parent_stage['name']) + def load_placeholder(task): + with placeholder_lock: + parent_pipeline = str(task.parent_pipeline['uid']) + parent_stage = str(task.parent_stage['uid']) - if parent_pipeline not in placeholders: - placeholders[parent_pipeline] = dict() + if parent_pipeline not in placeholders: + placeholders[parent_pipeline] = dict() - if parent_stage not in placeholders[parent_pipeline]: - placeholders[parent_pipeline][parent_stage] = dict() + if parent_stage not in placeholders[parent_pipeline]: + placeholders[parent_pipeline][parent_stage] = dict() - if None not in [parent_pipeline, parent_stage, task.name]: - placeholders[parent_pipeline][parent_stage][task.name] = \ - {'path' : task.path, - 'rts_uid': rts_uid} + if None not in [parent_pipeline, parent_stage, task.uid]: + placeholders[parent_pipeline][parent_stage][task.uid] = \ + {'path': task.path, + 'uid': task.uid} # ---------------------------------------------------------------------- def unit_state_cb(unit, state): @@ -250,7 +250,7 @@ def unit_state_cb(unit, state): self._advance(task, 'Task', states.COMPLETED, mq_channel, '%s-cb-to-sync' % self._sid) - load_placeholder(task, unit.uid) + load_placeholder(task) task_as_dict = json.dumps(task.to_dict()) @@ -307,6 +307,7 @@ def unit_state_cb(unit, state): task = Task() task.from_dict(msg) bulk_tasks.append(task) + load_placeholder(task) bulk_cuds.append(create_cud_from_task( task, placeholders, self._prof)) diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index 3f76a62e7..f8027702f 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -153,7 +153,7 @@ def resolve_arguments(args, placeholders): # ------------------------------------------------------------------------------ # -def resolve_tags(tags, parent_pipeline_name, placeholders): +def resolve_tags(task, parent_pipeline_name, placeholders): # entk only handles co_location tags. If tags are given as strings, they # get translated into `{'colocation': ''}`. Tags passed as dictionaies @@ -161,8 +161,7 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): # # In both cases, the tag string is expanded with the given placeholders. - if not tags: - return + tags = task.tags if task.tags else {'colocate': task.uid} colo_tag = tags['colocate'] @@ -171,8 +170,7 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): for tname in placeholders[parent_pipeline_name][sname]: if colo_tag != tname: continue - return {'colocate': - placeholders[parent_pipeline_name][sname][tname]['rts_uid']} + return placeholders[parent_pipeline_name][sname][tname]['uid'] for pname in placeholders: @@ -184,11 +182,11 @@ def resolve_tags(tags, parent_pipeline_name, placeholders): for tname in placeholders[pname][sname]: if colo_tag != tname: continue - return {'colocate': - placeholders[pname][sname][tname]['rts_uid']} + return placeholders[pname][sname][tname]['uid'] - raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' - 'task with that name is found' % colo_tag) + return task.uid + #raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' + # 'task with that name is found' % colo_tag) # ------------------------------------------------------------------------------ @@ -445,6 +443,7 @@ def create_cud_from_task(task, placeholders, prof=None): prof.prof('cud_create', uid=task.uid) cud = rp.ComputeUnitDescription() + cud.uid = task.uid cud.name = '%s,%s,%s,%s,%s,%s' % (task.uid, task.name, task.parent_stage['uid'], task.parent_stage['name'], @@ -456,18 +455,9 @@ def create_cud_from_task(task, placeholders, prof=None): cud.sandbox = task.sandbox cud.post_exec = task.post_exec - # FIXME: Use tags properly. - # if task.tags: - # if task.parent_pipeline['name']: - # cud.tags = resolve_tags( - # tags=task.tags, - # parent_pipeline_name=task.parent_pipeline['name'], - # placeholders=placeholders) - - if task.tag: - cud.tag = task.tag - else: - cud.tag = task.name + if task.parent_pipeline['uid']: + cud.tag = resolve_tags(task=task, parent_pipeline_name=task.parent_pipeline['uid'], + placeholders=placeholders) cud.cpu_processes = task.cpu_reqs['cpu_processes'] cud.cpu_threads = task.cpu_reqs['cpu_threads'] @@ -491,9 +481,8 @@ def create_cud_from_task(task, placeholders, prof=None): prof.prof('cud from task - done', uid=task.uid) logger.debug('CU %s created from Task %s' % (cud.name, task.uid)) - - return cud + return cud except Exception: logger.exception('CU creation failed') @@ -512,11 +501,8 @@ def create_task_from_cu(cu, prof=None): attributes for a CU as for a CUD. Also, this is not required for the most part. - TODO: Add exit code, stdout, stderr and path attributes to a Task. - These can be extracted from a CU - :arguments: - :cu: RP Compute Unit + :cu: RADICAL.Pilot Compute Unit :return: Task """ diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 593c95c6b..f8acebb75 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -76,7 +76,6 @@ def __init__(self, from_dict=None): self._path = None self._exit_code = None self._tags = None - self._tag = None # Keep track of res attained self._state_history = [res.INITIAL] @@ -527,7 +526,7 @@ def tag(self): WARNING: It will be deprecated. ''' - return self._tag + return self._tags @property @@ -940,13 +939,13 @@ def path(self, value): @tag.setter def tag(self, value): - warnings.warn("Attribute tag will be depcrecated", DeprecationWarning) + warnings.warn("Attribute tag is depcrecated. Use tags instead", DeprecationWarning) # this method exists for backward compatibility if not isinstance(value, str): raise ree.TypeError(entity='tag', expected_type=str, actual_type=type(value)) - self._tag = value + self._tags = {'colocate': value} @tags.setter @@ -961,6 +960,10 @@ def tags(self, value): actual_type=type(value.get('colocate')), entity='colocate') + if not isinstance(value['colocate'], str): + raise ree.TypeError(entity='tag', expected_type=str, + actual_type=type(value)) + self._tags = value @@ -1021,7 +1024,6 @@ def to_dict(self): 'exit_code' : self._exit_code, 'path' : self._path, 'tags' : self._tags, - 'tag' : self._tag, 'rts_uid' : self._rts_uid, 'parent_stage' : self._p_stage, diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 1f608b1c4..0809bfa4d 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -67,7 +67,6 @@ def test_task_initialization(self, mocked_generate_id): self.assertEqual(t._stderr, '') self.assertIsNone(t._exit_code) self.assertIsNone(t._tags) - self.assertIsNone(t._tag) self.assertIsNone(t._path) self.assertIsNone(t._p_pipeline['uid']) self.assertIsNone(t._p_pipeline['name']) @@ -387,11 +386,11 @@ def test_post_exec(self, mocked_init): def test_tag(self, mocked_init): task = Task() - task._tag = {'colocate':'tasks'} + task._tags = {'colocate':'tasks'} self.assertEqual(task.tag, {'colocate':'tasks'}) task.tag = 'task.tag' - self.assertEqual(task._tag, 'task.tag') + self.assertEqual(task._tags, {'colocate': 'task.tag'}) with self.assertRaises(ree.TypeError): task.tag = {'colocate':'tasks'} @@ -453,7 +452,6 @@ def test_task_to_dict(self, mocked_init): t._stdout = 'Hello World' t._stderr = 'Hello World' t._exit_code = 0 - t._tag = None t._tags = None t._path = 'some_path' t._p_pipeline = dict() @@ -493,7 +491,6 @@ def test_task_to_dict(self, mocked_init): 'stderr': 'Hello World', 'exit_code': 0, 'path': 'some_path', - 'tag': None, 'tags': None, 'rts_uid': 'unit.0000', 'parent_stage': {'name': 'stage.0000', diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index efb63008c..7ea6701c6 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -133,11 +133,10 @@ def test_resolve_tags(self, mocked_Logger): @mock.patch('radical.utils.Logger') @mock.patch.object(radical.entk.execman.rp.task_processor, 'get_output_list_from_task', return_value='outputs') @mock.patch.object(radical.entk.execman.rp.task_processor, 'resolve_arguments', return_value='test_args') + @mock.patch.object(radical.entk.execman.rp.task_processor, 'resolve_tags', return_value='test_tag') @mock.patch.object(radical.entk.execman.rp.task_processor, 'get_input_list_from_task', return_value='inputs') - def test_create_cud_from_task(self, mocked_ComputeUnitDescription, - mocked_Logger, mocked_get_input_list_from_task, - mocked_get_output_list_from_task, - mocked_resolve_arguments): + def test_create_cud_from_task(self, mocked_ComputeUnitDescription, mocked_Logger, mocked_get_input_list_from_task, + mocked_get_output_list_from_task, mocked_resolve_arguments, mocked_resolve_tags): mocked_ComputeUnitDescription.name = None mocked_ComputeUnitDescription.pre_exec = None @@ -180,13 +179,14 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, 'gpu_threads': 6, 'gpu_process_type': 'POSIX', 'gpu_thread_type': None} - task.tag = None + task.tags = None task.lfs_per_process = 235 task.stderr = 'stderr' task.stdout = 'stdout' test_cud = create_cud_from_task(task, None) + self.assertEqual(test_cud.name, 'task.0000,task.name,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_cud.pre_exec, 'post_exec') self.assertEqual(test_cud.executable, '/bin/date') @@ -206,11 +206,7 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, self.assertEqual(test_cud.stderr, 'stderr') self.assertEqual(test_cud.input_staging, 'inputs') self.assertEqual(test_cud.output_staging, 'outputs') - self.assertEqual(test_cud.tag, 'task.name') - - task.tag = 'task.tag' - test_cud = create_cud_from_task(task, None) - self.assertEqual(test_cud.tag, 'task.tag') + self.assertEqual(test_cud.tag, 'test_tag') # ------------------------------------------------------------------------------ # @@ -379,8 +375,7 @@ def test_get_output_list_from_task(self, mocked_Logger): # @mock.patch('radical.pilot.ComputeUnitDescription') @mock.patch('radical.utils.Logger') - def test_issue_259(self, mocked_ComputeUnitDescription, - mocked_Logger): + def test_issue_259(self, mocked_ComputeUnitDescription, mocked_Logger): mocked_ComputeUnitDescription.name = None mocked_ComputeUnitDescription.pre_exec = None @@ -403,10 +398,10 @@ def test_issue_259(self, mocked_ComputeUnitDescription, mocked_ComputeUnitDescription.input_staging = None mocked_ComputeUnitDescription.output_staging = None - pipeline_name = 'p1' - stage_name = 's1' - t1_name = 't1' - t2_name = 't2' + pipeline_name = 'pipe.0000' + stage_name = 'stage.0000' + t1_name = 'task.0000' + t2_name = 'task.0001' placeholders = { pipeline_name: { @@ -450,7 +445,7 @@ def test_issue_259(self, mocked_ComputeUnitDescription, 'gpu_threads': 6, 'gpu_process_type': 'POSIX', 'gpu_thread_type': None} - task.tag = None + task.tags = None task.lfs_per_process = 235 task.stderr = 'stderr' diff --git a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py index 9fff54a34..5abce41a5 100755 --- a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py +++ b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py @@ -13,9 +13,29 @@ class TestBase(TestCase): # def test_create_cud_from_task(self): + pipeline_name = 'pipe.0000' + stage_name = 'stage.0000' + t1_name = 'task.0000' + t2_name = 'task.0001' + + placeholders = { + pipeline_name: { + stage_name: { + t1_name: { + 'path' : '/home/vivek/t1', + 'rts_uid': 'unit.0000' + }, + t2_name: { + 'path' : '/home/vivek/t2', + 'rts_uid': 'unit.0003' + } + } + } + } + task = Task() task.uid = 'task.0000' - task.name = 'task.name' + task.name = 'task.0000' task.parent_stage = {'uid' : 'stage.0000', 'name' : 'stage.0000'} task.parent_pipeline = {'uid' : 'pipe.0000', @@ -38,8 +58,8 @@ def test_create_cud_from_task(self): task.stderr = 'stderr' task.stdout = 'stdout' - test_cud = create_cud_from_task(task, None) - self.assertEqual(test_cud.name, 'task.0000,task.name,stage.0000,stage.0000,pipe.0000,pipe.0000') + test_cud = create_cud_from_task(task, placeholders) + self.assertEqual(test_cud.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_cud.pre_exec, ['post_exec']) self.assertEqual(test_cud.executable, '/bin/date') self.assertEqual(test_cud.arguments, ['test_args']) @@ -58,7 +78,7 @@ def test_create_cud_from_task(self): self.assertEqual(test_cud.stderr, 'stderr') self.assertEqual(test_cud.input_staging, []) self.assertEqual(test_cud.output_staging, []) - self.assertEqual(test_cud.tag, 'task.name') + self.assertEqual(test_cud.tag, 'unit.0000') task.tag = 'task.tag' test_cud = create_cud_from_task(task, None) From fe610189e15013f569bbd84086af0c654fa26a65 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 11 Jan 2021 15:06:18 -0500 Subject: [PATCH 13/42] Updating lfs tagging example --- examples/misc/lfs_tagging_dd.py | 94 +++++++++++++++------------------ 1 file changed, 44 insertions(+), 50 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index fa6db4f85..262579c1f 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -1,3 +1,4 @@ + #!/usr/bin/env python from radical.entk import Pipeline, Stage, Task, AppManager @@ -24,86 +25,79 @@ def get_pipeline(n=2): # We create a pipeline which has 3 stages. The tasks from the second and # and third stage will execute at the same node as the respective tasks from - # the first stage. - - p = Pipeline() - p.name = 'p' - - s1 = Stage() - s1.name = 's1' + # the first stage. + pipelines = list() for x in range(n): + p = Pipeline() + p.name = 'p.%04d' % x + + s1 = Stage() + s1.name = 's1' # The tasks from the first stage will execute at the first available node # they fit. - t = Task() - t.name = 't1.%04d' % x - t.executable = 'hostname' - t.cpu_reqs = {'cpu_processes': 1, - 'cpu_threads': , # Set enough threads for this task to get a whole node - 'cpu_process_type': None, + t1 = Task() + t1.name = 't1.%04d' % x + t1.executable = 'hostname' + t1.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, # Set enough threads for this task to get a whole node + 'cpu_process_type': None, 'cpu_thread_type': None} - t.lfs_per_process = 1024 + t1.lfs_per_process = 10 + s1.add_tasks(t1) - s1.add_tasks(t) + p.add_stages(s1) - p.add_stages(s1) - - s2 = Stage() - s2.name = 's2' - for x in range(2 * n): + s2 = Stage() + s2.name = 's2' # Tasks from this stage will execute on the node the task from stage 1 # it depends executed. - t = Task() - t.name = 't2.%04d' % x - t.executable = 'hostname' - t.cpu_reqs = {'cpu_processes': 1, + t2 = Task() + t2.name = 't2.%04d' % x + t2.executable = 'hostname' + t2.cpu_reqs = {'cpu_processes': 1, 'cpu_threads': 1, 'cpu_process_type': None, 'cpu_thread_type': None} - t.lfs_per_process = 1024 - t.tag = 't1.%04d' % (x % 4) # As a tag we use the name of the task this task depends upon. - - s2.add_tasks(t) + t2.tag = t1.uid # As a tag we use the ID of the first task this task depends upon. + t2.lfs_per_process = 10 + s2.add_tasks(t2) - p.add_stages(s2) + p.add_stages(s2) - s3 = Stage() - s3.name = 's3' - for x in range(n): + s3 = Stage() + s3.name = 's3' # Tasks from this stage will execute on the node the task from stage 1 # it depends executed. - t = Task() - t.name = 't3.%04d' % x - t.executable = 'hostname' - t.cpu_reqs = {'cpu_processes': 1, + t3 = Task() + t3.name = 't3.%04d' % x + t3.executable = 'hostname' + t3.cpu_reqs = {'cpu_processes': 1, 'cpu_threads': 1, 'cpu_process_type': None, 'cpu_thread_type': None} - t.lfs_per_process = 1024 - t.tag = 't1.%04d' % x # As a tag we use the name of the task this task depends upon. - s3.add_tasks(t) - - - p.add_stages(s3) + t3.lfs_per_process = 10 + t3.tag = t1.uid # As a tag we use the ID of the first task this task depends upon. + s3.add_tasks(t3) - return p + p.add_stages(s3) + pipelines.append(p) + return pipelines if __name__ == '__main__': - # Request at least two nodes + # Request at least two nodes res_dict = { - 'resource' : '', - 'walltime' : , - 'cpus' : , - 'project' : '', - 'queue' : '' + 'resource' : 'local.localhost', + 'walltime' : 20, + 'cpus' : 2, } appman = AppManager(hostname=hostname, port=port, username=username, password=password) appman.resource_desc = res_dict p = get_pipeline(n=2) # Select n to be greater or equal to the number of nodes. - appman.workflow = set([p]) + appman.workflow = set(p) appman.run() From f97333f3a0610d7b9008c6ea49034337651d589e Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 11 Jan 2021 16:09:30 -0500 Subject: [PATCH 14/42] tests updated based on tags changes --- tests/test_component/test_tproc_rp.py | 26 +++++++++++-------- .../test_tmgr_rp/test_tproc_rp.py | 12 ++++----- 2 files changed, 21 insertions(+), 17 deletions(-) diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index 7ea6701c6..9eb83523a 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -98,19 +98,24 @@ def test_resolve_tags(self, mocked_Logger): pipeline_name = 'p1' stage_name = 's1' - t1_name = 't1' + task = mock.Mock() + task.uid = 'task.0000' + task.tags = {'colocate': task.uid} + task2 = mock.Mock() + task2.uid = 'task.0001' + task2.tags = None t2_name = 't2' placeholders = { pipeline_name: { stage_name: { - t1_name: { + task.uid: { 'path' : '/home/vivek/t1', - 'rts_uid': 'unit.0002' + 'uid': 'unit.0002' }, t2_name: { 'path' : '/home/vivek/t2', - 'rts_uid': 'unit.0003' + 'uid': 'unit.0003' } } } @@ -118,14 +123,13 @@ def test_resolve_tags(self, mocked_Logger): - self.assertEqual(resolve_tags(tags={'colocate': 't1'}, + self.assertEqual(resolve_tags(task=task, parent_pipeline_name=pipeline_name, placeholders=placeholders), - {'colocate':'unit.0002'}) + 'unit.0002') - with self.assertRaises(ree.EnTKError): - resolve_tags(tags={'colocate': 't3'}, parent_pipeline_name=pipeline_name, - placeholders=placeholders) + self.assertEqual(resolve_tags(task=task2, parent_pipeline_name=pipeline_name, + placeholders=placeholders), 'task.0001') # ------------------------------------------------------------------------------ # @@ -408,11 +412,11 @@ def test_issue_259(self, mocked_ComputeUnitDescription, mocked_Logger): stage_name: { t1_name: { 'path' : '/home/vivek/t1', - 'rts_uid': 'unit.0002' + 'uid': 'unit.0002' }, t2_name: { 'path' : '/home/vivek/t2', - 'rts_uid': 'unit.0003' + 'uid': 'unit.0003' } } } diff --git a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py index 5abce41a5..a0549d855 100755 --- a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py +++ b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py @@ -23,11 +23,11 @@ def test_create_cud_from_task(self): stage_name: { t1_name: { 'path' : '/home/vivek/t1', - 'rts_uid': 'unit.0000' + 'uid': 'task.0000' }, t2_name: { 'path' : '/home/vivek/t2', - 'rts_uid': 'unit.0003' + 'uid': 'task.0003' } } } @@ -78,9 +78,9 @@ def test_create_cud_from_task(self): self.assertEqual(test_cud.stderr, 'stderr') self.assertEqual(test_cud.input_staging, []) self.assertEqual(test_cud.output_staging, []) - self.assertEqual(test_cud.tag, 'unit.0000') + self.assertEqual(test_cud.tag, 'task.0000') - task.tag = 'task.tag' - test_cud = create_cud_from_task(task, None) - self.assertEqual(test_cud.tag, 'task.tag') + task.tag = 'task.0001' + test_cud = create_cud_from_task(task, placeholders) + self.assertEqual(test_cud.tag, 'task.0003') From 995f2151233406cfe46e05ec9fcd5aed5ab4aef0 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 12 Jan 2021 11:16:14 -0500 Subject: [PATCH 15/42] Tasks should have unique ids for tags to work properly --- src/radical/entk/task.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index f8acebb75..2668e9578 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -27,6 +27,7 @@ class Task(object): `uid` offset the uid count file in radical.utils and can potentially affect the profiling if not taken care. ''' + _uids = list() # FIXME: this should be converted into an RU/RS Attribute object, almost all # of the code is redundant with the attribute class... @@ -1073,6 +1074,11 @@ def _validate(self): executable has been specified for the task. ''' + if self._uid in Task._uids: + raise ree.EnTKError(msg='Task ID %s already exists' % self._uid) + else: + Task._uids.append(self._uid) + if self._state is not res.INITIAL: raise ree.ValueError(obj=self._uid, attribute='state', expected_value=res.INITIAL, From dff216b829719ab974c2078b490290cfeaa34f55 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 12 Jan 2021 11:42:44 -0500 Subject: [PATCH 16/42] linting --- examples/misc/lfs_tagging_dd.py | 96 +++++++++---------- src/radical/entk/execman/rp/task_manager.py | 1 + src/radical/entk/execman/rp/task_processor.py | 2 - src/radical/entk/task.py | 2 + 4 files changed, 51 insertions(+), 50 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index 262579c1f..6b0afa578 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -1,4 +1,3 @@ - #!/usr/bin/env python from radical.entk import Pipeline, Stage, Task, AppManager @@ -21,6 +20,7 @@ # tasks should print the same hostname as the respective task in the first stage # of the pipeline the following function returns. + def get_pipeline(n=2): # We create a pipeline which has 3 stages. The tasks from the second and @@ -28,60 +28,60 @@ def get_pipeline(n=2): # the first stage. pipelines = list() for x in range(n): - p = Pipeline() - p.name = 'p.%04d' % x + pipeline = Pipeline() + pipeline.name = 'p.%04d' % x - s1 = Stage() - s1.name = 's1' + stage1 = Stage() + stage1.name = 'stage1' # The tasks from the first stage will execute at the first available node # they fit. - t1 = Task() - t1.name = 't1.%04d' % x - t1.executable = 'hostname' - t1.cpu_reqs = {'cpu_processes': 1, - 'cpu_threads': 1, # Set enough threads for this task to get a whole node - 'cpu_process_type': None, - 'cpu_thread_type': None} - t1.lfs_per_process = 10 - s1.add_tasks(t1) - - p.add_stages(s1) - - s2 = Stage() - s2.name = 's2' + task1 = Task() + task1.name = 'task1.%04d' % x + task1.executable = 'hostname' + task1.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, # Set enough threads for this task to get a whole node + 'cpu_process_type': None, + 'cpu_thread_type': None} + task1.lfs_per_process = 10 + stage1.add_tasks(task1) + + pipeline.add_stages(stage1) + + stage2 = Stage() + stage2.name = 'stage2' # Tasks from this stage will execute on the node the task from stage 1 # it depends executed. - t2 = Task() - t2.name = 't2.%04d' % x - t2.executable = 'hostname' - t2.cpu_reqs = {'cpu_processes': 1, - 'cpu_threads': 1, - 'cpu_process_type': None, - 'cpu_thread_type': None} - t2.tag = t1.uid # As a tag we use the ID of the first task this task depends upon. - t2.lfs_per_process = 10 - s2.add_tasks(t2) - - - p.add_stages(s2) - - s3 = Stage() - s3.name = 's3' + task2 = Task() + task2.name = 'task2.%04d' % x + task2.executable = 'hostname' + task2.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, + 'cpu_process_type': None, + 'cpu_thread_type': None} + task2.tag = task1.uid # As a tag we use the ID of the first task this task depends upon. + task2.lfs_per_process = 10 + stage2.add_tasks(task2) + + + pipeline.add_stages(stage2) + + stage3 = Stage() + stage3.name = 'stage3' # Tasks from this stage will execute on the node the task from stage 1 # it depends executed. - t3 = Task() - t3.name = 't3.%04d' % x - t3.executable = 'hostname' - t3.cpu_reqs = {'cpu_processes': 1, - 'cpu_threads': 1, - 'cpu_process_type': None, - 'cpu_thread_type': None} - t3.lfs_per_process = 10 - t3.tag = t1.uid # As a tag we use the ID of the first task this task depends upon. - s3.add_tasks(t3) - - p.add_stages(s3) - pipelines.append(p) + task3 = Task() + task3.name = 'task3.%04d' % x + task3.executable = 'hostname' + task3.cpu_reqs = {'cpu_processes': 1, + 'cpu_threads': 1, + 'cpu_process_type': None, + 'cpu_thread_type': None} + task3.lfs_per_process = 10 + task3.tag = task1.uid # As a tag we use the ID of the first task this task depends upon. + stage3.add_tasks(task3) + + pipeline.add_stages(stage3) + pipelines.append(pipeline) return pipelines diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index 4beb4d5a6..210a3756a 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -218,6 +218,7 @@ def _process_tasks(self, task_queue, rmgr, rmq_conn_params): placeholders = dict() placeholder_lock = mt.Lock() + # ---------------------------------------------------------------------- def load_placeholder(task): with placeholder_lock: diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index f8027702f..dc0676e62 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -185,8 +185,6 @@ def resolve_tags(task, parent_pipeline_name, placeholders): return placeholders[pname][sname][tname]['uid'] return task.uid - #raise ree.EnTKError(msg='colocation tag %s cannot be used as no previous' - # 'task with that name is found' % colo_tag) # ------------------------------------------------------------------------------ diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 2668e9578..97492db0a 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -27,6 +27,8 @@ class Task(object): `uid` offset the uid count file in radical.utils and can potentially affect the profiling if not taken care. ''' + + _uids = list() # FIXME: this should be converted into an RU/RS Attribute object, almost all From f4c93ba9d427af176d6be6d94ba349db59116351 Mon Sep 17 00:00:00 2001 From: Matteo Turilli Date: Wed, 13 Jan 2021 12:39:07 -0500 Subject: [PATCH 17/42] Rewrite the comments --- examples/misc/lfs_tagging_dd.py | 45 +++++++++++++++++---------------- 1 file changed, 23 insertions(+), 22 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index 6b0afa578..f474d853b 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -6,26 +6,22 @@ if os.environ.get('RADICAL_ENTK_VERBOSE') is None: os.environ['RADICAL_ENTK_REPORT'] = 'True' -# Description of how the RabbitMQ process is accessible # No need to change/set any variables if you installed RabbitMQ has a system -# process. If you are running RabbitMQ under a docker container or another -# VM, set "RMQ_HOSTNAME" and "RMQ_PORT" in the session where you are running -# this script. +# process. If you are running RabbitMQ in a Docker container or on a dedicated +# virtual machine, set the variables "RMQ_HOSTNAME" and "RMQ_PORT" in the shell +# environment in which you are running this script. hostname = os.environ.get('RMQ_HOSTNAME', 'localhost') port = int(os.environ.get('RMQ_PORT', 5672)) username = os.environ.get('RMQ_USERNAME') password = os.environ.get('RMQ_PASSWORD') -# Each task in this example prints the hostname of the node it executed. Tagged -# tasks should print the same hostname as the respective task in the first stage -# of the pipeline the following function returns. - - +# Each task of this example prints the hostname of the node on which it is +# executed. Tagged tasks should print the same hostname. def get_pipeline(n=2): - # We create a pipeline which has 3 stages. The tasks from the second and - # and third stage will execute at the same node as the respective tasks from - # the first stage. + # We create a pipeline with 3 stages, each with 1 task. The tasks of the + # second and third stage will execute on the same compute node on which the + # task of the first stage executed. pipelines = list() for x in range(n): pipeline = Pipeline() @@ -33,13 +29,13 @@ def get_pipeline(n=2): stage1 = Stage() stage1.name = 'stage1' - # The tasks from the first stage will execute at the first available node - # they fit. + # task1 of stage1 will execute on the first available and suitable node. task1 = Task() task1.name = 'task1.%04d' % x task1.executable = 'hostname' + # Set enough threads for task1 to get a whole compute node task1.cpu_reqs = {'cpu_processes': 1, - 'cpu_threads': 1, # Set enough threads for this task to get a whole node + 'cpu_threads': 1, 'cpu_process_type': None, 'cpu_thread_type': None} task1.lfs_per_process = 10 @@ -49,8 +45,8 @@ def get_pipeline(n=2): stage2 = Stage() stage2.name = 'stage2' - # Tasks from this stage will execute on the node the task from stage 1 - # it depends executed. + # task2 of stage2 depends on task1 of stage1, i.e., it cannot execute + # before task1 has completed its execution. task2 = Task() task2.name = 'task2.%04d' % x task2.executable = 'hostname' @@ -58,7 +54,9 @@ def get_pipeline(n=2): 'cpu_threads': 1, 'cpu_process_type': None, 'cpu_thread_type': None} - task2.tag = task1.uid # As a tag we use the ID of the first task this task depends upon. + # We use the ID of task1 as the tag of task2. In this way, task2 will + # execute on the same node on which task1 executed. + task2.tag = task1.uid task2.lfs_per_process = 10 stage2.add_tasks(task2) @@ -67,8 +65,8 @@ def get_pipeline(n=2): stage3 = Stage() stage3.name = 'stage3' - # Tasks from this stage will execute on the node the task from stage 1 - # it depends executed. + # task3 of stage3 depends on task2 of stage2, i.e., it cannot execute + # before task2 has completed its execution. task3 = Task() task3.name = 'task3.%04d' % x task3.executable = 'hostname' @@ -77,7 +75,9 @@ def get_pipeline(n=2): 'cpu_process_type': None, 'cpu_thread_type': None} task3.lfs_per_process = 10 - task3.tag = task1.uid # As a tag we use the ID of the first task this task depends upon. + # We use the ID of task1 as the tag of task3. In this way, task3 will + # execute on the same node on which task1 and task2 executed. + task3.tag = task1.uid stage3.add_tasks(task3) pipeline.add_stages(stage3) @@ -98,6 +98,7 @@ def get_pipeline(n=2): appman = AppManager(hostname=hostname, port=port, username=username, password=password) appman.resource_desc = res_dict - p = get_pipeline(n=2) # Select n to be greater or equal to the number of nodes. + # Select n to be >= to the number of available compute nodes. + p = get_pipeline(n=2) appman.workflow = set(p) appman.run() From 26373e545c51f9e436d8c4bc2abb48bf027196ae Mon Sep 17 00:00:00 2001 From: Matteo Turilli Date: Wed, 13 Jan 2021 12:40:35 -0500 Subject: [PATCH 18/42] Fix comment --- examples/misc/lfs_tagging_dd.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index f474d853b..763dbd29b 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -88,7 +88,7 @@ def get_pipeline(n=2): if __name__ == '__main__': - # Request at least two nodes + # Request at least two compute nodes res_dict = { 'resource' : 'local.localhost', 'walltime' : 20, From 38cdad97b7c4044ef962e192e0ecc8ec5e4a61a3 Mon Sep 17 00:00:00 2001 From: Matteo Turilli Date: Wed, 13 Jan 2021 12:58:06 -0500 Subject: [PATCH 19/42] Linting --- examples/misc/lfs_tagging_dd.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index 763dbd29b..d99f2922e 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -18,10 +18,11 @@ # Each task of this example prints the hostname of the node on which it is # executed. Tagged tasks should print the same hostname. def get_pipeline(n=2): - - # We create a pipeline with 3 stages, each with 1 task. The tasks of the - # second and third stage will execute on the same compute node on which the - # task of the first stage executed. + ''' + We create a pipeline with 3 stages, each with 1 task. The tasks of the + second and third stage will be tagged so that they will execute on the same + compute node on which the task of the first stage executed. + ''' pipelines = list() for x in range(n): pipeline = Pipeline() From 8be50fc9bf9c1024191a17ec058e3cd6380780d6 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 13 Jan 2021 16:34:05 -0500 Subject: [PATCH 20/42] fixing warning lines --- src/radical/entk/task.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 97492db0a..d7d8bea9a 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -8,9 +8,9 @@ from . import exceptions as ree from . import states as res -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=707) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=764) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=943) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=721) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=778) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=945) # ------------------------------------------------------------------------------ From e5f22cd65da23407c4bbc2eae1ae883375378a23 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 13 Jan 2021 16:53:28 -0500 Subject: [PATCH 21/42] fixing tests --- src/radical/entk/task.py | 6 +++--- tests/test_component/test_task.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index c5be38f31..49df0b4a8 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -11,9 +11,9 @@ from . import exceptions as ree from . import states as res -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=721) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=778) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=945) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=725) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=783) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=951) diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 0809bfa4d..78b4051d0 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -298,7 +298,7 @@ def test_name(self, mocked_init): with self.assertRaises(ree.TypeError): task.name = 0 - with self.assertRaises(ree.ValueError): + with self.assertWarns(DeprecationWarning): task.name = 'task,0000' From ba86a763b7f1ad281ee908ee9356887e92a725de Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 13 Jan 2021 18:12:35 -0500 Subject: [PATCH 22/42] tests and linters happy --- examples/misc/lfs_tagging_dd.py | 1 + src/radical/entk/execman/rp/task_processor.py | 12 ++-- src/radical/entk/task.py | 7 +- tests/test_issues/test_issue_324.py | 72 +++++++++---------- 4 files changed, 44 insertions(+), 48 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index d99f2922e..6151e456c 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -15,6 +15,7 @@ username = os.environ.get('RMQ_USERNAME') password = os.environ.get('RMQ_PASSWORD') + # Each task of this example prints the hostname of the node on which it is # executed. Tagged tasks should print the same hostname. def get_pipeline(n=2): diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index dc0676e62..f590610c4 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -167,10 +167,8 @@ def resolve_tags(task, parent_pipeline_name, placeholders): # Check self pipeline first for sname in placeholders[parent_pipeline_name]: - for tname in placeholders[parent_pipeline_name][sname]: - if colo_tag != tname: - continue - return placeholders[parent_pipeline_name][sname][tname]['uid'] + if colo_tag in placeholders[parent_pipeline_name][sname]: + return placeholders[parent_pipeline_name][sname][colo_tag]['uid'] for pname in placeholders: @@ -179,10 +177,8 @@ def resolve_tags(task, parent_pipeline_name, placeholders): continue for sname in placeholders[pname]: - for tname in placeholders[pname][sname]: - if colo_tag != tname: - continue - return placeholders[pname][sname][tname]['uid'] + if colo_tag in placeholders[pname][sname]: + return placeholders[pname][sname][colo_tag]['uid'] return task.uid diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 49df0b4a8..263a033ca 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -11,10 +11,9 @@ from . import exceptions as ree from . import states as res -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=725) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=783) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=951) - +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=724) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=782) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=950) # ------------------------------------------------------------------------------ diff --git a/tests/test_issues/test_issue_324.py b/tests/test_issues/test_issue_324.py index bd50c05e7..0b217ff8d 100644 --- a/tests/test_issues/test_issue_324.py +++ b/tests/test_issues/test_issue_324.py @@ -1,44 +1,44 @@ from radical.entk import Task, states -''' -Test if task.from_dict accepts executable as a str or list -''' def test_issue_271(): + ''' + Test if task.from_dict accepts executable as a str or list + ''' - d = { 'uid': 're.Task.0000', - 'name': 't1', - 'state': states.DONE, - 'state_history': [states.INITIAL, states.DONE], - 'pre_exec': [], - 'executable': 'sleep', - 'arguments': [], - 'post_exec': [], - 'cpu_reqs': { 'processes': 1, - 'process_type': None, - 'threads_per_process': 1, - 'thread_type': None - }, - 'gpu_reqs': { 'processes': 0, - 'process_type': None, - 'threads_per_process': 0, - 'thread_type': None - }, - 'lfs_per_process': 1024, - 'upload_input_data': [], - 'copy_input_data': [], - 'link_input_data': [], - 'move_input_data': [], - 'copy_output_data': [], - 'move_output_data': [], - 'download_output_data': [], - 'stdout': 'out', - 'stderr': 'err', - 'exit_code': 555, - 'path': 'here/it/is', - 'tags': {'colocate': 'task.0010'}, - 'parent_stage': {'uid': 's1', 'name': 'stage1'}, - 'parent_pipeline': {'uid': 'p1', 'name': 'pipe1'}} + d = {'uid': 're.Task.0000', + 'name': 't1', + 'state': states.DONE, + 'state_history': [states.INITIAL, states.DONE], + 'pre_exec': [], + 'executable': 'sleep', + 'arguments': [], + 'post_exec': [], + 'cpu_reqs': {'processes': 1, + 'process_type': None, + 'threads_per_process': 1, + 'thread_type': None + }, + 'gpu_reqs': {'processes': 0, + 'process_type': None, + 'threads_per_process': 0, + 'thread_type': None + }, + 'lfs_per_process': 1024, + 'upload_input_data': [], + 'copy_input_data': [], + 'link_input_data': [], + 'move_input_data': [], + 'copy_output_data': [], + 'move_output_data': [], + 'download_output_data': [], + 'stdout': 'out', + 'stderr': 'err', + 'exit_code': 555, + 'path': 'here/it/is', + 'tags': {'colocate': 'task.0010'}, + 'parent_stage': {'uid': 's1', 'name': 'stage1'}, + 'parent_pipeline': {'uid': 'p1', 'name': 'pipe1'}} t = Task() t.from_dict(d) From 97b88182ac54a58047ed86d2deb42f098ebaf4ac Mon Sep 17 00:00:00 2001 From: Andre Merzky Date: Thu, 28 Jan 2021 19:11:35 +0100 Subject: [PATCH 23/42] follow renaming in RP --- .../entk/execman/rp/resource_manager.py | 4 +- src/radical/entk/execman/rp/task_manager.py | 24 ++-- src/radical/entk/execman/rp/task_processor.py | 115 +++++++++--------- 3 files changed, 73 insertions(+), 70 deletions(-) diff --git a/src/radical/entk/execman/rp/resource_manager.py b/src/radical/entk/execman/rp/resource_manager.py index 1a3fbac07..203f90ec0 100644 --- a/src/radical/entk/execman/rp/resource_manager.py +++ b/src/radical/entk/execman/rp/resource_manager.py @@ -162,8 +162,8 @@ def _pilot_state_cb(pilot, state): 'job_name' : self._job_name } - # Create Compute Pilot with validated resource description - pdesc = rp.ComputePilotDescription(pd_init) + # Create Pilot with validated resource description + pdesc = rp.PilotDescription(pd_init) self._prof.prof('rreq created', uid=self._uid) # Launch the pilot diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index 2f0652bed..e6f5753fe 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -17,7 +17,7 @@ from ...exceptions import EnTKError from ... import states, Task from ..base import Base_TaskManager -from .task_processor import create_cud_from_task, create_task_from_cu +from .task_processor import create_td_from_task, create_task_from_rp # ------------------------------------------------------------------------------ @@ -83,7 +83,7 @@ def _tmgr(self, uid, rmgr, pending_queue, completed_queue, The new thread is responsible for pushing completed tasks (returned by the RTS) to the dequeueing queue. It also - converts Tasks into CUDs and CUs into (partially described) + converts Tasks into TDs and CUs into (partially described) Tasks. This conversion is necessary since the current RTS is RADICAL Pilot. Once Tasks are recovered from a CU, they are then pushed to the completed_queue. At all state @@ -236,21 +236,21 @@ def load_placeholder(task, rts_uid): 'rts_uid': rts_uid} # ---------------------------------------------------------------------- - def unit_state_cb(unit, state): + def task_state_cb(rp_task, state): try: - self._log.debug('Unit %s in state %s' % (unit.uid, unit.state)) + self._log.debug('Task %s in state %s' % (rp_task.uid, rp_task.state)) - if unit.state in rp.FINAL: + if rp_task.state in rp.FINAL: task = None - task = create_task_from_cu(unit, self._prof) + task = create_task_from_rp(rp_task, self._prof) self._advance(task, 'Task', states.COMPLETED, mq_channel, '%s-cb-to-sync' % self._sid) - load_placeholder(task, unit.uid) + load_placeholder(task, task.uid) task_as_dict = json.dumps(task.to_dict()) @@ -277,9 +277,9 @@ def unit_state_cb(unit, state): mq_connection = pika.BlockingConnection(rmq_conn_params) mq_channel = mq_connection.channel() - umgr = rp.UnitManager(session=rmgr._session) + umgr = rp.TaskManager(session=rmgr._session) umgr.add_pilots(rmgr.pilot) - umgr.register_callback(unit_state_cb) + umgr.register_callback(task_state_cb) try: @@ -300,20 +300,20 @@ def unit_state_cb(unit, state): task_queue.task_done() bulk_tasks = list() - bulk_cuds = list() + bulk_tds = list() for msg in body: task = Task() task.from_dict(msg) bulk_tasks.append(task) - bulk_cuds.append(create_cud_from_task( + bulk_tds.append(create_td_from_task( task, placeholders, self._prof)) self._advance(task, 'Task', states.SUBMITTING, mq_channel, '%s-tmgr-to-sync' % self._sid) - umgr.submit_units(bulk_cuds) + umgr.submit_tasks(bulk_tds) mq_connection.close() self._log.debug('Exited RTS main loop. TMGR terminating') except KeyboardInterrupt as ex: diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index b701e5735..35fd24f51 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -1,5 +1,6 @@ import os + import radical.pilot as rp import radical.utils as ru @@ -101,6 +102,8 @@ def resolve_placeholders(path, placeholders): raise +# ------------------------------------------------------------------------------ +# def resolve_arguments(args, placeholders): resolved_args = list() @@ -413,126 +416,126 @@ def get_output_list_from_task(task, placeholders): # ------------------------------------------------------------------------------ # -def create_cud_from_task(task, placeholders, prof=None): +def create_td_from_task(task, placeholders, prof=None): """ - Purpose: Create a Compute Unit description based on the defined Task. + Purpose: Create an RP Task description based on the defined Task. :arguments: :task: EnTK Task object :placeholders: dictionary holding the values for placeholders - :return: ComputeUnitDescription + :return: rp.TaskDescription """ try: - logger.debug('Creating CU from Task %s' % (task.uid)) + logger.debug('Creating Task from Task %s: %s' % (task.uid, task.sandbox)) if prof: - prof.prof('cud_create', uid=task.uid) - - cud = rp.ComputeUnitDescription() - cud.name = '%s,%s,%s,%s,%s,%s' % (task.uid, task.name, - task.parent_stage['uid'], - task.parent_stage['name'], - task.parent_pipeline['uid'], - task.parent_pipeline['name']) - cud.pre_exec = task.pre_exec - cud.executable = task.executable - cud.arguments = resolve_arguments(task.arguments, placeholders) - cud.sandbox = task.sandbox - cud.post_exec = task.post_exec + prof.prof('td_create', uid=task.uid) + + td = rp.TaskDescription() + td.name = '%s,%s,%s,%s,%s,%s' % (task.uid, task.name, + task.parent_stage['uid'], + task.parent_stage['name'], + task.parent_pipeline['uid'], + task.parent_pipeline['name']) + td.pre_exec = task.pre_exec + td.executable = task.executable + td.arguments = resolve_arguments(task.arguments, placeholders) + td.sandbox = task.sandbox + td.post_exec = task.post_exec if task.tag: if task.parent_pipeline['name']: - cud.tag = resolve_tags( + td.tag = resolve_tags( tag=task.tag, parent_pipeline_name=task.parent_pipeline['name'], placeholders=placeholders) - cud.cpu_processes = task.cpu_reqs['cpu_processes'] - cud.cpu_threads = task.cpu_reqs['cpu_threads'] - cud.cpu_process_type = task.cpu_reqs['cpu_process_type'] - cud.cpu_thread_type = task.cpu_reqs['cpu_thread_type'] - cud.gpu_processes = task.gpu_reqs['gpu_processes'] - cud.gpu_threads = task.gpu_reqs['gpu_threads'] - cud.gpu_process_type = task.gpu_reqs['gpu_process_type'] - cud.gpu_thread_type = task.gpu_reqs['gpu_thread_type'] + td.cpu_processes = task.cpu_reqs['cpu_processes'] + td.cpu_threads = task.cpu_reqs['cpu_threads'] + td.cpu_process_type = task.cpu_reqs['cpu_process_type'] + td.cpu_thread_type = task.cpu_reqs['cpu_thread_type'] + td.gpu_processes = task.gpu_reqs['gpu_processes'] + td.gpu_threads = task.gpu_reqs['gpu_threads'] + td.gpu_process_type = task.gpu_reqs['gpu_process_type'] + td.gpu_thread_type = task.gpu_reqs['gpu_thread_type'] if task.lfs_per_process: - cud.lfs_per_process = task.lfs_per_process + td.lfs_per_process = task.lfs_per_process - if task.stdout: cud.stdout = task.stdout - if task.stderr: cud.stderr = task.stderr + if task.stdout: td.stdout = task.stdout + if task.stderr: td.stderr = task.stderr - cud.input_staging = get_input_list_from_task(task, placeholders) - cud.output_staging = get_output_list_from_task(task, placeholders) + td.input_staging = get_input_list_from_task(task, placeholders) + td.output_staging = get_output_list_from_task(task, placeholders) if prof: - prof.prof('cud from task - done', uid=task.uid) + prof.prof('td from task - done', uid=task.uid) - logger.debug('CU %s created from Task %s' % (cud.name, task.uid)) + logger.debug('Task %s created from Task %s' % (td.name, task.uid)) - return cud + return td except Exception: - logger.exception('CU creation failed') + logger.exception('Task creation failed') raise # ------------------------------------------------------------------------------ # -def create_task_from_cu(cu, prof=None): +def create_task_from_rp(rp_task, prof=None): """ - Purpose: Create a Task based on the Compute Unit. + Purpose: Create a Task based on the RP Task. Details: Currently, only the uid, parent_stage and parent_pipeline are - retrieved. The exact initial Task (that was converted to a CUD) + retrieved. The exact initial Task (that was converted to a TD) cannot be recovered as the RP API does not provide the same - attributes for a CU as for a CUD. Also, this is not required for + attributes for a Task as for a TD. Also, this is not required for the most part. TODO: Add exit code, stdout, stderr and path attributes to a Task. - These can be extracted from a CU + These can be extracted from a RP Task :arguments: - :cu: RP Compute Unit + :task: RP Task :return: Task """ try: - logger.debug('Create Task from CU %s' % cu.name) + logger.debug('Create Task from Task %s' % rp_task.name) if prof: - prof.prof('task_create', uid=cu.name.split(',')[0].strip()) + prof.prof('task_create', uid=rp_task.name.split(',')[0].strip()) task = Task() - task.uid = cu.name.split(',')[0].strip() - task.name = cu.name.split(',')[1].strip() - task.parent_stage['uid'] = cu.name.split(',')[2].strip() - task.parent_stage['name'] = cu.name.split(',')[3].strip() - task.parent_pipeline['uid'] = cu.name.split(',')[4].strip() - task.parent_pipeline['name'] = cu.name.split(',')[5].strip() - task.rts_uid = cu.uid + task.uid = rp_task.name.split(',')[0].strip() + task.name = rp_task.name.split(',')[1].strip() + task.parent_stage['uid'] = rp_task.name.split(',')[2].strip() + task.parent_stage['name'] = rp_task.name.split(',')[3].strip() + task.parent_pipeline['uid'] = rp_task.name.split(',')[4].strip() + task.parent_pipeline['name'] = rp_task.name.split(',')[5].strip() + task.rts_uid = rp_task.uid - if cu.state == rp.DONE : task.exit_code = 0 - elif cu.state in [rp.FAILED, rp.CANCELED] : task.exit_code = 1 + if rp_task.state == rp.DONE : task.exit_code = 0 + elif rp_task.state in [rp.FAILED, rp.CANCELED] : task.exit_code = 1 - task.path = ru.Url(cu.sandbox).path + task.path = ru.Url(rp_task.sandbox).path if prof: - prof.prof('task_created', uid=cu.name.split(',')[0].strip()) + prof.prof('task_created', uid=task.uid) - logger.debug('Task %s created from CU %s' % (task.uid, cu.name)) + logger.debug('Task %s created from Task %s' % (task.uid, rp_task.name)) return task except Exception: - logger.exception('Task creation from CU failed, error') + logger.exception('Task creation from RP Task failed, error') raise From c500a907c5859ea79166a5473c86b0eadc412b95 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 11:08:33 -0500 Subject: [PATCH 24/42] update codecov version --- .github/workflows/python-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 27646a622..340d9f197 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -50,7 +50,7 @@ jobs: LOC=/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov - uses: codecov/codecov-action@v1.0.15 + uses: codecov/codecov-action@v1.2.1 flake8: runs-on: ubuntu-latest From fa303b510e485412ad5b030234e3b0b2f3fa22f1 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 11:32:58 -0500 Subject: [PATCH 25/42] Update .codecov.yml --- .codecov.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.codecov.yml b/.codecov.yml index 25741db10..6b2c53024 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -6,4 +6,5 @@ coverage: threshold: 1% paths: - "src" + base: "pr" patch: off From 408edf1fae74687101b08f24417ac7aa77c445bd Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 15:47:38 -0500 Subject: [PATCH 26/42] Addressing Andre's comment --- examples/misc/lfs_tagging_dd.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/examples/misc/lfs_tagging_dd.py b/examples/misc/lfs_tagging_dd.py index 6151e456c..183115632 100755 --- a/examples/misc/lfs_tagging_dd.py +++ b/examples/misc/lfs_tagging_dd.py @@ -11,7 +11,7 @@ # virtual machine, set the variables "RMQ_HOSTNAME" and "RMQ_PORT" in the shell # environment in which you are running this script. hostname = os.environ.get('RMQ_HOSTNAME', 'localhost') -port = int(os.environ.get('RMQ_PORT', 5672)) +port = int(os.environ.get('RMQ_PORT', '5672')) username = os.environ.get('RMQ_USERNAME') password = os.environ.get('RMQ_PASSWORD') @@ -20,10 +20,11 @@ # executed. Tagged tasks should print the same hostname. def get_pipeline(n=2): ''' - We create a pipeline with 3 stages, each with 1 task. The tasks of the - second and third stage will be tagged so that they will execute on the same - compute node on which the task of the first stage executed. + We create a pipeline with three stages, each with 1 task. The tasks of the + second and third stages are tagged to execute on the same compute node on + which the first stage's task executed. ''' + pipelines = list() for x in range(n): pipeline = Pipeline() @@ -47,8 +48,7 @@ def get_pipeline(n=2): stage2 = Stage() stage2.name = 'stage2' - # task2 of stage2 depends on task1 of stage1, i.e., it cannot execute - # before task1 has completed its execution. + task2 = Task() task2.name = 'task2.%04d' % x task2.executable = 'hostname' @@ -58,7 +58,7 @@ def get_pipeline(n=2): 'cpu_thread_type': None} # We use the ID of task1 as the tag of task2. In this way, task2 will # execute on the same node on which task1 executed. - task2.tag = task1.uid + task2.tags = {'colocate': task1.uid} task2.lfs_per_process = 10 stage2.add_tasks(task2) @@ -67,8 +67,7 @@ def get_pipeline(n=2): stage3 = Stage() stage3.name = 'stage3' - # task3 of stage3 depends on task2 of stage2, i.e., it cannot execute - # before task2 has completed its execution. + task3 = Task() task3.name = 'task3.%04d' % x task3.executable = 'hostname' @@ -79,7 +78,7 @@ def get_pipeline(n=2): task3.lfs_per_process = 10 # We use the ID of task1 as the tag of task3. In this way, task3 will # execute on the same node on which task1 and task2 executed. - task3.tag = task1.uid + task3.tag = {'colocate': task1.uid} stage3.add_tasks(task3) pipeline.add_stages(stage3) From db39ffcfeaa18f24d8ca30d72aa8d4fc92f380cd Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 17:30:36 -0500 Subject: [PATCH 27/42] Update python-app.yml --- .github/workflows/python-app.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 340d9f197..f720dcc1b 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -51,6 +51,10 @@ jobs: coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov uses: codecov/codecov-action@v1.2.1 + - name: Coveralls + uses: coverallsapp/github-action@v1.1.2 + with: + github-token: ${{ secrets.COVERALL_TOKEN }} flake8: runs-on: ubuntu-latest From abfcbd33c2fad77dfcd2940251c01dea007e86fb Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 17:36:36 -0500 Subject: [PATCH 28/42] will it push a report? --- .github/workflows/python-app.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index f720dcc1b..cd954c6f0 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -36,6 +36,7 @@ jobs: pip install hypothesis pip install coverage pip install codecov + pip install coveralls pip install pytest pip install pytest-xdist pip install pytest-timeout From 0f45c1716e80557ee7859d281a55d8155201af90 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 17:48:22 -0500 Subject: [PATCH 29/42] will it now? --- .github/workflows/python-app.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index cd954c6f0..f455dfe56 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -52,8 +52,8 @@ jobs: coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov uses: codecov/codecov-action@v1.2.1 - - name: Coveralls - uses: coverallsapp/github-action@v1.1.2 + - name: Coveralls Python + uses: AndreMiras/coveralls-python-action@v20201129 with: github-token: ${{ secrets.COVERALL_TOKEN }} From 8d8918b74ad8cc8f4c82c8f3f94a46e79624bb91 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 8 Feb 2021 17:55:03 -0500 Subject: [PATCH 30/42] coverall support --- .github/workflows/python-app.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 340d9f197..f455dfe56 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -36,6 +36,7 @@ jobs: pip install hypothesis pip install coverage pip install codecov + pip install coveralls pip install pytest pip install pytest-xdist pip install pytest-timeout @@ -51,6 +52,10 @@ jobs: coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov uses: codecov/codecov-action@v1.2.1 + - name: Coveralls Python + uses: AndreMiras/coveralls-python-action@v20201129 + with: + github-token: ${{ secrets.COVERALL_TOKEN }} flake8: runs-on: ubuntu-latest From 213720403b1d5b15740d3a3074b53f3968c32e1e Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 09:54:47 -0500 Subject: [PATCH 31/42] Update .codecov.yml --- .codecov.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.codecov.yml b/.codecov.yml index 6b2c53024..83c8cdbb7 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,4 +1,5 @@ coverage: + require_ci_to_pass: no status: project: default: From cdc76e398c8430c5c65cf045f0d8c9ee84098c92 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 10:20:09 -0500 Subject: [PATCH 32/42] coveralls update --- .github/workflows/python-app.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index f455dfe56..d3dd9031b 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -53,7 +53,7 @@ jobs: - name: Codecov uses: codecov/codecov-action@v1.2.1 - name: Coveralls Python - uses: AndreMiras/coveralls-python-action@v20201129 + uses: AndreMiras/coveralls-python-action@develop with: github-token: ${{ secrets.COVERALL_TOKEN }} From a649c310d9125df9f01db8e7c6fd9609d111cb5e Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 10:28:25 -0500 Subject: [PATCH 33/42] Codecov using bash uploader --- .github/workflows/python-app.yml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index d3dd9031b..0b9e44520 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -47,11 +47,16 @@ jobs: RMQ_PORT: ${{ job.services.rabbitmq.ports[5672] }} # get randomly assigned published port RMQ_USERNAME: guest RMQ_PASSWORD: guest + LOC: /opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages run: | - LOC=/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages - coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration + coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov - uses: codecov/codecov-action@v1.2.1 + env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + run: | + coverage combine; \ + coverage xml; \ + coverage report; \ + curl -s https://codecov.io/bash | bash - name: Coveralls Python uses: AndreMiras/coveralls-python-action@develop with: From adcd25dfa2c411d85c9c5eceac7a026c915c321a Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 10:32:19 -0500 Subject: [PATCH 34/42] fixing GitHub Action syntax error --- .github/workflows/python-app.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 0b9e44520..15062c8e6 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -51,7 +51,8 @@ jobs: run: | coverage run --include=$LOC/radical/entk/* -m pytest -ra --timeout=600 -vvv --showlocals tests/test_component tests/test_utils/ tests/test_integration - name: Codecov - env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} run: | coverage combine; \ coverage xml; \ From 9c693725fd9fab7c43b8bfff578c836add009c5b Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 12:27:55 -0500 Subject: [PATCH 35/42] fixes for tests and linters --- .github/workflows/python-app.yml | 2 +- src/radical/entk/__init__.py | 4 +++- src/radical/entk/execman/mock/task_manager.py | 6 ++++-- src/radical/entk/execman/rp/task_manager.py | 3 +-- tests/test_component/test_amgr.py | 2 +- tests/test_component/test_tmgr_rp.py | 1 - tests/test_component/test_tproc_rp.py | 20 +++++++++---------- 7 files changed, 20 insertions(+), 18 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 340d9f197..0b750001d 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -39,7 +39,7 @@ jobs: pip install pytest pip install pytest-xdist pip install pytest-timeout - pip install timeout_decorator + pip install timeout-decorator - name: Test with pytest env: RMQ_HOSTNAME: localhost diff --git a/src/radical/entk/__init__.py b/src/radical/entk/__init__.py index ade36b191..8925f85d9 100644 --- a/src/radical/entk/__init__.py +++ b/src/radical/entk/__init__.py @@ -1,4 +1,4 @@ - +# pylint: disable=unused-argument # ------------------------------------------------------------------------------ # from .pipeline import Pipeline @@ -17,10 +17,12 @@ import requests as req from packaging.version import parse as parse_version + def custom_formatwarning(msg, *args, **kwargs): # ignore everything except the message return str(msg) + '\n' + warnings.formatwarning = custom_formatwarning version_short, version_detail, version_base, version_branch, \ diff --git a/src/radical/entk/execman/mock/task_manager.py b/src/radical/entk/execman/mock/task_manager.py index a94d8266d..4f7b65214 100644 --- a/src/radical/entk/execman/mock/task_manager.py +++ b/src/radical/entk/execman/mock/task_manager.py @@ -244,13 +244,15 @@ def _process_tasks(self, task_queue, rmgr, rmq_conn_params): bulk_tasks.append(task) self._advance(task, 'Task', states.SUBMITTING, - mq_channel, '%s-tmgr-to-sync' % self._sid) + mq_channel, rmq_conn_params, + '%s-tmgr-to-sync' % self._sid) # this mock RTS immmedialtely completes all tasks for task in bulk_tasks: self._advance(task, 'Task', states.COMPLETED, - mq_channel, '%s-cb-to-sync' % self._sid) + mq_channel, rmq_conn_params, + '%s-cb-to-sync' % self._sid) task_as_dict = json.dumps(task.to_dict()) mq_channel.basic_publish( diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index 0b572555d..077bd79bd 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -246,7 +246,7 @@ def load_placeholder(task, rts_uid): 'rts_uid': rts_uid} # ---------------------------------------------------------------------- - def task_state_cb(rp_task, state): + def task_state_cb(rp_task, state, cb_data): try: @@ -364,7 +364,6 @@ def start_manager(self): self._log.warn('tmgr process already running!') return - try: self._prof.prof('creating tmgr process', uid=self._uid) diff --git a/tests/test_component/test_amgr.py b/tests/test_component/test_amgr.py index ccd5c4970..270c94450 100755 --- a/tests/test_component/test_amgr.py +++ b/tests/test_component/test_amgr.py @@ -184,4 +184,4 @@ def test_run_workflow(self, mocked_init, mocked_ResourceManager, appman._sync_thread.is_alive = mock.MagicMock(return_value=True) with self.assertRaises(ree.EnTKError): - appman._run_workflow() \ No newline at end of file + appman._run_workflow() diff --git a/tests/test_component/test_tmgr_rp.py b/tests/test_component/test_tmgr_rp.py index 7a22d50ef..c1fcca7ab 100644 --- a/tests/test_component/test_tmgr_rp.py +++ b/tests/test_component/test_tmgr_rp.py @@ -71,7 +71,6 @@ def test_start_manager(self, mocked_init, mocked_Logger, mocked_Profiler): tmgr._completed_queue = ['completed_queues'] tmgr._tmgr = _tmgr_side_effect - tmgr._tmgr_terminate = None tmgr._tmgr_process = None tmgr.start_manager() diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index 6aa7fba43..b12869a00 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -12,8 +12,8 @@ from radical.entk.execman.rp.task_processor import resolve_tags from radical.entk.execman.rp.task_processor import get_input_list_from_task from radical.entk.execman.rp.task_processor import get_output_list_from_task -from radical.entk.execman.rp.task_processor import create_cud_from_task -from radical.entk.execman.rp.task_processor import create_task_from_cu +from radical.entk.execman.rp.task_processor import create_td_from_task +from radical.entk.execman.rp.task_processor import create_task_from_rp try: import mock @@ -131,7 +131,7 @@ def test_resolve_tags(self, mocked_Logger): @mock.patch.object(radical.entk.execman.rp.task_processor, 'get_output_list_from_task', return_value='outputs') @mock.patch.object(radical.entk.execman.rp.task_processor, 'resolve_arguments', return_value='test_args') @mock.patch.object(radical.entk.execman.rp.task_processor, 'get_input_list_from_task', return_value='inputs') - def test_create_cud_from_task(self, mocked_ComputeUnitDescription, + def test_create_td_from_task(self, mocked_ComputeUnitDescription, mocked_Logger, mocked_get_input_list_from_task, mocked_get_output_list_from_task, mocked_resolve_arguments): @@ -183,7 +183,7 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, task.stderr = 'stderr' task.stdout = 'stdout' - test_cud = create_cud_from_task(task, None) + test_cud = create_td_from_task(task, None) self.assertEqual(test_cud.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_cud.pre_exec, 'post_exec') self.assertEqual(test_cud.executable, '/bin/date') @@ -209,7 +209,7 @@ def test_create_cud_from_task(self, mocked_ComputeUnitDescription, # @mock.patch('radical.entk.Task') @mock.patch('radical.utils.Logger') - def test_create_task_from_cu(self, mocked_Task, mocked_Logger): + def test_create_task_from_rp(self, mocked_Task, mocked_Logger): test_cud = mock.Mock() test_cud.name = 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000' test_cud.pre_exec = 'post_exec' @@ -241,7 +241,7 @@ def test_create_task_from_cu(self, mocked_Task, mocked_Logger): mocked_Task.path = None mocked_Task.rts_uid = None - task = create_task_from_cu(test_cud, None) + task = create_task_from_rp(test_cud, None) self.assertEqual(task.uid, 'task.0000') self.assertEqual(task.name, 'task.0000') self.assertEqual(task.parent_stage, {'uid': 'stage.0000', 'name': 'stage.0000'}) @@ -286,15 +286,15 @@ def test_issue_271(self, mocked_Task, mocked_Logger): mocked_Task.path = None mocked_Task.rts_uid = None - task = create_task_from_cu(test_cud, None) + task = create_task_from_rp(test_cud, None) self.assertEqual(task.exit_code, 0) test_cud.state = 'FAILED' - task = create_task_from_cu(test_cud, None) + task = create_task_from_rp(test_cud, None) self.assertEqual(task.exit_code, 1) test_cud.state = 'EXECUTING' - task = create_task_from_cu(test_cud, None) + task = create_task_from_rp(test_cud, None) self.assertIsNone(task.exit_code) # ------------------------------------------------------------------------------ @@ -479,7 +479,7 @@ def test_issue_259(self, mocked_ComputeUnitDescription, task.copy_output_data = ['test_file > $SHARED/test_file'] task.move_output_data = ['test_file > $SHARED/test_file'] - test_cud = create_cud_from_task(task, placeholders) + test_cud = create_td_from_task(task, placeholders) self.assertEqual(test_cud.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_cud.pre_exec, 'post_exec') self.assertEqual(test_cud.executable, '/bin/date') From ed7ff0e48ba38336d458d1dee8e0077fef9040b8 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 14:29:08 -0500 Subject: [PATCH 36/42] some additional refactoring --- src/radical/entk/execman/rp/task_manager.py | 45 +++++++++---------- src/radical/entk/execman/rp/task_processor.py | 7 --- tests/test_component/test_tproc_rp.py | 3 +- 3 files changed, 22 insertions(+), 33 deletions(-) diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index 0421d12df..f7cad1099 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -57,8 +57,6 @@ def __init__(self, sid, pending_queue, completed_queue, rmgr, rts='radical.pilot') self._rts_runner = None - self._rmq_ping_interval = int(os.getenv('RMQ_PING_INTERVAL', '10')) - self._log.info('Created task manager object: %s', self._uid) self._prof.prof('tmgr_create', uid=self._uid) @@ -121,9 +119,9 @@ def heartbeat_response(mq_channel, conn_params): nprops = pika.BasicProperties( correlation_id=props.correlation_id) channel.basic_publish(exchange='', - routing_key=self._hb_response_q, - properties=nprops, - body='response') + routing_key=self._hb_response_q, + properties=nprops, + body='response') except (pika.exceptions.ConnectionClosed, pika.exceptions.ChannelClosed): connection = pika.BlockingConnection(conn_params) @@ -131,9 +129,9 @@ def heartbeat_response(mq_channel, conn_params): nprops = pika.BasicProperties( correlation_id=props.correlation_id) channel.basic_publish(exchange='', - routing_key=self._hb_response_q, - properties=nprops, - body='response') + routing_key=self._hb_response_q, + properties=nprops, + body='response') self._log.info('Sent heartbeat response') @@ -253,11 +251,11 @@ def task_state_cb(rp_task, state, cb_data): channel = cb_data['channel'] conn_params = cb_data['params'] - self._log.debug('Task %s in state %s' % (rp_task.uid, rp_task.state)) + self._log.debug('Task %s in state %s' % (rp_task.uid, + rp_task.state)) if rp_task.state in rp.FINAL: - task = None task = create_task_from_rp(rp_task, self._prof) self._advance(task, 'Task', states.COMPLETED, @@ -269,16 +267,15 @@ def task_state_cb(rp_task, state, cb_data): task_as_dict = json.dumps(task.to_dict()) try: channel.basic_publish(exchange='', - routing_key='%s-completedq-1' % self._sid, - body=task_as_dict) + routing_key='%s-completedq-1' % self._sid, + body=task_as_dict) except (pika.exceptions.ConnectionClosed, pika.exceptions.ChannelClosed): connection = pika.BlockingConnection(conn_params) channel = connection.channel() channel.basic_publish(exchange='', - routing_key='%s-completedq-1' % self._sid, - body=task_as_dict) - + routing_key='%s-completedq-1' % self._sid, + body=task_as_dict) self._log.info('Pushed task %s with state %s to completed ' 'queue %s-completedq-1', @@ -287,7 +284,7 @@ def task_state_cb(rp_task, state, cb_data): except KeyboardInterrupt as ex: self._log.exception('Execution interrupted (probably by Ctrl+C)' ' exit callback thread gracefully...') - raise KeyboardInterrupt from ex + raise KeyboardInterrupt(ex) from ex except Exception as ex: self._log.exception('Error in RP callback thread: %s', ex) @@ -298,11 +295,11 @@ def task_state_cb(rp_task, state, cb_data): mq_connection = pika.BlockingConnection(rmq_conn_params) mq_channel = mq_connection.channel() - umgr = rp.TaskManager(session=rmgr._session) - umgr.add_pilots(rmgr.pilot) - umgr.register_callback(task_state_cb, - cb_data={'channel': mq_channel, - 'params' : rmq_conn_params}) + rp_tmgr = rp.TaskManager(session=rmgr._session) + rp_tmgr.add_pilots(rmgr.pilot) + rp_tmgr.register_callback(task_state_cb, + cb_data={'channel': mq_channel, + 'params' : rmq_conn_params}) try: @@ -322,14 +319,12 @@ def task_state_cb(rp_task, state, cb_data): task_queue.task_done() - bulk_tasks = list() bulk_tds = list() for msg in body: task = Task() task.from_dict(msg) - bulk_tasks.append(task) load_placeholder(task) bulk_tds.append(create_td_from_task( @@ -339,7 +334,7 @@ def task_state_cb(rp_task, state, cb_data): mq_channel, rmq_conn_params, '%s-tmgr-to-sync' % self._sid) - umgr.submit_tasks(bulk_tds) + rp_tmgr.submit_tasks(bulk_tds) mq_connection.close() self._log.debug('Exited RTS main loop. TMGR terminating') except KeyboardInterrupt as ex: @@ -351,7 +346,7 @@ def task_state_cb(rp_task, state, cb_data): raise EnTKError(ex) from ex finally: - umgr.close() + rp_tmgr.close() # -------------------------------------------------------------------------- diff --git a/src/radical/entk/execman/rp/task_processor.py b/src/radical/entk/execman/rp/task_processor.py index dd4ee4cf0..2c89cc5c7 100644 --- a/src/radical/entk/execman/rp/task_processor.py +++ b/src/radical/entk/execman/rp/task_processor.py @@ -452,13 +452,6 @@ def create_td_from_task(task, placeholders, prof=None): td.sandbox = task.sandbox td.post_exec = task.post_exec - if task.tag: - if task.parent_pipeline['name']: - td.tag = resolve_tags( - tag=task.tag, - parent_pipeline_name=task.parent_pipeline['name'], - placeholders=placeholders) - if task.parent_pipeline['uid']: td.tag = resolve_tags(task=task, parent_pipeline_name=task.parent_pipeline['uid'], placeholders=placeholders) diff --git a/tests/test_component/test_tproc_rp.py b/tests/test_component/test_tproc_rp.py index 40d8c87d0..6ea3ccfd1 100755 --- a/tests/test_component/test_tproc_rp.py +++ b/tests/test_component/test_tproc_rp.py @@ -142,6 +142,7 @@ def test_resolve_tags(self, mocked_Logger): def test_create_td_from_task(self, mocked_TaskDescription, mocked_Logger, mocked_get_input_list_from_task, mocked_get_output_list_from_task, + mocked_resolve_arguments, mocked_resolve_tags): mocked_TaskDescription.name = None mocked_TaskDescription.pre_exec = None @@ -191,7 +192,7 @@ def test_create_td_from_task(self, mocked_TaskDescription, task.stdout = 'stdout' test_td = create_td_from_task(task, None) - self.assertEqual(test_td.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') + self.assertEqual(test_td.name, 'task.0000,task.name,stage.0000,stage.0000,pipe.0000,pipe.0000') self.assertEqual(test_td.pre_exec, 'post_exec') self.assertEqual(test_td.executable, '/bin/date') self.assertEqual(test_td.arguments, 'test_args') From 3d988d938bfe12c314c1203e9e118921e25ac2d3 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Tue, 9 Feb 2021 14:45:03 -0600 Subject: [PATCH 37/42] Test and linting passes --- src/radical/entk/execman/rp/task_manager.py | 1 - tests/test_component/test_task.py | 49 ++++++++--------- tests/test_component/test_tmgr_rp.py | 1 - .../test_tmgr_rp/test_tproc_rp.py | 52 +++++++++---------- 4 files changed, 51 insertions(+), 52 deletions(-) diff --git a/src/radical/entk/execman/rp/task_manager.py b/src/radical/entk/execman/rp/task_manager.py index f7cad1099..0a1e09f25 100644 --- a/src/radical/entk/execman/rp/task_manager.py +++ b/src/radical/entk/execman/rp/task_manager.py @@ -4,7 +4,6 @@ __license__ = "MIT" -import os import json import pika import queue diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 78b4051d0..8781158e4 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -88,10 +88,10 @@ def test_cpu_reqs(self, mocked_generate_id, mocked_init): 'process_type' : None, 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} - task.cpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, - 'thread_type' : 'OpenMP'} + task.cpu_reqs = {'cpu_processes' : 2, + 'cpu_process_type' : None, + 'cpu_threads' : 1, + 'cpu_thread_type' : 'OpenMP'} self.assertEqual(task._cpu_reqs, cpu_reqs) self.assertEqual(task.cpu_reqs, {'cpu_processes' : 2, @@ -146,10 +146,10 @@ def test_gpu_reqs(self, mocked_generate_id, mocked_init): 'process_type' : None, 'threads_per_process' : 1, 'thread_type' : 'OpenMP'} - task.gpu_reqs = {'processes' : 2, - 'process_type' : None, - 'threads_per_process' : 1, - 'thread_type' : 'OpenMP'} + task.gpu_reqs = {'gpu_processes' : 2, + 'gpu_process_type' : None, + 'gpu_threads' : 1, + 'gpu_thread_type' : 'OpenMP'} self.assertEqual(task._gpu_reqs, gpu_reqs) self.assertEqual(task.gpu_reqs, {'gpu_processes' : 2, @@ -231,14 +231,14 @@ def test_dict_to_task(self): 'pre_exec' : ['bar'], 'executable': 'buz', 'arguments' : ['baz', 'fiz'], - 'cpu_reqs' : {'processes' : 1, - 'process_type' : None, - 'threads_per_process': 1, - 'thread_type' : None}, - 'gpu_reqs' : {'processes' : 0, - 'process_type' : None, - 'threads_per_process': 0, - 'thread_type' : None}} + 'cpu_reqs' : {'cpu_processes' : 1, + 'cpu_process_type': None, + 'cpu_threads' : 1, + 'cpu_thread_type' : None}, + 'gpu_reqs' : {'gpu_processes' : 0, + 'gpu_process_type': None, + 'gpu_threads' : 0, + 'gpu_thread_type' : None}} t = Task(from_dict=d) for k,v in d.items(): @@ -249,14 +249,14 @@ def test_dict_to_task(self): 'pre_exec' : ['bar'], 'executable': 'buz', 'arguments' : ['baz', 'fiz'], - 'cpu_reqs' : {'processes' : 1, - 'process_type' : None, - 'threads_per_process': 1, - 'thread_type' : None}, - 'gpu_reqs' : {'processes' : 0, - 'process_type' : None, - 'threads_per_process': 0, - 'thread_type' : None}} + 'cpu_reqs' : {'cpu_processes' : 1, + 'cpu_process_type': None, + 'cpu_threads' : 1, + 'cpu_thread_type' : None}, + 'gpu_reqs' : {'gpu_processes' : 0, + 'gpu_process_type': None, + 'gpu_threads' : 0, + 'gpu_thread_type' : None}} t = Task() t.from_dict(d) @@ -391,6 +391,7 @@ def test_tag(self, mocked_init): task.tag = 'task.tag' self.assertEqual(task._tags, {'colocate': 'task.tag'}) + with self.assertRaises(ree.TypeError): task.tag = {'colocate':'tasks'} diff --git a/tests/test_component/test_tmgr_rp.py b/tests/test_component/test_tmgr_rp.py index c1fcca7ab..f8e0efd84 100644 --- a/tests/test_component/test_tmgr_rp.py +++ b/tests/test_component/test_tmgr_rp.py @@ -49,7 +49,6 @@ def test_init(self, mocked_generate_id, mocked_getcwd, mocked_Logger, tmgr = RPTmgr('test_tmgr', ['pending_queues'], ['completed_queues'], rmgr, rmq_params) self.assertIsNone(tmgr._rts_runner) - self.assertEqual(tmgr._rmq_ping_interval, 10) # -------------------------------------------------------------------------- # diff --git a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py index a0549d855..fadf02230 100755 --- a/tests/test_integration/test_tmgr_rp/test_tproc_rp.py +++ b/tests/test_integration/test_tmgr_rp/test_tproc_rp.py @@ -3,7 +3,7 @@ from unittest import TestCase -from radical.entk.execman.rp.task_processor import create_cud_from_task +from radical.entk.execman.rp.task_processor import create_td_from_task from radical.entk import Task @@ -11,7 +11,7 @@ class TestBase(TestCase): # ------------------------------------------------------------------------------ # - def test_create_cud_from_task(self): + def test_create_td_from_task(self): pipeline_name = 'pipe.0000' stage_name = 'stage.0000' @@ -58,29 +58,29 @@ def test_create_cud_from_task(self): task.stderr = 'stderr' task.stdout = 'stdout' - test_cud = create_cud_from_task(task, placeholders) - self.assertEqual(test_cud.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') - self.assertEqual(test_cud.pre_exec, ['post_exec']) - self.assertEqual(test_cud.executable, '/bin/date') - self.assertEqual(test_cud.arguments, ['test_args']) - self.assertEqual(test_cud.sandbox, 'unit.0000') - self.assertEqual(test_cud.post_exec, ['']) - self.assertEqual(test_cud.cpu_processes, 5) - self.assertEqual(test_cud.cpu_threads, 6) - self.assertEqual(test_cud.cpu_process_type, 'MPI') - self.assertIsNone(test_cud.cpu_thread_type) - self.assertEqual(test_cud.gpu_processes, 5) - self.assertEqual(test_cud.gpu_threads, 6) - self.assertEqual(test_cud.gpu_process_type, None) - self.assertIsNone(test_cud.gpu_thread_type) - self.assertEqual(test_cud.lfs_per_process, 235) - self.assertEqual(test_cud.stdout, 'stdout') - self.assertEqual(test_cud.stderr, 'stderr') - self.assertEqual(test_cud.input_staging, []) - self.assertEqual(test_cud.output_staging, []) - self.assertEqual(test_cud.tag, 'task.0000') + test_td = create_td_from_task(task, placeholders) + self.assertEqual(test_td.name, 'task.0000,task.0000,stage.0000,stage.0000,pipe.0000,pipe.0000') + self.assertEqual(test_td.pre_exec, ['post_exec']) + self.assertEqual(test_td.executable, '/bin/date') + self.assertEqual(test_td.arguments, ['test_args']) + self.assertEqual(test_td.sandbox, 'unit.0000') + self.assertEqual(test_td.post_exec, ['']) + self.assertEqual(test_td.cpu_processes, 5) + self.assertEqual(test_td.cpu_threads, 6) + self.assertEqual(test_td.cpu_process_type, 'MPI') + self.assertIsNone(test_td.cpu_thread_type) + self.assertEqual(test_td.gpu_processes, 5) + self.assertEqual(test_td.gpu_threads, 6) + self.assertEqual(test_td.gpu_process_type, None) + self.assertIsNone(test_td.gpu_thread_type) + self.assertEqual(test_td.lfs_per_process, 235) + self.assertEqual(test_td.stdout, 'stdout') + self.assertEqual(test_td.stderr, 'stderr') + self.assertEqual(test_td.input_staging, []) + self.assertEqual(test_td.output_staging, []) + self.assertEqual(test_td.tag, 'task.0000') - task.tag = 'task.0001' - test_cud = create_cud_from_task(task, placeholders) - self.assertEqual(test_cud.tag, 'task.0003') + task.tags = {'colocate': 'task.0001'} + test_td = create_td_from_task(task, placeholders) + self.assertEqual(test_td.tag, 'task.0003') From 1fb376bc6541ec520d510fb619b480df31be744f Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Wed, 10 Feb 2021 14:54:10 -0500 Subject: [PATCH 38/42] fixing python publish --- .github/workflows/python-publish.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index a53913f8a..8057c675c 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -19,7 +19,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v2 with: - python-version: '3.x' + python-version: '3.6' - name: Install dependencies run: | python -m pip install --upgrade pip @@ -29,5 +29,5 @@ jobs: TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} run: | - python setup.py sdist bdist_wheel + python setup.py sdist twine upload dist/* From d6bc36ed173eb8f039d08ed25afe70f4f7f512a9 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 15 Feb 2021 09:38:09 -0500 Subject: [PATCH 39/42] updating codecov.yml --- .codecov.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.codecov.yml b/.codecov.yml index 83c8cdbb7..6b2c53024 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,5 +1,4 @@ coverage: - require_ci_to_pass: no status: project: default: From d1135f0ee5a10f68e648900b1f8fe49b5dd8a4b6 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 15 Feb 2021 09:38:35 -0500 Subject: [PATCH 40/42] removing coveralls --- .github/workflows/python-app.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 6e8fa4c52..dfa0e9513 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -58,10 +58,6 @@ jobs: coverage xml; \ coverage report; \ curl -s https://codecov.io/bash | bash - - name: Coveralls Python - uses: AndreMiras/coveralls-python-action@develop - with: - github-token: ${{ secrets.COVERALL_TOKEN }} flake8: runs-on: ubuntu-latest From dc5f0471bf3c337cebf4b0881e9024933a26a125 Mon Sep 17 00:00:00 2001 From: Ioannis Paraskevakos Date: Mon, 15 Feb 2021 17:17:43 -0500 Subject: [PATCH 41/42] naming warning update to exception --- src/radical/entk/pipeline.py | 14 ++++------ src/radical/entk/stage.py | 14 ++++------ src/radical/entk/task.py | 43 ++++++++++++++++++++++--------- tests/test_component/test_task.py | 2 +- 4 files changed, 42 insertions(+), 31 deletions(-) diff --git a/src/radical/entk/pipeline.py b/src/radical/entk/pipeline.py index 2f5193882..accba18ec 100644 --- a/src/radical/entk/pipeline.py +++ b/src/radical/entk/pipeline.py @@ -3,7 +3,6 @@ __license__ = 'MIT' import threading -import warnings import radical.utils as ru @@ -166,11 +165,10 @@ def name(self, value): actual_type=type(value)) if any(symbol in value for symbol in invalid_symbols): - warnings.warn(NAME_MESSAGE, DeprecationWarning) - # raise ree.ValueError(obj=self._uid, - # attribute='name', - # actual_value=value, - # expected_value=NAME_MESSAGE) + raise ree.ValueError(obj=self._uid, + attribute='name', + actual_value=value, + expected_value=NAME_MESSAGE) self._name = value @@ -266,9 +264,7 @@ def from_dict(self, d): raise ree.ValueError(obj=self._uid, attribute='name', actual_value=d['name'], - expected_value="Valid object names can " + - "contains letters, numbers and '.'. Any " - "other character is not allowed") + expected_value=NAME_MESSAGE) self._name = d['name'] diff --git a/src/radical/entk/stage.py b/src/radical/entk/stage.py index 918bd85c1..3e4dda831 100644 --- a/src/radical/entk/stage.py +++ b/src/radical/entk/stage.py @@ -4,7 +4,6 @@ import radical.utils as ru -import warnings from string import punctuation from . import exceptions as ree @@ -175,11 +174,10 @@ def name(self, value): actual_type=type(value)) if any(symbol in value for symbol in invalid_symbols): - warnings.warn(NAME_MESSAGE, DeprecationWarning) - # raise ree.ValueError(obj=self._uid, - # attribute='name', - # actual_value=value, - # expected_value=NAME_MESSAGE) + raise ree.ValueError(obj=self._uid, + attribute='name', + actual_value=value, + expected_value=NAME_MESSAGE) self._name = value @tasks.setter @@ -285,9 +283,7 @@ def from_dict(self, d): raise ree.ValueError(obj=self._uid, attribute='name', actual_value=d['name'], - expected_value="Valid object names can " + - "contains letters, numbers and '.'. Any " - "other character is not allowed") + expected_value=NAME_MESSAGE) self._name = d['name'] if 'state' in d: diff --git a/src/radical/entk/task.py b/src/radical/entk/task.py index 263a033ca..e397a26af 100644 --- a/src/radical/entk/task.py +++ b/src/radical/entk/task.py @@ -11,9 +11,9 @@ from . import exceptions as ree from . import states as res -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=724) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=782) -warnings.simplefilter(action="once", category=DeprecationWarning, lineno=950) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=728) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=786) +warnings.simplefilter(action="once", category=DeprecationWarning, lineno=954) # ------------------------------------------------------------------------------ @@ -592,11 +592,16 @@ def rts_uid(self): # @uid.setter def uid(self, value): - + invalid_symbols = punctuation.replace('.','') if not isinstance(value, str): raise ree.TypeError(expected_type=str, actual_type=type(value)) + if any(symbol in value for symbol in invalid_symbols): + raise ree.ValueError(obj=self._uid, + attribute='uid', + actual_value=value, + expected_value=NAME_MESSAGE) self._uid = value @rts_uid.setter @@ -616,11 +621,10 @@ def name(self, value): actual_type=type(value)) if any(symbol in value for symbol in invalid_symbols): - warnings.warn(NAME_MESSAGE, DeprecationWarning, stacklevel=2) - # raise ree.ValueError(obj=self._uid, - # attribute='name', - # actual_value=value, - # expected_value=NAME_MESSAGE) + raise ree.ValueError(obj=self._uid, + attribute='name', + actual_value=value, + expected_value=NAME_MESSAGE) self._name = value @@ -1051,13 +1055,28 @@ def from_dict(self, d): :return: None ''' + invalid_symbols = punctuation.replace('.','') # FIXME: uid, name, state and state_history to use setter type checks - if d.get('uid') is not None: self._uid = d['uid'] - if d.get('name') is not None: self._name = d['name'] + if d.get('uid') is not None: + if any(symbol in d['uid'] for symbol in invalid_symbols): + raise ree.ValueError(obj=self._uid, + attribute='uid', + actual_value=d['uid'], + expected_value=NAME_MESSAGE) + else: + self._uid = d['uid'] + + if d.get('name') is not None: + if any(symbol in d['name'] for symbol in invalid_symbols): + raise ree.ValueError(obj=self._uid, + attribute='name', + actual_value=d['name'], + expected_value=NAME_MESSAGE) + else: + self._name = d['name'] if 'state' not in d: self._state = res.INITIAL - else: # avoid adding state to state history, thus do typecheck here if not isinstance(d['state'], str): diff --git a/tests/test_component/test_task.py b/tests/test_component/test_task.py index 8781158e4..11e327107 100755 --- a/tests/test_component/test_task.py +++ b/tests/test_component/test_task.py @@ -298,7 +298,7 @@ def test_name(self, mocked_init): with self.assertRaises(ree.TypeError): task.name = 0 - with self.assertWarns(DeprecationWarning): + with self.assertRaises(ree.ValueError): task.name = 'task,0000' From c1cc422d698e9dfffe43dfb1795d5ff783ee6718 Mon Sep 17 00:00:00 2001 From: Hyungro Lee Date: Tue, 16 Feb 2021 11:03:15 -0500 Subject: [PATCH 42/42] version bump, feb 2021 --- VERSION | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/VERSION b/VERSION index 41336a1c0..dc1e644a1 100755 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.5.12 +1.6.0