def set_up(self):
        test_models_dir = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(test_models_dir, 'workspace')
        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        files_dir = os.path.join(os.path.dirname(test_models_dir), 'files')
        self.private_key = os.path.join(files_dir, 'keys', 'testkey')
        self.private_key_pass = '******'

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path=self.private_key,
            private_key_pass=self.private_key_pass)
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Node_child',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Node_1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

        # Django model many to many relationship add method
        self.condorworkflowjobnode.parent_nodes.add(
            self.condorworkflowjobnode_child)

        self.condorbase_id = CondorWorkflow.objects.get(
            name='test name').condorbase_ptr_id
        self.condorpyworkflow_id = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
Exemple #2
0
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='foo{id}',
            workspace=self.workspace_dir,
            user=self.user,
        )
        self.condorworkflow.save()

        # To have a flow Node, we need to have a Condor Job which requires a CondorBase which requires a TethysJob
        self.id_value = CondorWorkflow.objects.get(
            name='foo{id}').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Job1_NodeA',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()
Exemple #3
0
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(name='test_scheduler',
                                         host='localhost',
                                         username='******',
                                         password='******',
                                         private_key_path='test_path',
                                         private_key_pass='******')
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        # One node can have many children nodes
        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Job1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        # One node can have many children nodes
        self.condorworkflowjobnode_child2 = CondorWorkflowJobNode(
            name='Job2',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child2.save()

        self.condorworkflownode = CondorWorkflowNode(
            name='test_condorworkflownode',
            workflow=self.condorpyworkflow,
        )
        self.condorworkflownode.save()
Exemple #4
0
    def set_up(self):
        test_models_dir = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(test_models_dir, 'workspace')

        files_dir = os.path.join(os.path.dirname(test_models_dir), 'files')
        self.private_key = os.path.join(files_dir, 'keys', 'testkey')
        self.private_key_pass = '******'

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path=self.private_key,
            private_key_pass=self.private_key_pass)
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)
        self.condorpyworkflow.condor_object = mock.MagicMock()

        self.condorworkflowjobnode_a = CondorWorkflowJobNode(
            name='Job1_a',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a.save()

        self.condorworkflowjobnode_a1 = CondorWorkflowJobNode(
            name='Job1_a1',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a1.save()
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='foo{id}',
            workspace=self.workspace_dir,
            user=self.user,
        )
        self.condorworkflow.save()

        # To have a flow Node, we need to have a Condor Job which requires a CondorBase which requires a TethysJob
        self.id_value = CondorWorkflow.objects.get(name='foo{id}').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Job1_NodeA',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path='test_path',
            private_key_pass='******'
        )
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        # One node can have many children nodes
        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Job1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        # One node can have many children nodes
        self.condorworkflowjobnode_child2 = CondorWorkflowJobNode(
            name='Job2',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child2.save()

        self.condorworkflownode = CondorWorkflowNode(
            name='test_condorworkflownode',
            workflow=self.condorpyworkflow,
        )
        self.condorworkflownode.save()
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')
        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path='test_path',
            private_key_pass='******'
        )
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Node_child',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Node_1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

        # Django model many to many relationship add method
        self.condorworkflowjobnode.parent_nodes.add(self.condorworkflowjobnode_child)

        self.condorbase_id = CondorWorkflow.objects.get(name='test name').condorbase_ptr_id
        self.condorpyworkflow_id = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
Exemple #8
0
class CondorPyWorkflowTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(name='test_scheduler',
                                         host='localhost',
                                         username='******',
                                         password='******',
                                         private_key_path='test_path',
                                         private_key_pass='******')
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_a = CondorWorkflowJobNode(
            name='Job1_a',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a.save()

        self.condorworkflowjobnode_a1 = CondorWorkflowJobNode(
            name='Job1_a1',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a1.save()

        # Django model many to many relationship add method
        # self.condorworkflowjobnode.parent_nodes.add(self.condorworkflowjobnode_job)

    def tear_down(self):
        self.scheduler.delete()
        self.condorworkflow.delete()
        self.condorworkflowjobnode_a.delete()
        self.condorworkflowjobnode_a1.delete()

        # pass

    def test_condorpy_workflow_prop(self):
        ret = self.condorworkflow.condorpy_workflow

        # Check Result
        self.assertEqual('<DAG: test_name>', repr(ret))
        self.assertEqual(self.workspace_dir, ret._cwd)
        self.assertEqual('test_config', ret.config)

    @mock.patch('tethys_compute.models.condor.condor_py_workflow.Workflow')
    def test_max_jobs(self, mock_wf):
        max_jobs = {'foo': 5}
        self.condorpyworkflow.name = 'test_name'
        self.condorpyworkflow.workspace = 'test_dict'
        self.condorpyworkflow.max_jobs = max_jobs

        ret = self.condorpyworkflow.max_jobs

        # Check result
        self.assertEqual(5, ret['foo'])
        mock_wf.assert_called_with(config='test_config',
                                   max_jobs={'foo': 10},
                                   name='test_name',
                                   working_directory='test_dict')

    @mock.patch(
        'tethys_compute.models.condor.condor_py_workflow.CondorPyWorkflow.condorpy_workflow'
    )
    def test_config(self, mock_cw):
        test_config_value = 'test_config2'

        # Mock condorpy_workflow.config = test_config_value. We have already tested condorpy_workflow.
        mock_cw.config = test_config_value

        # Setter
        self.condorpyworkflow.config = test_config_value

        # Property
        ret = self.condorpyworkflow.config

        # Check result
        self.assertEqual('test_config2', ret)

    def test_nodes(self):
        ret = self.condorworkflow.nodes
        # Check result after loading nodes
        # self.assertEqual('Node_1', ret[0].name)

        # Check result in CondorPyWorkflow object
        self.assertEqual({'foo': 10}, ret[0].workflow.max_jobs)
        self.assertEqual('test_config', ret[0].workflow.config)

    def test_load_nodes(self):
        # Before load nodes. Set should be empty
        ret_before = self.condorworkflow.condorpy_workflow.node_set
        list_before = []

        list_after = []
        for e in ret_before:
            list_before.append(e)

        # Check list_before is empty
        self.assertFalse(list_before)

        # Add parent
        self.condorworkflowjobnode_a1.add_parent(self.condorworkflowjobnode_a)

        # Execute load nodes
        self.condorworkflow.load_nodes()

        # After load nodes, Set should have two elements. One parent and one child
        ret_after = self.condorworkflow.condorpy_workflow.node_set
        # Convert to list for checking result
        for e in ret_after:
            list_after.append(e)

        # Check list_after is not empty
        self.assertTrue(list_after)

        # sort list and compare result
        list_after.sort(key=lambda node: node.job.name)
        self.assertEqual('Job1_a', list_after[0].job.name)
        self.assertEqual('Job1_a1', list_after[1].job.name)

    def test_add_max_jobs_throttle(self):
        # Set max_jobs
        self.condorworkflow.add_max_jobs_throttle('foo1', 20)

        # Get return value
        ret = self.condorworkflow.condorpy_workflow

        # Check result
        self.assertEqual(20, ret.max_jobs['foo1'])

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowJobNode.update_database_fields'
    )
    def test_update_database_fields(self, mock_update):
        # Set attribute for node
        self.condorpyworkflow.update_database_fields()

        # Check if mock is called twice for node and child node
        self.assertTrue(mock_update.call_count == 2)

    @mock.patch(
        'tethys_compute.models.condor.condor_py_workflow.CondorPyWorkflow.condorpy_workflow'
    )
    def test_num_jobs(self, mock_condorpy_workflow_prop):
        ret = self.condorpyworkflow.num_jobs
        self.assertEqual(mock_condorpy_workflow_prop.num_jobs, ret)
class CondorWorkflowTest(TethysTestCase):
    def set_up(self):
        test_models_dir = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(test_models_dir, 'workspace')
        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        files_dir = os.path.join(os.path.dirname(test_models_dir), 'files')
        self.private_key = os.path.join(files_dir, 'keys', 'testkey')
        self.private_key_pass = '******'

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path=self.private_key,
            private_key_pass=self.private_key_pass)
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Node_child',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Node_1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

        # Django model many to many relationship add method
        self.condorworkflowjobnode.parent_nodes.add(
            self.condorworkflowjobnode_child)

        self.condorbase_id = CondorWorkflow.objects.get(
            name='test name').condorbase_ptr_id
        self.condorpyworkflow_id = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id

    def tear_down(self):
        self.scheduler.delete()

        if self.condorworkflow.condorbase_ptr_id == self.condorbase_id:
            self.condorworkflow.delete()

        if os.path.exists(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def test_type(self):
        ret = self.condorworkflow.type
        self.assertEqual('CondorWorkflow', ret)

    def test_condor_object_prop(self):
        ret = self.condorworkflow._condor_object

        # Check workflow return
        self.assertEqual({'foo': 10}, ret.max_jobs)
        self.assertEqual('test_config', ret.config)
        self.assertEqual('<DAG: test_name>', repr(ret))

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.load_nodes'
    )
    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.condor_object'
    )
    def test_execute(self, mock_co, mock_ln):
        # Mock submit to return a 111 cluster id
        mock_co.submit.return_value = 111

        # Execute
        self.condorworkflow._execute(options=['foo'])

        # We already tested load_nodes in CondorPyWorkflow, just mocked to make sure it's called here.
        mock_ln.assert_called()
        mock_co.submit.assert_called_with(options=['foo'])

        # Check cluster_id from _execute in condorbase
        self.assertEqual(111, self.condorworkflow.cluster_id)

    def test_get_job(self):
        ret = self.condorworkflow.get_job(job_name='Node_1')

        # Check result
        self.assertIsInstance(ret, CondorWorkflowJobNode)
        self.assertEqual('Node_1', ret.name)

    def test_get_job_does_not_exist(self):
        ret = self.condorworkflow.get_job(job_name='Node_2')
        # Check result
        self.assertIsNone(ret)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.update_database_fields'
    )
    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.update_database_fields'
    )
    def test_update_database_fieds(self, mock_pw_update, mock_ba_update):
        # Execute
        self.condorworkflow.update_database_fields()

        # Check if mock is called
        mock_pw_update.assert_called()
        mock_ba_update.assert_called()

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorWorkflow.update_database_fields'
    )
    def test_condor_workflow_presave(self, mock_update):
        # Excute
        self.condorworkflow.save()

        # Check if update_database_fields is called
        mock_update.assert_called()

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object'
    )
    def test_condor_job_pre_delete(self, mock_co):
        if not os.path.exists(self.workspace_dir):
            os.makedirs(self.workspace_dir)
            file_path = os.path.join(self.workspace_dir, 'test_file.txt')
            open(file_path, 'a').close()

        self.condorworkflow.delete()

        # Check if close_remote is called
        mock_co.close_remote.assert_called()

        # Check if file has been removed
        self.assertFalse(os.path.isfile(file_path))

    @mock.patch('tethys_compute.models.condor.condor_workflow.log')
    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object'
    )
    def test_condor_job_pre_delete_exception(self, mock_co, mock_log):
        mock_co.close_remote.side_effect = Exception('test error')
        self.condorworkflow.delete()

        # Check if close_remote is called
        mock_log.exception.assert_called_with('test error')

    def test__update_status_no_execute_time(self):
        self.condorworkflow.execute_time = None
        ret = self.condorworkflow._update_status()
        self.assertEqual('SUB', ret)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.condor_object'
    )
    def test__update_status_not_Running(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Completed'

        self.condorworkflow._update_status()

        self.assertEqual('COM', self.condorworkflow._status)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.condor_object'
    )
    def test__update_status_Running_not_running_statuses(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Running'
        mock_co.statuses = {
            'Unexpanded': 0,
            'Idle': 0,
            'Running': 0,
            'Completed': 1
        }

        self.condorworkflow._update_status()

        self.assertEqual('VCP', self.condorworkflow._status)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.condor_object'
    )
    def test__update_status_Running_no_statuses(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Running'
        mock_co.statuses = {
            'Unexpanded': 0,
            'Idle': 0,
            'Running': 0,
            'Completed': 0
        }

        self.condorworkflow._update_status()

        self.assertEqual('SUB', self.condorworkflow._status)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow.CondorBase.condor_object'
    )
    def test__update_status_exception(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        type(mock_co).status = mock.PropertyMock(side_effect=Exception)

        self.condorworkflow._update_status()

        self.assertEqual('ERR', self.condorworkflow._status)
Exemple #10
0
class CondorPyWorkflowJobNodeTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='foo{id}',
            workspace=self.workspace_dir,
            user=self.user,
        )
        self.condorworkflow.save()

        # To have a flow Node, we need to have a Condor Job which requires a CondorBase which requires a TethysJob
        self.id_value = CondorWorkflow.objects.get(
            name='foo{id}').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Job1_NodeA',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

    def tear_down(self):
        self.condorworkflow.delete()
        self.condorworkflowjobnode.delete()

    def test_type_prop(self):
        self.assertEqual('JOB', self.condorworkflowjobnode.type)

    def test_workspace_prop(self):
        self.assertEqual('.', self.condorworkflowjobnode.workspace)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.condorpy_job'
    )
    def test_job_prop(self, mock_cpj):
        # Condorpy_job Prop is already tested in CondorPyJob Test case
        self.assertEqual(mock_cpj, self.condorworkflowjobnode.job)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowNode.update_database_fields'
    )
    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.update_database_fields'
    )
    def test_update_database_fields(self, mock_pj_update, mock_wfn_update):
        # Execute
        self.condorworkflowjobnode.update_database_fields()

        # Check result
        mock_pj_update.assert_called_once()
        mock_wfn_update.assert_called_once()

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowNode.update_database_fields'
    )
    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.update_database_fields'
    )
    def test_receiver_pre_save(self, mock_pj_update, mock_wfn_update):
        self.condorworkflowjobnode.save()

        # Check result
        mock_pj_update.assert_called_once()
        mock_wfn_update.assert_called_once()

    def test_job_post_save(self):
        # get the job
        tethys_job = TethysJob.objects.get(name='foo{id}')
        id_val = tethys_job.id

        # Run save to activate post save
        tethys_job.save()

        # Set up new name
        new_name = 'foo{id}'.format(id=id_val)

        # Get same tethys job with new name
        tethys_job = TethysJob.objects.get(name=new_name)

        # Check results
        self.assertIsInstance(tethys_job, TethysJob)
        self.assertEqual(new_name, tethys_job.name)
Exemple #11
0
class CondorWorkflowNodeTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super',
                                             '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(name='test_scheduler',
                                         host='localhost',
                                         username='******',
                                         password='******',
                                         private_key_path='test_path',
                                         private_key_pass='******')
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(
            name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(
            condorpyworkflow_id=self.id_value)

        # One node can have many children nodes
        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Job1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        # One node can have many children nodes
        self.condorworkflowjobnode_child2 = CondorWorkflowJobNode(
            name='Job2',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child2.save()

        self.condorworkflownode = CondorWorkflowNode(
            name='test_condorworkflownode',
            workflow=self.condorpyworkflow,
        )
        self.condorworkflownode.save()

    def tear_down(self):
        self.condorworkflow.delete()
        self.condorworkflowjobnode_child.delete()
        self.condorworkflowjobnode_child2.delete()

    def test_type_abs_prop(self):
        ret = self.condorworkflownode.type()

        # Check result
        self.assertIsNone(ret)

    def test_job_abs_prop(self):
        ret = self.condorworkflownode.job()

        # Check result
        self.assertIsNone(ret)

    @mock.patch(
        'tethys_compute.models.condor.condor_workflow_node.CondorWorkflowNode.job'
    )
    def test_condorpy_node(self, mock_job):
        mock_job_return = Job(name='test_job',
                              attributes={'foo': 'bar'},
                              num_jobs=1,
                              remote_input_files=['test_file.txt'],
                              working_directory=self.workspace_dir)
        mock_job.return_value = mock_job_return

        self.condorworkflownode.job = mock_job_return
        ret = self.condorworkflownode.condorpy_node

        # Check result
        self.assertEqual('<Node: test_job parents() children()>', repr(ret))

    def test_add_parents_and_parents_prop(self):
        # Add parent should add parent to condorwoflownode
        self.condorworkflownode.add_parent(self.condorworkflowjobnode_child)
        self.condorworkflownode.add_parent(self.condorworkflowjobnode_child2)

        # Get this Parent Nodes here
        ret = self.condorworkflownode.parents

        # Check result
        self.assertIsInstance(ret[0], CondorWorkflowJobNode)
        self.assertEqual('Job1', ret[0].name)
        self.assertIsInstance(ret[1], CondorWorkflowJobNode)
        self.assertEqual('Job2', ret[1].name)

    def test_update_database_fields(self):
        self.assertIsNone(self.condorworkflownode.update_database_fields())
class CondorPyWorkflowTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path='test_path',
            private_key_pass='******'
        )
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_a = CondorWorkflowJobNode(
            name='Job1_a',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a.save()

        self.condorworkflowjobnode_a1 = CondorWorkflowJobNode(
            name='Job1_a1',
            workflow=self.condorpyworkflow,
            _attributes={'foo': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )

        self.condorworkflowjobnode_a1.save()

        # Django model many to many relationship add method
        # self.condorworkflowjobnode.parent_nodes.add(self.condorworkflowjobnode_job)

    def tear_down(self):
        self.scheduler.delete()
        self.condorworkflow.delete()
        self.condorworkflowjobnode_a.delete()
        self.condorworkflowjobnode_a1.delete()

        # pass

    def test_condorpy_workflow_prop(self):
        ret = self.condorworkflow.condorpy_workflow

        # Check Result
        self.assertEqual('<DAG: test_name>', repr(ret))
        self.assertEqual(self.workspace_dir, ret._cwd)
        self.assertEqual('test_config', ret.config)

    @mock.patch('tethys_compute.models.condor.condor_py_workflow.Workflow')
    def test_max_jobs(self, mock_wf):
        max_jobs = {'foo': 5}
        self.condorpyworkflow.name = 'test_name'
        self.condorpyworkflow.workspace = 'test_dict'
        self.condorpyworkflow.max_jobs = max_jobs

        ret = self.condorpyworkflow.max_jobs

        # Check result
        self.assertEqual(5, ret['foo'])
        mock_wf.assert_called_with(config='test_config', max_jobs={'foo': 10},
                                   name='test_name', working_directory='test_dict')

    @mock.patch('tethys_compute.models.condor.condor_py_workflow.CondorPyWorkflow.condorpy_workflow')
    def test_config(self, mock_cw):
        test_config_value = 'test_config2'

        # Mock condorpy_workflow.config = test_config_value. We have already tested condorpy_workflow.
        mock_cw.config = test_config_value

        # Setter
        self.condorpyworkflow.config = test_config_value

        # Property
        ret = self.condorpyworkflow.config

        # Check result
        self.assertEqual('test_config2', ret)

    def test_nodes(self):
        ret = self.condorworkflow.nodes
        # Check result after loading nodes
        # self.assertEqual('Node_1', ret[0].name)

        # Check result in CondorPyWorkflow object
        self.assertEqual({'foo': 10}, ret[0].workflow.max_jobs)
        self.assertEqual('test_config', ret[0].workflow.config)

    def test_load_nodes(self):
        # Before load nodes. Set should be empty
        ret_before = self.condorworkflow.condorpy_workflow.node_set
        list_before = []

        list_after = []
        for e in ret_before:
            list_before.append(e)

        # Check list_before is empty
        self.assertFalse(list_before)

        # Add parent
        self.condorworkflowjobnode_a1.add_parent(self.condorworkflowjobnode_a)

        # Execute load nodes
        self.condorworkflow.load_nodes()

        # After load nodes, Set should have two elements. One parent and one child
        ret_after = self.condorworkflow.condorpy_workflow.node_set
        # Convert to list for checking result
        for e in ret_after:
            list_after.append(e)

        # Check list_after is not empty
        self.assertTrue(list_after)

        # sort list and compare result
        list_after.sort(key=lambda node: node.job.name)
        self.assertEqual('Job1_a', list_after[0].job.name)
        self.assertEqual('Job1_a1', list_after[1].job.name)

    def test_add_max_jobs_throttle(self):
        # Set max_jobs
        self.condorworkflow.add_max_jobs_throttle('foo1', 20)

        # Get return value
        ret = self.condorworkflow.condorpy_workflow

        # Check result
        self.assertEqual(20, ret.max_jobs['foo1'])

    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowJobNode.update_database_fields')
    def test_update_database_fields(self, mock_update):
        # Set attribute for node
        self.condorpyworkflow.update_database_fields()

        # Check if mock is called twice for node and child node
        self.assertTrue(mock_update.call_count == 2)

    @mock.patch('tethys_compute.models.condor.condor_py_workflow.CondorPyWorkflow.condorpy_workflow')
    def test_num_jobs(self, mock_condorpy_workflow_prop):
        ret = self.condorpyworkflow.num_jobs
        self.assertEqual(mock_condorpy_workflow_prop.num_jobs, ret)
class CondorWorkflowNodeTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path='test_path',
            private_key_pass='******'
        )
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        # One node can have many children nodes
        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Job1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        # One node can have many children nodes
        self.condorworkflowjobnode_child2 = CondorWorkflowJobNode(
            name='Job2',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child2.save()

        self.condorworkflownode = CondorWorkflowNode(
            name='test_condorworkflownode',
            workflow=self.condorpyworkflow,
        )
        self.condorworkflownode.save()

    def tear_down(self):
        self.condorworkflow.delete()
        self.condorworkflowjobnode_child.delete()
        self.condorworkflowjobnode_child2.delete()

    def test_type_abs_prop(self):
        ret = self.condorworkflownode.type()

        # Check result
        self.assertIsNone(ret)

    def test_job_abs_prop(self):
        ret = self.condorworkflownode.job()

        # Check result
        self.assertIsNone(ret)

    @mock.patch('tethys_compute.models.condor.condor_workflow_node.CondorWorkflowNode.job')
    def test_condorpy_node(self, mock_job):
        mock_job_return = Job(name='test_job',
                              attributes={'foo': 'bar'},
                              num_jobs=1,
                              remote_input_files=['test_file.txt'],
                              working_directory=self.workspace_dir)
        mock_job.return_value = mock_job_return

        self.condorworkflownode.job = mock_job_return
        ret = self.condorworkflownode.condorpy_node

        # Check result
        self.assertEqual('<Node: test_job parents() children()>', repr(ret))

    def test_add_parents_and_parents_prop(self):
        # Add parent should add parent to condorwoflownode
        self.condorworkflownode.add_parent(self.condorworkflowjobnode_child)
        self.condorworkflownode.add_parent(self.condorworkflowjobnode_child2)

        # Get this Parent Nodes here
        ret = self.condorworkflownode.parents

        # Check result
        self.assertIsInstance(ret[0], CondorWorkflowJobNode)
        self.assertEqual('Job1', ret[0].name)
        self.assertIsInstance(ret[1], CondorWorkflowJobNode)
        self.assertEqual('Job2', ret[1].name)

    def test_update_database_fields(self):
        self.assertIsNone(self.condorworkflownode.update_database_fields())
class CondorPyWorkflowJobNodeTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')

        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='foo{id}',
            workspace=self.workspace_dir,
            user=self.user,
        )
        self.condorworkflow.save()

        # To have a flow Node, we need to have a Condor Job which requires a CondorBase which requires a TethysJob
        self.id_value = CondorWorkflow.objects.get(name='foo{id}').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Job1_NodeA',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

    def tear_down(self):
        self.condorworkflow.delete()
        self.condorworkflowjobnode.delete()

    def test_type_prop(self):
        self.assertEqual('JOB', self.condorworkflowjobnode.type)

    def test_workspace_prop(self):
        self.assertEqual('.', self.condorworkflowjobnode.workspace)

    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.condorpy_job')
    def test_job_prop(self, mock_cpj):
        # Condorpy_job Prop is already tested in CondorPyJob Test case
        self.assertEqual(mock_cpj, self.condorworkflowjobnode.job)

    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowNode.update_database_fields')
    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.update_database_fields')
    def test_update_database_fields(self, mock_pj_update, mock_wfn_update):
        # Execute
        self.condorworkflowjobnode.update_database_fields()

        # Check result
        mock_pj_update.assert_called_once()
        mock_wfn_update.assert_called_once()

    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorWorkflowNode.update_database_fields')
    @mock.patch('tethys_compute.models.condor.condor_workflow_job_node.CondorPyJob.update_database_fields')
    def test_receiver_pre_save(self, mock_pj_update, mock_wfn_update):
        self.condorworkflowjobnode.save()

        # Check result
        mock_pj_update.assert_called_once()
        mock_wfn_update.assert_called_once()

    def test_job_post_save(self):
        # get the job
        tethys_job = TethysJob.objects.get(name='foo{id}')
        id_val = tethys_job.id

        # Run save to activate post save
        tethys_job.save()

        # Set up new name
        new_name = 'foo{id}'.format(id=id_val)

        # Get same tethys job with new name
        tethys_job = TethysJob.objects.get(name=new_name)

        # Check results
        self.assertIsInstance(tethys_job, TethysJob)
        self.assertEqual(new_name, tethys_job.name)
class CondorWorkflowTest(TethysTestCase):
    def set_up(self):
        path = os.path.dirname(__file__)
        self.workspace_dir = os.path.join(path, 'workspace')
        self.user = User.objects.create_user('tethys_super', '*****@*****.**', 'pass')

        self.scheduler = CondorScheduler(
            name='test_scheduler',
            host='localhost',
            username='******',
            password='******',
            private_key_path='test_path',
            private_key_pass='******'
        )
        self.scheduler.save()

        self.condorworkflow = CondorWorkflow(
            _max_jobs={'foo': 10},
            _config='test_config',
            name='test name',
            workspace=self.workspace_dir,
            user=self.user,
            scheduler=self.scheduler,
        )
        self.condorworkflow.save()

        self.id_value = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id
        self.condorpyworkflow = CondorPyWorkflow.objects.get(condorpyworkflow_id=self.id_value)

        self.condorworkflowjobnode_child = CondorWorkflowJobNode(
            name='Node_child',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode_child.save()

        self.condorworkflowjobnode = CondorWorkflowJobNode(
            name='Node_1',
            workflow=self.condorpyworkflow,
            _attributes={'test': 'one'},
            _num_jobs=1,
            _remote_input_files=['test1.txt'],
        )
        self.condorworkflowjobnode.save()

        # Django model many to many relationship add method
        self.condorworkflowjobnode.parent_nodes.add(self.condorworkflowjobnode_child)

        self.condorbase_id = CondorWorkflow.objects.get(name='test name').condorbase_ptr_id
        self.condorpyworkflow_id = CondorWorkflow.objects.get(name='test name').condorpyworkflow_ptr_id

    def tear_down(self):
        self.scheduler.delete()

        if self.condorworkflow.condorbase_ptr_id == self.condorbase_id:
            self.condorworkflow.delete()

        if os.path.exists(self.workspace_dir):
            shutil.rmtree(self.workspace_dir)

    def test_type(self):
        ret = self.condorworkflow.type
        self.assertEqual('CondorWorkflow', ret)

    def test_condor_object_prop(self):
        ret = self.condorworkflow._condor_object

        # Check workflow return
        self.assertEqual({'foo': 10}, ret.max_jobs)
        self.assertEqual('test_config', ret.config)
        self.assertEqual('<DAG: test_name>', repr(ret))

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.load_nodes')
    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
    def test_execute(self, mock_co, mock_ln):
        # Mock submit to return a 111 cluster id
        mock_co.submit.return_value = 111

        # Execute
        self.condorworkflow._execute(options=['foo'])

        # We already tested load_nodes in CondorPyWorkflow, just mocked to make sure it's called here.
        mock_ln.assert_called()
        mock_co.submit.assert_called_with(options=['foo'])

        # Check cluster_id from _execute in condorbase
        self.assertEqual(111, self.condorworkflow.cluster_id)

    def test_get_job(self):
        ret = self.condorworkflow.get_job(job_name='Node_1')

        # Check result
        self.assertIsInstance(ret, CondorWorkflowJobNode)
        self.assertEqual('Node_1', ret.name)

    def test_get_job_does_not_exist(self):
        ret = self.condorworkflow.get_job(job_name='Node_2')
        # Check result
        self.assertIsNone(ret)

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.update_database_fields')
    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorPyWorkflow.update_database_fields')
    def test_update_database_fieds(self, mock_pw_update, mock_ba_update):
        # Execute
        self.condorworkflow.update_database_fields()

        # Check if mock is called
        mock_pw_update.assert_called()
        mock_ba_update.assert_called()

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.update_database_fields')
    def test_condor_workflow_presave(self, mock_update):
        # Excute
        self.condorworkflow.save()

        # Check if update_database_fields is called
        mock_update.assert_called()

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object')
    def test_condor_job_pre_delete(self, mock_co):
        if not os.path.exists(self.workspace_dir):
            os.makedirs(self.workspace_dir)
            file_path = os.path.join(self.workspace_dir, 'test_file.txt')
            open(file_path, 'a').close()

        self.condorworkflow.delete()

        # Check if close_remote is called
        mock_co.close_remote.assert_called()

        # Check if file has been removed
        self.assertFalse(os.path.isfile(file_path))

    @mock.patch('tethys_compute.models.condor.condor_workflow.log')
    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorWorkflow.condor_object')
    def test_condor_job_pre_delete_exception(self, mock_co, mock_log):
        mock_co.close_remote.side_effect = Exception('test error')
        self.condorworkflow.delete()

        # Check if close_remote is called
        mock_log.exception.assert_called_with('test error')

    def test__update_status_no_execute_time(self):
        self.condorworkflow.execute_time = None
        ret = self.condorworkflow._update_status()
        self.assertEqual('PEN', ret)

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
    def test__update_status_not_Running(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Completed'

        self.condorworkflow._update_status()

        self.assertEqual('COM', self.condorworkflow._status)

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
    def test__update_status_Running_not_running_statuses(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Running'
        mock_co.statuses = {'Unexpanded': 0, 'Idle': 0, 'Running': 0, 'Completed': 1}

        self.condorworkflow._update_status()

        self.assertEqual('VCP', self.condorworkflow._status)

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
    def test__update_status_Running_no_statuses(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        mock_co.status = 'Running'
        mock_co.statuses = {'Unexpanded': 0, 'Idle': 0, 'Running': 0, 'Completed': 0}

        self.condorworkflow._update_status()

        self.assertEqual('SUB', self.condorworkflow._status)

    @mock.patch('tethys_compute.models.condor.condor_workflow.CondorBase.condor_object')
    def test__update_status_exception(self, mock_co):
        self.condorworkflow.execute_time = tz.now()
        type(mock_co).status = mock.PropertyMock(side_effect=Exception)

        self.condorworkflow._update_status()

        self.assertEqual('ERR', self.condorworkflow._status)