def test_job_ajax_patch_req(self): # Create a new job - through a workflow since that is the only way # of creating jobs in the interface exp_command = Command(1) json_str = ( '{"input_data": 1, "max_barcode_errors": 1.5, ' '"barcode_type": "golay_12", "max_bad_run_length": 3, ' '"rev_comp": false, "phred_quality_threshold": 3, ' '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, ' '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}') exp_params = Parameters.load(exp_command, json_str=json_str) exp_user = User('*****@*****.**') name = "Test processing workflow" # tests success wf = ProcessingWorkflow.from_scratch( exp_user, exp_params, name=name, force=True) graph = wf.graph nodes = list(graph.nodes()) job_id = nodes[0].id # Incorrect path parameter obs = job_ajax_patch_req('remove', '/%s/somethingelse' % job_id) exp = {'status': 'error', 'message': 'Incorrect path parameter: missing job id'} self.assertEqual(obs, exp) obs = job_ajax_patch_req('remove', '/') exp = {'status': 'error', 'message': 'Incorrect path parameter: missing job id'} self.assertEqual(obs, exp) # Job id is not like a job id obs = job_ajax_patch_req('remove', '/notAJobId') exp = {'status': 'error', 'message': 'Incorrect path parameter: ' 'notAJobId is not a recognized job id'} self.assertEqual(obs, exp) # Job doesn't exist obs = job_ajax_patch_req('remove', '/6d368e16-2242-4cf8-87b4-a5dc40bc890b') exp = {'status': 'error', 'message': 'Incorrect path parameter: ' '6d368e16-2242-4cf8-87b4-a5dc40bc890b is not a ' 'recognized job id'} self.assertEqual(obs, exp) # in_construction job obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'error', 'message': "Can't delete job %s. It is 'in_construction' " "status. Please use /study/process/workflow/" % job_id} self.assertEqual(obs, exp) # job status != 'error' job = ProcessingJob(job_id) job._set_status('queued') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'error', 'message': 'Only jobs in "error" status can be deleted.'} self.assertEqual(obs, exp) # Operation not supported job._set_status('queued') obs = job_ajax_patch_req('add', '/%s' % job_id) exp = {'status': 'error', 'message': 'Operation "add" not supported. Current supported ' 'operations: remove'} self.assertEqual(obs, exp) # Test success job._set_error('Killed for testing') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp)
def test_complete_job(self): # Complete success pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': { '1.SKD6.640190': 1 }}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command.get_validator('BIOM'), values_dict={ 'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM' }), True) c_job._set_status('running') fd, fp = mkstemp(suffix='_table.biom') close(fd) with open(fp, 'w') as f: f.write('\n') self._clean_up_files.append(fp) exp_artifact_count = get_count('qiita.artifact') + 1 payload = dumps({ 'success': True, 'error': '', 'artifacts': { 'OTU table': { 'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM' } } }) job = self._create_job('complete_job', { 'job_id': c_job.id, 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'success') self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) # Complete job error payload = dumps({'success': False, 'error': 'Job failure'}) job = self._create_job('complete_job', { 'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') self.assertEqual(c_job.status, 'error') self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) self.assertEqual(c_job.log.msg, 'Job failure') # Complete internal error pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': { '1.SKD6.640190': 1 }}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command.get_validator('BIOM'), values_dict={ 'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM' }), True) c_job._set_status('running') fp = '/surprised/if/this/path/exists.biom' payload = dumps({ 'success': True, 'error': '', 'artifacts': { 'OTU table': { 'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM' } } }) job = self._create_job('complete_job', { 'job_id': c_job.id, 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'error') self.assertIn('No such file or directory', c_job.log.msg)
def correct_redis_data(key, cmd, values_dict, user): """Corrects the data stored in the redis DB Parameters ---------- key: str The redis key to fix cmd : qiita_db.software.Command Command to use to create the processing job values_dict : dict Dictionary used to instantiate the parameters of the command user : qiita_db.user. User The user that will own the job """ info = r_client.get(key) if info: info = loads(info) if info['job_id'] is not None: if 'is_qiita_job' in info: if info['is_qiita_job']: try: job = ProcessingJob(info['job_id']) payload = {'job_id': info['job_id'], 'alert_type': info['status'], 'alert_msg': info['alert_msg']} r_client.set(key, dumps(payload)) except (QiitaDBUnknownIDError, KeyError): # We shomehow lost the information of this job # Simply delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # Job is null, we have the information here if info['status'] == 'success': # In the success case no information is stored. We can # safely delete the key r_client.delete(key) elif info['status'] == 'warning': # In case of warning the key message stores the warning # message. We need to create a new job, mark it as # successful and store the error message as expected by # the new structure params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_status('success') payload = {'job_id': job.id, 'alert_type': 'warning', 'alert_msg': info['message']} r_client.set(key, dumps(payload)) else: # The status is error. The key message stores the error # message. We need to create a new job and mark it as # failed with the given error message params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_error(info['message']) payload = {'job_id': job.id} r_client.set(key, dumps(payload)) else: # The key doesn't contain any information. Delete the key r_client.delete(key)
def test_job_ajax_patch_req(self): # Create a new job - through a workflow since that is the only way # of creating jobs in the interface exp_command = Command(1) json_str = ( '{"input_data": 1, "max_barcode_errors": 1.5, ' '"barcode_type": "golay_12", "max_bad_run_length": 3, ' '"rev_comp": false, "phred_quality_threshold": 3, ' '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, ' '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}') exp_params = Parameters.load(exp_command, json_str=json_str) exp_user = User('*****@*****.**') name = "Test processing workflow" # tests success wf = ProcessingWorkflow.from_scratch(exp_user, exp_params, name=name, force=True) graph = wf.graph nodes = list(graph.nodes()) job_id = nodes[0].id # Incorrect path parameter obs = job_ajax_patch_req('remove', '/%s/somethingelse' % job_id) exp = { 'status': 'error', 'message': 'Incorrect path parameter: missing job id' } self.assertEqual(obs, exp) obs = job_ajax_patch_req('remove', '/') exp = { 'status': 'error', 'message': 'Incorrect path parameter: missing job id' } self.assertEqual(obs, exp) # Job id is not like a job id obs = job_ajax_patch_req('remove', '/notAJobId') exp = { 'status': 'error', 'message': 'Incorrect path parameter: ' 'notAJobId is not a recognized job id' } self.assertEqual(obs, exp) # Job doesn't exist obs = job_ajax_patch_req('remove', '/6d368e16-2242-4cf8-87b4-a5dc40bc890b') exp = { 'status': 'error', 'message': 'Incorrect path parameter: ' '6d368e16-2242-4cf8-87b4-a5dc40bc890b is not a ' 'recognized job id' } self.assertEqual(obs, exp) # in_construction job obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = { 'status': 'error', 'message': "Can't delete job %s. It is 'in_construction' " "status. Please use /study/process/workflow/" % job_id } self.assertEqual(obs, exp) # job status != 'error' job = ProcessingJob(job_id) job._set_status('queued') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = { 'status': 'error', 'message': 'Only jobs in "error" status can be deleted.' } self.assertEqual(obs, exp) # Operation not supported job._set_status('queued') obs = job_ajax_patch_req('add', '/%s' % job_id) exp = { 'status': 'error', 'message': 'Operation "add" not supported. Current supported ' 'operations: remove' } self.assertEqual(obs, exp) # Test success job._set_error('Killed for testing') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp)
def test_complete_job(self): # Complete success pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fd, fp = mkstemp(suffix='_table.biom') close(fd) with open(fp, 'w') as f: f.write('\n') self._clean_up_files.append(fp) exp_artifact_count = get_count('qiita.artifact') + 1 payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'success') self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) # Complete job error payload = dumps({'success': False, 'error': 'Job failure'}) job = self._create_job( 'complete_job', {'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') self.assertEqual(c_job.status, 'error') self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) self.assertEqual(c_job.log.msg, 'Job failure') # Complete internal error pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fp = '/surprised/if/this/path/exists.biom' payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'error') self.assertIn('No such file or directory', c_job.log.msg)
def correct_redis_data(key, cmd, values_dict, user): """Corrects the data stored in the redis DB Parameters ---------- key: str The redis key to fix cmd : qiita_db.software.Command Command to use to create the processing job values_dict : dict Dictionary used to instantiate the parameters of the command user : qiita_db.user. User The user that will own the job """ info = r_client.get(key) if info: info = loads(info) if info['job_id'] is not None: if 'is_qiita_job' in info: if info['is_qiita_job']: try: job = ProcessingJob(info['job_id']) payload = { 'job_id': info['job_id'], 'alert_type': info['status'], 'alert_msg': info['alert_msg'] } r_client.set(key, dumps(payload)) except (QiitaDBUnknownIDError, KeyError): # We shomehow lost the information of this job # Simply delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # Job is null, we have the information here if info['status'] == 'success': # In the success case no information is stored. We can # safely delete the key r_client.delete(key) elif info['status'] == 'warning': # In case of warning the key message stores the warning # message. We need to create a new job, mark it as # successful and store the error message as expected by # the new structure params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_status('success') payload = { 'job_id': job.id, 'alert_type': 'warning', 'alert_msg': info['message'] } r_client.set(key, dumps(payload)) else: # The status is error. The key message stores the error # message. We need to create a new job and mark it as # failed with the given error message params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_error(info['message']) payload = {'job_id': job.id} r_client.set(key, dumps(payload)) else: # The key doesn't contain any information. Delete the key r_client.delete(key)