def test_complete_job(self): # Complete success pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': { '1.SKD6.640190': 1 }}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command.get_validator('BIOM'), values_dict={ 'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM' }), True) c_job._set_status('running') fd, fp = mkstemp(suffix='_table.biom') close(fd) with open(fp, 'w') as f: f.write('\n') self._clean_up_files.append(fp) exp_artifact_count = get_count('qiita.artifact') + 1 payload = dumps({ 'success': True, 'error': '', 'artifacts': { 'OTU table': { 'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM' } } }) job = self._create_job('complete_job', { 'job_id': c_job.id, 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'success') self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) # Complete job error payload = dumps({'success': False, 'error': 'Job failure'}) job = self._create_job('complete_job', { 'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') self.assertEqual(c_job.status, 'error') self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) self.assertEqual(c_job.log.msg, 'Job failure') # Complete internal error pt = npt.assert_warns(QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': { '1.SKD6.640190': 1 }}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command.get_validator('BIOM'), values_dict={ 'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM' }), True) c_job._set_status('running') fp = '/surprised/if/this/path/exists.biom' payload = dumps({ 'success': True, 'error': '', 'artifacts': { 'OTU table': { 'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM' } } }) job = self._create_job('complete_job', { 'job_id': c_job.id, 'payload': payload }) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'error') self.assertIn('No such file or directory', c_job.log.msg)
def artifact_post_req(user_id, filepaths, artifact_type, name, prep_template_id, artifact_id=None): """Creates the initial artifact for the prep template Parameters ---------- user_id : str User adding the atrifact filepaths : dict of str Comma-separated list of files to attach to the artifact, keyed by file type artifact_type : str The type of the artifact name : str Name to give the artifact prep_template_id : int or str castable to int Prep template to attach the artifact to artifact_id : int or str castable to int, optional The id of the imported artifact Returns ------- dict of objects A dictionary containing the new artifact ID {'status': status, 'message': message, 'artifact': id} """ prep_template_id = int(prep_template_id) prep = PrepTemplate(prep_template_id) study_id = prep.study_id # First check if the user has access to the study access_error = check_access(study_id, user_id) if access_error: return access_error user = User(user_id) if artifact_id: # if the artifact id has been provided, import the artifact qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('copy_artifact') params = Parameters.load(cmd, values_dict={ 'artifact': artifact_id, 'prep_template': prep.id }) job = ProcessingJob.create(user, params, True) else: uploads_path = get_mountpoint('uploads')[0][1] path_builder = partial(join, uploads_path, str(study_id)) cleaned_filepaths = {} for ftype, file_list in filepaths.items(): # JavaScript sends us this list as a comma-separated list for fp in file_list.split(','): # JavaScript will send this value as an empty string if the # list of files was empty. In such case, the split will # generate a single element containing the empty string. Check # for that case here and, if fp is not the empty string, # proceed to check if the file exists if fp: # Check if filepath being passed exists for study full_fp = path_builder(fp) exists = check_fp(study_id, full_fp) if exists['status'] != 'success': return { 'status': 'error', 'message': 'File does not exist: %s' % fp } if ftype not in cleaned_filepaths: cleaned_filepaths[ftype] = [] cleaned_filepaths[ftype].append(full_fp) # This should never happen, but it doesn't hurt to actually have # a explicit check, in case there is something odd with the JS if not cleaned_filepaths: return { 'status': 'error', 'message': "Can't create artifact, no files provided." } # This try/except will catch the case when the plugins are not # activated so there is no Validate for the given artifact_type try: command = Command.get_validator(artifact_type) except QiitaDBError as e: return {'status': 'error', 'message': str(e)} job = ProcessingJob.create( user, Parameters.load(command, values_dict={ 'template': prep_template_id, 'files': dumps(cleaned_filepaths), 'artifact_type': artifact_type, 'name': name, 'analysis': None, }), True) # Submit the job job.submit() r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep.id, dumps({ 'job_id': job.id, 'is_qiita_job': True })) return {'status': 'success', 'message': ''}
def test_complete_job(self): # Complete success pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fd, fp = mkstemp(suffix='_table.biom') close(fd) with open(fp, 'w') as f: f.write('\n') self._clean_up_files.append(fp) exp_artifact_count = get_count('qiita.artifact') + 1 payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'success') self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) # Complete job error payload = dumps({'success': False, 'error': 'Job failure'}) job = self._create_job( 'complete_job', {'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') self.assertEqual(c_job.status, 'error') self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) self.assertEqual(c_job.log.msg, 'Job failure') # Complete internal error pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fp = '/surprised/if/this/path/exists.biom' payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'error') self.assertIn('No such file or directory', c_job.log.msg)
def artifact_post_req(user_id, filepaths, artifact_type, name, prep_template_id, artifact_id=None): """Creates the initial artifact for the prep template Parameters ---------- user_id : str User adding the atrifact filepaths : dict of str Comma-separated list of files to attach to the artifact, keyed by file type artifact_type : str The type of the artifact name : str Name to give the artifact prep_template_id : int or str castable to int Prep template to attach the artifact to artifact_id : int or str castable to int, optional The id of the imported artifact Returns ------- dict of objects A dictionary containing the new artifact ID {'status': status, 'message': message, 'artifact': id} """ prep_template_id = int(prep_template_id) prep = PrepTemplate(prep_template_id) study_id = prep.study_id # First check if the user has access to the study access_error = check_access(study_id, user_id) if access_error: return access_error if artifact_id: # if the artifact id has been provided, import the artifact job_id = safe_submit(user_id, copy_raw_data, prep, artifact_id) is_qiita_job = False else: uploads_path = get_mountpoint('uploads')[0][1] path_builder = partial(join, uploads_path, str(study_id)) cleaned_filepaths = {} for ftype, file_list in viewitems(filepaths): # JavaScript sends us this list as a comma-separated list for fp in file_list.split(','): # JavaScript will send this value as an empty string if the # list of files was empty. In such case, the split will # generate a single element containing the empty string. Check # for that case here and, if fp is not the empty string, # proceed to check if the file exists if fp: # Check if filepath being passed exists for study full_fp = path_builder(fp) exists = check_fp(study_id, full_fp) if exists['status'] != 'success': return {'status': 'error', 'message': 'File does not exist: %s' % fp} if ftype not in cleaned_filepaths: cleaned_filepaths[ftype] = [] cleaned_filepaths[ftype].append(full_fp) # This should never happen, but it doesn't hurt to actually have # a explicit check, in case there is something odd with the JS if not cleaned_filepaths: return {'status': 'error', 'message': "Can't create artifact, no files provided."} command = Command.get_validator(artifact_type) job = ProcessingJob.create( User(user_id), Parameters.load(command, values_dict={ 'template': prep_template_id, 'files': dumps(cleaned_filepaths), 'artifact_type': artifact_type })) job.submit() job_id = job.id is_qiita_job = True r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep.id, dumps({'job_id': job_id, 'is_qiita_job': is_qiita_job})) return {'status': 'success', 'message': ''}