def execute(job_id): """Executes a job through the plugin system Parameters ---------- job_id : str The id of the job to execute """ # Create the new job job = ProcessingJob(job_id) job_dir = join(get_work_base_dir(), job.id) software = job.command.software plugin_start_script = software.start_script plugin_env_script = software.environment_script # Get the command to start the plugin cmd = '%s "%s" "%s" "%s" "%s" "%s"' % ( qiita_config.plugin_launcher, plugin_env_script, plugin_start_script, qiita_config.base_url, job.id, job_dir) # Start the plugin std_out, std_err, return_value = system_call(cmd) if return_value != 0: # Something wrong happened during the plugin start procedure job.status = 'error' log = LogEntry.create( 'Runtime', "Error starting plugin '%s':\nStd output:%s\nStd error:%s" % (software.name, std_out, std_err)) job.log = log
def artifact_post_req(user, artifact_id): """Deletes the artifact Parameters ---------- user : qiita_db.user.User The user requesting the action artifact_id : int Id of the artifact being deleted """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) check_artifact_access(user, artifact) analysis = artifact.analysis if analysis: # Do something when deleting in the analysis part to keep track of it redis_key = "analysis_%s" % analysis.id else: pt_id = artifact.prep_templates[0].id redis_key = PREP_TEMPLATE_KEY_FORMAT % pt_id qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_artifact') params = Parameters.load(cmd, values_dict={'artifact': artifact_id}) job = ProcessingJob.create(user, params, True) r_client.set(redis_key, dumps({'job_id': job.id, 'is_qiita_job': True})) job.submit() return {'job': job.id}
def _submit(ctx, user, parameters): """Submit a plugin job to a cluster Parameters ---------- ctx : qiita_db.ware.Dispatch A Dispatch object to submit through user : qiita_db.user.User The user doing the submission parameters : qiita_db.software.Parameters The parameters of the job Returns ------- str The job id """ job = ProcessingJob.create(user, parameters) redis_deets = { 'job_id': job.id, 'pubsub': user.id, 'messages': user.id + ':messages' } ctx.submit_async(_redis_wrap, execute, redis_deets, job.id) return job.id
def job_ajax_get_req(job_id): """Returns the job information Parameters ---------- job_id : str The job id Returns ------- dict of objects A dictionary containing the job information {'status': str, 'message': str, 'job_id': str, 'job_status': str, 'job_step': str, 'job_parameters': dict of {str: str}} """ job = ProcessingJob(job_id) return { 'status': 'success', 'message': '', 'job_id': job.id, 'job_status': job.status, 'job_step': job.step, 'job_parameters': job.parameters.values }
def study_delete_req(study_id, user_id): """Delete a given study Parameters ---------- study_id : int Study id to delete user_id : str User requesting the deletion Returns ------- dict Status of deletion, in the format {status: status, message: message} """ access_error = check_access(study_id, user_id) if access_error: return access_error qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_study') params = Parameters.load(cmd, values_dict={'study': study_id}) job = ProcessingJob.create(User(user_id), params, True) # Store the job id attaching it to the sample template id r_client.set(STUDY_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'status': 'success', 'message': ''}
def post(self, preprocessed_data_id): user = self.current_user # make sure user is admin and can therefore actually submit to EBI if user.level != 'admin': raise HTTPError(403, reason="User %s cannot submit to EBI!" % user.id) submission_type = self.get_argument('submission_type') if submission_type not in ['ADD', 'MODIFY']: raise HTTPError(403, reason="User: %s, %s is not a recognized " "submission type" % (user.id, submission_type)) study = Artifact(preprocessed_data_id).study state = study.ebi_submission_status if state == 'submitting': message = "Cannot resubmit! Current state is: %s" % state self.display_template(preprocessed_data_id, message, 'danger') else: qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('submit_to_EBI') params = Parameters.load( cmd, values_dict={'artifact': preprocessed_data_id, 'submission_type': submission_type}) job = ProcessingJob.create(user, params, True) r_client.set('ebi_submission_%s' % preprocessed_data_id, dumps({'job_id': job.id, 'is_qiita_job': True})) job.submit() level = 'success' message = 'EBI submission started. Job id: %s' % job.id self.redirect("%s/study/description/%d?level=%s&message=%s" % ( qiita_config.portal_dir, study.id, level, url_escape(message)))
def prep_template_jobs_get_req(prep_id, user_id): """Returns graph of all artifacts created from the prep base artifact Parameters ---------- prep_id : int Prep template ID to get graph for user_id : str User making the request Returns ------- dict with the jobs information Notes ----- Nodes are identified by the corresponding Artifact ID. """ prep = PrepTemplate(int(prep_id)) access_error = check_access(prep.study_id, user_id) if access_error: return access_error job_info = r_client.get(PREP_TEMPLATE_KEY_FORMAT % prep_id) result = {} if job_info: job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] job = ProcessingJob(job_id) result[job.id] = {'status': job.status, 'step': job.step, 'error': job.log.msg if job.log else ""} return result
def post(self, preprocessed_data_id): user = self.current_user # make sure user is admin and can therefore actually submit to VAMPS if user.level != 'admin': raise HTTPError(403, "User %s cannot submit to VAMPS!" % user.id) msg = '' msg_level = 'success' plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = plugin.get_command('submit_to_VAMPS') artifact = Artifact(preprocessed_data_id) # Check if the artifact is already being submitted to VAMPS is_being_submitted = any( [j.status in ('queued', 'running') for j in artifact.jobs(cmd=cmd)]) if is_being_submitted == 'submitting': msg = "Cannot resubmit! Data is already being submitted" msg_level = 'danger' self.display_template(preprocessed_data_id, msg, msg_level) else: params = Parameters.load( cmd, values_dict={'artifact': preprocessed_data_id}) job = ProcessingJob.create(user, params, True) job.submit() self.redirect('/study/description/%s' % artifact.study.study_id)
def artifact_post_req(user, artifact_id): """Deletes the artifact Parameters ---------- user : qiita_db.user.User The user requesting the action artifact_id : int Id of the artifact being deleted """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) check_artifact_access(user, artifact) analysis = artifact.analysis if analysis: # Do something when deleting in the analysis part to keep track of it redis_key = "analysis_%s" % analysis.id else: pt_id = artifact.prep_templates[0].id redis_key = PREP_TEMPLATE_KEY_FORMAT % pt_id qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_artifact') params = Parameters.load(cmd, values_dict={'artifact': artifact_id}) job = ProcessingJob.create(user, params) r_client.set(redis_key, dumps({'job_id': job.id, 'is_qiita_job': True})) job.submit()
def post(self): """Retrieves the archiving information Returns ------- dict The contents of the analysis keyed by sample id Notes ----- Argument "path" must be the Qiita job_id which is used to infer the merging scheme. Argument "features" is a list of feature identifier, e.g. Deblur sequences. Feature identifiers not found in the archive won't be included in the return dictionary. """ job_id = self.get_argument('job_id') features = self.request.arguments['features'] ms = Archive.get_merging_scheme_from_job(ProcessingJob(job_id)) response = Archive.retrieve_feature_values(archive_merging_scheme=ms, features=features) self.write(response)
def post(self, preprocessed_data_id): user = self.current_user # make sure user is admin and can therefore actually submit to VAMPS if user.level != 'admin': raise HTTPError(403, "User %s cannot submit to VAMPS!" % user.id) msg = '' msg_level = 'success' plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = plugin.get_command('submit_to_VAMPS') artifact = Artifact(preprocessed_data_id) # Check if the artifact is already being submitted to VAMPS is_being_submitted = any([ j.status in ('queued', 'running') for j in artifact.jobs(cmd=cmd) ]) if is_being_submitted == 'submitting': msg = "Cannot resubmit! Data is already being submitted" msg_level = 'danger' self.display_template(preprocessed_data_id, msg, msg_level) else: params = Parameters.load( cmd, values_dict={'artifact': preprocessed_data_id}) job = ProcessingJob.create(user, params) job.submit() self.redirect('/study/description/%s' % artifact.study.study_id)
def _create_job(self, cmd_name, values_dict): self.user = User('*****@*****.**') qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command(cmd_name) params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(self.user, params, True) job._set_status('queued') return job
def sample_template_put_req(study_id, user_id, sample_template): """Updates a sample template using the given file Parameters ---------- study_id : int The current study object id user_id : str The current user object id sample_template : str filename to use for updating Returns ------- dict results dictonary in the format {'status': status, 'message': msg, 'file': sample_template} status can be success, warning, or error depending on result message has the warnings or errors file has the file name """ exists = _check_sample_template_exists(int(study_id)) if exists['status'] != 'success': return exists access_error = check_access(int(study_id), user_id) if access_error: return access_error fp_rsp = check_fp(study_id, sample_template) if fp_rsp['status'] != 'success': # Unknown filepath, so return the error message return fp_rsp fp_rsp = fp_rsp['file'] msg = '' status = 'success' # Offload the update of the sample template to the cluster qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('update_sample_template') params = Parameters.load(cmd, values_dict={ 'study': int(study_id), 'template_fp': fp_rsp }) job = ProcessingJob.create(User(user_id), params) # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'status': status, 'message': msg, 'file': sample_template}
def sample_template_handler_post_request(study_id, user, filepath, data_type=None): """Creates a new sample template Parameters ---------- study_id: int The study to add the sample information user: qiita_db.user import User The user performing the request filepath: str The path to the sample template file data_type: str, optional If filepath is a QIIME mapping file, the data type of the prep information file Returns ------- dict of {'job': str} job: the id of the job adding the sample information to the study Raises ------ HTTPError 404 if the filepath doesn't exist """ # Check if the current user has access to the study sample_template_checks(study_id, user) # Check if the file exists fp_rsp = check_fp(study_id, filepath) if fp_rsp['status'] != 'success': raise HTTPError(404, 'Filepath not found') filepath = fp_rsp['file'] is_mapping_file = looks_like_qiime_mapping_file(filepath) if is_mapping_file and not data_type: raise HTTPError( 400, 'Please, choose a data type if uploading a ' 'QIIME mapping file') qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('create_sample_template') params = Parameters.load(cmd, values_dict={ 'fp': filepath, 'study_id': study_id, 'is_mapping_file': is_mapping_file, 'data_type': data_type }) job = ProcessingJob.create(user, params, True) r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'job': job.id}
def sample_template_handler_post_request(study_id, user, filepath, data_type=None, direct_upload=False): """Creates a new sample template Parameters ---------- study_id: int The study to add the sample information user: qiita_db.user import User The user performing the request filepath: str The path to the sample template file data_type: str, optional If filepath is a QIIME mapping file, the data type of the prep information file direct_upload: boolean, optional If filepath is a direct upload; if False we need to process the filepath as part of the study upload folder Returns ------- dict of {'job': str} job: the id of the job adding the sample information to the study Raises ------ HTTPError 404 if the filepath doesn't exist """ # Check if the current user has access to the study sample_template_checks(study_id, user) # Check if the file exists if not direct_upload: fp_rsp = check_fp(study_id, filepath) if fp_rsp['status'] != 'success': raise HTTPError(404, reason='Filepath not found') filepath = fp_rsp['file'] is_mapping_file = looks_like_qiime_mapping_file(filepath) if is_mapping_file and not data_type: raise HTTPError(400, reason='Please, choose a data type if uploading ' 'a QIIME mapping file') qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('create_sample_template') params = Parameters.load( cmd, values_dict={'fp': filepath, 'study_id': study_id, 'is_mapping_file': is_mapping_file, 'data_type': data_type}) job = ProcessingJob.create(user, params, True) r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'job': job.id}
def post(self, study_id): method = self.get_argument('remote-request-type') url = self.get_argument('inputURL') ssh_key = self.request.files['ssh-key'][0]['body'] status = 'success' message = '' try: study = Study(int(study_id)) except QiitaDBUnknownIDError: raise HTTPError(404, reason="Study %s does not exist" % study_id) check_access(self.current_user, study, no_public=True, raise_error=True) _, upload_folder = get_mountpoint("uploads")[0] upload_folder = join(upload_folder, study_id) ssh_key_fp = join(upload_folder, '.key.txt') create_nested_path(upload_folder) with open(ssh_key_fp, 'wb') as f: f.write(ssh_key) chmod(ssh_key_fp, 0o600) qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') if method == 'list': cmd = qiita_plugin.get_command('list_remote_files') params = Parameters.load(cmd, values_dict={ 'url': url, 'private_key': ssh_key_fp, 'study_id': study_id }) elif method == 'transfer': cmd = qiita_plugin.get_command('download_remote_files') params = Parameters.load(cmd, values_dict={ 'url': url, 'private_key': ssh_key_fp, 'destination': upload_folder }) else: status = 'error' message = 'Not a valid method' if status == 'success': job = ProcessingJob.create(self.current_user, params, True) job.submit() r_client.set(UPLOAD_STUDY_FORMAT % study_id, dumps({'job_id': job.id})) self.write({'status': status, 'message': message})
def test_artifact_post_request(self): # No access with self.assertRaises(QiitaHTTPError): artifact_post_req(User('*****@*****.**'), 1) artifact_post_req(User('*****@*****.**'), 2) # Wait until the job is completed wait_for_prep_information_job(1) # Check that the delete function has been actually called job = ProcessingJob(loads(r_client.get('prep_template_1'))['job_id']) self.assertEqual(job.status, 'error') self.assertIn('Cannot delete artifact 2', job.log.msg)
def wait_for_processing_job(job_id): """Waits until a processing job is completed Parameters ---------- job_id : str Job id """ job = ProcessingJob(job_id) while job.status not in ('success', 'error'): sleep(0.05) sleep(0.05)
def test_artifact_summary_post_request(self): # No access with self.assertRaises(QiitaHTTPError): artifact_summary_post_request(User('*****@*****.**'), 1) # Returns already existing job job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 2})) job._set_status('queued') obs = artifact_summary_post_request(User('*****@*****.**'), 2) exp = {'job': [job.id, 'queued', None]} self.assertEqual(obs, exp)
def test_artifact_summary_post_request(self): # No access with self.assertRaises(QiitaHTTPError): artifact_summary_post_request(User('*****@*****.**'), 1) # Returns already existing job job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 2}) ) job._set_status('queued') obs = artifact_summary_post_request(User('*****@*****.**'), 2) exp = {'job': [job.id, 'queued', None]} self.assertEqual(obs, exp)
def display_template(self, study_id, msg): """Simple function to avoid duplication of code""" study_id = int(study_id) study = Study(study_id) user = self.current_user level = 'info' message = '' remote_url = '' remote_files = [] check_access(user, study, no_public=True, raise_error=True) job_info = r_client.get(UPLOAD_STUDY_FORMAT % study_id) if job_info: job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] job = ProcessingJob(job_id) job_status = job.status processing = job_status not in ('success', 'error') url = job.parameters.values['url'] if processing: if job.command.name == 'list_remote_files': message = 'Retrieving remote files: listing %s' % url else: message = 'Retrieving remote files: download %s' % url elif job_status == 'error': level = 'danger' message = job.log.msg.replace('\n', '</br>') # making errors nicer for users if 'No such file' in message: message = 'URL not valid: <i>%s</i>, please review.' % url else: remote_url = job_info['url'] remote_files = job_info['files'] level = job_info['alert_type'] message = job_info['alert_msg'].replace('\n', '</br>') # getting the ontologies self.render('upload.html', study_title=study.title, study_info=study.info, study_id=study_id, is_admin=user.level == 'admin', extensions=','.join(qiita_config.valid_upload_extension), max_upload_size=qiita_config.max_upload_size, level=level, message=message, remote_url=remote_url, remote_files=remote_files, files=get_files_from_uploads_folders(str(study_id)))
def wait_for_processing_job(job_id): """Waits until a processing job is completed Parameters ---------- job_id : str Job id """ job = ProcessingJob(job_id) while job.status not in ('success', 'error'): sleep(0.8) # this print is useful for debugging if job.status == 'error': print("==> %s: %s" % (job.id, job.log.msg)) sleep(0.8)
def post(self): analysis_id = int(self.get_argument('analysis_id')) user = self.current_user check_analysis_access(user, Analysis(analysis_id)) qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_analysis') params = Parameters.load(cmd, values_dict={'analysis_id': analysis_id}) job = ProcessingJob.create(user, params, True) # Store the job id attaching it to the sample template id r_client.set('analysis_delete_%d' % analysis_id, dumps({'job_id': job.id})) job.submit() self.redirect("%s/analysis/list/" % (qiita_config.portal_dir))
def patch(self): """Updates / stores feature information in the archive. Notes ----- Argument "path" must be the Qiita job_id which is used to infer the merging scheme. Argument "value" is a json string, i.e. result of a json.dump(obj) of a dictionary, keyed with feature identifiers. """ req_path = self.get_argument('path') req_value = self.get_argument('value') ms = Archive.get_merging_scheme_from_job(ProcessingJob(req_path)) self.write(Archive.insert_features(ms, loads(req_value)))
def artifact_summary_post_request(user_id, artifact_id): """Launches the HTML summary generation and returns the job information Parameters ---------- user_id : str The user making the request artifact_id : int or str The artifact id Returns ------- dict of objects A dictionary containing the artifact summary information {'status': str, 'message': str, 'job': list of [str, str, str]} """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) access_error = check_access(artifact.study.id, user_id) if access_error: return access_error # Check if the summary is being generated or has been already generated command = Command.get_html_generator(artifact.artifact_type) jobs = artifact.jobs(cmd=command) jobs = [j for j in jobs if j.status in ['queued', 'running', 'success']] if jobs: # The HTML summary is either being generated or already generated. # Return the information of that job so we only generate the HTML # once job = jobs[0] else: # Create a new job to generate the HTML summary and return the newly # created job information job = ProcessingJob.create( User(user_id), Parameters.load(command, values_dict={'input_data': artifact_id})) job.submit() return { 'status': 'success', 'message': '', 'job': [job.id, job.status, job.step] }
def test_patch(self): # Create a new job - through a workflow since that is the only way # of creating jobs in the interface exp_command = Command(1) json_str = ( '{"input_data": 1, "max_barcode_errors": 1.5, ' '"barcode_type": "golay_12", "max_bad_run_length": 3, ' '"rev_comp": false, "phred_quality_threshold": 3, ' '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, ' '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}') exp_params = Parameters.load(exp_command, json_str=json_str) exp_user = User('*****@*****.**') name = "Test processing workflow" # tests success wf = ProcessingWorkflow.from_scratch(exp_user, exp_params, name=name, force=True) graph = wf.graph nodes = graph.nodes() job_id = nodes[0].id response = self.patch('/study/process/job/', { 'op': 'remove', 'path': job_id }) self.assertEqual(response.code, 200) exp = { 'status': 'error', 'message': "Can't delete job %s. It is 'in_construction' " "status. Please use /study/process/workflow/" % job_id } self.assertEqual(loads(response.body), exp) # Test success ProcessingJob(job_id)._set_error('Killed for testing') response = self.patch('/study/process/job/', { 'op': 'remove', 'path': job_id }) self.assertEqual(response.code, 200) exp = {'status': 'success', 'message': ''} self.assertEqual(loads(response.body), exp)
def artifact_summary_post_request(user_id, artifact_id): """Launches the HTML summary generation and returns the job information Parameters ---------- user_id : str The user making the request artifact_id : int or str The artifact id Returns ------- dict of objects A dictionary containing the artifact summary information {'status': str, 'message': str, 'job': list of [str, str, str]} """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) access_error = check_access(artifact.study.id, user_id) if access_error: return access_error # Check if the summary is being generated or has been already generated command = Command.get_html_generator(artifact.artifact_type) jobs = artifact.jobs(cmd=command) jobs = [j for j in jobs if j.status in ['queued', 'running', 'success']] if jobs: # The HTML summary is either being generated or already generated. # Return the information of that job so we only generate the HTML # once job = jobs[0] else: # Create a new job to generate the HTML summary and return the newly # created job information job = ProcessingJob.create( User(user_id), Parameters.load(command, values_dict={'input_data': artifact_id})) job.submit() return {'status': 'success', 'message': '', 'job': [job.id, job.status, job.step]}
def test_artifact_summary_post_request(self): # No access obs = artifact_summary_post_request('*****@*****.**', 1) exp = {'status': 'error', 'message': 'User does not have access to study'} self.assertEqual(obs, exp) # Returns already existing job job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 2}) ) job._set_status('queued') obs = artifact_summary_post_request('*****@*****.**', 2) exp = {'status': 'success', 'message': '', 'job': [job.id, 'queued', None]} self.assertEqual(obs, exp)
def post(self, study_id): method = self.get_argument('remote-request-type') url = self.get_argument('inputURL') ssh_key = self.request.files['ssh-key'][0]['body'] status = 'success' message = '' try: study = Study(int(study_id)) except QiitaDBUnknownIDError: raise HTTPError(404, reason="Study %s does not exist" % study_id) check_access( self.current_user, study, no_public=True, raise_error=True) _, upload_folder = get_mountpoint("uploads")[0] upload_folder = join(upload_folder, study_id) ssh_key_fp = join(upload_folder, '.key.txt') create_nested_path(upload_folder) with open(ssh_key_fp, 'w') as f: f.write(ssh_key) qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') if method == 'list': cmd = qiita_plugin.get_command('list_remote_files') params = Parameters.load(cmd, values_dict={ 'url': url, 'private_key': ssh_key_fp, 'study_id': study_id}) elif method == 'transfer': cmd = qiita_plugin.get_command('download_remote_files') params = Parameters.load(cmd, values_dict={ 'url': url, 'private_key': ssh_key_fp, 'destination': upload_folder}) else: status = 'error' message = 'Not a valid method' if status == 'success': job = ProcessingJob.create(self.current_user, params, True) job.submit() r_client.set( UPLOAD_STUDY_FORMAT % study_id, dumps({'job_id': job.id})) self.write({'status': status, 'message': message})
def get(self): user = self.current_user is_local_request = is_localhost(self.request.headers['host']) uanalyses = user.shared_analyses | user.private_analyses user_analysis_ids = set([a.id for a in uanalyses]) panalyses = Analysis.get_by_status('public') public_analysis_ids = set([a.id for a in panalyses]) public_analysis_ids = public_analysis_ids - user_analysis_ids user_analyses = generate_analysis_list(user_analysis_ids) public_analyses = generate_analysis_list(public_analysis_ids, True) dlop = partial(download_link_or_path, is_local_request) messages = {'info': '', 'danger': ''} for analysis_id in user_analysis_ids: job_info = r_client.get('analysis_delete_%d' % analysis_id) if job_info: job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] job = ProcessingJob(job_id) job_status = job.status processing = job_status not in ('success', 'error') if processing: messages['info'] += ('Analysis %s is being deleted<br/>' % analysis_id) elif job_status == 'error': messages['danger'] += (job.log.msg.replace('\n', '<br/>') + '<br/>') else: if job_info['alert_type'] not in messages: messages[job_info['alert_type']] = [] messages[job_info['alert_type']] += ( job.log.msg.replace('\n', '<br/>') + '<br/>') self.render("list_analyses.html", user_analyses=user_analyses, public_analyses=public_analyses, messages=messages, dlop=dlop)
def artifact_summary_post_request(user, artifact_id): """Launches the HTML summary generation and returns the job information Parameters ---------- user : qiita_db.user.User The user making the request artifact_id : int or str The artifact id Returns ------- dict of objects A dictionary containing the job summary information {'job': [str, str, str]} """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) check_artifact_access(user, artifact) # Check if the summary is being generated or has been already generated command = Command.get_html_generator(artifact.artifact_type) jobs = artifact.jobs(cmd=command) jobs = [j for j in jobs if j.status in ['queued', 'running', 'success']] if jobs: # The HTML summary is either being generated or already generated. # Return the information of that job so we only generate the HTML # once - Magic number 0 -> we are ensuring that there is only one # job generating the summary, so we can use the index 0 to access to # that job job = jobs[0] else: # Create a new job to generate the HTML summary and return the newly # created job information job = ProcessingJob.create( user, Parameters.load(command, values_dict={'input_data': artifact_id}), True) job.submit() return {'job': [job.id, job.status, job.step]}
def test_artifact_summary_post_request(self): # No access obs = artifact_summary_post_request('*****@*****.**', 1) exp = { 'status': 'error', 'message': 'User does not have access to study' } self.assertEqual(obs, exp) # Returns already existing job job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 2})) job._set_status('queued') obs = artifact_summary_post_request('*****@*****.**', 2) exp = { 'status': 'success', 'message': '', 'job': [job.id, 'queued', None] } self.assertEqual(obs, exp)
def artifact_summary_post_request(user, artifact_id): """Launches the HTML summary generation and returns the job information Parameters ---------- user : qiita_db.user.User The user making the request artifact_id : int or str The artifact id Returns ------- dict of objects A dictionary containing the job summary information {'job': [str, str, str]} """ artifact_id = int(artifact_id) artifact = Artifact(artifact_id) check_artifact_access(user, artifact) # Check if the summary is being generated or has been already generated command = Command.get_html_generator(artifact.artifact_type) jobs = artifact.jobs(cmd=command) jobs = [j for j in jobs if j.status in ['queued', 'running', 'success']] if jobs: # The HTML summary is either being generated or already generated. # Return the information of that job so we only generate the HTML # once - Magic number 0 -> we are ensuring that there is only one # job generating the summary, so we can use the index 0 to access to # that job job = jobs[0] else: # Create a new job to generate the HTML summary and return the newly # created job information job = ProcessingJob.create(user, Parameters.load( command, values_dict={'input_data': artifact_id}), True) job.submit() return {'job': [job.id, job.status, job.step]}
def _submit(ctx, user, parameters): """Submit a plugin job to a cluster Parameters ---------- ctx : qiita_db.ware.Dispatch A Dispatch object to submit through user : qiita_db.user.User The user doing the submission parameters : qiita_db.software.Parameters The parameters of the job Returns ------- str The job id """ job = ProcessingJob.create(user, parameters) redis_deets = {'job_id': job.id, 'pubsub': user.id, 'messages': user.id + ':messages'} ctx.submit_async(_redis_wrap, execute, redis_deets, job.id) return job.id
def sample_template_delete_req(study_id, user_id): """Deletes the sample template attached to the study Parameters ---------- study_id : int The current study object id user_id : str The current user object id Returns ------- dict results dictonary in the format {'status': status, 'message': msg} status can be success, warning, or error depending on result message has the warnings or errors """ exists = _check_sample_template_exists(int(study_id)) if exists['status'] != 'success': return exists access_error = check_access(int(study_id), user_id) if access_error: return access_error qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_sample_template') params = Parameters.load(cmd, values_dict={'study': int(study_id)}) job = ProcessingJob.create(User(user_id), params) # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'status': 'success', 'message': ''}
def job_ajax_get_req(job_id): """Returns the job information Parameters ---------- job_id : str The job id Returns ------- dict of objects A dictionary containing the job information {'status': str, 'message': str, 'job_id': str, 'job_status': str, 'job_step': str, 'job_parameters': dict of {str: str}} """ job = ProcessingJob(job_id) cmd = job.command sw = cmd.software job_status = job.status job_error = job.log.msg if job.log is not None else None return { 'status': 'success', 'message': '', 'job_id': job.id, 'job_external_id': job.external_id, 'job_status': job_status, 'job_step': job.step, 'job_parameters': job.parameters.values, 'job_error': job_error, 'command': cmd.name, 'command_description': cmd.description, 'software': sw.name, 'software_version': sw.version }
def get_sample_template_processing_status(st_id): # Initialize variables here processing = False alert_type = '' alert_msg = '' job_info = r_client.get(SAMPLE_TEMPLATE_KEY_FORMAT % st_id) if job_info: job_info = defaultdict(lambda: '', loads(job_info)) job_id = job_info['job_id'] job = ProcessingJob(job_id) job_status = job.status processing = job_status not in ('success', 'error') if processing: alert_type = 'info' alert_msg = 'This sample template is currently being processed' elif job_status == 'error': alert_type = 'danger' alert_msg = job.log.msg.replace('\n', '</br>') else: alert_type = job_info['alert_type'] alert_msg = job_info['alert_msg'].replace('\n', '</br>') return processing, alert_type, alert_msg
def sample_template_handler_delete_request(study_id, user): """Deletes the sample template Parameters ---------- study_id: int The study to delete the sample information user: qiita_db.user The user performing the request Returns ------- dict of {'job': str} job: the id of the job deleting the sample information to the study Raises ------ HTTPError 404 If the sample template doesn't exist """ # Check if the current user has access to the study and if the sample # template exists sample_template_checks(study_id, user, check_exists=True) qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_sample_template') params = Parameters.load(cmd, values_dict={'study': int(study_id)}) job = ProcessingJob.create(user, params, True) # Store the job if deleteing the sample template r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'job': job.id}
def test_artifact_summary_get_request(self): user = User('*****@*****.**') # Artifact w/o summary obs = artifact_summary_get_request(user, 1) exp_files = [ (1L, '1_s_G1_L001_sequences.fastq.gz (raw forward seqs)'), (2L, '1_s_G1_L001_sequences_barcodes.fastq.gz (raw barcodes)')] exp = {'name': 'Raw data 1', 'artifact_id': 1, 'artifact_type': 'FASTQ', 'artifact_timestamp': '2012-10-01 09:10', 'visibility': 'private', 'editable': True, 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'processing_info': {}, 'files': exp_files, 'is_from_analysis': False, 'summary': None, 'job': None, 'errored_summary_jobs': []} self.assertEqual(obs, exp) # Artifact with summary being generated job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 1}) ) job._set_status('queued') obs = artifact_summary_get_request(user, 1) exp = {'name': 'Raw data 1', 'artifact_id': 1, 'artifact_type': 'FASTQ', 'artifact_timestamp': '2012-10-01 09:10', 'visibility': 'private', 'editable': True, 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'processing_info': {}, 'files': exp_files, 'is_from_analysis': False, 'summary': None, 'job': [job.id, 'queued', None], 'errored_summary_jobs': []} self.assertEqual(obs, exp) # Artifact with summary fd, fp = mkstemp(suffix=".html") close(fd) with open(fp, 'w') as f: f.write('<b>HTML TEST - not important</b>\n') a = Artifact(1) a.set_html_summary(fp) self._files_to_remove.extend([fp, a.html_summary_fp[1]]) exp_files.append( (a.html_summary_fp[0], '%s (html summary)' % basename(a.html_summary_fp[1]))) exp_summary_path = relpath( a.html_summary_fp[1], qiita_config.base_data_dir) obs = artifact_summary_get_request(user, 1) exp = {'name': 'Raw data 1', 'artifact_id': 1, 'artifact_type': 'FASTQ', 'artifact_timestamp': '2012-10-01 09:10', 'visibility': 'private', 'editable': True, 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'processing_info': {}, 'files': exp_files, 'is_from_analysis': False, 'summary': exp_summary_path, 'job': None, 'errored_summary_jobs': []} self.assertEqual(obs, exp) # No access demo_u = User('*****@*****.**') with self.assertRaises(QiitaHTTPError): obs = artifact_summary_get_request(demo_u, 1) # A non-owner/share user can't see the files a.visibility = 'public' obs = artifact_summary_get_request(demo_u, 1) exp = {'name': 'Raw data 1', 'artifact_id': 1, 'artifact_type': 'FASTQ', 'artifact_timestamp': '2012-10-01 09:10', 'visibility': 'public', 'editable': False, 'buttons': '', 'processing_info': {}, 'files': [], 'is_from_analysis': False, 'summary': exp_summary_path, 'job': None, 'errored_summary_jobs': []} self.assertEqual(obs, exp) # returnig to private a.visibility = 'private' # admin gets buttons obs = artifact_summary_get_request(User('*****@*****.**'), 2) exp_files = [ (3L, '1_seqs.fna (preprocessed fasta)'), (4L, '1_seqs.qual (preprocessed fastq)'), (5L, '1_seqs.demux (preprocessed demux)')] exp = {'name': 'Demultiplexed 1', 'artifact_id': 2, 'artifact_type': 'Demultiplexed', 'artifact_timestamp': '2012-10-01 10:10', 'visibility': 'private', 'editable': True, 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 2?\')) { ' 'set_artifact_visibility(\'public\', 2) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '2?\')) { set_artifact_visibility(\'sandbox\', 2) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button> <a class="btn btn-primary ' 'btn-sm" href="/ebi_submission/2"><span ' 'class="glyphicon glyphicon-export"></span> ' 'Submit to EBI</a> <a class="btn btn-primary ' 'btn-sm" href="/vamps/2"><span class="glyphicon ' 'glyphicon-export"></span> Submit to VAMPS</a>'), 'processing_info': { 'command': 'Split libraries FASTQ', 'software': 'QIIME', 'software_version': '1.9.1', 'processing_parameters': { 'max_barcode_errors': '1.5', 'sequence_max_n': '0', 'max_bad_run_length': '3', 'phred_offset': u'auto', 'rev_comp': 'False', 'phred_quality_threshold': '3', 'input_data': '1', 'rev_comp_barcode': 'False', 'rev_comp_mapping_barcodes': 'False', 'min_per_read_length_fraction': '0.75', 'barcode_type': u'golay_12'}}, 'files': exp_files, 'is_from_analysis': False, 'summary': None, 'job': None, 'errored_summary_jobs': []} self.assertEqual(obs, exp) # analysis artifact obs = artifact_summary_get_request(user, 8) exp = {'name': 'noname', 'artifact_id': 8, 'artifact_type': 'BIOM', # this value changes on build so copy from obs 'artifact_timestamp': obs['artifact_timestamp'], 'visibility': 'sandbox', 'editable': True, 'buttons': '', 'processing_info': {}, 'files': [(27, 'biom_table.biom (biom)')], 'is_from_analysis': True, 'summary': None, 'job': None, 'errored_summary_jobs': []} self.assertEqual(obs, exp)
def sample_template_handler_patch_request(user, req_op, req_path, req_value=None, req_from=None): """Patches the sample template Parameters ---------- user: qiita_db.user.User The user performing the request req_op : str The operation to perform on the sample template req_path : str The path to the attribute to patch req_value : str, optional The new value req_from : str, optional The original path of the element Returns ------- Raises ------ HTTPError 400 If the path parameter doens't follow the expected format 400 If the given operation is not supported """ req_path = [v for v in req_path.split('/') if v] # At this point we know the path should be at least length 2 if len(req_path) < 2: raise HTTPError(400, reason='Incorrect path parameter') study_id = int(req_path[0]) # Check if the current user has access to the study and if the sample # template exists sample_template_checks(study_id, user, check_exists=True) if req_op == 'remove': # Path format # column: study_id/columns/column_name # sample: study_id/samples/sample_id if len(req_path) != 3: raise HTTPError(400, reason='Incorrect path parameter') attribute = req_path[1] attr_id = req_path[2] qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_sample_or_column') params = Parameters.load( cmd, values_dict={'obj_class': 'SampleTemplate', 'obj_id': study_id, 'sample_or_col': attribute, 'name': attr_id}) job = ProcessingJob.create(user, params, True) # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'job': job.id} elif req_op == 'replace': # WARNING: Although the patch operation is a replace, is not a full # true replace. A replace is in theory equivalent to a remove + add. # In this case, the replace operation doesn't necessarily removes # anything (e.g. when only new columns/samples are being added to the) # sample information. # Path format: study_id/data # Forcing to specify data for extensibility. In the future we may want # to use this function to replace other elements of the sample # information if len(req_path) != 2: raise HTTPError(400, reason='Incorrect path parameter') attribute = req_path[1] if attribute == 'data': # Update the sample information if req_value is None: raise HTTPError(400, reason="Value is required when updating " "sample information") # Check if the file exists fp_rsp = check_fp(study_id, req_value) if fp_rsp['status'] != 'success': raise HTTPError(404, reason='Filepath not found') filepath = fp_rsp['file'] qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('update_sample_template') params = Parameters.load( cmd, values_dict={'study': study_id, 'template_fp': filepath}) job = ProcessingJob.create(user, params, True) # Store the job id attaching it to the sample template id r_client.set(SAMPLE_TEMPLATE_KEY_FORMAT % study_id, dumps({'job_id': job.id})) job.submit() return {'job': job.id} else: raise HTTPError(404, reason='Attribute %s not found' % attribute) else: raise HTTPError(400, reason='Operation %s not supported. Current ' 'supported operations: remove, replace' % req_op)
def test_artifact_summary_get_request(self): # Artifact w/o summary obs = artifact_summary_get_request('*****@*****.**', 1) exp_p_jobs = [ ['063e553b-327c-4818-ab4a-adfe58e49860', 'Split libraries FASTQ', 'queued', None, None], ['bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'Split libraries', 'running', 'demultiplexing', None]] exp_files = [ (1L, '1_s_G1_L001_sequences.fastq.gz (raw forward seqs)'), (2L, '1_s_G1_L001_sequences_barcodes.fastq.gz (raw barcodes)')] exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'summary': None, 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': '<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public</button>' ' <button onclick="if (confirm(\'Are you sure you ' 'want to revert to sandbox artifact id: 1?\')) ' '{ set_artifact_visibility(\'sandbox\', 1) }" ' 'class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>', 'files': exp_files, 'editable': True} self.assertEqual(obs, exp) # Artifact with summary being generated job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 1}) ) job._set_status('queued') obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'summary': None, 'job': [job.id, 'queued', None], 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': '<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public</button>' ' <button onclick="if (confirm(\'Are you sure you ' 'want to revert to sandbox artifact id: 1?\')) { ' 'set_artifact_visibility(\'sandbox\', 1) }" ' 'class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>', 'files': exp_files, 'editable': True} self.assertEqual(obs, exp) # Artifact with summary fd, fp = mkstemp(suffix=".html") close(fd) with open(fp, 'w') as f: f.write('<b>HTML TEST - not important</b>\n') a = Artifact(1) a.html_summary_fp = fp self._files_to_remove.extend([fp, a.html_summary_fp[1]]) exp_files.append( (a.html_summary_fp[0], '%s (html summary)' % basename(a.html_summary_fp[1]))) obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'summary': '<b>HTML TEST - not important</b>\n', 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': '<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public</button>' ' <button onclick="if (confirm(\'Are you sure you ' 'want to revert to sandbox artifact id: 1?\')) { ' 'set_artifact_visibility(\'sandbox\', 1) }" ' 'class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>', 'files': exp_files, 'editable': True} self.assertEqual(obs, exp) # No access obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'error', 'message': 'User does not have access to study'} self.assertEqual(obs, exp) # A non-owner/share user can't see the files a.visibility = 'public' obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'summary': '<b>HTML TEST - not important</b>\n', 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'public', 'buttons': '', 'files': [], 'editable': False} self.assertEqual(obs, exp)
def test_complete_job(self): # Complete success pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fd, fp = mkstemp(suffix='_table.biom') close(fd) with open(fp, 'w') as f: f.write('\n') self._clean_up_files.append(fp) exp_artifact_count = get_count('qiita.artifact') + 1 payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'success') self.assertEqual(get_count('qiita.artifact'), exp_artifact_count) # Complete job error payload = dumps({'success': False, 'error': 'Job failure'}) job = self._create_job( 'complete_job', {'job_id': 'bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') c_job = ProcessingJob('bcc7ebcd-39c1-43e4-af2d-822e3589f14d') self.assertEqual(c_job.status, 'error') self.assertEqual(c_job.log, LogEntry.newest_records(numrecords=1)[0]) self.assertEqual(c_job.log.msg, 'Job failure') # Complete internal error pt = npt.assert_warns( QiitaDBWarning, PrepTemplate.create, pd.DataFrame({'new_col': {'1.SKD6.640190': 1}}), Study(1), '16S') c_job = ProcessingJob.create( User('*****@*****.**'), Parameters.load( Command.get_validator('BIOM'), values_dict={'template': pt.id, 'files': dumps({'BIOM': ['file']}), 'artifact_type': 'BIOM'}), True) c_job._set_status('running') fp = '/surprised/if/this/path/exists.biom' payload = dumps( {'success': True, 'error': '', 'artifacts': {'OTU table': {'filepaths': [(fp, 'biom')], 'artifact_type': 'BIOM'}}}) job = self._create_job('complete_job', {'job_id': c_job.id, 'payload': payload}) private_task(job.id) self.assertEqual(job.status, 'success') self.assertEqual(c_job.status, 'error') self.assertIn('No such file or directory', c_job.log.msg)
def prep_template_patch_req(user_id, req_op, req_path, req_value=None, req_from=None): """Modifies an attribute of the prep template Parameters ---------- user_id : str The id of the user performing the patch operation req_op : str The operation to perform on the prep information req_path : str The prep information and attribute to patch req_value : str, optional The value that needs to be modified req_from : str, optional The original path of the element Returns ------- dict of {str, str, str} A dictionary with the following keys: - status: str, whether if the request is successful or not - message: str, if the request is unsuccessful, a human readable error - row_id: str, the row_id that we tried to delete """ req_path = [v for v in req_path.split('/') if v] if req_op == 'replace': # The structure of the path should be /prep_id/attribute_to_modify/ # so if we don't have those 2 elements, we should return an error if len(req_path) != 2: return {'status': 'error', 'message': 'Incorrect path parameter'} prep_id = int(req_path[0]) attribute = req_path[1] # Check if the user actually has access to the prep template prep = PrepTemplate(prep_id) access_error = check_access(prep.study_id, user_id) if access_error: return access_error status = 'success' msg = '' if attribute == 'investigation_type': prep.investigation_type = req_value elif attribute == 'data': fp = check_fp(prep.study_id, req_value) if fp['status'] != 'success': return fp fp = fp['file'] qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('update_prep_template') params = Parameters.load( cmd, values_dict={'prep_template': prep_id, 'template_fp': fp}) job = ProcessingJob.create(User(user_id), params, True) r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep_id, dumps({'job_id': job.id})) job.submit() elif attribute == 'name': prep.name = req_value.strip() else: # We don't understand the attribute so return an error return {'status': 'error', 'message': 'Attribute "%s" not found. ' 'Please, check the path parameter' % attribute} return {'status': status, 'message': msg} elif req_op == 'remove': # The structure of the path should be: # /prep_id/row_id/{columns|samples}/name if len(req_path) != 4: return {'status': 'error', 'message': 'Incorrect path parameter'} prep_id = int(req_path[0]) row_id = req_path[1] attribute = req_path[2] attr_id = req_path[3] # Check if the user actually has access to the study pt = PrepTemplate(prep_id) access_error = check_access(pt.study_id, user_id) if access_error: return access_error qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('delete_sample_or_column') params = Parameters.load( cmd, values_dict={'obj_class': 'PrepTemplate', 'obj_id': prep_id, 'sample_or_col': attribute, 'name': attr_id}) job = ProcessingJob.create(User(user_id), params, True) # Store the job id attaching it to the sample template id r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep_id, dumps({'job_id': job.id})) job.submit() return {'status': 'success', 'message': '', 'row_id': row_id} else: return {'status': 'error', 'message': 'Operation "%s" not supported. ' 'Current supported operations: replace, remove' % req_op, 'row_id': '0'}
def artifact_post_req(user_id, filepaths, artifact_type, name, prep_template_id, artifact_id=None): """Creates the initial artifact for the prep template Parameters ---------- user_id : str User adding the atrifact filepaths : dict of str Comma-separated list of files to attach to the artifact, keyed by file type artifact_type : str The type of the artifact name : str Name to give the artifact prep_template_id : int or str castable to int Prep template to attach the artifact to artifact_id : int or str castable to int, optional The id of the imported artifact Returns ------- dict of objects A dictionary containing the new artifact ID {'status': status, 'message': message, 'artifact': id} """ prep_template_id = int(prep_template_id) prep = PrepTemplate(prep_template_id) study_id = prep.study_id # First check if the user has access to the study access_error = check_access(study_id, user_id) if access_error: return access_error if artifact_id: # if the artifact id has been provided, import the artifact job_id = safe_submit(user_id, copy_raw_data, prep, artifact_id) is_qiita_job = False else: uploads_path = get_mountpoint('uploads')[0][1] path_builder = partial(join, uploads_path, str(study_id)) cleaned_filepaths = {} for ftype, file_list in viewitems(filepaths): # JavaScript sends us this list as a comma-separated list for fp in file_list.split(','): # JavaScript will send this value as an empty string if the # list of files was empty. In such case, the split will # generate a single element containing the empty string. Check # for that case here and, if fp is not the empty string, # proceed to check if the file exists if fp: # Check if filepath being passed exists for study full_fp = path_builder(fp) exists = check_fp(study_id, full_fp) if exists['status'] != 'success': return {'status': 'error', 'message': 'File does not exist: %s' % fp} if ftype not in cleaned_filepaths: cleaned_filepaths[ftype] = [] cleaned_filepaths[ftype].append(full_fp) # This should never happen, but it doesn't hurt to actually have # a explicit check, in case there is something odd with the JS if not cleaned_filepaths: return {'status': 'error', 'message': "Can't create artifact, no files provided."} command = Command.get_validator(artifact_type) job = ProcessingJob.create( User(user_id), Parameters.load(command, values_dict={ 'template': prep_template_id, 'files': dumps(cleaned_filepaths), 'artifact_type': artifact_type })) job.submit() job_id = job.id is_qiita_job = True r_client.set(PREP_TEMPLATE_KEY_FORMAT % prep.id, dumps({'job_id': job_id, 'is_qiita_job': is_qiita_job})) return {'status': 'success', 'message': ''}
def test_job_ajax_patch_req(self): # Create a new job - through a workflow since that is the only way # of creating jobs in the interface exp_command = Command(1) json_str = ( '{"input_data": 1, "max_barcode_errors": 1.5, ' '"barcode_type": "golay_12", "max_bad_run_length": 3, ' '"rev_comp": false, "phred_quality_threshold": 3, ' '"rev_comp_barcode": false, "rev_comp_mapping_barcodes": false, ' '"min_per_read_length_fraction": 0.75, "sequence_max_n": 0}') exp_params = Parameters.load(exp_command, json_str=json_str) exp_user = User('*****@*****.**') name = "Test processing workflow" # tests success wf = ProcessingWorkflow.from_scratch( exp_user, exp_params, name=name, force=True) graph = wf.graph nodes = list(graph.nodes()) job_id = nodes[0].id # Incorrect path parameter obs = job_ajax_patch_req('remove', '/%s/somethingelse' % job_id) exp = {'status': 'error', 'message': 'Incorrect path parameter: missing job id'} self.assertEqual(obs, exp) obs = job_ajax_patch_req('remove', '/') exp = {'status': 'error', 'message': 'Incorrect path parameter: missing job id'} self.assertEqual(obs, exp) # Job id is not like a job id obs = job_ajax_patch_req('remove', '/notAJobId') exp = {'status': 'error', 'message': 'Incorrect path parameter: ' 'notAJobId is not a recognized job id'} self.assertEqual(obs, exp) # Job doesn't exist obs = job_ajax_patch_req('remove', '/6d368e16-2242-4cf8-87b4-a5dc40bc890b') exp = {'status': 'error', 'message': 'Incorrect path parameter: ' '6d368e16-2242-4cf8-87b4-a5dc40bc890b is not a ' 'recognized job id'} self.assertEqual(obs, exp) # in_construction job obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'error', 'message': "Can't delete job %s. It is 'in_construction' " "status. Please use /study/process/workflow/" % job_id} self.assertEqual(obs, exp) # job status != 'error' job = ProcessingJob(job_id) job._set_status('queued') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'error', 'message': 'Only jobs in "error" status can be deleted.'} self.assertEqual(obs, exp) # Operation not supported job._set_status('queued') obs = job_ajax_patch_req('add', '/%s' % job_id) exp = {'status': 'error', 'message': 'Operation "add" not supported. Current supported ' 'operations: remove'} self.assertEqual(obs, exp) # Test success job._set_error('Killed for testing') obs = job_ajax_patch_req('remove', '/%s' % job_id) exp = {'status': 'success', 'message': ''} self.assertEqual(obs, exp)
def test_artifact_summary_get_request(self): # Artifact w/o summary obs = artifact_summary_get_request('*****@*****.**', 1) exp_p_jobs = [ ['063e553b-327c-4818-ab4a-adfe58e49860', 'Split libraries FASTQ', 'queued', None, None], ['bcc7ebcd-39c1-43e4-af2d-822e3589f14d', 'Split libraries', 'running', 'demultiplexing', None]] exp_files = [ (1L, '1_s_G1_L001_sequences.fastq.gz (raw forward seqs)'), (2L, '1_s_G1_L001_sequences_barcodes.fastq.gz (raw barcodes)')] exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'processing_parameters': {}, 'summary': None, 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'files': exp_files, 'editable': True, 'prep_id': 1, 'study_id': 1} self.assertEqual(obs, exp) # Artifact with summary being generated job = ProcessingJob.create( User('*****@*****.**'), Parameters.load(Command(7), values_dict={'input_data': 1}) ) job._set_status('queued') obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'processing_parameters': {}, 'summary': None, 'job': [job.id, 'queued', None], 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'files': exp_files, 'editable': True, 'prep_id': 1, 'study_id': 1} self.assertEqual(obs, exp) # Artifact with summary fd, fp = mkstemp(suffix=".html") close(fd) with open(fp, 'w') as f: f.write('<b>HTML TEST - not important</b>\n') a = Artifact(1) a.html_summary_fp = fp self._files_to_remove.extend([fp, a.html_summary_fp[1]]) exp_files.append( (a.html_summary_fp[0], '%s (html summary)' % basename(a.html_summary_fp[1]))) obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'processing_parameters': {}, 'summary': '<b>HTML TEST - not important</b>\n', 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'private', 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 1?\')) { ' 'set_artifact_visibility(\'public\', 1) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '1?\')) { set_artifact_visibility(\'sandbox\', 1) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button>'), 'files': exp_files, 'editable': True, 'prep_id': 1, 'study_id': 1} self.assertEqual(obs, exp) # No access obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'error', 'message': 'User does not have access to study'} self.assertEqual(obs, exp) # A non-owner/share user can't see the files a.visibility = 'public' obs = artifact_summary_get_request('*****@*****.**', 1) exp = {'status': 'success', 'message': '', 'name': 'Raw data 1', 'processing_parameters': {}, 'summary': '<b>HTML TEST - not important</b>\n', 'job': None, 'processing_jobs': exp_p_jobs, 'errored_jobs': [], 'visibility': 'public', 'buttons': '', 'files': [], 'editable': False, 'prep_id': 1, 'study_id': 1} self.assertEqual(obs, exp) # returnig to private a.visibility = 'sandbox' # admin gets buttons obs = artifact_summary_get_request('*****@*****.**', 2) exp_p_jobs = [ ['d19f76ee-274e-4c1b-b3a2-a12d73507c55', 'Pick closed-reference OTUs', 'error', 'generating demux file', 'Error message']] exp_files = [ (3L, '1_seqs.fna (preprocessed fasta)'), (4L, '1_seqs.qual (preprocessed fastq)'), (5L, '1_seqs.demux (preprocessed demux)')] exp = {'status': 'success', 'files': exp_files, 'errored_jobs': [], 'editable': True, 'visibility': 'private', 'job': None, 'message': '', 'name': 'Demultiplexed 1', 'processing_jobs': exp_p_jobs, 'processing_parameters': { 'max_barcode_errors': 1.5, 'sequence_max_n': 0, 'max_bad_run_length': 3, 'phred_offset': u'auto', 'rev_comp': False, 'phred_quality_threshold': 3, 'input_data': 1, 'rev_comp_barcode': False, 'rev_comp_mapping_barcodes': False, 'min_per_read_length_fraction': 0.75, 'barcode_type': u'golay_12'}, 'summary': None, 'buttons': ('<button onclick="if (confirm(\'Are you sure you ' 'want to make public artifact id: 2?\')) { ' 'set_artifact_visibility(\'public\', 2) }" ' 'class="btn btn-primary btn-sm">Make public' '</button> <button onclick="if (confirm(\'Are you ' 'sure you want to revert to sandbox artifact id: ' '2?\')) { set_artifact_visibility(\'sandbox\', 2) ' '}" class="btn btn-primary btn-sm">Revert to ' 'sandbox</button> <a class="btn btn-primary ' 'btn-sm" href="/vamps/2"><span class="glyphicon ' 'glyphicon-export"></span> Submit to VAMPS</a>'), 'study_id': 1, 'prep_id': 1} self.assertEqual(obs, exp)
def job_ajax_patch_req(req_op, req_path, req_value=None, req_from=None): """Patches a job Parameters ---------- req_op : str The operation to perform on the job req_path : str Path parameter with the job to patch req_value : str, optional The value that needs to be modified req_from : str, optional The original path of the element Returns ------- dict of {str: str} A dictionary of the form: {'status': str, 'message': str} in which status is the status of the request ('error' or 'success') and message is a human readable string with the error message in case that status is 'error'. """ if req_op == 'remove': req_path = [v for v in req_path.split('/') if v] if len(req_path) != 1: return { 'status': 'error', 'message': 'Incorrect path parameter: missing job id' } # We have ensured that we only have one element on req_path job_id = req_path[0] try: job = ProcessingJob(job_id) except QiitaDBUnknownIDError: return { 'status': 'error', 'message': 'Incorrect path parameter: ' '%s is not a recognized job id' % job_id } except Exception as e: e = str(e) if "invalid input syntax for uuid" in e: return { 'status': 'error', 'message': 'Incorrect path parameter: ' '%s is not a recognized job id' % job_id } else: return { 'status': 'error', 'message': 'An error occured while accessing the ' 'job: %s' % e } job_status = job.status if job_status == 'in_construction': # A job that is in construction is in a workflow. Use the methods # defined for workflows to keep everything consistent. This message # should never be presented to the user, but rather to the # developer if it makes a mistake during changes in the interface return { 'status': 'error', 'message': "Can't delete job %s. It is 'in_construction' " "status. Please use /study/process/workflow/" % job_id } elif job_status == 'error': # When the job is in error status, we just need to hide it job.hide() return {'status': 'success', 'message': ''} else: # In any other state, we currently fail. Adding the else here # because it can be useful to have it for fixing issue #2307 return { 'status': 'error', 'message': 'Only jobs in "error" status can be deleted.' } else: return { 'status': 'error', 'message': 'Operation "%s" not supported. Current supported ' 'operations: remove' % req_op }
def correct_redis_data(key, cmd, values_dict, user): """Corrects the data stored in the redis DB Parameters ---------- key: str The redis key to fix cmd : qiita_db.software.Command Command to use to create the processing job values_dict : dict Dictionary used to instantiate the parameters of the command user : qiita_db.user. User The user that will own the job """ info = r_client.get(key) if info: info = loads(info) if info['job_id'] is not None: if 'is_qiita_job' in info: if info['is_qiita_job']: try: job = ProcessingJob(info['job_id']) payload = {'job_id': info['job_id'], 'alert_type': info['status'], 'alert_msg': info['alert_msg']} r_client.set(key, dumps(payload)) except (QiitaDBUnknownIDError, KeyError): # We shomehow lost the information of this job # Simply delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # These jobs don't contain any information on the live # dump. We can safely delete the key r_client.delete(key) else: # Job is null, we have the information here if info['status'] == 'success': # In the success case no information is stored. We can # safely delete the key r_client.delete(key) elif info['status'] == 'warning': # In case of warning the key message stores the warning # message. We need to create a new job, mark it as # successful and store the error message as expected by # the new structure params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_status('success') payload = {'job_id': job.id, 'alert_type': 'warning', 'alert_msg': info['message']} r_client.set(key, dumps(payload)) else: # The status is error. The key message stores the error # message. We need to create a new job and mark it as # failed with the given error message params = Parameters.load(cmd, values_dict=values_dict) job = ProcessingJob.create(user, params) job._set_error(info['message']) payload = {'job_id': job.id} r_client.set(key, dumps(payload)) else: # The key doesn't contain any information. Delete the key r_client.delete(key)