def write_demux_files(self, prep_template, generate_hdf5=True): """Writes a demux test file to avoid duplication of code""" fna_fp = join(self.temp_dir, 'seqs.fna') demux_fp = join(self.temp_dir, 'demux.seqs') if generate_hdf5: with open(fna_fp, 'w') as f: f.write(FASTA_EXAMPLE) with File(demux_fp, "w") as f: to_hdf5(fna_fp, f) else: with open(demux_fp, 'w') as f: f.write('') if prep_template.artifact is None: ppd = Artifact.create( [(demux_fp, 6)], "Demultiplexed", prep_template=prep_template, can_be_submitted_to_ebi=True, can_be_submitted_to_vamps=True) else: params = Parameters.from_default_params( DefaultParameters(1), {'input_data': prep_template.artifact.id}) ppd = Artifact.create( [(demux_fp, 6)], "Demultiplexed", parents=[prep_template.artifact], processing_parameters=params, can_be_submitted_to_ebi=True, can_be_submitted_to_vamps=True) return ppd
def write_demux_files(self, prep_template, generate_hdf5=True): """Writes a demux test file to avoid duplication of code""" fna_fp = join(self.temp_dir, 'seqs.fna') demux_fp = join(self.temp_dir, 'demux.seqs') if generate_hdf5: with open(fna_fp, 'w') as f: f.write(FASTA_EXAMPLE) with File(demux_fp, "w") as f: to_hdf5(fna_fp, f) else: with open(demux_fp, 'w') as f: f.write('') if prep_template.artifact is None: ppd = Artifact.create([(demux_fp, 6)], "Demultiplexed", prep_template=prep_template) else: params = Parameters.from_default_params( DefaultParameters(1), {'input_data': prep_template.artifact.id}) ppd = Artifact.create([(demux_fp, 6)], "Demultiplexed", parents=[prep_template.artifact], processing_parameters=params) return ppd
def setUp(self): uploads_path = get_mountpoint('uploads')[0][1] # Create prep test file to point at self.update_fp = join(uploads_path, '1', 'update.txt') with open(self.update_fp, 'w') as f: f.write("""sample_name\tnew_col\n1.SKD6.640190\tnew_value\n""") self._files_to_remove = [self.update_fp] self._files_to_remove = [] # creating temporal files and artifact # NOTE: we don't need to remove the artifact created cause it's # used to test the delete functionality fd, fp = mkstemp(suffix='_seqs.fna') close(fd) with open(fp, 'w') as f: f.write(">1.sid_r4_0 M02034:17:000000000-A5U18:1:1101:15370:1394 " "1:N:0:1 orig_bc=CATGAGCT new_bc=CATGAGCT bc_diffs=0\n" "GTGTGCCAGCAGCCGCGGTAATACGTAGGG\n") # 4 Demultiplexed filepaths_processed = [(fp, 4)] # 1 for default parameters and input data exp_params = Parameters.from_default_params(DefaultParameters(1), {'input_data': 1}) self.artifact = Artifact.create(filepaths_processed, "Demultiplexed", parents=[Artifact(1)], processing_parameters=exp_params)
def setUp(self): uploads_path = get_mountpoint('uploads')[0][1] # Create prep test file to point at self.update_fp = join(uploads_path, '1', 'update.txt') with open(self.update_fp, 'w') as f: f.write("""sample_name\tnew_col\n1.SKD6.640190\tnew_value\n""") self._files_to_remove = [self.update_fp] self._files_to_remove = [] # creating temporal files and artifact # NOTE: we don't need to remove the artifact created cause it's # used to test the delete functionality fd, fp = mkstemp(suffix='_seqs.fna') close(fd) with open(fp, 'w') as f: f.write(">1.sid_r4_0 M02034:17:000000000-A5U18:1:1101:15370:1394 " "1:N:0:1 orig_bc=CATGAGCT new_bc=CATGAGCT bc_diffs=0\n" "GTGTGCCAGCAGCCGCGGTAATACGTAGGG\n") # 4 Demultiplexed filepaths_processed = [(fp, 4)] # 1 for default parameters and input data exp_params = Parameters.from_default_params(DefaultParameters(1), {'input_data': 1}) self.artifact = Artifact.create(filepaths_processed, "Demultiplexed", parents=[Artifact(1)], processing_parameters=exp_params)
def post(self): study_id = int(self.get_argument('study_id')) preprocessed_data_id = int(self.get_argument('preprocessed_data_id')) param_id = self.get_argument('parameter-set-%s' % preprocessed_data_id) parameters = Parameters.from_default_params( DefaultParameters(param_id), {'input_data': preprocessed_data_id}) job_id = plugin_submit(self.current_user, parameters) self.render('compute_wait.html', job_id=job_id, title='Processing', completion_redirect='/study/description/%d?top_tab=' 'preprocessed_data_tab&sub_tab=%s' % (study_id, preprocessed_data_id))
def post(self): study_id = int(self.get_argument('study_id')) preprocessed_data_id = int(self.get_argument('preprocessed_data_id')) param_id = self.get_argument('parameter-set-%s' % preprocessed_data_id) parameters = Parameters.from_default_params( DefaultParameters(param_id), {'input_data': preprocessed_data_id}) job_id = plugin_submit(self.current_user, parameters) self.render('compute_wait.html', job_id=job_id, title='Processing', completion_redirect='/study/description/%d?top_tab=' 'preprocessed_data_tab&sub_tab=%s' % (study_id, preprocessed_data_id))
def post(self): study_id = int(self.get_argument('study_id')) prep_template_id = int(self.get_argument('prep_template_id')) raw_data = PrepTemplate(prep_template_id).artifact param_id = int(self.get_argument('preprocessing_parameters_id')) parameters = Parameters.from_default_params( DefaultParameters(param_id), {'input_data': raw_data.id}) job_id = plugin_submit(self.current_user, parameters) self.render('compute_wait.html', job_id=job_id, title='Preprocessing', completion_redirect='/study/description/%d?top_tab=' 'prep_template_tab&sub_tab=%s' % (study_id, prep_template_id))
def post(self): study_id = int(self.get_argument("study_id")) prep_template_id = int(self.get_argument("prep_template_id")) raw_data = PrepTemplate(prep_template_id).artifact param_id = int(self.get_argument("preprocessing_parameters_id")) parameters = Parameters.from_default_params(DefaultParameters(param_id), {"input_data": raw_data.id}) job_id = plugin_submit(self.current_user, parameters) self.render( "compute_wait.html", job_id=job_id, title="Preprocessing", completion_redirect="/study/description/%d?top_tab=" "prep_template_tab&sub_tab=%s" % (study_id, prep_template_id), )
def test_submit_to_EBI(self): # setting up test fna_fp = join(self.temp_dir, 'seqs.fna') demux_fp = join(self.temp_dir, 'demux.seqs') with open(fna_fp, 'w') as f: f.write(FASTA_EXAMPLE) with File(demux_fp, "w") as f: to_hdf5(fna_fp, f) pt = PrepTemplate(1) params = Parameters.from_default_params(DefaultParameters(1), {'input_data': pt.artifact.id}) artifact = Artifact.create([(demux_fp, 6)], "Demultiplexed", parents=[pt.artifact], processing_parameters=params) # submit job job = self._create_job('submit_to_EBI', { 'artifact': artifact.id, 'submission_type': 'VALIDATE' }) job._set_status('in_construction') job.submit() # wait for the job to fail, and check that the status is submitting checked_submitting = True while job.status != 'error': if checked_submitting: self.assertEqual('submitting', artifact.study.ebi_submission_status) checked_submitting = False # once it fails wait for a few to check status again sleep(5) exp = 'Some artifact submissions failed: %d' % artifact.id obs = artifact.study.ebi_submission_status self.assertEqual(obs, exp) # make sure that the error is correct, we have 2 options if environ.get('ASPERA_SCP_PASS', '') != '': self.assertIn('1.SKM2.640199', job.log.msg) else: self.assertIn('ASCP Error:', job.log.msg) # wait for everything to finish to avoid DB deadlocks sleep(5)
def test_submit_to_EBI(self): # setting up test fna_fp = join(self.temp_dir, 'seqs.fna') demux_fp = join(self.temp_dir, 'demux.seqs') with open(fna_fp, 'w') as f: f.write(FASTA_EXAMPLE) with File(demux_fp, "w") as f: to_hdf5(fna_fp, f) pt = PrepTemplate(1) params = Parameters.from_default_params( DefaultParameters(1), {'input_data': pt.artifact.id}) artifact = Artifact.create( [(demux_fp, 6)], "Demultiplexed", parents=[pt.artifact], processing_parameters=params) # submit job job = self._create_job('submit_to_EBI', { 'artifact': artifact.id, 'submission_type': 'VALIDATE'}) job._set_status('in_construction') job.submit() # wait for the job to fail, and check that the status is submitting checked_submitting = True while job.status != 'error': if checked_submitting: self.assertEqual('submitting', artifact.study.ebi_submission_status) checked_submitting = False # once it fails wait for a few to check status again sleep(5) exp = 'Some artifact submissions failed: %d' % artifact.id obs = artifact.study.ebi_submission_status self.assertEqual(obs, exp) # make sure that the error is correct, we have 2 options if environ.get('ASPERA_SCP_PASS', '') != '': self.assertIn('1.SKM2.640199', job.log.msg) else: self.assertIn('ASCP Error:', job.log.msg) # wait for everything to finish to avoid DB deadlocks sleep(5)
def workflow_handler_post_req(user_id, dflt_params_id, req_params): """Creates a new workflow in the system Parameters ---------- user_id : str The user creating the workflow dflt_params_id : int The default parameters to use for the first command of the workflow req_params : str JSON representations of the required parameters for the first command of the workflow Returns ------- dict of objects A dictionary containing the commands information {'status': str, 'message': str, 'workflow_id': int} """ dflt_params = DefaultParameters(dflt_params_id) req_params = loads(req_params) parameters = Parameters.from_default_params(dflt_params, req_params) wf = ProcessingWorkflow.from_scratch(User(user_id), parameters) # this is safe as we are creating the workflow for the first time and there # is only one node. Remember networkx doesn't assure order of nodes job = wf.graph.nodes()[0] inputs = [a.id for a in job.input_artifacts] job_cmd = job.command return { 'status': 'success', 'message': '', 'workflow_id': wf.id, 'job': { 'id': job.id, 'inputs': inputs, 'label': job_cmd.name, 'outputs': job_cmd.outputs } }
def workflow_handler_post_req(user_id, dflt_params_id, req_params): """Creates a new workflow in the system Parameters ---------- user_id : str The user creating the workflow dflt_params_id : int The default parameters to use for the first command of the workflow req_params : str JSON representations of the required parameters for the first command of the workflow Returns ------- dict of objects A dictionary containing the commands information {'status': str, 'message': str, 'workflow_id': int} """ dflt_params = DefaultParameters(dflt_params_id) req_params = loads(req_params) parameters = Parameters.from_default_params(dflt_params, req_params) wf = ProcessingWorkflow.from_scratch(User(user_id), parameters) # this is safe as we are creating the workflow for the first time and there # is only one node. Remember networkx doesn't assure order of nodes job = wf.graph.nodes()[0] inputs = [a.id for a in job.input_artifacts] job_cmd = job.command return {'status': 'success', 'message': '', 'workflow_id': wf.id, 'job': {'id': job.id, 'inputs': inputs, 'label': job_cmd.name, 'outputs': job_cmd.outputs}}
def test_download_study(self): tmp_dir = mkdtemp() self._clean_up_files.append(tmp_dir) biom_fp = join(tmp_dir, 'otu_table.biom') smr_dir = join(tmp_dir, 'sortmerna_picked_otus') log_dir = join(smr_dir, 'seqs_otus.log') tgz = join(tmp_dir, 'sortmerna_picked_otus.tgz') with biom_open(biom_fp, 'w') as f: et.to_hdf5(f, "test") makedirs(smr_dir) with open(log_dir, 'w') as f: f.write('\n') with open(tgz, 'w') as f: f.write('\n') files_biom = [(biom_fp, 'biom'), (smr_dir, 'directory'), (tgz, 'tgz')] params = Parameters.from_default_params( next(Command(3).default_parameter_sets), {'input_data': 1}) a = Artifact.create(files_biom, "BIOM", parents=[Artifact(2)], processing_parameters=params) for x in a.filepaths: self._clean_up_files.append(x['fp']) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ('- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table.biom processed_data/' '1_study_1001_closed_reference_otu_table.biom\n' '- [0-9]* /protected/templates/1_prep_1_[0-9]*-[0-9]*.txt ' 'mapping_files/4_mapping_file.txt\n' '- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table.biom processed_data/' '1_study_1001_closed_reference_otu_table.biom\n' '- [0-9]* /protected/templates/1_prep_1_[0-9]*-[0-9]*.txt ' 'mapping_files/5_mapping_file.txt\n' '- 1256812 /protected/processed_data/1_study_1001_' 'closed_reference_otu_table_Silva.biom processed_data/' '1_study_1001_closed_reference_otu_table_Silva.biom\n' '- [0-9]* /protected/templates/1_prep_1_[0-9]*-[0-9]*.txt ' 'mapping_files/6_mapping_file.txt\n' '- 1093210 /protected/BIOM/7/biom_table.biom ' 'BIOM/7/biom_table.biom\n' '- [0-9]* /protected/templates/1_prep_2_[0-9]*-[0-9]*.txt ' 'mapping_files/7_mapping_file.txt\n' '- [0-9]* /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/10/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- [0-9]* /protected/templates/1_prep_1_[0-9]*-[0-9]*.txt ' 'mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegex(response.body.decode('ascii'), exp) response = self.get('/download_study_bioms/200') self.assertEqual(response.code, 405) # changing user so we can test the failures BaseHandler.get_current_user = Mock( return_value=User("*****@*****.**")) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 405) a.visibility = 'public' response = self.get('/download_study_bioms/1') # returning visibility a.visibility = 'private' self.assertEqual(response.code, 200) # we should have the same files than the previous test, except artifact # and mapping file 7: position 6 and 7; thus removing 6 twice exp = exp.split('\n') exp.pop(6) exp.pop(6) exp = '\n'.join(exp) self.assertRegex(response.body.decode('ascii'), exp)
def test_download_study(self): tmp_dir = mkdtemp() self._clean_up_files.append(tmp_dir) biom_fp = join(tmp_dir, 'otu_table.biom') smr_dir = join(tmp_dir, 'sortmerna_picked_otus') log_dir = join(smr_dir, 'seqs_otus.log') tgz = join(tmp_dir, 'sortmerna_picked_otus.tgz') with biom_open(biom_fp, 'w') as f: et.to_hdf5(f, "test") makedirs(smr_dir) with open(log_dir, 'w') as f: f.write('\n') with open(tgz, 'w') as f: f.write('\n') files_biom = [(biom_fp, 'biom'), (smr_dir, 'directory'), (tgz, 'tgz')] params = Parameters.from_default_params( Command(3).default_parameter_sets.next(), {'input_data': 1}) a = Artifact.create(files_biom, "BIOM", parents=[Artifact(2)], processing_parameters=params) for _, fp, _ in a.filepaths: self._clean_up_files.append(fp) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ( '- 1256812 /protected/processed_data/1_study_1001_closed_' 'reference_otu_table.biom processed_data/1_study_1001_closed_' 'reference_otu_table.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/4_mapping_file.txt\n' '- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table.biom processed_data/' '1_study_1001_closed_reference_otu_table.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/5_mapping_file.txt\n' '- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table_Silva.biom processed_data' '/1_study_1001_closed_reference_otu_table_Silva.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/6_mapping_file.txt\n' '- 36615 /protected/templates/1_prep_2_qiime_[0-9]*-' '[0-9]*.txt mapping_files/7_mapping_file.txt\n' '- 39752 /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/{0}/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-[0-9]*.' 'txt mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegexpMatches(response.body, exp) response = self.get('/download_study_bioms/200') self.assertEqual(response.code, 405) # changing user so we can test the failures BaseHandler.get_current_user = Mock( return_value=User("*****@*****.**")) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 405) a.visibility = 'public' response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ('- 39752 /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/{0}/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-[0-9]*.' 'txt mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegexpMatches(response.body, exp)
def test_download_study(self): tmp_dir = mkdtemp() self._clean_up_files.append(tmp_dir) biom_fp = join(tmp_dir, 'otu_table.biom') smr_dir = join(tmp_dir, 'sortmerna_picked_otus') log_dir = join(smr_dir, 'seqs_otus.log') tgz = join(tmp_dir, 'sortmerna_picked_otus.tgz') with biom_open(biom_fp, 'w') as f: et.to_hdf5(f, "test") makedirs(smr_dir) with open(log_dir, 'w') as f: f.write('\n') with open(tgz, 'w') as f: f.write('\n') files_biom = [(biom_fp, 'biom'), (smr_dir, 'directory'), (tgz, 'tgz')] params = Parameters.from_default_params( Command(3).default_parameter_sets.next(), {'input_data': 1}) a = Artifact.create(files_biom, "BIOM", parents=[Artifact(2)], processing_parameters=params) for _, fp, _ in a.filepaths: self._clean_up_files.append(fp) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ( '- 1256812 /protected/processed_data/1_study_1001_closed_' 'reference_otu_table.biom processed_data/1_study_1001_closed_' 'reference_otu_table.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/4_mapping_file.txt\n' '- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table.biom processed_data/' '1_study_1001_closed_reference_otu_table.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/5_mapping_file.txt\n' '- 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table_Silva.biom processed_data' '/1_study_1001_closed_reference_otu_table_Silva.biom\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/6_mapping_file.txt\n' '- 36615 /protected/templates/1_prep_2_qiime_[0-9]*-' '[0-9]*.txt mapping_files/7_mapping_file.txt\n' '- [0-9]* /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/{0}/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-[0-9]*.' 'txt mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegexpMatches(response.body, exp) response = self.get('/download_study_bioms/200') self.assertEqual(response.code, 405) # changing user so we can test the failures BaseHandler.get_current_user = Mock( return_value=User("*****@*****.**")) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 405) a.visibility = 'public' response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ( '- [0-9]* /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/{0}/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- 36615 /protected/templates/1_prep_1_qiime_[0-9]*-[0-9]*.' 'txt mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegexpMatches(response.body, exp)
def test_download_study(self): tmp_dir = mkdtemp() self._clean_up_files.append(tmp_dir) biom_fp = join(tmp_dir, 'otu_table.biom') smr_dir = join(tmp_dir, 'sortmerna_picked_otus') log_dir = join(smr_dir, 'seqs_otus.log') tgz = join(tmp_dir, 'sortmerna_picked_otus.tgz') with biom_open(biom_fp, 'w') as f: et.to_hdf5(f, "test") makedirs(smr_dir) with open(log_dir, 'w') as f: f.write('\n') with open(tgz, 'w') as f: f.write('\n') files_biom = [(biom_fp, 'biom'), (smr_dir, 'directory'), (tgz, 'tgz')] params = Parameters.from_default_params( next(Command(3).default_parameter_sets), {'input_data': 1}) a = Artifact.create(files_biom, "BIOM", parents=[Artifact(2)], processing_parameters=params) for x in a.filepaths: self._clean_up_files.append(x['fp']) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 200) exp = ( '1579715020 1256812 /protected/processed_data/1_study_1001_closed_' 'reference_otu_table.biom processed_data/1_study_1001_closed_' 'reference_otu_table.biom\n' '- [0-9]* /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/4_mapping_file.txt\n' '1579715020 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table.biom processed_data/' '1_study_1001_closed_reference_otu_table.biom\n' '- [0-9]* /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/5_mapping_file.txt\n' '1579715020 1256812 /protected/processed_data/' '1_study_1001_closed_reference_otu_table_Silva.biom processed_data' '/1_study_1001_closed_reference_otu_table_Silva.biom\n' '- [0-9]* /protected/templates/1_prep_1_qiime_[0-9]*-' '[0-9]*.txt mapping_files/6_mapping_file.txt\n' '1756512010 1093210 /protected/BIOM/7/biom_table.biom ' 'BIOM/7/biom_table.biom\n' '- [0-9]* /protected/templates/1_prep_2_qiime_[0-9]*-' '[0-9]*.txt mapping_files/7_mapping_file.txt\n' '[0-9]* [0-9]* /protected/BIOM/{0}/otu_table.biom ' 'BIOM/{0}/otu_table.biom\n' '- 1 /protected/BIOM/{0}/sortmerna_picked_otus/seqs_otus.log ' 'BIOM/{0}/sortmerna_picked_otus/seqs_otus.log\n' '- [0-9]* /protected/templates/1_prep_1_qiime_[0-9]*-[0-9]*.' 'txt mapping_files/{0}_mapping_file.txt\n'.format(a.id)) self.assertRegex(response.body.decode('ascii'), exp) response = self.get('/download_study_bioms/200') self.assertEqual(response.code, 405) # changing user so we can test the failures BaseHandler.get_current_user = Mock( return_value=User("*****@*****.**")) response = self.get('/download_study_bioms/1') self.assertEqual(response.code, 405) a.visibility = 'public' response = self.get('/download_study_bioms/1') # returning visibility a.visibility = 'private' self.assertEqual(response.code, 200) # we should have the same files than the previous test, except artifact # and mapping file 7: position 6 and 7; thus removing 6 twice exp = exp.split('\n') exp.pop(6) exp.pop(6) exp = '\n'.join(exp) self.assertRegex(response.body.decode('ascii'), exp)