def test_FP_options(self): ''' Creating amplicon FPs logic is independent of creating sample FPs logic ''' with self.subTest(): ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma50by30, 'fp_options': { 'create_amplicon_fps': False, 'create_sample_fps': False, }, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 2, Var.objects_created with self.subTest(): ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma50by30, 'fp_options': { 'create_amplicon_fps': True, 'create_sample_fps': True, }, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 8, Var.objects_created
def test_nonePass_succeedCor(self): # get obj with all 1s with patch.dict('OTUSampleMetadataCorrelation.util.kbase_obj.Var', values={'dfu': get_mock_dfu('enigma50by30')}): obj = AmpliconMatrix(enigma50by30).obj obj['data']['values'] = [[1 for j in range(30)] for i in range(50)] # all 1s # integreate all-1s version into mock dfu dfu = get_mock_dfu('enigma50by30', replace_obj={enigma50by30: obj}) # run with all-1s-obj patched in with patch( 'OTUSampleMetadataCorrelation.OTUSampleMetadataCorrelationImpl.DataFileUtil', new=lambda *a, **kw: dfu): ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma50by30, "sample_metadata": sample_metadata_50by30, "amp_params": { "val_cutoff": None, "sd_cutoff": None, "tax_rank": None, "tax_field": None, }, "cor_params": { "cor_cutoff": 0, "cor_method": "pearson", "p_adj_method": "bonferroni", "p_adj_cutoff": 1 }, 'workspace_name': self.wsName, })
def test_has_row_AttributeMapping_create_all(self): ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma50by30, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 8, Var.objects_created
def test_has_no_row_AttributeMapping(self): ''' Will not update/create row AttributeMapping if already none Default is to create all the FPs ''' ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma50by30_noAttrMaps_noSampleSet, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 6, Var.objects_created
def test_large_dataset(self): ''' ''' ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma17770by511, 'fp_options': { 'create_amplicon_fps': 1, 'create_sample_fps': 1, }, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 8, Var.objects_created
def test_userTest_data(self): ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': userTest, 'functions': { 'cog': 1, 'pfam': 1, 'tigrfam': 1, 'pheno': 1, }, 'fp_options': { 'create_amplicon_fps': 1, 'create_sample_fps': 1, }, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 14, Var.objects_created
def test_nonePass_filterPAdj(self): ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma50by30, "sample_metadata": sample_metadata_50by30, "amp_params": { "val_cutoff": None, "sd_cutoff": None, "tax_rank": None, "tax_field": None, }, "cor_params": { "cor_cutoff": 0, "cor_method": "pearson", "p_adj_method": "BH", "p_adj_cutoff": 0 }, 'workspace_name': self.wsName, })
def _test_wTax(self, amp_mat_upa, sample_metadata, tax_rank, tax_field): ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": amp_mat_upa, "sample_metadata": sample_metadata, "amp_params": { "val_cutoff": None, "sd_cutoff": None, "tax_rank": tax_rank, "tax_field": tax_field, # can be listed or not }, "cor_params": { "cor_cutoff": 0.1, "cor_method": "kendall", "p_adj_method": "BH", "p_adj_cutoff": 0.9 }, 'workspace_name': self.wsName, })
def test_small_3_results(self): ''' ''' ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma50by30, "sample_metadata": sample_metadata_50by30, "amp_params": { "val_cutoff": 1, "sd_cutoff": 1, "tax_rank": 'genus', "tax_field": ['taxonomy'], }, "cor_params": { "cor_cutoff": 0.503, "cor_method": "kendall", "p_adj_method": "bonferroni", "p_adj_cutoff": 1, }, 'workspace_name': self.wsName, })
def test_large_defaultParams(self): ''' ''' ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma17770by511, "sample_metadata": sample_metadata_17770by511, "amp_params": { "val_cutoff": None, "sd_cutoff": None, "tax_rank": None, "tax_field": None, }, "cor_params": { "cor_cutoff": 0.5, "cor_method": "kendall", "p_adj_method": "BH", "p_adj_cutoff": 0.05, }, 'workspace_name': self.wsName, })
def test_small(self): ''' ''' ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma50by30, "sample_metadata": sample_metadata_50by30, "amp_params": { "val_cutoff": None, "sd_cutoff": None, "tax_rank": 'genus', "tax_field": ['taxonomy'], }, "cor_params": { "cor_cutoff": 0.01, "cor_method": "pearson", "p_adj_method": "BH", "p_adj_cutoff": 0.9, }, 'workspace_name': self.wsName, })
def test_func_options(self): with self.subTest(): ret = config.get_serviceImpl().run_picrust2_pipeline( config.ctx, { **config.get_ws(), 'amplicon_matrix_upa': enigma50by30, 'functions': { 'cog': 1, 'pfam': 1, 'tigrfam': 1, 'pheno': 1, }, 'fp_options': { 'create_amplicon_fps': 1, 'create_sample_fps': 1, }, 'output_name': 'an_output_name', }) assert len(Var.objects_created) == 16, Var.objects_created '''
def test_nonePass_prelimFilter(self): ''' All amplicons filtered out by value/sd (Taxonomy can't eliminate all amplicons) ''' ret = cfg.get_serviceImpl().run_OTUSampleMetadataCorrelation( cfg.ctx, { "amp_mat_upa": enigma50by30, "sample_metadata": sample_metadata_50by30, "amp_params": { "val_cutoff": 1e9, "sd_cutoff": 1e9, "tax_rank": None, "tax_field": None, }, "cor_params": { "cor_cutoff": 0.5, "cor_method": "kendall", "p_adj_method": "BH", "p_adj_cutoff": 0.05 }, 'workspace_name': self.wsName, })
def test_dup_GenomeSet(self): ret = config.get_serviceImpl().run_FAPROTAX( config.ctx, { **self.ws, 'input_upa': refseq_dup, })