def setUp(self): ConfigTestCase.setUp(self) TempfileTestCase.setUp(self) self.no_resources_repo = AlphaRepo() resource_filename1 = self.create_tempfile(suffix='.qza').name resource_filename2 = self.create_tempfile(suffix='.qza').name test_series1 = pd.Series({ 'sample1': 7.15, 'sample2': 9.04 }, name='chao1') test_series2 = pd.Series( { 'sample3': 7.24, 'sample2': 9.04, 'sample4': 8.25 }, name='faith_pd') imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", test_series1) imported_artifact.save(resource_filename1) imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", test_series2) imported_artifact.save(resource_filename2) config.resources.update({ 'alpha_resources': { 'chao1': resource_filename1, 'faith_pd': resource_filename2, } }) resources.update(config.resources) self.repo = AlphaRepo()
def setUp(self): super().setUp() self.series1_filename = self.create_tempfile(suffix='.qza').name self.series2_filename = self.create_tempfile(suffix='.qza').name self.series_1 = pd.Series( { 'sample-foo-bar': 7.24, 'sample-baz-qux': 8.25, 'sample-3': 6.4, }, name='observed_otus') self.series_2 = pd.Series( { 'sample-foo-bar': 9.01, 'sample-qux-quux': 9.04 }, name='chao1') imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", self.series_1) imported_artifact.save(self.series1_filename) imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", self.series_2) imported_artifact.save(self.series2_filename) config.resources.update({ 'alpha_resources': { 'observed_otus': self.series1_filename, 'chao1': self.series2_filename, } }) resources.update(config.resources)
def start(self): self.metadata_file = self.create_tempfile(suffix='.txt') metadata_path = self.metadata_file.name Metadata(self.metadata_table).save(metadata_path) self.faith_pd_file = self.create_tempfile(suffix='.qza') faith_pd_path = self.faith_pd_file.name faith_pd_artifact = Artifact.import_data( "SampleData[AlphaDiversity]", self.faith_pd_data, ) faith_pd_artifact.save(faith_pd_path) self.taxonomy_file = self.create_tempfile(suffix='.qza') taxonomy_path = self.taxonomy_file.name imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy_greengenes_df) imported_artifact.save(taxonomy_path) self.table_file = self.create_tempfile(suffix='.qza') table_path = self.table_file.name imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table) imported_artifact.save(table_path) config.resources.update({ 'metadata': metadata_path, 'alpha_resources': { 'faith-pd': faith_pd_path, }, 'table_resources': { 'greengenes': { 'table': table_path, 'feature-data-taxonomy': taxonomy_path, } }, }) resources.update(config.resources)
def build_app(): app = connexion.FlaskApp(__name__) app.app.json_encoder = NumPySafeJSONEncoder resource_config = SERVER_CONFIG.get('resources', {}) # default configuration for resources is provided in # microsetta.config.resources, this config can be updated by a json file # passed to `build_app`. config_resources.update(resource_config) resources.update(config_resources) resource = copy.deepcopy(config_resources) resource = schema.make_elements(resource) load_data = _pool.submit(atomic_update_resources, resource) futures.add(load_data) load_data.add_done_callback(lambda fut: futures.remove(load_data)) app_file = resource_filename('microsetta_public_api.api', 'microsetta_public_api.yml') # validate_responses needs to be False to support sending binary # files it seems, see https://github.com/zalando/connexion/issues/401 app.add_api(app_file, validate_responses=SERVER_CONFIG.get('validate', True)) app.app.register_error_handler(UnknownMetric, handle_404) app.app.register_error_handler(UnknownResource, handle_404) app.app.register_error_handler(UnknownID, handle_404) app.app.register_error_handler(UnknownCategory, handle_404) app.app.register_error_handler(IncompatibleOptions, handle_400) app.app.register_error_handler(InvalidParameter, handle_400) CORS(app.app) return app
def setUp(self): super().setUp() self.plotting_metadata_path = self.create_tempfile(suffix='.txt').name self.plotting_metadata_table = pd.DataFrame( { 'age_cat': ['30s', '40s', '50s', '30s', '30s', '50s'], 'bmi_cat': ['normal', 'not', 'not', 'normal', 'not', 'normal'], 'num_cat': [20, 30, 7.15, 8.25, 30, 7.15], }, index=pd.Series([ 'sample-1', 'sample-2', 'sample-3', 'sample-4', 'sample-5', 'sample-6' ], name='#SampleID')) Metadata(self.plotting_metadata_table).save( self.plotting_metadata_path) config.resources.update({'metadata': self.plotting_metadata_path}) self.plotting_series1_filename = self.create_tempfile( suffix='.qza').name self.plotting_series2_filename = self.create_tempfile( suffix='.qza').name self.plotting_series_1 = pd.Series( { 'sample-2': 7.24, 'sample-4': 8.25, 'sample-3': 6.4, }, name='observed_otus') self.plotting_series_2 = pd.Series({ 'sample-2': 9.01, 'sample-5': 9.04 }, name='chao1') imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", self.plotting_series_1) imported_artifact.save(self.plotting_series1_filename) imported_artifact = Artifact.import_data("SampleData[AlphaDiversity]", self.plotting_series_2) imported_artifact.save(self.plotting_series2_filename) config.resources.update({ 'alpha_resources': { 'observed_otus': self.plotting_series1_filename, 'chao1': self.plotting_series2_filename, } }) resources.update(config.resources)
def setUp(self): TempfileTestCase.setUp(self) ConfigTestCase.setUp(self) self.metadata_filename = self.create_tempfile(suffix='.qza').name self.test_metadata = pd.DataFrame( { 'age_cat': ['30s', '40s', '50s', '30s', np.nan], 'num_cat': [7.24, 7.24, 8.25, 7.24, np.nan], 'other': [1, 2, 3, 4, np.nan], }, index=pd.Series(['a', 'b', 'c', 'd', 'e'], name='#SampleID')) Metadata(self.test_metadata).save(self.metadata_filename) config.resources.update({'metadata': self.metadata_filename}) resources.update(config.resources) self.repo = MetadataRepo()
def build_app(resources_config_json=None): app = connexion.FlaskApp(__name__) # default configuration for resources is provided in # microsetta.config.resources, this config can be updated by a json file # passed to `build_app`. if resources_config_json is not None: with open(resources_config_json) as fp: resource_updates = json.load(fp) config.resources.update(resource_updates) resources.update(config.resources) app_file = resource_filename('microsetta_public_api.api', 'microsetta_public_api.yml') app.add_api(app_file, validate_responses=True) return app
def setUp(self): super().setUp() self.metadata_path = self.create_tempfile(suffix='.txt').name self.metadata_table = pd.DataFrame( { 'age_cat': ['30s', '40s', '50s', '30s', '30s', '50s', '50s'], 'bmi_cat': [ 'normal', 'not', 'not', 'normal', 'not', 'normal', 'overweight' ], 'num_cat': [20, 30, 7.15, 8.25, 30, 7.15, np.nan], }, index=pd.Series([ 'sample-1', 'sample-2', 'sample-3', 'sample-4', 'sample-5', 'sample-6', 'sample-7', ], name='#SampleID')) Metadata(self.metadata_table).save(self.metadata_path) config.resources.update({'metadata': self.metadata_path}) resources.update(config.resources) self.sample_querybuilder = { "condition": "AND", "rules": [ { "id": "age_cat", "field": "age_cat", "type": "string", "input": "select", "operator": "equal", "value": "30s" }, ] }
def setUp(self): super().setUp() self.table1_filename = self.create_tempfile(suffix='.qza').name self.taxonomy1_filename = self.create_tempfile(suffix='.qza').name self.table2_filename = self.create_tempfile(suffix='.qza').name self.taxonomy2_filename = self.create_tempfile(suffix='.qza').name self.table3_filename = self.create_tempfile(suffix='.qza').name self.var_table_filename = self.create_tempfile(suffix='.qza').name self.table_biom = self.create_tempfile(suffix='.biom').name self.taxonomy_greengenes_df_filename = self.create_tempfile( suffix='.qza').name self.table = biom.Table(np.array([[0, 1, 2], [2, 4, 6], [3, 0, 1]]), ['feature-1', 'feature-2', 'feature-3'], ['sample-1', 'sample-2', 'sample-3']) self.taxonomy_df = pd.DataFrame( [['feature-1', 'a; b; c', 0.123], ['feature-2', 'a; b; c; d; e', 0.345], ['feature-3', 'a; f; g; h', 0.678]], columns=['Feature ID', 'Taxon', 'Confidence']) self.taxonomy_greengenes_df = pd.DataFrame( [['feature-1', 'k__a;p__b; o__c', 0.123], ['feature-2', 'k__a; p__b; o__c; f__d; g__e', 0.34], ['feature-3', 'k__a; p__f; o__g; f__h', 0.678]], columns=['Feature ID', 'Taxon', 'Confidence']) self.taxonomy_greengenes_df.set_index('Feature ID', inplace=True) self.taxonomy_df.set_index('Feature ID', inplace=True) self.table2 = biom.Table(np.array([[0, 1, 2], [2, 4, 6], [3, 0, 1]]), ['feature-1', 'feature-X', 'feature-3'], ['sample-1', 'sample-2', 'sample-3']) self.taxonomy2_df = pd.DataFrame( [['feature-1', 'a; b; c', 0.123], ['feature-X', 'a; b; c; d; e', 0.34], ['feature-3', 'a; f; g; h', 0.678]], columns=['Feature ID', 'Taxon', 'Confidence']) self.taxonomy2_df.set_index('Feature ID', inplace=True) self.table3 = biom.Table(np.array([[1, 2], [0, 1]]), ['feature-X', 'feature-3'], ['sample-2', 'sample-3']) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table) imported_artifact.save(self.table1_filename) imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy_df) imported_artifact.save(self.taxonomy1_filename) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table2) imported_artifact.save(self.table2_filename) imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy2_df) imported_artifact.save(self.taxonomy2_filename) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table3) imported_artifact.save(self.table3_filename) with biom_open(self.table_biom, 'w') as f: self.table.to_hdf5(f, 'test-table') imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy_greengenes_df) imported_artifact.save(self.taxonomy_greengenes_df_filename) config.resources.update({ 'table_resources': { 'table1': { 'table': self.table1_filename, }, 'table2': { 'table': self.table1_filename, 'feature-data-taxonomy': self.taxonomy1_filename, }, 'table2-greengenes': { 'table': self.table1_filename, 'feature-data-taxonomy': self.taxonomy_greengenes_df_filename, }, 'table-fish': { 'table': self.table_biom, 'feature-data-taxonomy': self.taxonomy1_filename, 'table-format': 'biom' }, 'table5': { 'table': self.table2_filename, }, 'table6': { 'table': self.table_biom, 'table-format': 'biom', }, 'table-cached-model': { 'table': self.table1_filename, 'feature-data-taxonomy': self.taxonomy1_filename, 'cache-taxonomy': True, }, } }) resources.update(config.resources)
def setUp(self): TempfileTestCase.setUp(self) ConfigTestCase.setUp(self) self.no_resources_repo = TaxonomyRepo() self.table1_filename = self.create_tempfile(suffix='.qza').name self.taxonomy1_filename = self.create_tempfile(suffix='.qza').name self.table2_filename = self.create_tempfile(suffix='.qza').name self.taxonomy2_filename = self.create_tempfile(suffix='.qza').name self.table3_filename = self.create_tempfile(suffix='.qza').name self.var_table_filename = self.create_tempfile(suffix='.qza').name self.table_biom = self.create_tempfile(suffix='.biom').name self.table = biom.Table(np.array([[0, 1, 2], [2, 4, 6], [3, 0, 1]]), ['feature-1', 'feature-2', 'feature-3'], ['sample-1', 'sample-2', 'sample-3']) self.taxonomy_df = pd.DataFrame( [['feature-1', 'a; b; c', 0.123], ['feature-2', 'a; b; c; d; e', 0.345], ['feature-3', 'a; f; g; h', 0.678]], columns=['Feature ID', 'Taxon', 'Confidence']) self.taxonomy_df.set_index('Feature ID', inplace=True) self.table2 = biom.Table(np.array([[0, 1, 2], [2, 4, 6], [3, 0, 1]]), ['feature-1', 'feature-X', 'feature-3'], ['sample-1', 'sample-2', 'sample-3']) self.taxonomy2_df = pd.DataFrame( [['feature-1', 'a; b; c', 0.123], ['feature-X', 'a; b; c; d; e', 0.34], ['feature-3', 'a; f; g; h', 0.678]], columns=['Feature ID', 'Taxon', 'Confidence']) self.taxonomy2_df.set_index('Feature ID', inplace=True) self.table3 = biom.Table(np.array([[1, 2], [0, 1]]), ['feature-X', 'feature-3'], ['sample-2', 'sample-3']) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table) imported_artifact.save(self.table1_filename) imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy_df) imported_artifact.save(self.taxonomy1_filename) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table2) imported_artifact.save(self.table2_filename) imported_artifact = Artifact.import_data("FeatureData[Taxonomy]", self.taxonomy2_df) imported_artifact.save(self.taxonomy2_filename) imported_artifact = Artifact.import_data("FeatureTable[Frequency]", self.table3) imported_artifact.save(self.table3_filename) with biom_open(self.table_biom, 'w') as f: self.table.to_hdf5(f, 'test-table') config.resources.update({ 'table_resources': { 'table1': { 'table': self.table1_filename, }, 'table2': { 'table': self.table1_filename, 'feature-data-taxonomy': self.taxonomy1_filename, 'cache-taxonomy': False, }, 'table3': { 'table': self.table3_filename, 'feature-data-taxonomy': self.taxonomy2_filename, 'cache-taxonomy': False, 'variances': self.table3_filename, }, 'table4': { 'table': self.table_biom, 'feature-data-taxonomy': self.taxonomy1_filename, 'cache-taxonomy': False, 'table-format': 'biom' }, 'table5': { 'table': self.table2_filename, }, 'table6': { 'table': self.table_biom, 'table-format': 'biom', }, 'cached-taxonomy-table': { 'table': self.table1_filename, 'feature-data-taxonomy': self.taxonomy1_filename, 'cache-taxonomy': True, }, } }) resources.update(config.resources) self.repo = TaxonomyRepo()