def test_schemas(token, client, connection, schemas_simple): REST_schemas = client.get( '/list_schemas', headers=dict(Authorization=f'Bearer {token}')).json['schemaNames'] assert set(REST_schemas) == set([ s for s in dj.list_schemas(connection=connection) if s not in ('mysql', 'performance_schema', 'sys') ])
def test_schemas(token, client, connection, schemas_simple): REST_schemas = client.get( "/schema", headers=dict(Authorization=f"Bearer {token}")).json["schemaNames"] assert set(REST_schemas) == set([ s for s in dj.list_schemas(connection=connection) if s not in ("mysql", "performance_schema", "sys") ])
def test_invalid_schema_list_table(token, client, schema_main): # Test invalid schema response: Response = client.post( '/list_tables', headers=dict(Authorization=f'Bearer {token}'), json=dict(schemaName='invalid_schema')) assert (response.status_code != 200) assert ('invalid_schema' not in dj.list_schemas())
def test_invalid_schema_list_table(token, client, schema_main): # Test invalid schema response: Response = client.get( f'/schema/{"invalid_schema"}/table', headers=dict(Authorization=f"Bearer {token}"), ) assert response.status_code != 200 assert "invalid_schema" not in dj.list_schemas()
def connect_to_datajoint(self): if self.is_connected: return True for key, value in self.config.items(): dj.config[key] = value self.connection = dj.conn() self.is_connected = self.connection.is_connected if self.is_connected: for schema in dj.list_schemas(): setattr(self, schema, dj.create_virtual_module(f'{schema}.py', schema)) return self.is_connected
def refresh_schema(self): """refresh container of schemas """ schemata = {} for schema in dj.list_schemas(): if schema in self["skip_schemas"]: continue # TODO error messages schemata[schema] = dj.VirtualModule( schema, schema, connection=self['connection'], add_objects=custom_attributes_dict, create_tables=True) # make sure jobs table has been created schemata[schema].schema.jobs self['schemata'] = schemata
def refresh_schema(self): """refresh container of schemas """ schemata = {} # direct loading if possible # TODO (also in app init) if self['init_database']: from loris.schema import (experimenters, core) schemata['experimenters'] = experimenters schemata['core'] = core if self['include_fly']: from loris.schema import anatomy, subjects schemata['anatomy'] = anatomy # move out schemata['subjects'] = subjects for schema, module_path in self["import_schema_module"]: # TODO test spec = importlib.util.spec_from_file_location(schema, module_path) module = importlib.util.module_from_spec(spec) spec.loader.exec_module(module) schemata[schema] = module for schema in dj.list_schemas(): if schema in self["skip_schemas"]: continue if schema in schemata: continue # TODO error messages schemata[schema] = dj.VirtualModule( schema, schema, connection=self['connection'], add_objects=custom_attributes_dict, create_tables=True) # make sure jobs table has been created schemata[schema].schema.jobs self['schemata'] = schemata
# + The table classes in the module corresponds to a table in the schema in the database. # + Each datajoint table class inside the module corresponds to a table inside the schema. For example, the class `ephys.EphysRecording` correponds to the table `_ephys_recording` in the schema `neuro_ephys` in the database. # preview columns and contents in a table imaging.Processing() # + The first time importing the modules, empty schemas and tables will be created in the database. [markdown] # # + By importing the modules for the first time, the schemas and tables will be created inside the database. # # # + Once created, importing modules will not create schemas and tables again, but the existing schemas/tables can be accessed and manipulated by the modules. # + The schemas and tables will not be re-created when importing modules if they have existed. [markdown] # ## DataJoint tools to explore schemas and tables # # # + `dj.list_schemas()`: list all schemas a user has access to in the current database # + `dj.list_schemas()`: list all schemas a user could access. dj.list_schemas() # + `dj.Diagram()`: plot tables and dependencies in a schema. # + `dj.Diagram()`: plot tables and dependencies # plot diagram for all tables in a schema dj.Diagram(imaging) # - # **Table tiers**: # # Manual table: green box, manually inserted table, expect new entries daily, e.g. Subject, ProbeInsertion. # Lookup table: gray box, pre inserted table, commonly used for general facts or parameters. e.g. Strain, ClusteringMethod, ClusteringParamSet. # Imported table: blue oval, auto-processing table, the processing depends on the importing of external files. e.g. process of Clustering requires output files from kilosort2. # Computed table: red circle, auto-processing table, the processing does not depend on files external to the database, commonly used for # Part table: plain text, as an appendix to the master table, all the part entries of a given master entry represent a intact set of the master entry. e.g. Unit of a CuratedClustering.
def test_schema_list(): schemas = dj.list_schemas() assert_true(schema.schema.database in schemas)
import datajoint as dj import os dj.config['enable_python_native_blobs'] = True reference = dj.create_virtual_module('reference', 'ibl_reference') subject = dj.create_virtual_module('subject', 'ibl_subject') action = dj.create_virtual_module('action', 'ibl_action') acquisition = dj.create_virtual_module('acquisition', 'ibl_acquisition') data = dj.create_virtual_module('data', 'ibl_data') behavior = dj.create_virtual_module('behavior', 'ibl_behavior') behavior_analyses = dj.create_virtual_module('behavior_analyses', 'ibl_analyses_behavior') accessible_schemas = dj.list_schemas() if 'ibl_ephys' in accessible_schemas and \ 'ibl_storage' in accessible_schemas: schema = dj.schema('ibl_storage') @schema class S3Access(dj.Manual): definition = """ s3_id: tinyint # unique id for each S3 pair --- access_key: varchar(128) # S3 access key secret_key: varchar(128) # S3 secret key """ # attempt to get S3 access/secret key from different sources
def test_schemas(token, client, connection): REST_schemas = client.get('/api/list_schemas', headers=dict( Authorization=f'Bearer {token}')).json['schemaNames'] expected_schemas = dj.list_schemas(connection=connection) assert set(REST_schemas) == set(expected_schemas)