def create_study_collection_table(self): schema = self.model.schemas[self.DATA] table_def = Table.define( "Study_Collection", column_defs=[ Column.define("Study", builtin_types.text, nullok=False), Column.define("Collection", builtin_types.text, nullok=False) ], key_defs=[Key.define(["Study", "Collection"])], fkey_defs=[ ForeignKey.define(["Study"], self.DATA, "Study", ["RID"], constraint_names=[[ self.DATA, "Study_Collection_Study_fkey" ]]), ForeignKey.define(["Collection"], self.DATA, "Collection", ["RID"], constraint_names=[[ self.DATA, "Study_Collection_Collection_fkey" ]]) ], comment="Many-to-many associations between studies and collection") self.try_create_table(schema, table_def)
def define_tdoc_Data_Dictionary(): table_name = 'Data_Dictionary' comment = 'Dictionary containing all available versions.' column_defs = [ Column.define('Name', builtin_types.text, comment='The name of the dictionary', nullok=False), Column.define('Category', builtin_types.text, comment='The category of the dictionary', nullok=False), Column.define('Version', builtin_types.text, comment='The version of the dictionary', nullok=False), Column.define('Location', builtin_types.text, comment='The location of the dictionary', nullok=False) ] key_defs = [ Key.define(['Name', 'Version'], constraint_names=[['PDB', 'Data_Dictionary_primary_key']]), Key.define( ['RID', 'Category'], constraint_names=[['PDB', 'Data_Dictionary_RID_Category_key']]), Key.define(['RID'], constraint_names=[['PDB', 'Data_Dictionary_RID_key']]), ] fkey_defs = [ ForeignKey.define( ['Name'], 'Vocab', 'Data_Dictionary_Name', ['Name'], constraint_names=[['PDB', 'Data_Dictionary_Name_fkey']], on_update='CASCADE', on_delete='CASCADE'), ForeignKey.define( ['Category'], 'Vocab', 'Data_Dictionary_Category', ['Name'], constraint_names=[['PDB', 'Data_Dictionary_Category_fkey']], on_update='CASCADE', on_delete='CASCADE') ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
def create_specimen_table(self): schema = self.model.schemas[self.DATA] table_def = Table.define( "Specimen", column_defs=[ Column.define("Species", builtin_types.text, nullok=False), Column.define("Sex", builtin_types.text, nullok=True), Column.define("Stage", builtin_types.text, nullok=True, comment="developmental stage of this specimen"), Column.define("Anatomy", builtin_types.text, nullok=True), Column.define( "Assay_Type", builtin_types.text, nullok=False, comment="type of assay performed on this specimen"), Column.define( "Internal_ID", builtin_types.text, nullok=True, comment="data-provider-defined unique identifier") ], key_defs=[Key.define(["Internal_ID"])], fkey_defs=[ ForeignKey.define( ["Species"], self.VOCABULARY, "Species", ["Name"], constraint_names=[[self.DATA, "Specimen_Species_fkey"]]), ForeignKey.define( ["Sex"], self.VOCABULARY, "Sex", ["Name"], constraint_names=[[self.DATA, "Specimen_Sex_fkey"]]), ForeignKey.define( ["Stage"], self.VOCABULARY, "Stage", ["Name"], constraint_names=[[self.DATA, "Specimen_Stage_fkey"]]), ForeignKey.define( ["Assay_Type"], self.VOCABULARY, "Assay_Type", ["Name"], constraint_names=[[self.DATA, "Specimen_Assay_Type_fkey"]]), ForeignKey.define( ["Anatomy"], self.VOCABULARY, "Anatomy", ["ID"], constraint_names=[[self.DATA, "Specimen_Anatomy_fkey"]]) ], comment="A biospeciment") self.try_create_table(schema, table_def)
def define_tdoc_Conform_Dictionary(): table_name = 'Conform_Dictionary' comment = 'Dictionary associated to the export mmCIF file.' column_defs = [ Column.define( 'Exported_mmCIF_RID', builtin_types.text, comment='The reference to the RID of the exported mmCIF file.', nullok=False), Column.define( 'Data_Dictionary_RID', builtin_types.text, comment= 'The reference to the RID of the Data_Dictionary version for the mmCIF file.', nullok=False) ] key_defs = [ Key.define(['Exported_mmCIF_RID', 'Data_Dictionary_RID'], constraint_names=[['PDB', 'Conform_Dictionary_primary_key']]), Key.define(['RID'], constraint_names=[['PDB', 'Conform_Dictionary_RID_key']]), ] fkey_defs = [ ForeignKey.define(['Exported_mmCIF_RID'], 'PDB', 'Entry_mmCIF_File', ['RID'], constraint_names=[[ 'PDB', 'Conform_Dictionary_Entry_mmCIF_File_fkey' ]], on_update='CASCADE', on_delete='CASCADE'), ForeignKey.define(['Data_Dictionary_RID'], 'PDB', 'Data_Dictionary', ['RID'], constraint_names=[[ 'PDB', 'Conform_Dictionary_Data_Dictionary_fkey' ]], on_update='CASCADE', on_delete='CASCADE') ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
def create_file_table(self): schema = self.model.schemas[self.DATA] table_def = Table.define( "File", column_defs=[ Column.define("Replicate", builtin_types.text, nullok=False, comment="Replicate that generated this file"), Column.define("File_URI", builtin_types.text, nullok=False, comment="URI for this file"), Column.define("File_Name", builtin_types.text, nullok=False, comment="Name of file when uploaded"), Column.define("File_Size", builtin_types.int8, nullok=False, comment="Size of file in bytes"), Column.define("File_MD5", builtin_types.text, nullok=False, comment="MD5 checksum of this file"), Column.define("File_SHA256", builtin_types.text, nullok=False, comment="SHA256 checksum of this file"), Column.define("File_Type", builtin_types.text, nullok=False), Column.define("Caption", builtin_types.text, nullok=True) ], key_defs=[Key.define(["File_MD5"]), Key.define(["File_URI"])], fkey_defs=[ ForeignKey.define( ["Replicate"], self.DATA, "Replicate", ["RID"], constraint_names=[[self.DATA, "File_Replicate_fkey"]]), ForeignKey.define( ["File_Type"], self.VOCABULARY, "File_Type", ["Name"], constraint_names=[[self.DATA, "File_File_Type_fkey"]]) ], comment="Data files") self.try_create_table(schema, table_def)
def update_PDB_ihm_cross_link_restraint(model): # Add the PDB.ihm_cross_link_restraint.pseudo_site_flag column utils.create_column_if_not_exist( model, 'PDB', 'ihm_cross_link_restraint', Column.define( 'pseudo_site_flag', builtin_types.text, comment= 'A flag indicating if the cross link involves a pseudo site that is not part of the model representation', nullok=True)) # Create the foreign key PDB.ihm_cross_link_restraint.pseudo_site_flag references Vocab.pseudo_site_flag.Name utils.create_foreign_key_if_not_exists( model, 'PDB', 'ihm_cross_link_restraint', 'ihm_cross_link_restraint_pseudo_site_flag_fkey', ForeignKey.define(['pseudo_site_flag'], 'Vocab', 'pseudo_site_flag', ['Name'], constraint_names=[[ 'Vocab', 'ihm_cross_link_restraint_pseudo_site_flag_fkey' ]], on_update='CASCADE', on_delete='NO ACTION')) # Create combo1 key utils.create_key_if_not_exists(model, 'PDB', 'ihm_cross_link_restraint', ['RID', 'structure_id', 'id'], 'ihm_cross_link_restraint_combo1_key')
def create_source_file_table(self): schema = self.model.schemas[self.source_file_schema] schema.create_table( Table.define(self.source_file_table, [ Column.define("Species", builtin_types.text, nullok=False, comment="Species this file represents"), Column.define( "Downloaded_From", builtin_types.text, comment= "URL of remote file (e.g., from NCBI) that this was downladoed from" ), Column.define("File_Name", builtin_types.text, nullok=False), Column.define("File_URL", builtin_types.text, nullok=False), Column.define("File_Bytes", builtin_types.int4, nullok=False), Column.define("File_MD5", builtin_types.text, nullok=False) ], key_defs=[Key.define(["Species"])], fkey_defs=[ ForeignKey.define(["Species"], self.species_schema, self.species_table, self.adjust_fkey_columns_case( self.species_schema, self.species_table, ["ID"])) ]))
def update_PDB_ihm_related_datasets(model): # -- add columns utils.create_column_if_not_exist( model, 'PDB', 'ihm_related_datasets', Column.define( 'transformation_id', builtin_types.int8, comment= 'Identifier corresponding to the transformation matrix to be applied to the derived dataset in order to transform it to the primary dataset', nullok=True)) utils.create_column_if_not_exist( model, 'PDB', 'ihm_related_datasets', Column.define('Transformation_RID', builtin_types.text, comment='Identifier to the transformation RID', nullok=True)) # -- add fk utils.create_foreign_key_if_not_exists( model, 'PDB', 'ihm_related_datasets', 'ihm_related_datasets_ihm_data_transformation_combo2_fkey', ForeignKey.define( ['Transformation_RID', 'transformation_id'], 'PDB', 'ihm_data_transformation', ['RID', 'id'], constraint_names=[[ 'PDB', 'ihm_related_datasets_ihm_data_transformation_combo2_fkey' ]], on_update='CASCADE', on_delete='NO ACTION'))
def create_replicate_table(self): schema = self.model.schemas[self.DATA] table_def = Table.define( "Replicate", column_defs=[ Column.define( "Experiment", builtin_types.text, nullok=False, comment="Experiment that produced this replicate"), Column.define("Biological_Replicate_Number", builtin_types.int4, nullok=False), Column.define("Technical_Replicate_Number", builtin_types.int4, nullok=False), Column.define("Mapped_Reads", builtin_types.float8, nullok=True), Column.define("RNA_Reads", builtin_types.float8, nullok=True), Column.define("Specimen", builtin_types.text, nullok=False), Column.define("Description", builtin_types.text, nullok=True) ], key_defs=[ Key.define([ "Experiment", "Biological_Replicate_Number", "Technical_Replicate_Number" ]) ], fkey_defs=[ ForeignKey.define( ["Experiment"], self.DATA, "Experiment", ["RID"], constraint_names=[[self.DATA, "Replicate_Experiment_fkey"]]), ForeignKey.define( ["Specimen"], self.DATA, "Specimen", ["RID"], constraint_names=[[self.DATA, "Replicate_Specimen_fkey"]]) ], comment= "A biological or technical replicate in a sequencing experiment") self.try_create_table(schema, table_def)
def update_PDB_ihm_ensemble_info(model): # Add the PDB.ihm_ensemble_info.sub_sample_flag and PDB.ihm_ensemble_info.sub_sampling_type columns utils.create_column_if_not_exist( model, 'PDB', 'ihm_ensemble_info', Column.define( 'sub_sample_flag', builtin_types.text, comment= 'A flag that indicates whether the ensemble consists of sub samples', nullok=True)) utils.create_column_if_not_exist( model, 'PDB', 'ihm_ensemble_info', Column.define('sub_sampling_type', builtin_types.text, comment='Type of sub sampling', nullok=True)) # Create the foreign keys utils.create_foreign_key_if_not_exists( model, 'PDB', 'ihm_ensemble_info', 'ihm_ensemble_info_sub_sample_flag_fkey', ForeignKey.define(['sub_sample_flag'], 'Vocab', 'sub_sample_flag', ['Name'], constraint_names=[[ 'Vocab', 'ihm_ensemble_info_sub_sample_flag_fkey' ]], on_update='CASCADE', on_delete='NO ACTION')) utils.create_foreign_key_if_not_exists( model, 'PDB', 'ihm_ensemble_info', 'ihm_ensemble_info_sub_sampling_type_fkey', ForeignKey.define(['sub_sampling_type'], 'Vocab', 'sub_sampling_type', ['Name'], constraint_names=[[ 'Vocab', 'ihm_ensemble_info_sub_sampling_type_fkey' ]], on_update='CASCADE', on_delete='NO ACTION')) utils.create_key_if_not_exists(model, 'PDB', 'ihm_ensemble_info', ['RID', 'structure_id', 'ensemble_id'], 'ihm_ensemble_info_combo1_key')
def tdef(tname): return Table.define( tname, [ Column.define( "descendant", builtin_types.text, nullok=False, comment="Contained dataset in transitive relationship." ), Column.define( "ancestor", builtin_types.text, nullok=False, comment="Containing dataset in transitive relationship." ), ], [ Key.define( ["descendant", "ancestor"], constraint_names=[["CFDE", tname + "_assoc_key"]]), ], [ ForeignKey.define( ["descendant"], "CFDE", "dataset", ["id"], constraint_names=[["CFDE", tname + "_descendant_fkey"] ], ), ForeignKey.define( ["ancestor"], "CFDE", "dataset", ["id"], constraint_names=[["CFDE", tname + "_ancestor_fkey"]], ), ], comment= "Flattened, transitive closure of nested DatasetsInDatasets relationship.", )
def create_experiment_table(self): schema = self.model.schemas[self.DATA] table_def = Table.define( "Experiment", column_defs=[ # rbk Notes becomes experiment Internal_ID Column.define( "Internal_ID", builtin_types.text, nullok=False, comment="data-provider-defined unique identifier"), Column.define("Study", builtin_types.text, nullok=False, comment="study that this experiment is part of"), Column.define("Molecule_Type", builtin_types.text, nullok=False), Column.define("Cell_Count", builtin_types.int4, nullok=True), Column.define("Notes", builtin_types.markdown, nullok=True), ], key_defs=[ Key.define(["Internal_ID"]), ], fkey_defs=[ ForeignKey.define( ["Study"], self.DATA, "Study", ["RID"], constraint_names=[[self.DATA, "Experiment_Study_fkey"]]), ForeignKey.define(["Molecule_Type"], self.VOCABULARY, "Molecule_Type", ["Name"], constraint_names=[[ self.DATA, "Experiment_Molecule_Type_fkey" ]]) ], comment="A sequencing or metabolomics experiment") self.try_create_table(schema, table_def)
def create_dbxref_table(self): schema = self.model.schemas[self.dbxref_schema] schema.create_table( Table.define(self.dbxref_table, [ Column.define("Gene", builtin_types.text, nullok=False, comment='Gene that this identifier refers to'), Column.define("Alternate_ID", builtin_types.text, nullok=False), Column.define("Alternate_Ontology", builtin_types.text, nullok=False, comment='Ontology this identifier is from'), Column.define( "Reference_URL", builtin_types.text, nullok=True, comment= 'URL to a description, in this alternate ontology, of this gene' ) ], key_defs=[Key.define(["Gene", "Alternate_ID"])], fkey_defs=[ ForeignKey.define(["Gene"], self.gene_schema, self.gene_table, self.adjust_fkey_columns_case( self.gene_schema, self.gene_table, ["ID"])), ForeignKey.define(["Alternate_Ontology"], self.ontology_schema, self.ontology_table, self.adjust_fkey_columns_case( self.ontology_schema, self.ontology_table, ["ID"])) ], comment= 'Alternate gene identifiers from different ontologies' ))
def create_gene_table(self, extra_boolean_cols=[]): schema = self.model.schemas[self.gene_schema] common_cols = [ Column.define("Gene_Type", builtin_types.text, nullok=False), Column.define("Species", builtin_types.text, nullok=False), Column.define("Chromosome", builtin_types.text), Column.define("Location", builtin_types.text, comment="Location on chromosome"), Column.define( "Source_Date", builtin_types.date, comment="Last-updated date reported by the gene data source") ] for colname in extra_boolean_cols: common_cols.append(Column.define(colname, builtin_types.boolean)) fkey_defs = [ ForeignKey.define(["Gene_Type"], self.gene_type_schema, self.gene_type_table, self.adjust_fkey_columns_case(["ID"])), ForeignKey.define(["Species"], self.species_schema, self.species_table, self.adjust_fkey_columns_case( self.species_schema, self.species_table, ["ID"])), ForeignKey.define(["Chromosome"], self.chromosome_schema, self.chromosome_table, ["RID"]) ] key_defs = [["NCBI_GeneID"]] table = schema.create_table( Table.define_vocabulary( self.gene_table, '{prefix}:{{RID}}'.format(prefix=self.curie_prefix), column_defs=column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment="Genes"))
def dataset_property(srctable, srccolumn): tname = 'dataset_denorm_%s' % srccolumn.name return ( tname, Table.define( tname, [ Column.define( "dataset", builtin_types.text, nullok=False), Column.define(srccolumn.name, builtin_types.text, srccolumn.nullok), ], [ Key.define(["dataset", srccolumn.name]), ], [ ForeignKey.define( ["dataset"], "CFDE", "dataset", ["id"], constraint_names=[["CFDE", "%s_ds_fkey" % tname]], ) ] + [ ForeignKey.define( [srccolumn.name], 'CFDE', fkey.referenced_columns[0].table.name, [c.name for c in fkey.referenced_columns], constraint_names=[['CFDE', '%s_prop_fkey' % tname]]) for fkey in srctable.foreign_keys if {srccolumn.name} == set([c.name for c in fkey.foreign_key_columns]) ], ))
def create_chromosome_table(self): schema = self.model.schemas[self.chromosome_schema] schema.create_table( Table.define(self.chromosome_table, [ Column.define("Name", builtin_types.text, nullok=False), Column.define("Species", builtin_types.text, nullok=False) ], key_defs=[Key.define(["Name", "Species"])], fkey_defs=[ ForeignKey.define(["Species"], self.species_schema, self.species_table, self.adjust_fkey_columns_case( self.species_schema, self.species_table, ["ID"])) ]))
def make_fkey(tname, fkdef): fkcols = fkdef.pop("fields") fkcols = [fkcols] if isinstance(fkcols, str) else fkcols reference = fkdef.pop("reference") pkschema = reference.pop("resourceSchema", schema_name) pktable = reference.pop("resource") pktable = tname if pktable == "" else pktable to_name = reference.pop("title", None) pkcols = reference.pop("fields") pkcols = [pkcols] if isinstance(pkcols, str) else pkcols constraint_name = fkdef.pop("constraint_name", None) if constraint_name is None: # don't run this if we don't need it... constraint_name = make_id(tname, fkcols, 'fkey') if len(constraint_name.encode('utf8')) > 63: raise ValueError('Constraint name "%s" too long in %r' % (constraint_name, fkdef)) def get_action(clause): try: return { 'cascade': 'CASCADE', 'set null': 'SET NULL', 'set default': 'SET DEFAULT', 'restrict': 'RESTRICT', 'no action': 'NO ACTION', }[fkdef.pop(clause, 'no action').lower()] except KeyError as e: raise ValueError( 'unknown action "%s" for foreign key %s %s clause' % (e, constraint_name, clause)) on_delete = get_action('on_delete') on_update = get_action('on_update') annotations = { schema_tag: fkdef, } if to_name is not None: annotations[tag.foreign_key] = {"to_name": to_name} return ForeignKey.define(fkcols, pkschema, pktable, pkcols, constraint_names=[[schema_name, constraint_name]], on_delete=on_delete, on_update=on_update, annotations=annotations)
def define_tdoc_Supported_Dictionary(): table_name = 'Supported_Dictionary' comment = 'Dictionary containing the latest supported versions.' column_defs = [ Column.define( 'Data_Dictionary_RID', builtin_types.text, comment= 'The reference to the RID of the latest Data_Dictionary version', nullok=False), Column.define( 'Data_Dictionary_Category', builtin_types.text, comment= 'The reference to the category of the latest Data_Dictionary version', nullok=False) ] key_defs = [ Key.define( ['Data_Dictionary_Category'], constraint_names=[['PDB', 'Supported_Dictionary_primary_key']]), Key.define(['RID'], constraint_names=[['PDB', 'Supported_Dictionary_RID_key']]), ] fkey_defs = [ ForeignKey.define( ['Data_Dictionary_RID', 'Data_Dictionary_Category'], 'PDB', 'Data_Dictionary', ['RID', 'Category'], constraint_names=[['PDB', 'Supported_Dictionary_fkey']], on_update='CASCADE', on_delete='CASCADE') ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
def make_fkey(tname, fkdef, schema_name): fkcols = fkdef.pop("fields") fkcols = [fkcols] if isinstance(fkcols, str) else fkcols reference = fkdef.pop("reference") pktable = reference.pop("resource") pktable = tname if pktable == "" else pktable pkcols = reference.pop("fields") pkcols = [pkcols] if isinstance(pkcols, str) else pkcols return ForeignKey.define(fkcols, schema_name, pktable, pkcols, constraint_names=[[ schema_name, "%s_%s_fkey" % (tname, "_".join(fkcols)) ]], annotations={ schema_tag: fkdef, })
def make_fkey(tname, fkdef): fkcols = fkdef.pop("fields") fkcols = [fkcols] if isinstance(fkcols, str) else fkcols reference = fkdef.pop("reference") pktable = reference.pop("resource") pktable = tname if pktable == "" else pktable to_name = reference.pop("title", None) pkcols = reference.pop("fields") pkcols = [pkcols] if isinstance(pkcols, str) else pkcols annotations = { schema_tag: fkdef, } if to_name is not None: annotations[tag.foreign_key] = {"to_name": to_name} return ForeignKey.define(fkcols, schema_name, pktable, pkcols, constraint_names=[[ schema_name, "%s_%s_fkey" % (tname, "_".join(fkcols)) ]], annotations=annotations)
def define_Vocab_table(table_name, table_comment): column_defs = [ Column.define( "ID", builtin_types.ermrest_curie, comment='The preferred Compact URI (CURIE) for this term.', nullok=False, default="PDB:{RID}"), Column.define("URI", builtin_types.ermrest_uri, nullok=False, default="/id/{RID}", comment="The preferred URI for this term."), Column.define("Name", builtin_types.text, nullok=False), Column.define("Description", builtin_types.markdown, nullok=False), Column.define("Synonyms", builtin_types["text[]"], nullok=True, comment="Alternate human-readable names for this term."), Column.define("Owner", builtin_types.text, comment='Group that can update the record.', nullok=True) ] key_defs = [ Key.define( ["URI"], constraint_names=[["Vocab", '{}_URI_key'.format(table_name)]]), Key.define( ["Name"], constraint_names=[["Vocab", '{}_Name_key'.format(table_name)]]), Key.define(["ID"], constraint_names=[["Vocab", '{}_ID_key'.format(table_name)]]), Key.define( ["RID"], constraint_names=[["Vocab", '{}_RID_key'.format(table_name)]]) ] fkey_defs = [ ForeignKey.define( ["RCB"], "public", "ERMrest_Client", ["ID"], constraint_names=[["Vocab", '{}_RCB_fkey'.format(table_name)]], on_update="NO ACTION", on_delete="NO ACTION"), ForeignKey.define( ["RMB"], "public", "ERMrest_Client", ["ID"], constraint_names=[["Vocab", '{}_RMB_fkey'.format(table_name)]], on_update="NO ACTION", on_delete="NO ACTION"), ForeignKey.define( ["Owner"], "public", "Catalog_Group", ["ID"], constraint_names=[["Vocab", '{}_Owner_fkey'.format(table_name)]], on_update="NO ACTION", on_delete="NO ACTION") ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=table_comment, provide_system=True) return table_def
def define_tdoc_ihm_hdx_restraint(): table_name = 'ihm_hdx_restraint' comment = 'Details of restraint derived from hydrogen-deuterium (H/D) exchange experiments; can be uploaded as CSV/TSV file above; mmCIF category: ihm_hdx_restraint' column_defs = [ Column.define('id', builtin_types.int8, comment='A unique id for the H/D exchange restraint', nullok=False), Column.define( 'feature_id', builtin_types.int8, comment='An identifier for the peptide / residue feature', nullok=False), Column.define('Feature_RID', builtin_types.text, comment='Identifier to the feature RID', nullok=False), Column.define( 'protection_factor', builtin_types.float8, comment= 'The value of the protection factor determined from H/D exchange experiments', nullok=True), Column.define( 'dataset_list_id', builtin_types.int8, comment= 'Identifier to the H/D exchange input data from which the restraints are derived', nullok=False), Column.define('Dataset_List_RID', builtin_types.text, comment='Identifier to the dataset list RID', nullok=False), Column.define( 'details', builtin_types.text, comment='Additional details regarding the H/D exchange restraint', nullok=True), Column.define('structure_id', builtin_types.text, comment='Structure identifier', nullok=False), Column.define( 'Entry_Related_File', builtin_types.text, comment= 'A reference to the uploaded restraint file in the table Entry_Related_File.id', nullok=True) ] key_defs = [ Key.define(['structure_id', 'id'], constraint_names=[['PDB', 'ihm_hdx_restraint_primary_key']]), Key.define(['RID'], constraint_names=[['PDB', 'ihm_hdx_restraint_RID_key']]), ] fkey_defs = [ ForeignKey.define( ['structure_id'], 'PDB', 'entry', ['id'], constraint_names=[['PDB', 'ihm_hdx_restraint_structure_id_fkey']], on_update='CASCADE', on_delete='NO ACTION'), ForeignKey.define(['Feature_RID', 'structure_id', 'feature_id'], 'PDB', 'ihm_feature_list', ['RID', 'structure_id', 'feature_id'], constraint_names=[[ 'PDB', 'ihm_hdx_restraint_ihm_feature_list_combo1_fkey' ]], on_update='CASCADE', on_delete='NO ACTION'), ForeignKey.define( ['Dataset_List_RID', 'structure_id', 'dataset_list_id'], 'PDB', 'ihm_dataset_list', ['RID', 'structure_id', 'id'], constraint_names=[[ 'PDB', 'ihm_hdx_restraint_ihm_dataset_list_combo1_fkey' ]], on_update='CASCADE', on_delete='NO ACTION'), ForeignKey.define(['Entry_Related_File'], 'PDB', 'Entry_Related_File', ['RID'], constraint_names=[[ 'PDB', 'ihm_hdx_restraint_Entry_Related_File_fkey' ]], on_update='CASCADE', on_delete='NO ACTION') ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
Key.define( ["Step_RID","File_RID"], # this is a list to allow for compound keys constraint_names=[ [schema_name, "Step_OutputFile_Mapping_RID_key"] ], comment="Step plus file must be distinct.", annotations={}, ) ] fkey_defs = [ ForeignKey.define( ["Step_RID"], # this is a list to allow for compound foreign keys "Core", "Step", ["RID"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[ [schema_name, "Step_OutputFile_Mapping_Step_RID_fkey"] ], comment="", acls={}, acl_bindings={}, annotations={}, ),ForeignKey.define( ["File_RID"], # this is a list to allow for compound foreign keys "Core", "File", ["RID"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[ [schema_name, "Step_OutputFile_Mapping_File_RID_fkey"] ], comment="", acls={},
def define_tdoc_ihm_cross_link_pseudo_site(): table_name='ihm_cross_link_pseudo_site' comment='Details of pseudo sites involved in crosslinks; can be uploaded as CSV/TSV file above; mmCIF category: ihm_cross_link_pseudo_site' column_defs = [ Column.define( 'id', builtin_types.int8, comment='An identifier for a pseudo site involved in a crosslink', nullok=False ), Column.define( 'restraint_id', builtin_types.int8, comment='An identifier for the crosslink restraint between a pair of residues', nullok=False ), Column.define( 'cross_link_partner', builtin_types.text, comment='The identity of the crosslink partner corresponding to the pseudo site', nullok=False ), Column.define( 'pseudo_site_id', builtin_types.int8, comment='The pseudo site identifier corresponding to the crosslink partner', nullok=False ), Column.define( 'model_id', builtin_types.int8, comment='Identifier to the model that the pseudo site corresponds to', nullok=True ), Column.define( 'Entry_Related_File', builtin_types.text, comment='A reference to the uploaded restraint file in the table Entry_Related_File.id.', nullok=True ), Column.define( 'structure_id', builtin_types.text, comment='Structure identifier', nullok=False ), # HT: to use for Chaise Column.define( 'Model_RID', builtin_types.text, comment='Identifier to the model RID', nullok=True ), Column.define( 'Pseudo_Site_RID', builtin_types.text, comment='Identifier to the pseudo site RID', nullok=False ), Column.define( 'Restraint_RID', builtin_types.text, comment='Identifier to the restraint RID', nullok=False ) ] #BV: This is a leaf table; so no combo1/combo2 keys required key_defs = [ Key.define(['structure_id', 'id'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_primary_key']] ), Key.define(['RID'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_RID_key']] ), ] # @brinda: add fk pseudo-definition fkey_defs = [ # HT: it own fk to Entry table ForeignKey.define(['structure_id'], 'PDB', 'entry', ['id'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_structure_id_fkey']], on_update='CASCADE', on_delete='NO ACTION' ), # -- begin ihm_cross_link_restraint # HT: In annotation, apply domain_filter to filter the RID list by constraining structure_id ForeignKey.define(['Restraint_RID', 'structure_id', 'restraint_id'], 'PDB', 'ihm_cross_link_restraint', ['RID', 'structure_id', 'id'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_ihm_cross_link_restraint_combo1_fkey']], on_update='CASCADE', on_delete='NO ACTION' ), # -- end ihm_cross_link_restraint # -- begin ihm_model_list table # HT: This is for chaise optional foreign key --> check naming convention # HT: In annotation, apply domain_filter to filter the RID list by constraining structure_id # BV: Not required anymore based on the google doc with fkey conventions #ForeignKey.define(['Model_RID'], 'PDB', 'ihm_model_list', ['RID'], # constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_Model_RID_fkey']], # on_update='CASCADE', # on_delete='NO ACTION' #), # HT: equivalent fk so that Chaise will automatically fill in automatically --> check constraint naming convention ForeignKey.define(['Model_RID', 'model_id'], 'PDB', 'ihm_model_list', ['RID', 'model_id'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_ihm_model_list_combo2_fkey']], on_update='CASCADE', on_delete='NO ACTION' ), # -- end ihm_model_list table ForeignKey.define(['Pseudo_Site_RID', 'structure_id', 'pseudo_site_id'], 'PDB', 'ihm_pseudo_site', ['RID', 'structure_id', 'id'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_ihm_pseudo_site_combo1_fkey']], on_update='CASCADE', on_delete='NO ACTION' ), ForeignKey.define(['cross_link_partner'], 'Vocab', 'cross_link_partner', ['Name'], constraint_names=[ ['Vocab', 'ihm_cross_link_pseudo_site_cross_link_partner_fkey'] ], on_update='CASCADE', on_delete='NO ACTION' ), ForeignKey.define(['Entry_Related_File'], 'PDB', 'Entry_Related_File', ['RID'], constraint_names=[['PDB', 'ihm_cross_link_pseudo_site_Entry_Related_File_fkey']], on_update='CASCADE', on_delete='NO ACTION' ) ] table_def = Table.define( table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True ) return table_def
Key.define( ["Name"], # this is a list to allow for compound keys constraint_names=[ [schema_name, "Instance_Name_key"] ], comment="Instance name must be distinct.", annotations={}, ) ] fkey_defs = [ ForeignKey.define( ["Domain_ID"], # this is a list to allow for compound foreign keys "Vocab", "Domain", ["id"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[ [schema_name, "Instance_Domain_RID_fkey"] ], comment="", acls={}, acl_bindings={}, annotations={}, ), ForeignKey.define( ["Level_ID"], # this is a list to allow for compound foreign keys "Vocab", "Instance_Level", ["id"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[ [schema_name, "Instance_Level_RID_fkey"] ], comment="",
def define_tdoc_ihm_data_transformation(): table_name = 'ihm_data_transformation' comment = 'Details of rotation matrix and translation vector that can be applied to transform data; mmCIF category: ihm_data_transformation' column_defs = [ Column.define('id', builtin_types.int8, comment='An identifier to the transformation matrix', nullok=False), Column.define( 'rot_matrix[1][1]', builtin_types.float8, comment= 'Data item [1][1] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[2][1]', builtin_types.float8, comment= 'Data item [2][1] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[3][1]', builtin_types.float8, comment= 'Data item [3][1] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[1][2]', builtin_types.float8, comment= 'Data item [1][2] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[2][2]', builtin_types.float8, comment= 'Data item [2][2] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[3][2]', builtin_types.float8, comment= 'Data item [3][2] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[1][3]', builtin_types.float8, comment= 'Data item [1][3] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[2][3]', builtin_types.float8, comment= 'Data item [2][3] of the rotation matrix used in the transformation', nullok=True), Column.define( 'rot_matrix[3][3]', builtin_types.float8, comment= 'Data item [3][3] of the rotation matrix used in the transformation', nullok=True), Column.define( 'tr_vector[1]', builtin_types.float8, comment= 'Data item [1] of the translation vector used in the transformation', nullok=True), Column.define( 'tr_vector[2]', builtin_types.float8, comment= 'Data item [2] of the translation vector used in the transformation', nullok=True), Column.define( 'tr_vector[3]', builtin_types.float8, comment= 'Data item [3] of the translation vector used in the transformation', nullok=True), Column.define('structure_id', builtin_types.text, comment='Structure identifier', nullok=False) ] #BV: This is a parent table with optional columns in the child table; so combo2 key is defined key_defs = [ Key.define( ['structure_id', 'id'], constraint_names=[['PDB', 'ihm_data_transformation_primary_key']]), Key.define( ['RID'], constraint_names=[['PDB', 'ihm_data_transformation_RID_key']]), Key.define( ['RID', 'id'], constraint_names=[['PDB', 'ihm_data_transformation_combo2_key']]) ] # @brinda: add fk pseudo-definition #BV: No outgoing fkeys other than structure_id fkey_defs = [ ForeignKey.define(['structure_id'], 'PDB', 'entry', ['id'], constraint_names=[[ 'PDB', 'ihm_data_transformation_structure_id_fkey' ]], on_update='CASCADE', on_delete='NO ACTION') ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
def define_tdoc_ihm_derived_angle_restraint(): table_name = 'ihm_derived_angle_restraint' comment = 'Details of angle restraints used in integrative modeling; can be uploaded as CSV/TSV file above; mmCIF category: ihm_derived_angle_restraint' column_defs = [ Column.define("id", builtin_types.int8, comment='An identifier for the angle restraint', nullok=False), Column.define("group_id", builtin_types.int8, comment='An identifier to group the angle restraints', nullok=True), Column.define( "feature_id_1", builtin_types.int8, comment= 'The feature identifier for the first partner in the angle restraint', nullok=False), Column.define( "feature_id_2", builtin_types.int8, comment= 'The feature identifier for the second partner in the angle restraint', nullok=False), Column.define( "feature_id_3", builtin_types.int8, comment= 'The feature identifier for the third partner in the angle restraint', nullok=False), Column.define( "group_conditionality", builtin_types.text, comment= 'If a group of angles are restrained together, this data item defines the conditionality based on which the restraint is applied in the modeling', nullok=True), Column.define( "angle_lower_limit", builtin_types.float8, comment= 'The lower limit to the threshold applied to this angle restraint', nullok=True), Column.define( "angle_upper_limit", builtin_types.float8, comment= 'The upper limit to the threshold applied to this angle restraint', nullok=True), Column.define( "angle_lower_limit_esd", builtin_types.float8, comment= 'The estimated standard deviation of the lower limit angle threshold applied', nullok=True), Column.define( "angle_upper_limit_esd", builtin_types.float8, comment= 'The estimated standard deviation of the upper limit angle threshold applied', nullok=True), Column.define( "probability", builtin_types.float8, comment='The probability that the angle restraint is correct', nullok=True), Column.define("restraint_type", builtin_types.text, comment='The type of angle restraint applied', nullok=False), Column.define( "angle_threshold_mean", builtin_types.float8, comment='The angle threshold mean applied to the restraint', nullok=True), Column.define( "angle_threshold_mean_esd", builtin_types.float8, comment= 'The estimated standard deviation of the angle threshold mean applied to the restraint', nullok=True), Column.define( "dataset_list_id", builtin_types.int8, comment= 'Identifier to the input data from which the angle restraint is derived', nullok=False), Column.define( "Entry_Related_File", builtin_types.text, comment= 'A reference to the uploaded restraint file in the table Entry_Related_File.id.', nullok=True), Column.define("structure_id", builtin_types.text, comment='Structure identifier', nullok=False), # HT: to use for Chaise Column.define("Feature_Id_1_RID", builtin_types.text, comment='Identifier to the feature 1 RID', nullok=False), Column.define("Feature_Id_2_RID", builtin_types.text, comment='Identifier to the feature 2 RID', nullok=False), Column.define("Feature_Id_3_RID", builtin_types.text, comment='Identifier to the feature 3 RID', nullok=False), Column.define("Dataset_List_RID", builtin_types.text, comment='Identifier to the dataset list RID', nullok=False) ] #BV: This is a leaf table; so no combo1/combo2 keys required key_defs = [ Key.define(["structure_id", "id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_primary_key" ]]), Key.define( ["RID"], constraint_names=[["PDB", "ihm_derived_angle_restraint_RID_key"]]), ] # @brinda: add fk pseudo-definition fkey_defs = [ # HT: it own fk to Entry table ForeignKey.define(["structure_id"], "PDB", "entry", ["id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_structure_id_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), # HT: In annotation, apply domain_filter to filter the RID list by constraining structure_id ForeignKey.define( ["Feature_Id_1_RID", "structure_id", "feature_id_1"], "PDB", "ihm_feature_list", ["RID", "structure_id", "feature_id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_ihm_feature_list_1_combo1_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define( ["Feature_Id_2_RID", "structure_id", "feature_id_2"], "PDB", "ihm_feature_list", ["RID", "structure_id", "feature_id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_ihm_feature_list_2_combo1_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define( ["Feature_Id_3_RID", "structure_id", "feature_id_3"], "PDB", "ihm_feature_list", ["RID", "structure_id", "feature_id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_ihm_feature_list_3_combo1_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define( ["Dataset_List_RID", "structure_id", "dataset_list_id"], "PDB", "ihm_dataset_list", ["RID", "structure_id", "id"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_ihm_dataset_list_combo1_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define( ["group_conditionality"], "Vocab", "ihm_derived_angle_restraint_group_conditionality", ["Name"], constraint_names=[[ "Vocab", "ihm_derived_angle_restraint_group_conditionality_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define(["restraint_type"], "Vocab", "ihm_derived_angle_restraint_restraint_type", ["Name"], constraint_names=[[ "Vocab", "ihm_derived_angle_restraint_restraint_type_fkey" ]], on_update="CASCADE", on_delete="NO ACTION"), ForeignKey.define( ["Entry_Related_File"], "PDB", "Entry_Related_File", ["RID"], constraint_names=[[ "PDB", "ihm_derived_angle_restraint_Entry_Related_File_fkey" ]], on_update="CASCADE", on_delete="NO ACTION") ] table_def = Table.define(table_name, column_defs, key_defs=key_defs, fkey_defs=fkey_defs, comment=comment, provide_system=True) return table_def
["File_RID", "Metadata_RID" ], # this is a list to allow for compound keys constraint_names=[[schema_name, "File_Metadata_RID_key"]], comment="file&term must be distinct.", annotations={}, ) ] fkey_defs = [ ForeignKey.define( ["File_RID"], # this is a list to allow for compound foreign keys "Core", "File", ["RID"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[[schema_name, "File_Metadata_Mapping_File_fkey"]], comment="", acls={}, acl_bindings={}, annotations={}, ), ForeignKey.define( ["Metadata_RID"], # this is a list to allow for compound foreign keys "Vocab", "Metadata", ["id"], # this is a list to allow for compound keys on_update='CASCADE', on_delete='SET NULL', constraint_names=[[schema_name, "File_Metadata_Mapping_Term_fkey"]], comment="",
def replace_vocab_table(schema_name, old_table_name, new_table_name, replace_if_exists=False): """Replaces old vocab table with new and remaps all foreign keys from old to new.""" schema = model.schemas[schema_name] # Drop new_vocab table if exists (optional) if not args.dryrun and new_table_name in schema.tables: if replace_if_exists: verbose("Found {tname}. Dropping...".format(tname=new_table_name)) schema.tables[new_table_name].delete(catalog, schema) else: verbose("Found {tname}. Skipping...".format(tname=new_table_name)) return # Define and create new vocab table extra_cols = [ Column.define('dbxref', builtin_types['text'], comment='Legacy database external reference (dbxref).') ] if args.altids: extra_cols = [ Column.define('alternate_ids', builtin_types['text[]'], comment='Alternate identifiers for this term.') ] + extra_cols vocab_table_def = Table.define_vocabulary( new_table_name, args.curie_template, uri_template='https://www.facebase.org/id/{RID}', column_defs=extra_cols) if not args.dryrun: new_table = schema.create_table(catalog, vocab_table_def) # Populate new vocab table datapaths = catalog.getPathBuilder() old_table_path = datapaths.schemas[schema_name].tables[old_table_name] kwargs = { 'name': old_table_path.column_definitions['name'], 'description': old_table_path.column_definitions['definition'], 'synonyms': old_table_path.column_definitions['synonyms'], 'dbxref': old_table_path.column_definitions['dbxref'] } if args.altids: kwargs['alternate_dbxrefs'] = old_table_path.column_definitions[ 'alternate_dbxrefs'] cleaned_terms = [ clean_term(term) for term in old_table_path.entities(**kwargs) ] vverbose('Cleaned terms ready for insert into {tname}:'.format( tname=new_table_name)) vverbose(list(cleaned_terms)) # Create separate batches for insertion w/ defaults terms_w_ids = [ term for term in cleaned_terms if term['id'] and len(term['id']) ] terms_w_no_ids = [ term for term in cleaned_terms if not term['id'] or not len(term['id']) ] if not args.dryrun: new_table_path = datapaths.schemas[schema_name].tables[new_table_name] new_terms = list(new_table_path.insert(terms_w_ids, defaults=['uri'])) new_terms += list( new_table_path.insert(terms_w_no_ids, defaults=['id', 'uri'])) vverbose('New terms returned after insert into {tname}:'.format( tname=new_table_name)) vverbose(list(new_terms)) else: # This allows for best effort dryrun testing, though the local term CURIEs will be faked new_terms = cleaned_terms for term in new_terms: if not term['id']: term['id'] = term['dbxref'][:term['dbxref'].rindex(':')].upper( ) # Create mapping of old dbxref to new id dbxref_to_id = {term['dbxref']: term['id'] for term in new_terms} # Find all references to old vocab table dbxref old_table = schema.tables[old_table_name] for fkey in old_table.referenced_by: if fkey_blacklist_pattern.match(fkey.names[0][1]): verbose('Skipping foreign key "{sname}:{cname}"'.format( sname=fkey.names[0][0], cname=fkey.names[0][1])) continue # skip fkeys from vocab to vocab for i in range(len(fkey.referenced_columns)): # Get referenced column refcol = fkey.referenced_columns[i] # See if it references the dbxref of the old vocab table, if not skip if (refcol['schema_name'] != schema_name or refcol['table_name'] != old_table_name or refcol['column_name'] != 'dbxref'): continue # Get the corresponding referring table and its fkey column fkeycol = fkey.foreign_key_columns[i] reftable = model.schemas[fkeycol['schema_name']].tables[ fkeycol['table_name']] verbose( 'Found reference to "dbxref" from "{sname}:{tname}:{cname}"'. format(sname=fkeycol['schema_name'], tname=fkeycol['table_name'], cname=fkeycol['column_name'])) # Delete the fkey if not args.dryrun: verbose('Deleting foreign key "{sname}:{cname}"'.format( sname=fkey.names[0][0], cname=fkey.names[0][1])) fkey.delete(catalog, reftable) # Fix fkey column value verbose('Getting existing fkey column values') reftable_path = datapaths.schemas[fkeycol['schema_name']].tables[ fkeycol['table_name']] entities = reftable_path.entities( reftable_path.RID, reftable_path.column_definitions[fkeycol['column_name']]) # Map the old dbxref value to the new curie id value for the reference verbose('Remapping {count} fkey column values'.format( count=len(entities))) for entity in entities: if entity[fkeycol['column_name']]: entity[fkeycol['column_name']] = dbxref_to_id[entity[ fkeycol['column_name']]] vverbose(list(entities)) # Update referring table if not args.dryrun: verbose( 'Updating fkey column values, {max_up} at a time'.format( max_up=args.max_update)) slice_ct = 0 slice_sz = args.max_update updated = [] while (slice_ct * slice_sz) < len(entities): data = entities[(slice_ct * slice_sz):((1 + slice_ct) * slice_sz)] reftable_path.update(data, targets=[fkeycol['column_name']]) updated.extend(data) slice_ct += 1 if len(updated) != len(entities): print( 'WARNING: only updated {up_count} of {ent_count} entities!' .format(up_count=len(updated), ent_count=len(entities))) # Define new fkey verbose( 'Defining and creating new foreign key reference to new vocab table' ) fkey.referenced_columns[i]['column_name'] = 'id' new_fkey = ForeignKey.define( [ fkey.foreign_key_columns[j]['column_name'] for j in range(len(fkey.foreign_key_columns)) ], schema_name, new_table_name, [ fkey.referenced_columns[k]['column_name'] for k in range(len(fkey.referenced_columns)) ], on_update=fkey.on_update or 'NO ACTION', on_delete=fkey.on_delete or 'NO ACTION', constraint_names=fkey.names or [], comment=fkey.comment or None, acls=fkey.acls or {}, acl_bindings=fkey.acl_bindings or {}, annotations=fkey.annotations or {}) vverbose(new_fkey) if not args.dryrun: reftable.create_fkey(catalog, new_fkey) if not args.dryrun: verbose('Dropping "dbxref" column from new vocab table') dbxref = new_table.column_definitions['dbxref'] dbxref.delete(catalog, new_table)
def make_table(tdef): provide_system = not (os.getenv('SKIP_SYSTEM_COLUMNS', 'false').lower() == 'true') tname = tdef["name"] if provide_system: system_columns = Table.system_column_defs() system_keys = Table.system_key_defs() # customize the system column templates... for col in system_columns: cname = col['name'] col['comment'] = { 'RID': 'Immutable record identifier (system-generated).', 'RCT': 'Record creation time (system-generated).', 'RMT': 'Record last-modification time (system-generated).', 'RCB': 'Record created by (system-generated).', 'RMB': 'Record last-modified by (system-generated).', }[cname] display_names = { 'RCT': 'Creation Time', 'RMT': 'Modification Time', 'RCB': 'Created By', 'RMB': 'Modified By', } if cname != 'RID': col['annotations'] = { tag.display: { "name": display_names[cname] } } system_fkeys = [ ForeignKey.define( [cname], 'public', 'ERMrest_Client', ['ID'], constraint_names=[['CFDE', '%s_%s_fkey' % (tname, cname)]]) for cname in ['RCB', 'RMB'] ] else: system_columns = [] system_keys = [] system_fkeys = [] tcomment = tdef.get("description") tdef_resource = tdef tdef = tdef_resource.pop("schema") keys = [] keysets = set() pk = tdef.pop("primaryKey", None) if isinstance(pk, str): pk = [pk] if isinstance(pk, list): keys.append(make_key(tname, pk)) keysets.add(frozenset(pk)) tdef_fields = tdef.pop("fields", None) for cdef in tdef_fields: if cdef.get("constraints", {}).pop("unique", False): kcols = [cdef["name"]] if frozenset(kcols) not in keysets: keys.append(make_key(tname, kcols)) keysets.add(frozenset(kcols)) tdef_fkeys = tdef.pop("foreignKeys", []) title = tdef_resource.get("title", None) annotations = { resource_tag: tdef_resource, schema_tag: tdef, } if title is not None: annotations[tag.display] = {"name": title} return Table.define( tname, column_defs=system_columns + [make_column(cdef) for cdef in tdef_fields], key_defs=system_keys + keys, fkey_defs=system_fkeys + [make_fkey(tname, fkdef) for fkdef in tdef_fkeys], comment=tcomment, provide_system=False, annotations=annotations, )