def drop_indexes(self, engine: Engine, metadata: MetaData) -> None: for tablename, tabledict in self._index_requests.items(): index_names = list(tabledict.keys()) tablename_casematch = get_case_insensitive_dict_key( metadata.tables, tablename) if not tablename_casematch: log.warning("add_indexes: Skipping index as table {} " "absent".format(tablename)) continue table = metadata.tables[tablename_casematch] drop_indexes(engine, table, index_names)
def process_table(table: Table, engine: Engine, configoptions: PcmisConfigOptions) -> None: """ Processes a PCMIS table by checking it has appropriate columns, perhaps adding a CRATE integer PK, and indexing it. Args: table: an SQLAlchemy Table to process engine: an SQLAlchemy Engine configoptions: an instance of :class:`PcmisConfigOptions` """ tablename = table.name column_names = table.columns.keys() log.debug(f"TABLE: {tablename}; COLUMNS: {column_names}") existing_pk_cols = get_pk_colnames(table) assert len(existing_pk_cols) < 2, ( f"Table {tablename} has >1 PK column; don't know what to do") if existing_pk_cols and not get_effective_int_pk_col(table): raise ValueError(f"Table {table!r} has a non-integer PK") adding_crate_pk = not existing_pk_cols required_cols = [CRATE_COL_PK] if not configoptions.print_sql_only else [] if configoptions.drop_not_create: # --------------------------------------------------------------------- # DROP STUFF! Opposite order to creation (below) # --------------------------------------------------------------------- drop_indexes(engine, table, [CRATE_IDX_PK]) drop_columns(engine, table, [CRATE_COL_PK]) else: # --------------------------------------------------------------------- # CREATE STUFF! # --------------------------------------------------------------------- # SQL Server requires Table-bound columns in order to generate DDL: if adding_crate_pk: crate_pk_col = make_bigint_autoincrement_column( CRATE_COL_PK, engine.dialect) table.append_column(crate_pk_col) add_columns(engine, table, [crate_pk_col]) ensure_columns_present(engine, tablename=table.name, column_names=required_cols) add_indexes(engine, table, [{ 'index_name': CRATE_IDX_PK, 'column': CRATE_COL_PK, 'unique': True }])
def drop_indexes(self, engine: Engine, metadata: MetaData) -> None: """ Drop indexes from the source database. Reverses the effects of :func:`create_indexes`. Args: engine: SQLAlchemy database Engine metadata: SQLAlchemy ORM Metadata """ for tablename, tabledict in self._index_requests.items(): index_names = list(tabledict.keys()) tablename_casematch = get_case_insensitive_dict_key( metadata.tables, tablename) if not tablename_casematch: log.warning( f"add_indexes: Skipping index as table {tablename} absent") continue table = metadata.tables[tablename_casematch] drop_indexes(engine, table, index_names)
def process_table(table: Table, engine: Engine, progargs: Any) -> None: tablename = table.name column_names = table.columns.keys() log.debug("TABLE: {}; COLUMNS: {}".format(tablename, column_names)) existing_pk_cols = get_pk_colnames(table) assert len(existing_pk_cols) < 2, ( "Table {} has >1 PK column; don't know what to do".format(tablename)) if existing_pk_cols and not get_effective_int_pk_col(table): raise ValueError("Table {} has a non-integer PK".format(repr(table))) adding_crate_pk = not existing_pk_cols required_cols = [CRATE_COL_PK] if not progargs.print else [] if progargs.drop_danger_drop: # --------------------------------------------------------------------- # DROP STUFF! Opposite order to creation (below) # --------------------------------------------------------------------- drop_indexes(engine, table, [CRATE_IDX_PK]) drop_columns(engine, table, [CRATE_COL_PK]) else: # --------------------------------------------------------------------- # CREATE STUFF! # --------------------------------------------------------------------- # SQL Server requires Table-bound columns in order to generate DDL: if adding_crate_pk: crate_pk_col = make_bigint_autoincrement_column( CRATE_COL_PK, engine.dialect) table.append_column(crate_pk_col) add_columns(engine, table, [crate_pk_col]) ensure_columns_present(engine, tablename=table.name, column_names=required_cols) add_indexes(engine, table, [{ 'index_name': CRATE_IDX_PK, 'column': CRATE_COL_PK, 'unique': True }])
def drop_for_clindocs_table(table: Table, engine: Engine) -> None: drop_indexes( engine, table, [CRATE_IDX_RIONUM_SERIALNUM, CRATE_IDX_MAX_DOCVER, CRATE_IDX_LAST_DOC]) drop_columns(engine, table, [CRATE_COL_MAX_DOCVER, CRATE_COL_LAST_DOC])
def drop_for_progress_notes(table: Table, engine: Engine) -> None: drop_view(engine, VIEW_RCEP_CPFT_PROGRESS_NOTES_CURRENT) drop_indexes( engine, table, [CRATE_IDX_RIONUM_NOTENUM, CRATE_IDX_MAX_SUBNUM, CRATE_IDX_LAST_NOTE]) drop_columns(engine, table, [CRATE_COL_MAX_SUBNUM, CRATE_COL_LAST_NOTE])
def drop_for_nonpatient_table(table: Table, engine: Engine) -> None: drop_indexes(engine, table, [CRATE_IDX_PK]) drop_columns(engine, table, [CRATE_COL_PK])
def drop_for_patient_table(table: Table, engine: Engine) -> None: drop_indexes(engine, table, [CRATE_IDX_PK, CRATE_IDX_RIONUM]) drop_columns(engine, table, [CRATE_COL_PK, CRATE_COL_RIO_NUMBER])