class ISOEliminator: def __init__(self, db_config, table): self.db_ = ArkDBMySQL(db_config_file=db_config) self.table_ = table self.db_.set_table(self.table_) self.cell_ = Cell(self.db_) def get_iso_cell_ids_based_on_id(self, id_cell): self.cell_.init_based_on_id(id_cell) id_list = self.cell_.fetch_ids() id_list.remove(id_cell) return id_list def eliminate_iso(self, start, cnt): query = f'SELECT DISTINCT CELL_BSF_UNIFIED FROM {self.table_} LIMIT {start},{cnt}' bsf_uni_list = list() for row in self.db_.run_query_get_all_row(query): bsf_uni_list.append(row['CELL_BSF_UNIFIED'].decode("utf-8")) runner_idx = start // cnt for bsf in tqdm(bsf_uni_list, desc=f'Eliminating str iso[{runner_idx:02}]:'): query = f'SELECT idCELL FROM {self.table_} WHERE CELL_BSF_UNIFIED=%s' id_list = list() removed_ids = set() for row in self.db_.run_query_get_all_row(query, [bsf]): id_list.append(row['idCELL']) for id_cell in tqdm(id_list, desc=f'Cells in {bsf}: '): if id_cell in removed_ids: continue dup_ids = self.get_iso_cell_ids_based_on_id(id_cell) removed_ids.update(dup_ids) if len(dup_ids) != 0: str_temp = ', '.join([str(x) for x in dup_ids]) query = f'DELETE FROM {self.table_} WHERE idCELL IN ({str_temp})' self.db_.run_sql_nocommit(query)
class NonminimalEliminator: def __init__(self, db_config, table): self.db_ = ArkDBMySQL(db_config_file=db_config) self.table_ = table self.db_.set_table(self.table_) self.hypo_checker_ = StructuralHypoChecker() self.hypo_checker_.set_strategy(NonminimalityStrategy()) def eliminate_nonminimal_cells(self, start, cnt, is_checking_bsf_weak=True): query = f'SELECT idCELL, CELL_NETLIST FROM {self.table_} LIMIT {start},{cnt}' nonminimal_cell_ids = list() runner_idx = start // cnt for row in tqdm(self.db_.run_query_get_all_row(query), desc=f'Eliminating nonminimal[{runner_idx:02}]:'): self.hypo_checker_.set_netlist(row['CELL_NETLIST']) self.hypo_checker_.check() if is_checking_bsf_weak: is_non_minimal = not self.hypo_checker_.is_all_bsf_weak_diff() else: is_non_minimal = not self.hypo_checker_.is_all_bsf_diff() if is_non_minimal: self.db_.delete_nocommit(row['idCELL'], 'idCELL') nonminimal_cell_ids.append(row['idCELL']) self.db_.commit()
def analyze_resistive_defect(db_config, table): db = ArkDBMySQL(db_config_file=db_config) db.set_table(table) id_list = [ row['idCELL'] for row in db.run_query_get_all_row(f"SELECT idCELL FROM {db.get_table()}") ] resistive_defect_analyzer = ResistiveDefect(db) for id_cell in tqdm(id_list, desc='Resistive_defect'): resistive_defect_analyzer.insert_defect_details_for_id_cell(id_cell)
def remove_non_shared_multi_cells(db_config, table): db = ArkDBMySQL(db_config_file=db_config) db.set_table(table) id_list = [ row['idCELL'] for row in db.run_query_get_all_row(f"SELECT idCELL FROM {db.get_table()} WHERE CELL_FAMILY like '%MultiCellIsoInput%'") ] for id_cell in id_list: db.delete(id_cell, 'idCELL') if len(id_list) != 0: db.commit() print(get_cell_cnt(db_config, table))
def update_bsf_uni_for_table(bsf_col, db_config_file, target_lib, start, cnt, force=False): db = ArkDBMySQL(db_config_file=db_config_file) if not force: unset_entry_cnt = db.get_query_value('CNT', f'SELECT COUNT(*) AS CNT FROM {target_lib} ' f'WHERE {bsf_col}_UNIFIED is null') if unset_entry_cnt == 0: print(f'{bsf_col}_UNIFIED column is set for all entries, skipping...') return # TODO: add fast mode, in which only null entries are selected and updated query = f'SELECT * FROM BSF_LIB LIMIT {start},{cnt}' uni_bsf_arr = db.run_query_get_all_row(query) runner_idx = start // cnt for row in tqdm(uni_bsf_arr, desc=f'Update {bsf_col}_UNI[{runner_idx:02}]'): db.run_sql_nocommit(f'UPDATE {target_lib} SET {bsf_col}_UNIFIED = %s WHERE {bsf_col} = %s', [row['BSF_UNI'].decode("utf-8"), row['BSF'].decode("utf-8")]) db.commit()
def tag_multi_cell(db_config, table): db = ArkDBMySQL(db_config_file=db_config) db.set_table(table) cell = Cell(db) # select all 1~3 tx cells netlists = [ row['CELL_NETLIST'] for row in db.run_query_get_all_row(f'SELECT CELL_NETLIST FROM {db.get_table()} WHERE CELL_PMOS_CNT+CELL_NMOS_CNT<=2') ] # for every two of them, construct multi-cells for two_netlists in tqdm(list(product(netlists, repeat=2)), desc='Multi-cell'): multi_cell = MultiCell() iso_multi, shared_multi = multi_cell.construct(two_netlists[0], two_netlists[1]) # search for those multi-cells in lib and tag them for str_netlist in iso_multi: cell.init_based_on_netlist(str_netlist) cell.add_to_family('MultiCellIsoInput') for str_netlist in shared_multi: cell.init_based_on_netlist(str_netlist) cell.add_to_family('MultiCellSharedInput')