def setUpClass(cls, datasource_name='', metadata=None): Unittest_with_sqlite.datasource_name = datasource_name # create db engine for sqlite create_sqlite(use_metadata_from_db=True, echo=False, datasource_name=datasource_name, metadata=metadata)
def setUpClass(cls, datasource_name='', metadata=None, resources_dir=None, force_foreign_keys=True, use_metadata_from_db=True, import_data=True): Unittest_with_sqlite.datasource_name = datasource_name # create db engine for sqlite create_sqlite(use_metadata_from_db=use_metadata_from_db, echo=False, metadata=metadata, datasource_name=datasource_name, force_foreign_keys=force_foreign_keys) # create test data in the sqlite generate_cvs_templates( datasource_name=Unittest_with_sqlite.datasource_name) here = os.path.abspath(os.path.dirname(__file__)) if resources_dir is None: resources_dir = os.path.abspath( os.path.join(os.path.join(here, '..', 'resources'))) global csv_imported if import_data and not csv_imported.get( datasource_name + '.' + resources_dir, False): import_csv_dir( resources_dir, datasource_name=Unittest_with_sqlite.datasource_name) csv_imported[datasource_name + '.' + resources_dir] = True
def test_list_of_tables_from_db(self): create_sqlite(force_foreign_keys=True, use_metadata_from_db=True, echo=False) dbUtil = component.queryUtility(IDbUtil) engine = dbUtil.get_engine() metadata = dbUtil.get_metadata() self.assertIsNotNone(engine) self.assertIsNotNone(metadata) self.assertIsNotNone(metadata.tables.keys())
def test_create_engine(self): # Make sure we do not have sqlite in memory dbUtil = component.queryUtility(IDbUtil) self.assertIsNone(dbUtil) create_sqlite(force_foreign_keys=True, use_metadata_from_db=False, echo=False) dbUtil = component.queryUtility(IDbUtil) engine = dbUtil.get_engine() metadata = dbUtil.get_metadata() self.assertIsNotNone(engine) self.assertIsNotNone(metadata)
def test_list_of_tables_by_using_foreign_keys_deps(self): create_sqlite(force_foreign_keys=True, use_metadata_from_db=True, echo=False) dbUtil = component.queryUtility(IDbUtil) metadata = dbUtil.get_metadata() sorted_tables = metadata.sorted_tables self.assertIsNotNone(sorted_tables) for table in sorted_tables: print('SORTED TABLES: ' + str(table)) # fact_asmt_outcome_vw has Foreign keys from dim_asmt, dim_inst_hier, and dim_section_subject self.assertTrue(check_order_of_fact_asmt_outcome_vw(sorted_tables))
def setUpClass(cls, datasource_name='', metadata=None, resources_dir=None, force_foreign_keys=True, use_metadata_from_db=True, import_data=True): Unittest_with_sqlite.datasource_name = datasource_name # create db engine for sqlite create_sqlite(use_metadata_from_db=use_metadata_from_db, echo=False, metadata=metadata, datasource_name=datasource_name, force_foreign_keys=force_foreign_keys) # create test data in the sqlite generate_cvs_templates(datasource_name=Unittest_with_sqlite.datasource_name) here = os.path.abspath(os.path.dirname(__file__)) if resources_dir is None: resources_dir = os.path.abspath(os.path.join(os.path.join(here, '..', 'resources'))) global csv_imported if import_data and not csv_imported.get(datasource_name + '.' + resources_dir, False): import_csv_dir(resources_dir, datasource_name=Unittest_with_sqlite.datasource_name) csv_imported[datasource_name + '.' + resources_dir] = True
def setUp(self): create_sqlite(use_metadata_from_db=False, echo=False, metadata=generate_test_metadata())
def setUpClass(cls): # create db engine for sqlite create_sqlite(use_metadata_from_db=True, echo=True)
def run_validation(metadata=None, missing_table_ignore=False, missing_field_ignore=False, dir_name='/please_specify_dir', verbose=False): ''' run validation @param metadata: specify metadata for validation @param force_foreign: enforce foreign key @param missing_table_ignore: ignoring missing table from the list of csv @param missing_field_ignore: ignoring missing fields from the csv @param dir_name: the directory has all csv files @param verbose: verboseing sqlite output ''' try: create_sqlite(use_metadata_from_db=False, echo=verbose, metadata=metadata) if not os.path.exists(dir_name): return 1 csv_file_map = read_csv(dir_name) tables = get_list_of_tables() # check table consistency if not missing_table_ignore: missing_file_for_tables, unnecessary_files = check_tables(tables, csv_file_map) exit_me = False if len(missing_file_for_tables) > 0: print('No CSV file(s) for following table(s):') for table in missing_file_for_tables: print(' ' + table.name) exit_me = True if len(unnecessary_files) > 0: print('Unnecessary CSV file(s):') for file in unnecessary_files: print(' ' + file) exit_me = True if exit_me: return 1 # check field consistency if not missing_field_ignore: exit_me = False for table in tables: if table.name in csv_file_map: missing_fields, unnecessary_fields = check_fields(table, csv_file_map[table.name]) if len(missing_fields) > 0: print('cvs[%s]: missing field(s):' % csv_file_map[table.name]) for field in missing_fields: print(' ' + field) exit_me = True if len(unnecessary_fields) > 0: print('cvs[%s]: unnecessary field(s):' % csv_file_map[table.name]) for field in unnecessary_fields: print(' ' + field) exit_me = True if exit_me: return 1 # check fields are in order # use this checker when missing fields ignore flag is False for table in tables: if table.name in csv_file_map: fields_in_order = check_fields_in_order(table, csv_file_map[table.name]) if not fields_in_order: print('cvs[%s]: fields are not in order' % csv_file_map[table.name]) exit_me = True if exit_me: return 1 # import data import_ok = import_csv_dir(dir_name) if not import_ok: print('failed to import csv data') return 1 load_fact_asmt_outcome() return 0 finally: destroy_sqlite()