def test_string_to_column_data(self): columns_as_str = '(table_name, (TEXT, NOT NULL, UNIQUE)), (column_data, (TEXT, NOT NULL))' column_data = DB_general.string_to_column_data(columns_as_str) assert column_data == DB_general.master_table_columns # do one item tuples cause problems? column_data = { 'Col1': ('TEXT', ), 'Col2': ('TEXT', ), 'Col3': ('INTEGER', ) } column_data_as_str = '(Col1, (TEXT)), (Col2, (TEXT)), (Col3, (INTEGER))' assert column_data == DB_general.string_to_column_data( column_data_as_str)
def initiate_db(filepath, db_type: str, num_of_dummy_inserts: int = 10) -> DB_general: # make sure we can access filepath if not filepath.parent.exists(): filepath.parent.mkdir(parents=True, exist_ok=True) db = DB_general(filepath) for name, table_dict in table_data.items(): if name == db_type: tables = table_dict type_tuple = type_tuples[name] break else: raise Exception('Invalid db type') # create tables for table, column_data in tables.items(): db.create_table(table, column_data) # insert dummy data to tables ph = cs.PasswordHasher() if 'hash' in type_tuple else None f = cs.Fernet(cs.Fernet.generate_key()) if 'key' in type_tuple else None for table, column_data in tables.items(): t_tuple = type_tuple[:len(column_data.keys())] dummy_data = generate_dummy_data(t_tuple, num_of_dummy_inserts, f, ph) db.insert_many(table, column_data.keys(), dummy_data) return db
def default_paths(paths): def_paths = get_paths_from_str(paths, 0) # make sure the files exist if not def_paths[0].exists(): generate_salt(def_paths[0]) for path in def_paths[1:]: if not path.exists(): DB_general(path) yield def_paths # remove files after some time for path in def_paths: if path.exists(): path.unlink()
def test_get_everything(db1): everything = db1.get_everything() assert 'tables' in everything assert 'test_table' in everything assert 'empty_table' in everything tables_rows = [ (1, 'tables', DB_general.column_data_as_string(DB_general.master_table_columns)) ] tables_rows.append( (2, 'test_table', '(Col1, (TEXT)), (Col2, (TEXT)), (Col3, (INTEGER))')) tables_rows.append((3, 'empty_table', '(Col1, (TEXT)), (Col2, (TEXT)), (Col3, (INTEGER))')) all_stuff = {'tables': tables_rows} all_stuff['test_table'] = [(1, 'a', 'b', 1), (2, 'c', 'd', 2), (3, 'e', 'f', 3)] all_stuff['empty_table'] = [] assert all_stuff == everything
def test_table_row_as_dict(self): columns = ('col1', 'col2', 'col3') row = (1, 2, 3) row_dict = DB_general.table_row_as_dict(row, columns) assert row_dict == {'col1': 1, 'col2': 2, 'col3': 3}
def test_prepare_to_add_to_master_table(self): table = 'Table_Name' column_data = {'Col1': ('TEXT', 'UNIQUE')} result = DB_general.prepare_to_add_to_master_table(table, column_data) assert result[0] == ('table_name', 'column_data') assert result[1] == ('Table_Name', '(Col1, (TEXT, UNIQUE))')
def test_column_data_as_string(self): columns_as_str = '(table_name, (TEXT, NOT NULL, UNIQUE)), (column_data, (TEXT, NOT NULL))' column_data_as_str = DB_general.column_data_as_string( DB_general.master_table_columns) assert columns_as_str == column_data_as_str
def db_backup(): db_path = Path(__file__).parent / 'test_db_backup.db' yield DB_general(db_path) db_path.unlink()