def create_database(ctx, database_path): is_append_table = ctx.obj.get(Context.IS_APPEND_TABLE) db_path = path.Path(database_path) dir_path = db_path.dirname() if typepy.is_not_null_string(dir_path): dir_path.makedirs_p() if is_append_table: return simplesqlite.SimpleSQLite(db_path, "a") return simplesqlite.SimpleSQLite(db_path, "w")
def create_database(database_path, dup_table): db_path = path.Path(database_path) dir_path = db_path.dirname() if typepy.is_not_null_string(dir_path): dir_path.makedirs_p() is_create_db = not db_path.isfile() if dup_table == DupDatabase.APPEND: return (sqlite.SimpleSQLite(db_path, "a"), is_create_db) return (sqlite.SimpleSQLite(db_path, "w"), is_create_db)
def create_database( database_path: str, dup_table: DupDatabase, max_workers: int ) -> Tuple[sqlite.SimpleSQLite, bool]: db_path = path.Path(database_path) dir_path = db_path.dirname() if typepy.is_not_null_string(dir_path): dir_path.makedirs_p() is_create_db = not db_path.isfile() if dup_table == DupDatabase.APPEND: return (sqlite.SimpleSQLite(db_path, "a", max_workers=max_workers), is_create_db) return (sqlite.SimpleSQLite(db_path, "w", max_workers=max_workers), is_create_db)
def test_normal_complex_json(self): db_path = "test_complex_json.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_complex_json_file() result = runner.invoke(cmd, ["-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected = set([ "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), ]) assert set(con.fetch_table_name_list()) == expected
def create_database(database_path): db_path = path.Path(database_path) dir_path = db_path.dirname() if dataproperty.is_not_empty_string(dir_path): dir_path.makedirs_p() return simplesqlite.SimpleSQLite(db_path, "w")
def test_normal_json(self): url = "https://example.com/complex_json.json" responses.add( responses.GET, url, body=complex_json, content_type="text/plain; charset=utf-8", status=200, ) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["-o", self.db_path, "url", url]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(self.db_path, "r") expected = set([ "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), ]) assert set(con.fetch_table_name_list()) == expected
def open(self, file_path): """ Open a SQLite database file. :param str file_path: SQLite database file path to open. """ self.close() self.stream = simplesqlite.SimpleSQLite(file_path, "w")
def main(): table_name = "sample_table" con = simplesqlite.SimpleSQLite("sample.sqlite", "w") con.create_table_from_data_matrix(table_name, ["attr_a", "attr_b"], [[1, "a"], [2, "b"]]) con.verify_table_existence(table_name) try: con.verify_table_existence("not_existing") except simplesqlite.DatabaseError as e: print(e)
def main(): table_name = "sample_table" con = simplesqlite.SimpleSQLite("sample.sqlite", "w") con.create_table_from_data_matrix(table_name, ["attr_a", "attr_b"], [[1, "a"], [2, "b"]]) print(con.has_attr(table_name, "attr_a")) print(con.has_attr(table_name, "not_existing")) try: print(con.has_attr("not_existing", "attr_a")) except simplesqlite.DatabaseError as e: print(e)
def main(): con = simplesqlite.SimpleSQLite("sample.sqlite", "w") print("---- connected to a database ----") con.check_connection() print("---- disconnected from a database ----") con.close() try: con.check_connection() except simplesqlite.NullDatabaseConnectionError as e: print(e)
def test_normal_multi_file_same_table_different_structure(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_multi_file_2_2(), valid_json_multi_file_2_3(), ] result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected_table_list = [ 'multij2', 'multij2_1', ] actual_table_list = con.get_table_name_list() print("[expected]\n{}\n".format(expected_table_list)) print("[actual]\n{}\n".format(actual_table_list)) assert set(actual_table_list) == set(expected_table_list) expected_data_table = { "multij2": [ (1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc'), ], "multij2_1": [ (u'abc', u'a', 4.0), (u'abc', u'bb', 2.1), (u'abc', u'ccc', 120.9), ], } for table in con.get_table_name_list(): expected_data = expected_data_table.get(table) actual_data = con.select("*", table_name=table).fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) print("--- table: {} ---".format(table)) print("[expected]\n{}\n".format(expected_data)) print("[actual]\n{}\n".format(actual_data)) assert expected_data == actual_data, message
def main(): file_path = "sample_data.xlsx" # create sample data file --- workbook = xlsxwriter.Workbook(file_path) worksheet = workbook.add_worksheet("samplesheet1") table = [ ["", "", "", ""], ["", "a", "b", "c"], ["", 1, 1.1, "a"], ["", 2, 2.2, "bb"], ["", 3, 3.3, "cc"], ] for row_idx, row in enumerate(table): for col_idx, item in enumerate(row): worksheet.write(row_idx, col_idx, item) worksheet = workbook.add_worksheet("samplesheet2") worksheet = workbook.add_worksheet("samplesheet3") table = [ ["", "", ""], ["", "", ""], ["aa", "ab", "ac"], [1, "hoge", "a"], [2, "", "bb"], [3, "foo", ""], ] for row_idx, row in enumerate(table): for col_idx, item in enumerate(row): worksheet.write(row_idx, col_idx, item) workbook.close() # create table --- con = simplesqlite.SimpleSQLite("sample.sqlite", "w") loader = pytablereader.ExcelTableFileLoader(file_path) for table_data in loader.load(): con.create_table_from_tabledata(table_data) # output --- for table_name in con.fetch_table_names(): print("table: " + table_name) print(con.fetch_attr_names(table_name)) result = con.select(select="*", table_name=table_name) for record in result.fetchall(): print(record) print()
def test_normal_multi_file_same_table_same_structure(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_multi_file_2_1(), valid_json_multi_file_2_2() ] result = runner.invoke(cmd, ["-o", db_path, "file"] + file_list) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected_table_list = ["multij2", SourceInfo.get_table_name()] actual_table_list = con.fetch_table_name_list() print_test_result(expected=expected_table_list, actual=actual_table_list) assert set(actual_table_list) == set(expected_table_list) expected_data_table = { "multij2": [ (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), ] } for table in con.fetch_table_name_list(): if table == SourceInfo.get_table_name(): continue expected_data = expected_data_table.get(table) actual_data = con.select("*", table_name=table).fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) print("--- table: {} ---".format(table)) print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data, message
def __init__(self, database_source): is_connection_required = True try: if database_source.is_connected(): self._con = database_source is_connection_required = False except AttributeError: pass if is_connection_required: self._con = simplesqlite.SimpleSQLite(database_source, "r") self._con_sql_master = None self._total_changes = None self._stream = None
def test_normal_complex_json(self): db_path = "test_complex_json.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_complex_json_file() result = runner.invoke(cmd, ["file", file_path, "-o", db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected = set([ 'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1', 'screenshots_2', 'tags', 'versions', 'root' ]) assert set(con.get_table_name_list()) == expected
def test_smoke_database_connection(self, database_path, verbosity_level, output_format, expected_v): con = simplesqlite.SimpleSQLite(database_path, "a") extractor = ss.SqliteSchemaExtractor(con, verbosity_level, output_format) assert len(extractor.dumps()) > 10 assert extractor.verbosity_level == expected_v assert extractor.get_database_schema() is not None for table_name in extractor.get_table_name_list(): extractor.get_table_schema_text(table_name) con.create_table_from_tabledata( ptr.TableData("newtable", ["foo", "bar", "hoge"], [ [1, 2.2, "aa"], [3, 4.4, "bb"], ])) extractor.get_table_schema_text("newtable")
def main(): credentials_file = "sample-xxxxxxxxxxxx.json" # create table --- con = simplesqlite.SimpleSQLite("sample.sqlite", "w") loader = ptr.GoogleSheetsTableLoader(credentials_file) loader.title = "samplebook" for table_data in loader.load(): con.create_table_from_tabledata(table_data) # output --- for table_name in con.fetch_table_names(): print("table: " + table_name) print(con.fetch_attr_names(table_name)) result = con.select(select="*", table_name=table_name) for record in result.fetchall(): print(record) print()
def test_normal_format_ssv(self): db_path = "test_ssv.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_ssv_file() result = runner.invoke( cmd, ["file", file_path, "-o", db_path, "--format", "ssv"]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") data = con.select_as_tabledata(table_name="ssv") expected = ( "table_name=ssv, " "header_list=[USER, PID, %CPU, %MEM, VSZ, RSS, TTY, STAT, START, TIME, COMMAND], " "rows=5") assert str(data) == expected
def test_normal(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [] file_list.append(valid_json_single_file()) file_list.append(invalid_json_single_file()) file_list.append(valid_json_multi_file()) file_list.append(invalid_json_multi_file()) file_list.append(csv_file()) file_list.append(valid_excel_file()) file_list.append(invalid_excel_file()) result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) assert result.exit_code == 0 con = simplesqlite.SimpleSQLite(db_path, "r") expected_tables = [ 'json_a', 'json_c', 'json_b', 'csv_a', 'excel_sheet_a', 'excel_sheet_c', ] assert set(con.get_table_name_list()) == set(expected_tables) expected_data = { "json_a": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "json_b": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "json_c": [(1, '4'), (2, 'NULL'), (3, '120.9')], "csv_a": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "excel_sheet_a": [(1.0, 1.1, 'a'), (2.0, 2.2, 'bb'), (3.0, 3.3, 'cc')], "excel_sheet_c": [(1.0, '1.1', 'a'), (2.0, '', 'bb'), (3.0, '3.3', '')], } for table in con.get_table_name_list(): result = con.select("*", table_name=table) assert expected_data.get(table) == result.fetchall()
def test_normal_json(self): url = "https://example.com/complex_json.json" responses.add( responses.GET, url, body=complex_json, content_type='text/plain; charset=utf-8', status=200) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["url", url, "-o", self.db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(self.db_path, "r") expected = set([ 'ratings', 'screenshots_4', 'screenshots_3', 'screenshots_5', 'screenshots_1', 'screenshots_2', 'tags', 'versions', 'root']) assert set(con.get_table_name_list()) == expected
def make_database(): db_path = "example.sqlite" con = simplesqlite.SimpleSQLite(db_path, "w") con.create_table_from_data_matrix(table_name="sampletable0", attr_name_list=["attr_a", "attr_b"], data_matrix=[[1, 2], [3, 4]]) con.create_table_from_data_matrix(table_name="sampletable1", attr_name_list=["foo", "bar", "hoge"], data_matrix=[ [1, 2.2, "aa"], [3, 4.4, "bb"], ], index_attr_list=("foo", "hoge")) con.create_table("constraints", [ "primarykey_id INTEGER PRIMARY KEY", "notnull_value REAL NOT NULL", "unique_value INTEGER UNIQUE", ]) return db_path
def database_path(tmpdir): p = tmpdir.join("tmp.db") db_path = str(p) con = simplesqlite.SimpleSQLite(db_path, "w") con.create_table_from_tabledata(ptr.TableData( "testdb0", ["attr_a", "attr_b"], [ [1, 2], [3, 4], ]), index_attr_list=["attr_a"] ) con.create_table_from_tabledata(ptr.TableData( "testdb1", ["foo", "bar", "hoge"], [ [1, 2.2, "aa"], [3, 4.4, "bb"], ]), index_attr_list=("foo", "hoge") ) con.create_table( "constraints", [ "primarykey_id INTEGER PRIMARY KEY", "notnull_value REAL NOT NULL", "unique_value INTEGER UNIQUE", ] ) return db_path
#!/usr/bin/env python # encoding: utf-8 from __future__ import print_function import pytablereader as ptr import simplesqlite credentials_file = "sample-xxxxxxxxxxxx.json" # create table --- con = simplesqlite.SimpleSQLite("sample.sqlite", "w") loader = ptr.GoogleSheetsTableLoader(credentials_file) loader.title = "samplebook" for table_data in loader.load(): con.create_table_from_tabledata(table_data) # output --- for table_name in con.fetch_table_names(): print("table: " + table_name) print(con.fetch_attr_names(table_name)) result = con.select(select="*", table_name=table_name) for record in result.fetchall(): print(record) print()
def test_normal_multi(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_single_file(), invalid_json_single_file(), valid_json_multi_file(), invalid_json_multi_file(), valid_csv_file_1(), valid_csv_file_2(), invalid_csv_file(), valid_tsv_file(), invalid_tsv_file(), valid_excel_file(), invalid_excel_file_1(), invalid_excel_file_2(), valid_html_file(), invalid_html_file(), valid_ltsv_file(), invalid_ltsv_file(), valid_markdown_file(), not_supported_format_file(), ] result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected_tables = [ 'singlejson_json1', 'multijson_table1', 'multijson_table2', 'csv_a', "rename_insert", 'excel_sheet_a', 'excel_sheet_c', 'excel_sheet_d', "valid_ltsv_a", 'testtitle_tablename', 'testtitle_html2', 'tsv_a', 'valid_mdtable_markdown1', ] message = "expected-tables={}, actual-tables={}".format( expected_tables, con.get_table_name_list()) assert set( con.get_table_name_list()) == set(expected_tables), message expected_data_table = { "singlejson_json1": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "multijson_table1": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "multijson_table2": [(1, 4.0), (2, None), (3, 120.9)], "csv_a": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "rename_insert": [ (1, 55, 'D Sam', 31, 'Raven'), (2, 36, 'J Ifdgg', 30, 'Raven'), (3, 91, 'K Wedfb', 28, 'Raven'), ], "excel_sheet_a": [(1.0, 1.1, 'a'), (2.0, 2.2, 'bb'), (3.0, 3.3, 'cc')], "excel_sheet_c": [(1, 1.1, 'a'), (2, '', 'bb'), (3, 3.3, '')], "excel_sheet_d": [(1, 1.1, 'a'), (2, '', 'bb'), (3, 3.3, '')], "testtitle_tablename": [(1, 123.1, 'a'), (2, 2.2, 'bb'), (3, 3.3, 'ccc')], "valid_ltsv_a": [ (1, 123.1, u'ltsv0', 1.0, u'1'), (2, 2.2, u'ltsv1', 2.2, u'2.2'), (3, 3.3, u'ltsv2', 3.0, u'cccc'), ], "testtitle_html2": [(1, 123.1), (2, 2.2), (3, 3.3)], "tsv_a": [(1, 4.0, 'tsv0'), (2, 2.1, 'tsv1'), (3, 120.9, 'tsv2')], "valid_mdtable_markdown1": [(1, 123.1, 'a'), (2, 2.2, 'bb'), (3, 3.3, 'ccc')], } for table in con.get_table_name_list(): result = con.select("*", table_name=table) expected_data = expected_data_table.get(table) actual_data = result.fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) print("--- table: {} ---".format(table)) print("[expected]\n{}\n".format(expected_data)) print("[actual]\n{}\n".format(actual_data)) assert expected_data == actual_data, message
def test_normal_multi(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_single_file(), invalid_json_single_file(), valid_json_multi_file(), invalid_json_multi_file(), valid_csv_file(), valid_csv_file2(), valid_excel_file(), invalid_excel_file(), invalid_excel_file2(), valid_html_file(), invalid_html_file(), ] result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) assert result.exit_code == 0 con = simplesqlite.SimpleSQLite(db_path, "r") expected_tables = [ 'singlejson_json1', 'multijson_table1', 'multijson_table2', 'csv_a', "insert_csv", 'excel_sheet_a', 'excel_sheet_c', 'excel_sheet_d', 'htmltable_tablename', 'htmltable_html2', ] message = "expected-tables={}, actual-tables={}".format( expected_tables, con.get_table_name_list()) assert set( con.get_table_name_list()) == set(expected_tables), message expected_data_table = { "singlejson_json1": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "multijson_table1": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "multijson_table2": [(1, '4'), (2, 'NULL'), (3, '120.9')], "csv_a": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "insert_csv": [(1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc')], "excel_sheet_a": [(1.0, 1.1, 'a'), (2.0, 2.2, 'bb'), (3.0, 3.3, 'cc')], "excel_sheet_c": [(1.0, '1.1', 'a'), (2.0, '', 'bb'), (3.0, '3.3', '')], "excel_sheet_d": [(1.0, '1.1', 'a'), (2.0, '', 'bb'), (3.0, '3.3', '')], "htmltable_tablename": [(1, 123.1, 'a'), (2, 2.2, 'bb'), (3, 3.3, 'ccc')], "htmltable_html2": [(1, 123.1), (2, 2.2), (3, 3.3)], } for table in con.get_table_name_list(): result = con.select("*", table_name=table) expected_data = expected_data_table.get(table) actual_data = result.fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) assert expected_data == actual_data, message
def test_normal_append(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_multi_file_2_1(), ] table_name = "multij2" expected_table_list = [table_name] # first execution without --append option (new) --- result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") actual_table_list = con.get_table_name_list() print_test_result(expected=expected_table_list, actual=actual_table_list) assert set(actual_table_list) == set(expected_table_list) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [ (1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc'), ] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data # second execution with --append option --- result = runner.invoke(cmd, ["--append", "file"] + file_list + ["-o", db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") actual_table_list = con.get_table_name_list() print_test_result(expected=expected_table_list, actual=actual_table_list) assert set(actual_table_list) == set(expected_table_list) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [ (1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc'), (1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc'), ] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data # third execution without --append option (overwrite) --- result = runner.invoke(cmd, ["file"] + file_list + ["-o", db_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") actual_table_list = con.get_table_name_list() print_test_result(expected=expected_table_list, actual=actual_table_list) assert set(actual_table_list) == set(expected_table_list) actual_data = con.select("*", table_name=table_name).fetchall() expected_data = [ (1, 4.0, 'a'), (2, 2.1, 'bb'), (3, 120.9, 'ccc'), ] print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data
def test_normal_multi_file_different_table(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_list = [ valid_json_single_file(), invalid_json_single_file(), valid_json_multi_file_1(), valid_json_kv_file(), valid_csv_file_1_1(), valid_csv_file_2_1(), invalid_csv_file(), valid_tsv_file(), invalid_tsv_file(), valid_excel_file(), invalid_excel_file_1(), invalid_excel_file_2(), valid_html_file(), invalid_html_file(), valid_ltsv_file(), invalid_ltsv_file(), valid_markdown_file(), not_supported_format_file(), ] result = runner.invoke(cmd, ["-o", db_path, "file"] + file_list) assert result.exit_code == ExitCode.SUCCESS con = simplesqlite.SimpleSQLite(db_path, "r") expected_table_list = [ "singlejson", "multij1", "multij2", "valid_kv", "csv_a", "rename_insert", "excel_sheet_a", "excel_sheet_c", "excel_sheet_d", "valid_ltsv_a", "testtitle_tablename", "testtitle_html2", "tsv_a", "valid_mdtable_markdown1", SourceInfo.get_table_name(), ] actual_table_list = con.fetch_table_name_list() print_test_result(expected=expected_table_list, actual=actual_table_list) assert set(actual_table_list) == set(expected_table_list) expected_data_table = { "singlejson": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "multij1": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "multij2": [(1, 4.0), (2, None), (3, 120.9)], "valid_kv": [("json_b", "hoge"), ("json_c", "bar")], "csv_a": [(1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc")], "rename_insert": [ (1, 55, "D Sam", 31, "Raven"), (2, 36, "J Ifdgg", 30, "Raven"), (3, 91, "K Wedfb", 28, "Raven"), ], "excel_sheet_a": [(1.0, 1.1, "a"), (2.0, 2.2, "bb"), (3.0, 3.3, "cc")], "excel_sheet_c": [(1, 1.1, "a"), (2, "", "bb"), (3, 3.3, "")], "excel_sheet_d": [(1, 1.1, "a"), (2, "", "bb"), (3, 3.3, "")], "testtitle_tablename": [(1, 123.1, "a"), (2, 2.2, "bb"), (3, 3.3, "ccc")], "valid_ltsv_a": [ (1, 123.1, u'"ltsv0"', 1.0, u'"1"'), (2, 2.2, u'"ltsv1"', 2.2, u'"2.2"'), (3, 3.3, u'"ltsv2"', 3.0, u'"cccc"'), ], "testtitle_html2": [(1, 123.1), (2, 2.2), (3, 3.3)], "tsv_a": [(1, 4.0, "tsv0"), (2, 2.1, "tsv1"), (3, 120.9, "tsv2")], "valid_mdtable_markdown1": [(1, 123.1, "a"), (2, 2.2, "bb"), (3, 3.3, "ccc")], } for table in con.fetch_table_name_list(): if table == SourceInfo.get_table_name(): continue result = con.select("*", table_name=table) expected_data = expected_data_table.get(table) actual_data = result.fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data) print("--- table: {} ---".format(table)) print_test_result(expected=expected_data, actual=actual_data) assert sorted(expected_data) == sorted(actual_data), message