def test_normal_no_type_inference(self): runner = CliRunner() basename = "no_type_inference" file_path = "{}.csv".format(basename) db_path = "{}.sqlite".format(basename) with runner.isolated_filesystem(): with open(file_path, "w") as f: f.write( dedent("""\ "a","b" 11,"xyz" 22,"abc" """)) f.flush() result = runner.invoke( cmd, ["--no-type-inference", "-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") tbldata = con.select_as_tabledata(basename) assert tbldata.headers == ["a", "b"] assert tbldata.rows == [("11", "xyz"), ("22", "abc")]
def test_normal_type_hint_header(self): runner = CliRunner() basename = "type_hint_header" file_path = "{}.csv".format(basename) db_path = "{}.sqlite".format(basename) with runner.isolated_filesystem(): with open(file_path, "w") as f: f.write( dedent("""\ "a text","b integer","c real" 1,"1","1.1" 2,"2","1.2" 3,"3","1.3" """)) f.flush() result = runner.invoke( cmd, ["--type-hint-header", "-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") tbldata = con.select_as_tabledata(basename) assert tbldata.headers == ["a text", "b integer", "c real"] assert tbldata.rows == [("1", 1, 1.1), ("2", 2, 1.2), ("3", 3, 1.3)]
def __init__( self, device, ip_version, logger, tc_command_output, export_path=None, is_parse_filter_id=True, dump_db_path=None, ): if dump_db_path is None: self.__con = connect_memdb() else: self.__con = SimpleSQLite(dump_db_path, "w") Filter.attach(self.__con) Filter.create() Qdisc.attach(self.__con) Qdisc.create() self.__device = device self.__ip_version = ip_version self.__tc_command_output = tc_command_output self.__logger = logger self.__export_path = export_path self.clear() self.__ifb_device = self.__get_ifb_from_device() self.__iptables_ctrl = IptablesMangleController(True, ip_version) self.is_parse_filter_id = is_parse_filter_id
def test_normal_json(self): url = "https://example.com/complex_json.json" responses.add( responses.GET, url, body=complex_json, content_type="text/plain; charset=utf-8", status=200, ) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["-o", self.db_path, "url", url]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") expected = set( [ "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), ] ) assert set(con.fetch_table_names()) == expected
def test_normal(self, tmpdir, value, expected): p_db = tmpdir.join("tmp.db") con = SimpleSQLite(str(p_db), "w") con.create_table_from_tabledata(value) assert con.select_as_dict(table_name=value.table_name) == expected
def test_normal_json(self): runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["-o", self.db_path, "stdin", "json"], input=complex_json) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") expected = { "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), } assert set(con.fetch_table_names()) == expected result = runner.invoke(cmd, ["-o", self.db_path, "stdin", "csv"], input=complex_json) assert result.exit_code == ExitCode.FAILED_CONVERT
def test_normal_type_hint_header(self): runner = CliRunner() basename = "type_hint_header" file_path = "{}.csv".format(basename) db_path = "{}.sqlite".format(basename) with runner.isolated_filesystem(): with open(file_path, "w") as f: f.write( dedent( """\ "a text","b integer","c real" 1,"1","1.1" 2,"2","1.2" 3,"3","1.3" """ ) ) f.flush() result = runner.invoke(cmd, ["--type-hint-header", "-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") tbldata = con.select_as_tabledata(basename) assert tbldata.headers == ["a text", "b integer", "c real"] assert tbldata.rows == [("1", 1, 1.1), ("2", 2, 1.2), ("3", 3, 1.3)]
def test_normal_no_type_inference(self): runner = CliRunner() basename = "no_type_inference" file_path = "{}.csv".format(basename) db_path = "{}.sqlite".format(basename) with runner.isolated_filesystem(): with open(file_path, "w") as f: f.write( dedent( """\ "a","b" 11,"xyz" 22,"abc" """ ) ) f.flush() result = runner.invoke(cmd, ["--no-type-inference", "-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") tbldata = con.select_as_tabledata(basename) assert tbldata.headers == ["a", "b"] assert tbldata.rows == [("11", "xyz"), ("22", "abc")]
def test_normal_empty_header(self, tmpdir, table_name, attr_names, data_matrix, expected): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(table_name, attr_names, data_matrix) assert con.fetch_attr_names(table_name) == expected
def test_normal_complex_json(self): db_path = "test_complex_json.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_complex_json_file() result = runner.invoke(cmd, ["-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") expected = set( [ "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), ] ) assert set(con.fetch_table_names()) == expected
def test_normal_type_hint_header(self): url = "https://example.com/type_hint_header.csv" responses.add( responses.GET, url, body=dedent("""\ "a text","b integer","c real" 1,"1","1.1" 2,"2","1.2" 3,"3","1.3" """), content_type="text/plain; charset=utf-8", status=200, ) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke( cmd, ["--type-hint-header", "-o", self.db_path, "url", url]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") table_names = list( set(con.fetch_table_names()) - set([SourceInfo.get_table_name()])) # table name may change test execution order tbldata = con.select_as_tabledata(table_names[0]) assert tbldata.headers == ["a text", "b integer", "c real"] assert tbldata.rows == [("1", 1, 1.1), ("2", 2, 1.2), ("3", 3, 1.3)]
def test_normal_json(self): url = "https://example.com/complex_json.json" responses.add( responses.GET, url, body=complex_json, content_type="text/plain; charset=utf-8", status=200, ) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["-o", self.db_path, "url", url]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") expected = set([ "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), ]) assert set(con.fetch_table_names()) == expected
def test_smoke(self, tmpdir, filename): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") test_data_file_path = os.path.join( os.path.dirname(__file__), "data", filename) loader = ptr.TableFileLoader(test_data_file_path) success_count = 0 for tabledata in loader.load(): if tabledata.is_empty(): continue print(ptw.dump_tabledata(tabledata)) try: con.create_table_from_tabledata( ptr.SQLiteTableDataSanitizer(tabledata).sanitize()) success_count += 1 except ValueError as e: print(e) con.commit() assert success_count > 0
def test_normal_type_hint_header(self): url = "https://example.com/type_hint_header.csv" responses.add( responses.GET, url, body=dedent( """\ "a text","b integer","c real" 1,"1","1.1" 2,"2","1.2" 3,"3","1.3" """ ), content_type="text/plain; charset=utf-8", status=200, ) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke(cmd, ["--type-hint-header", "-o", self.db_path, "url", url]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") table_names = list(set(con.fetch_table_names()) - set([SourceInfo.get_table_name()])) # table name may change test execution order tbldata = con.select_as_tabledata(table_names[0]) assert tbldata.headers == ["a text", "b integer", "c real"] assert tbldata.rows == [("1", 1, 1.1), ("2", 2, 1.2), ("3", 3, 1.3)]
def con(tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(TEST_TABLE_NAME, ["attr_a", "attr_b"], [[1, 2], [3, 4]]) return con
def test_normal_complex_json(self): db_path = "test_complex_json.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_complex_json_file() result = runner.invoke(cmd, ["-o", db_path, "file", file_path]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") expected = { "ratings", "screenshots_4", "screenshots_3", "screenshots_5", "screenshots_1", "screenshots_2", "tags", "versions", "root", SourceInfo.get_table_name(), } assert set(con.fetch_table_names()) == expected
def test_normal_type_hint_header(self): text = dedent("""\ "a text","b integer","c real" 1,"1","1.1" 2,"2","1.2" 3,"3","1.3" """) runner = CliRunner() with runner.isolated_filesystem(): result = runner.invoke( cmd, ["--type-hint-header", "-o", self.db_path, "stdin", "csv"], input=text) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(self.db_path, "r") table_names = list( set(con.fetch_table_names()) - {SourceInfo.get_table_name()}) # table name may change test execution order tbldata = con.select_as_tabledata(table_names[0]) assert tbldata.headers == ["a text", "b integer", "c real"] assert tbldata.rows == [("1", 1, 1.1), ("2", 2, 1.2), ("3", 3, 1.3)]
def con_profile(tmpdir): p = tmpdir.join("tmp_profile.db") con = SimpleSQLite(str(p), "w", profile=True) con.create_table_from_data_matrix(TEST_TABLE_NAME, ["attr_a", "attr_b"], [[1, 2], [3, 4]]) con.commit() return con
def test_exception_empty_header(self, tmpdir, table_name, attr_name_list, data_matrix, index_attr_list, expected): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") with pytest.raises(expected): con.create_table_from_data_matrix(table_name, attr_name_list, data_matrix, index_attr_list)
def main(): con = SimpleSQLite("pandas_df.sqlite") con.create_table_from_dataframe( pandas.DataFrame([[0, 0.1, "a"], [1, 1.1, "bb"], [2, 2.2, "ccc"]], columns=["id", "value", "name"]), table_name="pandas_df", )
def test_normal_con(self, mode): con = SimpleSQLite(connect_memdb().connection, mode) assert con.database_path is None assert con.connection con = SimpleSQLite(connect_memdb(), mode) assert con.database_path assert con.connection
def con_mix(tmpdir): p = tmpdir.join("tmp_mixed_data.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(TEST_TABLE_NAME, ["attr_i", "attr_f", "attr_s"], [[1, 2.2, "aa"], [3, 4.4, "bb"]]) return con
def test_normal(self, con, tmpdir): dump_path = str(tmpdir.join("dump.db")) con.dump(dump_path) con_dump = SimpleSQLite(dump_path, "r") assert con.fetch_num_records(TEST_TABLE_NAME) == con_dump.fetch_num_records(TEST_TABLE_NAME) assert con.select_as_tabledata(TEST_TABLE_NAME) == con_dump.select_as_tabledata( TEST_TABLE_NAME )
def con_mix(tmpdir): p = tmpdir.join("tmp_mixed_data.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix( TEST_TABLE_NAME, ["attr_i", "attr_f", "attr_s"], [[1, 2.2, "aa"], [3, 4.4, "bb"]] ) return con
def con_a1(): con = SimpleSQLite("tmp_a1.sqlite", "w") con.create_table_from_data_matrix( table_name=TEST_TABLE_NAME_A, attr_name_list=["attr_a", "attr_b"], data_matrix=[[11, 12], [13, 14]], ) return con
def test_normal(self, con, tmpdir): dump_path = str(tmpdir.join("dump.db")) con.dump(dump_path) con_dump = SimpleSQLite(dump_path, "r") assert con.fetch_num_records( TEST_TABLE_NAME) == con_dump.fetch_num_records(TEST_TABLE_NAME) assert con.select_as_tabledata( TEST_TABLE_NAME) == con_dump.select_as_tabledata(TEST_TABLE_NAME)
def con_b0(): con = SimpleSQLite("tmp_b0.sqlite", "w") con.create_table_from_data_matrix( table_name=TEST_TABLE_NAME_B, attr_name_list=["ba", "bb"], data_matrix=[[101, 102], [103, 104]], ) return con
def main(): con = SimpleSQLite("sample.sqlite", "w", profile=True) con.create_table_from_data_matrix( "sample_table", ["a", "b", "c", "d", "e"], [[1, 1.1, "aaa", 1, 1], [2, 2.2, "bbb", 2.2, 2.2], [3, 3.3, "ccc", 3, "ccc"]], ) print(con.select_as_dataframe(table_name="sample_table"))
def con_a0(): con = SimpleSQLite("tmp_a0.sqlite", "w") con.create_table_from_data_matrix(table_name=TEST_TABLE_NAME_A, attr_name_list=["attr_a", "attr_b"], data_matrix=[ [1, 2], [3, 4], ]) return con
def open(self, file_path): """ Open a SQLite database file. :param str file_path: SQLite database file path to open. """ from simplesqlite import SimpleSQLite self.close() self.stream = SimpleSQLite(file_path, "w")
def test_normal_primary_key(self, tmpdir, table_name, attr_names, data_matrix, expected): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = TEST_TABLE_NAME con.create_table_from_data_matrix( table_name, attr_names, data_matrix, primary_key=attr_names[0] ) assert con.schema_extractor.fetch_table_schema(table_name).primary_key == "AA"
def test_normal_number_header(self, tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = "numbers" attr_names = [1, 123456789] data_matrix = [[1, 2], [1, 2]] expected = ["1", "123456789"] con.create_table_from_data_matrix(table_name, attr_names, data_matrix) assert con.fetch_attr_names(table_name) == expected
def main(): con = SimpleSQLite("sample.sqlite", "w", profile=True) data_matrix = [[1, 1.1, "aaa", 1, 1], [2, 2.2, "bbb", 2.2, 2.2], [3, 3.3, "ccc", 3, "ccc"]] con.create_table_from_data_matrix("sample_table", ["a", "b", "c", "d", "e"], data_matrix, index_attrs=["a"]) for profile in con.get_profile(): print(profile)
def test_normal_symbol_header(self, tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = "symbols" attr_names = ["a!bc#d$e%f&gh(i)j", "k@l[m]n{o}p;q:r_s.t/u"] data_matrix = [{"ABCD>8.5": "aaa", "ABCD<8.5": 0}, {"ABCD>8.5": "bbb", "ABCD<8.5": 9}] expected = ["a!bc#d$e%f&gh(i)j", "k@l[m]n{o}p;q:r_s.t/u"] con.create_table_from_data_matrix(table_name, attr_names, data_matrix) assert con.fetch_attr_names(table_name) == expected
def test_except_add_primary_key_column(self, tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") with pytest.raises(ValueError): con.create_table_from_data_matrix( table_name="specify existing attr as a primary key", attr_names=["AA", "BB"], data_matrix=[["a", 11], ["bb", 12]], primary_key="AA", add_primary_key_column=True, )
def con(tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(table_name=TEST_TABLE_NAME, attr_name_list=["attr_a", "attr_b"], data_matrix=[ [1, 2], [3, 4], ]) return con
def test_normal_primary_key(self, tmpdir, table_name, attr_names, data_matrix, expected): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = TEST_TABLE_NAME con.create_table_from_data_matrix(table_name, attr_names, data_matrix, primary_key=attr_names[0]) assert con.schema_extractor.fetch_table_schema( table_name).primary_key == "AA"
def __init__(self, g, event_queue, data_handle): self.g = g self.event_queue = event_queue self.data_handle = data_handle self.dbname = 'bitmex' self.create_table(self.dbname) self.data = None self.con = SimpleSQLite(self.dbname, 'a') self.ex = cxt.bitmex({ 'apiKey': self.g.apiKey, 'secret': self.g.apiSecret, 'timeout': 60000, })
def test_normal_type_hints(self, tmpdir, table_name, attr_names, data_matrix, type_hints, expected): p = tmpdir.join("tmp_type_hints.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(table_name, attr_names, data_matrix, type_hints=type_hints) assert con.select_as_dict(table_name) == [ OrderedDict([("int", 1), ("text", "001")]), OrderedDict([("int", 2), ("text", "010")]), ]
def main(): table_name = "sample_table" con = SimpleSQLite("sample.sqlite", "w") con.create_table_from_data_matrix(table_name, ["attr_a", "attr_b"], [[1, "a"], [2, "b"]]) con.verify_attr_existence(table_name, "attr_a") try: con.verify_attr_existence(table_name, "not_existing") except AttributeNotFoundError as e: print(e) try: con.verify_attr_existence("not_existing", "attr_a") except DatabaseError as e: print(e)
def test_normal_add_primary_key_column(self, tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = "table1" con.create_table_from_data_matrix( table_name=table_name, attr_names=["AA", "BB"], data_matrix=[["a", 11], ["bb", 12]], add_primary_key_column=True, ) assert con.select_as_tabledata(table_name) == TableData( table_name=table_name, headers=["id", "AA", "BB"], rows=[[1, "a", 11], [2, "bb", 12]]) assert con.schema_extractor.fetch_table_schema( table_name).primary_key == "id" table_name = "table2" con.create_table_from_data_matrix( table_name=table_name, attr_names=["AA", "BB"], data_matrix=[["a", 11], ["bb", 12]], primary_key="pkey", add_primary_key_column=True, ) assert con.select_as_tabledata(table_name) == TableData( table_name=table_name, headers=["pkey", "AA", "BB"], rows=[[1, "a", 11], [2, "bb", 12]]) assert con.schema_extractor.fetch_table_schema( table_name).primary_key == "pkey"
def test_normal_add_primary_key_column(self, tmpdir): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = "table1" con.create_table_from_data_matrix( table_name=table_name, attr_names=["AA", "BB"], data_matrix=[["a", 11], ["bb", 12]], add_primary_key_column=True, ) assert con.select_as_tabledata(table_name) == TableData( table_name=table_name, headers=["id", "AA", "BB"], rows=[[1, "a", 11], [2, "bb", 12]] ) assert con.schema_extractor.fetch_table_schema(table_name).primary_key == "id" table_name = "table2" con.create_table_from_data_matrix( table_name=table_name, attr_names=["AA", "BB"], data_matrix=[["a", 11], ["bb", 12]], primary_key="pkey", add_primary_key_column=True, ) assert con.select_as_tabledata(table_name) == TableData( table_name=table_name, headers=["pkey", "AA", "BB"], rows=[[1, "a", 11], [2, "bb", 12]] ) assert con.schema_extractor.fetch_table_schema(table_name).primary_key == "pkey"
def test_normal_multi_file_same_table_same_structure(self): db_path = "test.sqlite" runner = CliRunner() with runner.isolated_filesystem(): files = [valid_json_multi_file_2_1(), valid_json_multi_file_2_2()] result = runner.invoke(cmd, ["-o", db_path, "file"] + files) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") expected_tables = ["multij2", SourceInfo.get_table_name()] actual_tables = con.fetch_table_names() print_test_result(expected=expected_tables, actual=actual_tables) assert set(actual_tables) == set(expected_tables) expected_data_table = { "multij2": [ (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), (1, 4.0, "a"), (2, 2.1, "bb"), (3, 120.9, "ccc"), ] } for table in con.fetch_table_names(): if table == SourceInfo.get_table_name(): continue expected_data = expected_data_table.get(table) actual_data = con.select("*", table_name=table).fetchall() message = "table={}, expected={}, actual={}".format( table, expected_data, actual_data ) print("--- table: {} ---".format(table)) print_test_result(expected=expected_data, actual=actual_data) assert expected_data == actual_data, message
def con_ro(tmpdir): p = tmpdir.join("tmp_readonly.db") con = SimpleSQLite(str(p), "w") con.create_table_from_data_matrix(TEST_TABLE_NAME, ["attr_a", "attr_b"], [[1, 2], [3, 4]]) con.close() con.connect(str(p), "r") return con
def test_normal_format_ssv(self): db_path = "test_ssv.sqlite" runner = CliRunner() with runner.isolated_filesystem(): file_path = valid_ssv_file() result = runner.invoke(cmd, ["-o", db_path, "file", file_path, "--format", "ssv"]) print_traceback(result) assert result.exit_code == ExitCode.SUCCESS con = SimpleSQLite(db_path, "r") data = con.select_as_tabledata(table_name="ssv") expected = ( "table_name=ssv, " "headers=[USER, PID, %CPU, %MEM, VSZ, RSS, TTY, STAT, START, TIME, COMMAND], " "cols=11, rows=5" ) assert str(data) == expected
def test_normal_file( self, tmpdir, json_text, filename, table_name, expected_table_name, expected_attr_names, expected_data_matrix, ): p_db = tmpdir.join("tmp.db") p_json = tmpdir.join(filename) with open(str(p_json), "w") as f: f.write(json_text) con = SimpleSQLite(str(p_db), "w") con.create_table_from_json(str(p_json), table_name) assert con.fetch_table_names() == [expected_table_name] assert expected_attr_names == con.fetch_attr_names(expected_table_name) result = con.select(select="*", table_name=expected_table_name) result_matrix = result.fetchall() assert len(result_matrix) == 3 assert result_matrix == expected_data_matrix
def test_normal(self, tmpdir, value, expected): p_db = tmpdir.join("tmp.db") con = SimpleSQLite(str(p_db), "w") con.create_table_from_tabledata(value) assert con.fetch_table_names() == [value.table_name] assert con.fetch_attr_names(value.table_name) == value.headers result = con.select(select="*", table_name=value.table_name) result_matrix = result.fetchall() assert result_matrix == expected actual = con.select_as_tabledata(columns=value.headers, table_name=value.table_name) assert actual.equals(value)
def test_normal(self, tmpdir, value, type_hints, expected): p_db = tmpdir.join("tmp.db") con = SimpleSQLite(str(p_db), "w") con.create_table_from_tabledata(value) assert con.fetch_table_names() == [value.table_name] assert con.fetch_attr_names(value.table_name) == value.headers actual = con.select_as_tabledata( columns=value.headers, table_name=value.table_name, type_hints=type_hints ) assert actual.value_matrix == expected
def test_normal(self, tmpdir, attr_names, data_matrix, index_attrs, expected_attr): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") table_name = TEST_TABLE_NAME con.create_table_from_data_matrix( table_name, attr_names, data_matrix, primary_key=None, index_attrs=index_attrs ) # check data --- result = con.select(select=AttrList(attr_names), table_name=table_name) result_matrix = result.fetchall() assert len(result_matrix) == 3 print_test_result(expected=expected_attr, actual=con.fetch_attr_type(table_name)) assert con.fetch_attr_type(table_name) == expected_attr
def test_normal_text( self, tmpdir, csv_text, table_name, attr_names, expected_table_name, expected_attr_names, expected_data_matrix, ): p_db = tmpdir.join("tmp.db") con = SimpleSQLite(str(p_db), "w") con.create_table_from_csv(csv_text, table_name, attr_names) assert con.fetch_table_names() == [expected_table_name] assert expected_attr_names == con.fetch_attr_names(expected_table_name) result = con.select(select="*", table_name=expected_table_name) result_matrix = result.fetchall() assert len(result_matrix) == 3 assert result_matrix == expected_data_matrix
#!/usr/bin/env python # encoding: utf-8 import json from simplesqlite import SimpleSQLite import six table_name = "sample_table" con = SimpleSQLite("sample.sqlite", "w") # create table ----- data_matrix = [ [1, 1.1, "aaa", 1, 1], [2, 2.2, "bbb", 2.2, 2.2], [3, 3.3, "ccc", 3, "ccc"], ] con.create_table_with_data( table_name, attribute_name_list=["attr_a", "attr_b", "attr_c", "attr_d", "attr_e"], data_matrix=data_matrix) # display values in the table ----- six.print_(con.get_attribute_name_list(table_name)) result = con.select(select="*", table_name=table_name) for record in result.fetchall(): six.print_(record) # display data type for each column in the table ----- six.print_(json.dumps(con.get_attr_type(table_name), indent=4))
# create sample data file --- with open(file_path, "w") as f: f.write( """{ "table_a" : [ {"attr_b": 4, "attr_c": "a", "attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb", "attr_a": 2}, {"attr_b": 120.9, "attr_c": "ccc", "attr_a": 3} ], "table_b" : [ {"a": 1, "b": 4}, {"a": 2 }, {"a": 3, "b": 120.9} ] }""" ) # create table --- con = SimpleSQLite("sample.sqlite", "w") con.create_table_from_json(file_path) # output --- for table_name in con.fetch_table_names(): print("table: " + table_name) print(con.fetch_attr_names(table_name)) result = con.select(select="*", table_name=table_name) for record in result.fetchall(): print(record) print()
#!/usr/bin/env python # encoding: utf-8 from __future__ import print_function import json from simplesqlite import SimpleSQLite con = SimpleSQLite("sample.sqlite", "w") data_matrix = [ [1, 1.1, "aaa", 1, 1], [2, 2.2, "bbb", 2.2, 2.2], [3, 3.3, "ccc", 3, "ccc"], ] con.create_table_from_data_matrix( table_name="sample_table", attr_name_list=["a", "b", "c", "d", "e"], data_matrix=data_matrix, index_attr_list=["a"]) print(json.dumps(con.get_sqlite_master(), indent=4))
#!/usr/bin/env python # encoding: utf-8 from __future__ import print_function from simplesqlite import SimpleSQLite from simplesqlite.query import Where table_name = "sample_table" con = SimpleSQLite("sample.sqlite", "w") data_matrix = [[1, "aaa"], [2, "bbb"]] con.create_table_from_data_matrix(table_name, ["key", "value"], data_matrix) print("---- before update ----") for record in con.select(select="*", table_name=table_name).fetchall(): print(record) print() con.update(table_name, set_query="value = 'ccc'", where=Where(key="key", value=1)) print("---- after update ----") for record in con.select(select="*", table_name=table_name).fetchall(): print(record)
#!/usr/bin/env python # encoding: utf-8 from simplesqlite import SimpleSQLite, NullDatabaseConnectionError import six con = SimpleSQLite("sample.sqlite", "w") six.print_("---- connected to a database ----") con.check_connection() six.print_("---- disconnected from a database ----") con.close() try: con.check_connection() except NullDatabaseConnectionError as e: six.print_(e)