def test_normal(self, table_name, header_list, record_list, expected): tabledata = TableData(table_name, header_list, record_list) print("expected: {}".format(ptw.dump_tabledata(expected))) print("actusl: {}".format(ptw.dump_tabledata(tabledata))) assert tabledata == expected
def test_normal_csv(self, url, format_name): responses.add(responses.GET, url, body='''"attr_a","attr_b","attr_c" 1,4,"a" 2,2.1,"bb" 3,120.9,"ccc"''', content_type='text/plain; charset=utf-8', status=200) expeced_list = [ ptr.TableData("csv1", ["attr_a", "attr_b", "attr_c"], [ [1, 4, "a"], [2, "2.1", "bb"], [3, "120.9", "ccc"], ]) ] loader = ptr.TableUrlLoader(url, format_name) assert loader.format_name == "csv" for tabledata, expected in zip(loader.load(), expeced_list): print("[expected]\n{}".format(ptw.dump_tabledata(expected))) print("[actual]\n{}".format(ptw.dump_tabledata(tabledata))) assert tabledata == expected
def test_normal_csv(self, tmpdir, file_path, format_name): filename = pv.replace_symbol(file_path, "") p_file_path = Path(six.text_type(tmpdir.join(filename + Path(file_path).ext))) p_file_path.parent.makedirs_p() with open(p_file_path, "w") as f: f.write( dedent( """\ "attr_a","attr_b","attr_c" 1,4,"a" 2,2.1,"bb" 3,120.9,"ccc" """ ) ) expeced_list = [ TableData( filename, ["attr_a", "attr_b", "attr_c"], [[1, 4, "a"], [2, "2.1", "bb"], [3, "120.9", "ccc"]], ) ] loader = ptr.TableFileLoader(p_file_path, format_name=format_name) assert loader.format_name == "csv" for tabledata, expected in zip(loader.load(), expeced_list): print(dump_tabledata(expected)) print(dump_tabledata(tabledata)) assert tabledata.equals(expected)
def test_normal_(self, table_name, headers, dup_col_handler, expected): new_tabledata = SQLiteTableDataSanitizer( TableData(table_name, headers, []), dup_col_handler=dup_col_handler ).normalize() print_test_result(expected=dump_tabledata(expected), actual=dump_tabledata(new_tabledata)) assert new_tabledata.equals(expected)
def test_normal(self, table_text, table_name, expected): loader = ptr.LtsvTableTextLoader(table_text) loader.table_name = table_name for tabledata in loader.load(): print("[expected]: {}".format(ptw.dump_tabledata(expected))) print("[actual]: {}".format(ptw.dump_tabledata(tabledata))) assert tabledata == expected
def test_normal( self, table_name, header_list, record_list, expected): tabledata = TableData(table_name, header_list, record_list) sanitizer = SQLiteTableDataSanitizer(tabledata) new_tabledata = sanitizer.sanitize() print("lhs: {}".format(ptw.dump_tabledata(new_tabledata))) print("rhs: {}".format(ptw.dump_tabledata(expected))) assert new_tabledata == expected
def test_normal(self, table_text, table_name, header_list, expected): loader = ptr.CsvTableTextLoader(table_text) loader.table_name = table_name loader.header_list = header_list for tabledata in loader.load(): print(ptw.dump_tabledata(tabledata)) for e in expected: print(ptw.dump_tabledata(e)) assert tabledata.in_tabledata_list(expected)
def test_normal(self, table_name, headers, records, expected): new_tabledata = SQLiteTableDataSanitizer( TableData(table_name, headers, records) ).normalize() print_test_result(expected=dump_tabledata(expected), actual=dump_tabledata(new_tabledata)) con = connect_memdb() con.create_table_from_tabledata(new_tabledata) assert con.select_as_tabledata(new_tabledata.table_name) == expected assert new_tabledata.equals(expected)
def test_normal_ssv(self, tmpdir): p_file_path = Path(six.text_type(tmpdir.join("testdata.txt"))) p_file_path.parent.makedirs_p() with open(p_file_path, "w") as f: f.write( dedent( """\ USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND root 1 0.0 0.4 77664 8784 ? Ss May11 0:02 /sbin/init root 2 0.0 0.0 0 0 ? S May11 0:00 [kthreadd] root 4 0.0 0.0 0 0 ? I< May11 0:00 [kworker/0:0H] root 6 0.0 0.0 0 0 ? I< May11 0:00 [mm_percpu_wq] root 7 0.0 0.0 0 0 ? S May11 0:01 [ksoftirqd/0] """ ) ) expeced_list = [ TableData( "testdata", [ "USER", "PID", "%CPU", "%MEM", "VSZ", "RSS", "TTY", "STAT", "START", "TIME", "COMMAND", ], [ ["root", 1, 0, 0.4, 77664, 8784, "?", "Ss", "May11", "0:02", "/sbin/init"], ["root", 2, 0, 0, 0, 0, "?", "S", "May11", "0:00", "[kthreadd]"], ["root", 4, 0, 0, 0, 0, "?", "I<", "May11", "0:00", "[kworker/0:0H]"], ["root", 6, 0, 0, 0, 0, "?", "I<", "May11", "0:00", "[mm_percpu_wq]"], ["root", 7, 0, 0, 0, 0, "?", "S", "May11", "0:01", "[ksoftirqd/0]"], ], ) ] loader = ptr.TableFileLoader(p_file_path, format_name="ssv") assert loader.format_name == "csv" for tabledata, expected in zip(loader.load(), expeced_list): print(dump_tabledata(expected)) print(dump_tabledata(tabledata)) assert tabledata.equals(expected)
def test_normal(self, tmpdir, test_id, table_text, filename, expected): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with io.open(file_path, "w", encoding="utf-8") as f: f.write(table_text) loader = ptr.LtsvTableFileLoader(file_path) for tabledata in loader.load(): print("test-id={}".format(test_id)) print("[expected]\n{}".format(ptw.dump_tabledata(expected))) print("[actual]\n{}".format(ptw.dump_tabledata(tabledata))) assert tabledata == expected
def test_normal_excel(self, tmpdir): file_path = "/tmp/valid/test/data/validdata.xlsx" p_file_path = Path(str(tmpdir.join(file_path))) p_file_path.parent.makedirs_p() tabledata_list = [ TableData( "testsheet1", ["a1", "b1", "c1"], [["aa1", "ab1", "ac1"], [1.0, 1.1, "a"], [2.0, 2.2, "bb"], [3.0, 3.3, 'cc"dd"']], ), TableData( "testsheet3", ["a3", "b3", "c3"], [["aa3", "ab3", "ac3"], [4.0, 1.1, "a"], [5.0, "", "bb"], [6.0, 3.3, ""]], ), ] writer = ExcelXlsxTableWriter() writer.open(p_file_path) for tabledata in tabledata_list: writer.from_tabledata(tabledata) writer.write_table() writer.close() loader = ptr.TableFileLoader(p_file_path) assert loader.format_name == "excel" for tabledata in loader.load(): print(dump_tabledata(tabledata)) assert tabledata in tabledata_list
def test_smoke(self, tmpdir, filename): p = tmpdir.join("tmp.db") con = SimpleSQLite(str(p), "w") test_data_file_path = os.path.join( os.path.dirname(__file__), "data", filename) loader = ptr.TableFileLoader(test_data_file_path) success_count = 0 for tabledata in loader.load(): if tabledata.is_empty(): continue print(ptw.dump_tabledata(tabledata)) try: con.create_table_from_tabledata( ptr.SQLiteTableDataSanitizer(tabledata).sanitize()) success_count += 1 except ValueError as e: print(e) con.commit() assert success_count > 0
def test_normal(self, table_text, table_name, expected_tabletuple_list): self.LOADER_CLASS.clear_table_count() loader = self.LOADER_CLASS(table_text) loader.table_name = table_name load = False for tabledata in loader.load(): print("[actual]\n{}".format(dump_tabledata(tabledata))) print("[expected]") for expected in expected_tabletuple_list: print("{}".format(dump_tabledata(tabledata))) assert tabledata.in_tabledata_list(expected_tabletuple_list) load = True assert load
def test_normal(self, table_text, table_name, expected_tabletuple_list): loader = ptr.HtmlTableTextLoader(table_text) loader.table_name = table_name for table_data in loader.load(): print("[actual]\n{}".format(dump_tabledata(table_data))) assert table_data.in_tabledata_list(expected_tabletuple_list)
def test_normal(self, tmpdir, table, header, value, expected): test_file_path = tmpdir.join("test.sqlite") writer = ptw.SqliteTableWriter() writer.open(str(test_file_path)) writer.table_name = table writer.headers = header writer.value_matrix = value writer.write_table() writer.close() for table_data in SqliteFileLoader(str(test_file_path)).load(): expected_dump = ptw.dump_tabledata(expected) actual_dump = ptw.dump_tabledata(table_data) print_test_result(expected=expected_dump, actual=actual_dump) assert actual_dump == expected_dump
def test_normal(self, valid_excel_file_path, table_name, start_row, expected_list): loader = ptr.ExcelTableFileLoader(valid_excel_file_path) loader.table_name = table_name loader.start_row = start_row for table_data in loader.load(): print("[actual]\n{}".format(dump_tabledata(table_data))) assert table_data.in_tabledata_list(expected_list)
def test_normal(self, tmpdir, test_id, table_text, filename, table_name, expected_tabledata_list): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with io.open(file_path, "w", encoding="utf-8") as f: f.write(table_text) loader = ptr.HtmlTableFileLoader(file_path) loader.table_name = table_name for tabledata, expected in zip(loader.load(), expected_tabledata_list): print("--- test {} ---".format(test_id)) print("[expected]\n{}".format(ptw.dump_tabledata(expected))) print("[actual]\n{}".format(ptw.dump_tabledata(tabledata))) print("") assert tabledata == expected
def test_normal(self, tmpdir, table, header, value, expected): test_file_path = tmpdir.join("test.sqlite") writer = ptw.SqliteTableWriter() writer.open(str(test_file_path)) writer.table_name = table writer.header_list = header writer.value_matrix = value writer.write_table() writer.close() loader = ptr.SqliteFileLoader(str(test_file_path)) for table_data in loader.load(): expected_dump = ptw.dump_tabledata(expected) actual_dump = ptw.dump_tabledata(table_data) print("[expected]\n{}".format(expected_dump)) print("[actual]\n{}".format(actual_dump)) assert actual_dump == expected_dump
def test_normal(self, table_text, table_name, expected_tabletuple_list): ptr.JsonTableFileLoader.clear_table_count() loader = ptr.JsonTableTextLoader(table_text) loader.table_name = table_name load = False for tabledata in loader.load(): print("[actual]\n{}".format(ptw.dump_tabledata(tabledata))) assert tabledata.in_tabledata_list(expected_tabletuple_list) load = True assert load
def test_normal(self, tmpdir, writer_class, table, header, value, expected): if writer_class == ptw.ExcelXlsTableWriter and not HAS_XLWT: pytest.skip() test_file_path = tmpdir.join("test.xlsx") writer = writer_class() writer.open(str(test_file_path)) writer.make_worksheet(table) writer.header_list = header writer.value_matrix = value writer.write_table() writer.close() for table_data in ExcelTableFileLoader(str(test_file_path)).load(): expected_dump = ptw.dump_tabledata(expected) actual_dump = ptw.dump_tabledata(table_data) print_test_result(expected=expected_dump, actual=actual_dump) assert actual_dump == expected_dump
def test_normal( self, tmpdir, writer_class, table, header, value, expected): test_file_path = tmpdir.join("test.xlsx") writer = writer_class() writer.open(str(test_file_path)) writer.make_worksheet(table) writer.header_list = header writer.value_matrix = value writer.write_table() writer.close() loader = ptr.ExcelTableFileLoader(str(test_file_path)) for table_data in loader.load(): expected_dump = ptw.dump_tabledata(expected) actual_dump = ptw.dump_tabledata(table_data) print("[expected]\n{}".format(expected_dump)) print("[actual]\n{}".format(actual_dump)) assert actual_dump == expected_dump
def test_normal_json(self, url, format_name): responses.add( responses.GET, url, body=dedent("""\ [ {"attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb"} ]"""), content_type="text/plain; charset=utf-8", status=200, ) expeced_list = [ TableData( "url_loader", ["attr_a", "attr_b", "attr_c"], [{ "attr_a": 1 }, { "attr_b": 2.1, "attr_c": "bb" }], ) ] loader = ptr.TableUrlLoader(url, format_name) assert loader.format_name == "json" loader.table_name = "url_loader" for table_data in loader.load(): print("{} {}".format(table_data, dump_tabledata(table_data))) print(table_data.rows) print("[expected]") for expeced in expeced_list: print(dump_tabledata(expeced)) assert table_data.in_tabledata_list(expeced_list)
def test_normal_fifo(self, tmpdir, table_text, fifo_name, expected): namedpipe = str(tmpdir.join(fifo_name)) os.mkfifo(namedpipe) loader = self.LOADER_CLASS(namedpipe) with ProcessPoolExecutor() as executor: executor.submit(fifo_writer, namedpipe, table_text) for tabledata in loader.load(): print("[actual]\n{}".format(dump_tabledata(tabledata))) assert tabledata.in_tabledata_list(expected)
def test_normal_fifo(self, tmpdir, table_text, fifo_name, expected): namedpipe = str(tmpdir.join(fifo_name)) os.mkfifo(namedpipe) loader = ptr.CsvTableFileLoader(namedpipe) with ProcessPoolExecutor() as executor: executor.submit(fifo_writer, namedpipe, table_text) for tabledata in loader.load(): print(dump_tabledata(tabledata)) assert tabledata.in_tabledata_list(expected)
def test_normal_2( self, tmpdir, test_id, table_text, filename, encoding, header_list, expected): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with io.open(file_path, "w", encoding=encoding) as f: f.write(table_text) loader = ptr.CsvTableFileLoader(file_path) loader.header_list = header_list for tabledata in loader.load(): print("test-id={}".format(test_id)) print(ptw.dump_tabledata(tabledata)) assert tabledata in expected
def test_normal(self, tmpdir, test_id, table_text, filename, headers, expected): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with io.open(file_path, "w", encoding="utf-8") as f: f.write(table_text) loader = ptr.TsvTableFileLoader(file_path) loader.headers = headers for tabledata in loader.load(): print("test-id={}".format(test_id)) print(dump_tabledata(tabledata)) assert tabledata.in_tabledata_list(expected)
def test_smoke(self, tmpdir, filename): test_data_file_path = os.path.join( os.path.dirname(__file__), "data", filename) loader = ptr.TableFileLoader(test_data_file_path) success_count = 0 for tabledata in loader.load(): if tabledata.is_empty(): continue assert len(ptw.dump_tabledata(tabledata)) > 10 success_count += 1 assert success_count > 0
def test_normal(self, tmpdir, table_text, filename, table_name, expected_tabletuple_list): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with open(file_path, "w") as f: f.write(table_text) loader = self.LOADER_CLASS(file_path) load = False for tabledata in loader.load(): print("[actual]\n{}".format(dump_tabledata(tabledata))) assert tabledata.in_tabledata_list(expected_tabletuple_list) load = True assert load
def test_normal(self, tmpdir, test_id, tabledata, filename, header_list, expected): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() con = SimpleSQLite(file_path, "w") con.create_table_from_tabledata(tabledata) loader = ptr.SqliteFileLoader(file_path) loader.header_list = header_list for tabledata in loader.load(): print("test-id={}".format(test_id)) print(ptw.dump_tabledata(tabledata)) assert tabledata in expected
def test_normal(self, tmpdir, test_id, table_text, filename, table_name, expected_tabledata_list): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with open(file_path, "w") as f: f.write(table_text) loader = ptr.MarkdownTableFileLoader(file_path) loader.table_name = table_name load = False for table_data in loader.load(): print("--- test {} ---".format(test_id)) print("\n[actual]\n{}".format(dump_tabledata(table_data))) assert table_data.in_tabledata_list(expected_tabledata_list) load = True assert load
def test_normal( self, tmpdir, table_text, filename, table_name, expected_tabletuple_list): file_path = Path(str(tmpdir.join(filename))) file_path.parent.makedirs_p() with open(file_path, "w") as f: f.write(table_text) loader = ptr.JsonTableFileLoader(file_path) loader.table_name = table_name load = False for tabledata in loader.load(): print("[actual]\n{}".format(ptw.dump_tabledata(tabledata))) assert tabledata in expected_tabletuple_list load = True assert load
def test_normal_excel(self, tmpdir): file_path = '/tmp/valid/test/data/validdata.xlsx' p_file_path = Path(str(tmpdir.join(file_path))) p_file_path.parent.makedirs_p() tabledata_list = [ TableData( table_name='testsheet1', header_list=['a1', 'b1', 'c1'], row_list=[ ['aa1', 'ab1', 'ac1'], [1.0, 1.1, 'a'], [2.0, 2.2, 'bb'], [3.0, 3.3, 'cc"dd"'], ]), TableData( table_name='testsheet3', header_list=['a3', 'b3', 'c3'], row_list=[ ['aa3', 'ab3', 'ac3'], [4.0, 1.1, 'a'], [5.0, '', 'bb'], [6.0, 3.3, ''], ]), ] writer = ptw.ExcelXlsxTableWriter() writer.open(p_file_path) for tabledata in tabledata_list: writer.from_tabledata(tabledata) writer.write_table() writer.close() loader = ptr.TableFileLoader(p_file_path) assert loader.format_name == "excel" for tabledata in loader.load(): print(ptw.dump_tabledata(tabledata)) assert tabledata in tabledata_list