Exemple #1
0
    def test_normal_json(self, url, format_name):
        responses.add(
            responses.GET,
            url,
            body=dedent(
                """\
                [
                    {"attr_a": 1},
                    {"attr_b": 2.1, "attr_c": "bb"}
                ]"""
            ),
            content_type="text/plain; charset=utf-8",
            status=200,
        )
        expected_list = [
            TableData(
                "url_loader",
                ["attr_a", "attr_b", "attr_c"],
                [{"attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb"}],
            )
        ]

        loader = ptr.TableUrlLoader(url, format_name)
        assert loader.format_name == "json"

        loader.table_name = "url_loader"

        for table_data in loader.load():
            print("{} {}".format(table_data, dumps_tabledata(table_data)))
            print(table_data.rows)
            print("[expected]")
            for expected in expected_list:
                print(dumps_tabledata(expected))

            assert table_data.in_tabledata_list(expected_list)
Exemple #2
0
def io_test():
    # prepare data ---
    file_path = "sample_data.csv"
    csv_text = "\n".join([
        '"attr_a","attr_b","attr_c"',
        '1,4,"a"',
        '2,2.1,"bb"',
        '3,120.9,"ccc"',
    ])

    with open(file_path, "w") as f:
        f.write(csv_text)

        # load from a csv text ---

    loader = ptr.CsvTableTextLoader(csv_text)
    for table_data in loader.load():
        print("\n".join([
            "load from text",
            "==============",
            "{:s}".format(ptw.dumps_tabledata(table_data)),
        ]))

    # load from a csv file ---
    loader = ptr.CsvTableFileLoader(file_path)
    for table_data in loader.load():
        print("\n".join([
            "load from file",
            "==============",
            "{:s}".format(ptw.dumps_tabledata(modify(table_data))),
        ]))
Exemple #3
0
    def test_normal_json(self):
        text = dedent(
            """\
            [
                {"attr_a": 1},
                {"attr_b": 2.1, "attr_c": "bb"}
            ]"""
        )

        expected_list = [
            TableData(
                "json1",
                ["attr_a", "attr_b", "attr_c"],
                [{"attr_a": 1}, {"attr_b": 2.1, "attr_c": "bb"}],
            )
        ]
        loader = ptr.TableTextLoader(text, format_name="json")

        assert loader.format_name == "json"

        for table_data, expected in zip(loader.load(), expected_list):
            print(dumps_tabledata(expected))
            print(dumps_tabledata(table_data))

            assert table_data.equals(expected)
Exemple #4
0
    def test_normal_csv(self, tmpdir, file_path, format_name):
        filename = pv.replace_symbol(file_path, "")
        p_file_path = Path(
            six.text_type(tmpdir.join(filename + Path(file_path).ext)))
        p_file_path.parent.makedirs_p()

        with open(p_file_path, "w") as f:
            f.write(
                dedent("""\
                "attr_a","attr_b","attr_c"
                1,4,"a"
                2,2.1,"bb"
                3,120.9,"ccc"
                """))

        expeced_list = [
            TableData(
                filename,
                ["attr_a", "attr_b", "attr_c"],
                [[1, 4, "a"], [2, "2.1", "bb"], [3, "120.9", "ccc"]],
            )
        ]
        loader = ptr.TableFileLoader(p_file_path, format_name=format_name)

        assert loader.format_name == "csv"

        for tabledata, expected in zip(loader.load(), expeced_list):
            print(dumps_tabledata(expected))
            print(dumps_tabledata(tabledata))

            assert tabledata.equals(expected)
Exemple #5
0
    def test_normal_csv(self):
        text = dedent(
            """\
            "attr_a","attr_b","attr_c"
            1,4,"a"
            2,2.1,"bb"
            3,120.9,"ccc"
            """
        )

        expected_list = [
            TableData(
                "csv1",
                ["attr_a", "attr_b", "attr_c"],
                [[1, 4, "a"], [2, "2.1", "bb"], [3, "120.9", "ccc"]],
            )
        ]
        loader = ptr.TableTextLoader(text, format_name="csv")

        assert loader.format_name == "csv"

        for tabledata, expected in zip(loader.load(), expected_list):
            print(dumps_tabledata(expected))
            print(dumps_tabledata(tabledata))

            assert tabledata.equals(expected)
    def test_normal(self, table_text, table_name, expected):
        loader = ptr.LtsvTableTextLoader(table_text)
        loader.table_name = table_name

        for tabledata in loader.load():
            print("[expected]: {}".format(dumps_tabledata(expected)))
            print("[actual]: {}".format(dumps_tabledata(tabledata)))

            assert tabledata.equals(expected)
Exemple #7
0
    def test_normal(self, provider_list, rows, table_name, expected):
        faker = TableFaker(seed=1)
        out = faker.generate(provider_list, rows, table_name=table_name)
        out_table = dumps_tabledata(out, **dump_opts)
        expected_table = dumps_tabledata(expected, **dump_opts)

        print_test_result(expected=expected_table, actual=out_table)

        assert expected_table == out_table
        assert out == expected
Exemple #8
0
    def test_normal(self, table_text, table_name, headers, expected):
        loader = ptr.TsvTableTextLoader(table_text)
        loader.table_name = table_name
        loader.headers = headers

        for tabledata in loader.load():
            print(dumps_tabledata(tabledata))
            for e in expected:
                print(dumps_tabledata(e))

            assert tabledata.in_tabledata_list(expected)
    def test_normal_(self, table_name, headers, dup_col_handler, expected):
        new_tabledata = SQLiteTableDataSanitizer(
            TableData(table_name, headers, []),
            dup_col_handler=dup_col_handler).normalize()

        try:
            from pytablewriter import dumps_tabledata

            print_test_result(expected=dumps_tabledata(expected),
                              actual=dumps_tabledata(new_tabledata))
        except ImportError:
            pass

        assert new_tabledata.equals(expected)
    def test_normal(self, tmpdir, test_id, table_text, filename, expected):
        file_path = Path(str(tmpdir.join(filename)))
        file_path.parent.makedirs_p()

        with io.open(file_path, "w", encoding="utf-8") as f:
            f.write(table_text)

        loader = ptr.LtsvTableFileLoader(file_path)

        for tabledata in loader.load():
            print("test-id={}".format(test_id))
            print("[expected]\n{}".format(dumps_tabledata(expected)))
            print("[actual]\n{}".format(dumps_tabledata(tabledata)))

            assert tabledata.equals(expected)
Exemple #11
0
    def test_normal_specify_header_list(self):
        faker = TableFaker(seed=1)
        provider_list = ("file_name", "file_path")
        out = faker.generate(provider_list,
                             1,
                             table_name="with headers",
                             headers=("input", "output"))
        expected = TableData("with headers", ("input", "output"),
                             [("shake.wav", "/prepare/last.jpeg")])
        out_table = dumps_tabledata(out, **dump_opts)
        expected_table = dumps_tabledata(expected, **dump_opts)

        print_test_result(expected=expected_table, actual=out_table)

        assert expected_table == out_table
        assert out == expected
Exemple #12
0
    def test_normal_excel(self, tmpdir):
        file_path = "/tmp/valid/test/data/validdata.xlsx"
        p_file_path = Path(str(tmpdir.join(file_path)))
        p_file_path.parent.makedirs_p()

        tabledata_list = [
            TableData(
                "testsheet1",
                ["a1", "b1", "c1"],
                [["aa1", "ab1", "ac1"], [1.0, 1.1, "a"], [2.0, 2.2, "bb"],
                 [3.0, 3.3, 'cc"dd"']],
            ),
            TableData(
                "testsheet3",
                ["a3", "b3", "c3"],
                [["aa3", "ab3", "ac3"], [4.0, 1.1, "a"], [5.0, "", "bb"],
                 [6.0, 3.3, ""]],
            ),
        ]

        writer = ExcelXlsxTableWriter()
        writer.open(p_file_path)
        for tabledata in tabledata_list:
            writer.from_tabledata(tabledata)
        writer.write_table()
        writer.close()

        loader = ptr.TableFileLoader(p_file_path)

        assert loader.format_name == "excel"

        for tabledata in loader.load():
            print(dumps_tabledata(tabledata))

            assert tabledata in tabledata_list
    def test_normal(self, table_text, table_name, expected_tabletuple_list):
        self.LOADER_CLASS.clear_table_count()
        loader = self.LOADER_CLASS(table_text)
        loader.table_name = table_name

        load = False
        for tabledata in loader.load():
            print("[actual]\n{}".format(dumps_tabledata(tabledata)))
            print("[expected]")
            for expected in expected_tabletuple_list:
                print("{}".format(dumps_tabledata(tabledata)))

            assert tabledata.in_tabledata_list(expected_tabletuple_list)
            load = True

        assert load
    def test_normal(self, table_text, table_name, expected_tabletuple_list):
        loader = self.LOADER_CLASS(table_text)
        loader.table_name = table_name

        for table_data in loader.load():
            print("[actual]\n{}".format(dumps_tabledata(table_data)))

            assert table_data.in_tabledata_list(expected_tabletuple_list)
Exemple #15
0
    def test_normal(self, valid_excel_file_path, table_name, start_row, expected_list):
        loader = ptr.ExcelTableFileLoader(valid_excel_file_path)
        loader.table_name = table_name
        loader.start_row = start_row

        for table_data in loader.load():
            print("[actual]\n{}".format(dumps_tabledata(table_data)))
            assert table_data.in_tabledata_list(expected_list)
Exemple #16
0
    def test_normal_ssv(self):
        text = dedent(
            """\
            USER       PID %CPU %MEM    VSZ   RSS TTY      STAT START   TIME COMMAND
            root         1  0.0  0.4  77664  8784 ?        Ss   May11   0:02 /sbin/init
            root         2  0.0  0.0      0     0 ?        S    May11   0:00 [kthreadd]
            root         4  0.0  0.0      0     0 ?        I<   May11   0:00 [kworker/0:0H]
            root         6  0.0  0.0      0     0 ?        I<   May11   0:00 [mm_percpu_wq]
            root         7  0.0  0.0      0     0 ?        S    May11   0:01 [ksoftirqd/0]
            """
        )

        expected_list = [
            TableData(
                "csv1",
                [
                    "USER",
                    "PID",
                    "%CPU",
                    "%MEM",
                    "VSZ",
                    "RSS",
                    "TTY",
                    "STAT",
                    "START",
                    "TIME",
                    "COMMAND",
                ],
                [
                    ["root", 1, 0, 0.4, 77664, 8784, "?", "Ss", "May11", "0:02", "/sbin/init"],
                    ["root", 2, 0, 0, 0, 0, "?", "S", "May11", "0:00", "[kthreadd]"],
                    ["root", 4, 0, 0, 0, 0, "?", "I<", "May11", "0:00", "[kworker/0:0H]"],
                    ["root", 6, 0, 0, 0, 0, "?", "I<", "May11", "0:00", "[mm_percpu_wq]"],
                    ["root", 7, 0, 0, 0, 0, "?", "S", "May11", "0:01", "[ksoftirqd/0]"],
                ],
            )
        ]
        loader = ptr.TableTextLoader(text, format_name="ssv")

        assert loader.format_name == "csv"

        for tabledata, expected in zip(loader.load(), expected_list):
            print(dumps_tabledata(expected))
            print(dumps_tabledata(tabledata))

            assert tabledata.equals(expected)
Exemple #17
0
    def test_normal_locale(self):
        faker = TableFaker(locale="ja_JP", seed=1)
        out = faker.generate(("name", "address"), rows=2)
        expected = TableData(
            None,
            ("name", "address"),
            [("山岸 裕樹", "三重県荒川区明石町14丁目4番16号"),
             ("村山 拓真", "北海道荒川区白金台15丁目19番4号 コート所野806")],
        )
        out_table = dumps_tabledata(out, **dump_opts)
        expected_table = dumps_tabledata(expected, **dump_opts)

        print_test_result(expected=expected_table, actual=out_table)

        assert expected_table == out_table
        assert out == expected
        assert out != faker.generate(("name", "address"), rows=2)
    def test_normal(self, table_name, headers, records, expected):
        new_tabledata = SQLiteTableDataSanitizer(
            TableData(table_name, headers, records)).normalize()

        try:
            from pytablewriter import dumps_tabledata

            print_test_result(expected=dumps_tabledata(expected),
                              actual=dumps_tabledata(new_tabledata))
        except ImportError:
            pass

        con = connect_memdb()
        con.create_table_from_tabledata(new_tabledata)
        assert con.select_as_tabledata(new_tabledata.table_name) == expected

        assert new_tabledata.equals(expected)
    def test_normal(self, tmpdir, table, header, value, expected):
        test_file_path = tmpdir.join("test.sqlite")

        writer = ptw.SqliteTableWriter()
        writer.open(str(test_file_path))
        writer.table_name = table
        writer.headers = header
        writer.value_matrix = value
        writer.write_table()
        writer.close()

        for table_data in SqliteFileLoader(str(test_file_path)).load():
            expected_dump = ptw.dumps_tabledata(expected)
            actual_dump = ptw.dumps_tabledata(table_data)

            print_test_result(expected=expected_dump, actual=actual_dump)

            assert actual_dump == expected_dump
    def test_normal(self, tmpdir, table, header, value, expected):
        test_file_path = tmpdir.join("test.sqlite")

        writer = ptw.SqliteTableWriter()
        writer.open(str(test_file_path))
        writer.table_name = table
        writer.headers = header
        writer.value_matrix = value
        writer.write_table()
        writer.close()

        for table_data in SqliteFileLoader(str(test_file_path)).load():
            expected_dump = ptw.dumps_tabledata(expected)
            actual_dump = ptw.dumps_tabledata(table_data)

            print_test_result(expected=expected_dump, actual=actual_dump)

            assert actual_dump == expected_dump
    def test_normal(self, tmpdir, writer_class, table, header, value, expected):
        if writer_class == ptw.ExcelXlsTableWriter and not HAS_XLWT:
            pytest.skip()

        test_file_path = tmpdir.join("test.xlsx")

        writer = writer_class()
        writer.open(str(test_file_path))
        writer.make_worksheet(table)
        writer.headers = header
        writer.value_matrix = value
        writer.write_table()
        writer.close()

        for table_data in ExcelTableFileLoader(str(test_file_path)).load():
            expected_dump = ptw.dumps_tabledata(expected)
            actual_dump = ptw.dumps_tabledata(table_data)

            print_test_result(expected=expected_dump, actual=actual_dump)

            assert actual_dump == expected_dump
    def test_normal(self, tmpdir, table, header, value, expected):
        test_filepath = tmpdir.join("test1.pkl")

        writer = ptw.PandasDataFramePickleWriter(
            table_name=table,
            headers=header,
            value_matrix=value,
        )
        writer.open(str(test_filepath))
        writer.write_table()

        print(expected, file=sys.stderr)

        actual = ptw.PandasDataFramePickleWriter()
        actual.from_dataframe(pd.read_pickle(test_filepath))
        actual.table_name = expected.table_name

        print_test_result(expected=ptw.dumps_tabledata(expected),
                          actual=ptw.dumps_tabledata(actual.tabledata))

        assert ptw.dumps_tabledata(
            actual.tabledata) == ptw.dumps_tabledata(expected)
Exemple #23
0
    def test_normal(self, table_text, table_name, expected_tabletuple_list):
        ptr.JsonTableFileLoader.clear_table_count()
        loader = ptr.JsonTableTextLoader(table_text)
        loader.table_name = table_name

        load = False
        for tabledata in loader.load():
            print("[actual]\n{}".format(dumps_tabledata(tabledata)))

            assert tabledata.in_tabledata_list(expected_tabletuple_list)
            load = True

        assert load
    def test_normal_fifo(self, tmpdir, table_text, fifo_name, expected):
        namedpipe = str(tmpdir.join(fifo_name))

        os.mkfifo(namedpipe)

        loader = ptr.CsvTableFileLoader(namedpipe)

        with ProcessPoolExecutor() as executor:
            executor.submit(fifo_writer, namedpipe, table_text)

            for tabledata in loader.load():
                print(dumps_tabledata(tabledata))

                assert tabledata.in_tabledata_list(expected)
    def test_normal_fifo(self, tmpdir, table_text, fifo_name, expected):
        namedpipe = str(tmpdir.join(fifo_name))

        os.mkfifo(namedpipe)

        loader = self.LOADER_CLASS(namedpipe)

        with ProcessPoolExecutor() as executor:
            executor.submit(fifo_writer, namedpipe, table_text)

            for tabledata in loader.load():
                print("[actual]\n{}".format(dumps_tabledata(tabledata)))

                assert tabledata.in_tabledata_list(expected)
    def test_smoke(self, tmpdir, filename):
        test_data_file_path = os.path.join(os.path.dirname(__file__), "data", filename)
        loader = ptr.TableFileLoader(test_data_file_path)

        success_count = 0

        for tabledata in loader.load():
            if tabledata.is_empty():
                continue

            assert len(dumps_tabledata(tabledata)) > 10

            success_count += 1

        assert success_count > 0
    def test_normal(self, tmpdir, test_id, table_text, filename, table_name,
                    expected_tabledata_list):
        file_path = Path(str(tmpdir.join(filename)))
        file_path.parent.makedirs_p()

        with io.open(file_path, "w", encoding="utf-8") as f:
            f.write(table_text)

        loader = ptr.HtmlTableFileLoader(file_path)
        loader.table_name = table_name

        for table_data in loader.load():
            print("--- test {} ---".format(test_id))
            print("[actual]\n{}\n".format(dumps_tabledata(table_data)))

            assert table_data.in_tabledata_list(expected_tabledata_list)
    def test_normal(self, tmpdir, test_id, tabledata, filename, headers, expected):
        file_path = Path(str(tmpdir.join(filename)))
        file_path.parent.makedirs_p()

        con = SimpleSQLite(file_path, "w")

        con.create_table_from_tabledata(tabledata)

        loader = ptr.SqliteFileLoader(file_path)
        loader.headers = headers

        for tabledata in loader.load():
            print("test-id={}".format(test_id))
            print(dumps_tabledata(tabledata))

            assert tabledata.in_tabledata_list(expected)
Exemple #29
0
    def test_normal(self, tmpdir, test_id, table_text, filename, headers,
                    expected):
        file_path = Path(str(tmpdir.join(filename)))
        file_path.parent.makedirs_p()

        with open(file_path, "w", encoding="utf-8") as f:
            f.write(table_text)

        loader = ptr.TsvTableFileLoader(file_path)
        loader.headers = headers

        for tabledata in loader.load():
            print("test-id={}".format(test_id))
            print(dumps_tabledata(tabledata))

            assert tabledata.in_tabledata_list(expected)
Exemple #30
0
    def test_normal(self, tmpdir, table_text, filename, table_name,
                    expected_tabletuple_list):
        file_path = Path(str(tmpdir.join(filename)))
        file_path.parent.makedirs_p()

        with open(file_path, "w") as f:
            f.write(table_text)

        loader = ptr.JsonTableFileLoader(file_path)
        load = False
        for tabledata in loader.load():
            print("[actual]\n{}".format(dumps_tabledata(tabledata)))

            assert tabledata.in_tabledata_list(expected_tabletuple_list)
            load = True

        assert load
    def test_smoke(self, tmpdir, filename):
        try:
            import pytablereader as ptr
        except ImportError:
            pytest.skip("requires pytablereader")

        p = tmpdir.join("tmp.db")
        con = SimpleSQLite(str(p), "w")

        test_data_file_path = os.path.join(os.path.dirname(__file__), "data",
                                           filename)
        loader = ptr.TableFileLoader(test_data_file_path)

        success_count = 0

        for table_data in loader.load():
            if table_data.is_empty():
                continue

            try:
                from pytablewriter import dumps_tabledata

                print(dumps_tabledata(table_data))
            except ImportError:
                pass

            try:
                con.create_table_from_tabledata(
                    SQLiteTableDataSanitizer(table_data).normalize())
                success_count += 1
            except ValueError as e:
                print(e)

        con.commit()

        assert success_count > 0
    def test_normal_format_name(self, value, format_name, expected):
        out = dumps_tabledata(value, format_name=format_name)
        print_test_result(expected=expected, actual=out)

        assert out == expected
    def test_normal_kwargs(self, value, kwargs, expected):
        out = dumps_tabledata(value, **kwargs)
        print_test_result(expected=expected, actual=out)

        assert out == expected
 def test_exception(self, value, expected):
     with pytest.raises(expected):
         dumps_tabledata(value)