示例#1
0
def test_sql_storage_mysql_constraints(mysql_url):
    dialect = SqlDialect(prefix="prefix_")
    source = Package("data/storage/constraints.json")
    storage = source.to_sql(mysql_url, dialect=dialect)
    target = Package.from_sql(mysql_url, dialect=dialect)

    # Assert metadata
    assert target.get_resource("constraints").schema == {
        "fields": [
            {"name": "required", "type": "string", "constraints": {"required": True}},
            {"name": "minLength", "type": "string"},  # constraint removal
            {"name": "maxLength", "type": "string"},  # constraint removal
            {"name": "pattern", "type": "string"},  # constraint removal
            {"name": "enum", "type": "string"},  # constraint removal
            {"name": "minimum", "type": "integer"},  # constraint removal
            {"name": "maximum", "type": "integer"},  # constraint removal
        ],
    }

    # Assert data
    assert target.get_resource("constraints").read_rows() == [
        {
            "required": "passing",
            "minLength": "passing",
            "maxLength": "passing",
            "pattern": "passing",
            "enum": "passing",
            "minimum": 5,
            "maximum": 5,
        },
    ]

    # Cleanup storage
    storage.delete_package(target.resource_names)
示例#2
0
def test_sql_parser(database_url):
    dialect = SqlDialect(table="table")
    with Resource(database_url, dialect=dialect) as resource:
        assert resource.schema == {
            "fields": [
                {
                    "constraints": {
                        "required": True
                    },
                    "name": "id",
                    "type": "integer"
                },
                {
                    "name": "name",
                    "type": "string"
                },
            ],
            "primaryKey": ["id"],
        }
        assert resource.header == ["id", "name"]
        assert resource.read_rows() == [
            {
                "id": 1,
                "name": "english"
            },
            {
                "id": 2,
                "name": "中国人"
            },
        ]
示例#3
0
def test_table_sql_write_timezone(sqlite_url):
    source = "data/timezone.csv"
    dialect = SqlDialect(table="timezone")
    with Table(source) as table:
        table.write(sqlite_url, dialect=dialect)
    with Table(sqlite_url, dialect=dialect) as table:
        assert table.header == ["datetime", "time"]
        assert table.read_rows() == [
            {
                "datetime": datetime.datetime(2020, 1, 1, 15),
                "time": datetime.time(15)
            },
            {
                "datetime": datetime.datetime(2020, 1, 1, 15),
                "time": datetime.time(15)
            },
            {
                "datetime": datetime.datetime(2020, 1, 1, 15),
                "time": datetime.time(15)
            },
            {
                "datetime": datetime.datetime(2020, 1, 1, 15),
                "time": datetime.time(15)
            },
        ]
def test_sql_parser_where(database_url):
    dialect = SqlDialect(table="table", where="name = '中国人'")
    with Resource(database_url, dialect=dialect) as resource:
        assert resource.header == ["id", "name"]
        assert resource.read_rows() == [
            {"id": 2, "name": "中国人"},
        ]
def test_sql_parser_order_by_desc(database_url):
    dialect = SqlDialect(table="table", order_by="id desc")
    with Resource(database_url, dialect=dialect) as resource:
        assert resource.header == ["id", "name"]
        assert resource.read_rows() == [
            {"id": 2, "name": "中国人"},
            {"id": 1, "name": "english"},
        ]
示例#6
0
def test_program_extract_dialect_table_option_sql(database_url):
    table = "fruits"
    result = runner.invoke(program, f"extract {database_url} --table {table} --json")
    if IS_UNIX:
        assert result.exit_code == 0
        dialect = SqlDialect(table=table)
        with Resource(database_url, dialect=dialect) as resource:
            assert json.loads(result.stdout) == extract(resource)
示例#7
0
def test_sql_storage_postgresql_integrity_different_order_issue_957(postgresql_url):
    dialect = SqlDialect(prefix="prefix_")
    source = Package("data/storage/integrity.json")
    source.add_resource(source.remove_resource("integrity_main"))
    storage = source.to_sql(postgresql_url, dialect=dialect)
    target = Package.from_sql(postgresql_url, dialect=dialect)
    assert len(target.resources) == 2
    storage.delete_package(target.resource_names)
示例#8
0
def test_sql_storage_dialect_basepath_issue_964(sqlite_url):
    dialect = SqlDialect(table="test_table", basepath="data")
    with Resource(path="sqlite:///sqlite.db", dialect=dialect) as resource:
        assert resource.read_rows() == [
            {"id": 1, "name": "foo"},
            {"id": 2, "name": "bar"},
            {"id": 3, "name": "baz"},
        ]
示例#9
0
def test_table_sql_write(database_url):
    source = "data/table.csv"
    dialect = SqlDialect(table="name", order_by="id")
    with Table(source) as table:
        table.write(database_url, dialect=dialect)
    with Table(database_url, dialect=dialect) as table:
        assert table.header == ["id", "name"]
        assert table.read_data() == [[1, "english"], [2, "中国人"]]
def test_sql_parser_write(database_url):
    source = Resource("data/table.csv")
    target = source.write(database_url, dialect=SqlDialect(table="name", order_by="id"))
    with target:
        assert target.header == ["id", "name"]
        assert target.read_rows() == [
            {"id": 1, "name": "english"},
            {"id": 2, "name": "中国人"},
        ]
def test_sql_parser_headers_false(database_url):
    dialect = SqlDialect(table="table")
    layout = Layout(header=False)
    with Resource(database_url, dialect=dialect, layout=layout) as resource:
        assert resource.header == ["id", "name"]
        assert resource.read_rows() == [
            {"id": None, "name": "name"},
            {"id": 1, "name": "english"},
            {"id": 2, "name": "中国人"},
        ]
示例#12
0
def test_sql_storage_sqlite_integer_enum_issue_776(sqlite_url):
    dialect = SqlDialect(table="table")
    source = Resource(path="data/table.csv")
    source.infer()
    source.schema.get_field("id").constraints["enum"] = [1, 2]
    target = source.write(sqlite_url, dialect=dialect)
    assert target.read_rows() == [
        {"id": 1, "name": "english"},
        {"id": 2, "name": "中国人"},
    ]
def test_sql_parser_write_timezone(sqlite_url):
    source = Resource("data/timezone.csv")
    target = source.write(sqlite_url, dialect=SqlDialect(table="timezone"))
    with target:
        assert target.header == ["datetime", "time"]
        assert target.read_rows() == [
            {"datetime": datetime.datetime(2020, 1, 1, 15), "time": datetime.time(15)},
            {"datetime": datetime.datetime(2020, 1, 1, 15), "time": datetime.time(15)},
            {"datetime": datetime.datetime(2020, 1, 1, 15), "time": datetime.time(15)},
            {"datetime": datetime.datetime(2020, 1, 1, 15), "time": datetime.time(15)},
        ]
示例#14
0
def test_package_to_zip_resource_sql(tmpdir, database_url):
    path = os.path.join(tmpdir, "package.zip")
    dialect = SqlDialect(table="table")
    source = Package(resources=[Resource(database_url, name="table", dialect=dialect)])
    source.to_zip(path)
    target = Package.from_zip(path)
    assert target.get_resource("table").path == database_url
    assert target.get_resource("table").read_rows() == [
        {"id": 1, "name": "english"},
        {"id": 2, "name": "中国人"},
    ]
示例#15
0
def test_sql_storage_mysql_integrity(mysql_url):
    dialect = SqlDialect(prefix="prefix_")
    source = Package("data/storage/integrity.json")
    storage = source.to_sql(mysql_url, dialect=dialect)
    target = Package.from_sql(mysql_url, dialect=dialect)

    # Assert metadata (main)
    assert target.get_resource("integrity_main").schema == {
        "fields": [
            # added required
            {"name": "id", "type": "integer", "constraints": {"required": True}},
            {"name": "parent", "type": "integer"},
            {"name": "description", "type": "string"},
        ],
        "primaryKey": ["id"],
        "foreignKeys": [
            {"fields": ["parent"], "reference": {"resource": "", "fields": ["id"]}}
        ],
    }

    # Assert metadata (link)
    assert target.get_resource("integrity_link").schema == {
        "fields": [
            # added required
            {"name": "main_id", "type": "integer", "constraints": {"required": True}},
            # added required; removed unique
            {"name": "some_id", "type": "integer", "constraints": {"required": True}},
            # removed unique
            {"name": "description", "type": "string"},
        ],
        "primaryKey": ["main_id", "some_id"],
        "foreignKeys": [
            {
                "fields": ["main_id"],
                "reference": {"resource": "integrity_main", "fields": ["id"]},
            }
        ],
    }

    # Assert data (main)
    assert target.get_resource("integrity_main").read_rows() == [
        {"id": 1, "parent": None, "description": "english"},
        {"id": 2, "parent": 1, "description": "中国人"},
    ]

    # Assert data (link)
    assert target.get_resource("integrity_link").read_rows() == [
        {"main_id": 1, "some_id": 1, "description": "note1"},
        {"main_id": 2, "some_id": 2, "description": "note2"},
    ]

    # Cleanup storage
    storage.delete_package(target.resource_names)
def test_sql_parser_write_string_pk_issue_777_sqlite(sqlite_url):
    source = Resource("data/table.csv")
    source.infer()
    source.schema.primary_key = ["name"]
    target = source.write(sqlite_url, dialect=SqlDialect(table="name"))
    with target:
        assert target.schema.primary_key == ["name"]
        assert target.header == ["id", "name"]
        assert target.read_rows() == [
            {"id": 1, "name": "english"},
            {"id": 2, "name": "中国人"},
        ]
示例#17
0
def test_sql_storage_mysql_types(mysql_url):
    dialect = SqlDialect(prefix="prefix_")
    source = Package("data/storage/types.json")
    storage = source.to_sql(mysql_url, dialect=dialect)
    target = Package.from_sql(mysql_url, dialect=dialect)

    # Assert metadata
    assert target.get_resource("types").schema == {
        "fields": [
            {"name": "any", "type": "string"},  # type fallback
            {"name": "array", "type": "string"},  # type fallback
            {"name": "boolean", "type": "integer"},  # type downgrade
            {"name": "date", "type": "date"},
            {"name": "date_year", "type": "date"},  # format removal
            {"name": "datetime", "type": "datetime"},
            {"name": "duration", "type": "string"},  # type fallback
            {"name": "geojson", "type": "string"},  # type fallback
            {"name": "geopoint", "type": "string"},  # type fallback
            {"name": "integer", "type": "integer"},
            {"name": "number", "type": "number"},
            {"name": "object", "type": "string"},  # type fallback
            {"name": "string", "type": "string"},
            {"name": "time", "type": "time"},
            {"name": "year", "type": "integer"},  # type downgrade
            {"name": "yearmonth", "type": "string"},  # type fallback
        ],
    }

    # Assert data
    assert target.get_resource("types").read_rows() == [
        {
            "any": "中国人",
            "array": '["Mike", "John"]',
            "boolean": True,
            "date": datetime.date(2015, 1, 1),
            "date_year": datetime.date(2015, 1, 1),
            "datetime": datetime.datetime(2015, 1, 1, 3, 0),
            "duration": "P1Y1M",
            "geojson": '{"type": "Point", "coordinates": [33, 33.33]}',
            "geopoint": "30,70",
            "integer": 1,
            "number": 7,
            "object": '{"chars": 560}',
            "string": "english",
            "time": datetime.time(3, 0),
            "year": 2015,
            "yearmonth": "2015-01",
        },
    ]

    # Cleanup storage
    storage.delete_package(target.resource_names)
示例#18
0
def test_sql_parser_write_string_pk_issue_777_mysql(mysql_url):
    source = Resource("data/table.csv")
    source.infer()
    source.schema.primary_key = ["name"]
    source.schema.get_field("name").constraints["maxLength"] = 100
    target = source.write(mysql_url, dialect=SqlDialect(table="name"))
    with target:
        assert target.schema.primary_key == ["name"]
        assert target.header == ["id", "name"]
        assert target.read_rows() == [
            {"id": 1, "name": "english"},
            {"id": 2, "name": "中国人"},
        ]
示例#19
0
def test_table_sql(database_url):
    dialect = SqlDialect(table="data")
    with Table(database_url, dialect=dialect) as table:
        assert table.schema == {
            "fields": [
                {
                    "constraints": {
                        "required": True
                    },
                    "name": "id",
                    "type": "integer"
                },
                {
                    "name": "name",
                    "type": "string"
                },
            ],
            "primaryKey": ["id"],
        }
        assert table.header == ["id", "name"]
        assert table.read_data() == [[1, "english"], [2, "中国人"]]
示例#20
0
def test_sql_storage_mysql_comment_support(mysql_url):
    dialect = SqlDialect(table="table")

    # Write
    source = Resource(path="data/table.csv")
    source.infer()
    source.schema.get_field("id").description = "integer field"
    source.schema.get_field("name").description = "string field"
    source.write(mysql_url, dialect=dialect)

    # Read
    target = Resource(mysql_url, dialect=dialect)
    with target:
        assert target.schema == {
            "fields": [
                {"name": "id", "type": "integer", "description": "integer field"},
                {"name": "name", "type": "string", "description": "string field"},
            ]
        }
        assert target.read_rows() == [
            {"id": 1, "name": "english"},
            {"id": 2, "name": "中国人"},
        ]
示例#21
0
def test_sql_parser_order_by(database_url):
    dialect = SqlDialect(table="table", order_by="id")
    with Table(database_url, dialect=dialect) as table:
        assert table.header == ["id", "name"]
        assert table.read_data() == [[1, "english"], [2, "中国人"]]
示例#22
0
def test_table_sql_headers_false(database_url):
    dialect = SqlDialect(table="data")
    with Table(database_url, dialect=dialect, headers=False) as table:
        assert table.header == []
        assert table.read_data() == [["id", "name"], [1, "english"],
                                     [2, "中国人"]]
示例#23
0
def test_table_sql_order_by_desc(database_url):
    dialect = SqlDialect(table="data", order_by="id desc")
    with Table(database_url, dialect=dialect) as table:
        assert table.header == ["id", "name"]
        assert table.read_data() == [[2, "中国人"], [1, "english"]]
示例#24
0
def test_table_format_sql(database_url):
    dialect = SqlDialect(table="data")
    with Table(database_url, dialect=dialect) as table:
        assert table.header == ["id", "name"]
        assert table.read_data() == [[1, "english"], [2, "中国人"]]
def test_sql_parser_describe_to_yaml_issue_821(database_url):
    dialect = SqlDialect(table="table")
    resource = Resource(database_url, dialect=dialect)
    resource.infer()
    assert resource.to_yaml()