Example #1
0
File: cli.py Project: yy1117/synch
def etl(ctx: Context, schema: str, renew: bool, table: List[str]):
    alias = ctx.obj["alias"]
    tables = table
    if not tables:
        tables = Settings.get_source_db_database_tables_name(alias, schema)
    tables_pk = {}
    reader = get_reader(alias)
    for table in tables:
        tables_pk[table] = reader.get_primary_key(schema, table)
    etl_full(alias, schema, tables_pk, renew)
Example #2
0
def test_full_etl_postgres():
    database = get_postgres_database()

    sql = "insert into test(id,amount) values(1,1)"
    get_reader(alias_postgres).execute(sql)

    etl_full(alias_postgres, database, {"test": "id"}, True)

    sql = f"select * from {database}.test"
    ret = get_writer().execute(sql)
    assert ret == [(1, Decimal("1"))]
Example #3
0
def test_full_etl_mysql():
    database = get_mysql_database()

    sql = f"insert into {database}.test(amount) values(1.00)"
    get_reader(alias_mysql).execute(sql)

    etl_full(alias_mysql, database, {"test": "id"}, True)

    sql = f"select * from {database}.test"

    ret = get_writer().execute(sql)
    assert ret == [(1, Decimal("1"))]
Example #4
0
def consume(ctx: Context, schema: str, skip_error: bool, last_msg_id: str):
    alias = ctx.obj["alias"]
    reader = get_reader(alias)
    tables = Settings.get_source_db_database_tables_name(alias, schema)
    tables_pk = {}
    for table in tables:
        tables_pk[table] = reader.get_primary_key(schema, table)

    # try etl full
    etl_full(alias, schema, tables_pk)
    table_dict = Settings.get_source_db_database_tables_dict(alias, schema)

    continuous_etl(
        alias, schema, tables_pk, table_dict, last_msg_id, skip_error,
    )