def test_basic_both(mc):
    """ Initializes a sync project and does both a change in Mergin and in the database,
    and lets DB sync handle it: changes in PostgreSQL need to be rebased on top of
    changes in Mergin server.
    """

    project_name = 'test_sync_both'
    source_gpkg_path = os.path.join(TEST_DATA_DIR, 'base.gpkg')
    project_dir = os.path.join(TMP_DIR,
                               project_name + '_work')  # working directory

    init_sync_from_geopackage(mc, project_name, source_gpkg_path)

    conn = psycopg2.connect(DB_CONNINFO)

    # test that database schemas are created + tables are populated
    cur = conn.cursor()
    cur.execute(f"SELECT count(*) from {project_name}_main.simple")
    assert cur.fetchone()[0] == 3

    # make change in GPKG and push
    shutil.copy(os.path.join(TEST_DATA_DIR, 'inserted_1_A.gpkg'),
                os.path.join(project_dir, 'test_sync.gpkg'))
    mc.push_project(project_dir)

    # make a change in PostgreSQL
    cur = conn.cursor()
    cur.execute(
        f"INSERT INTO {project_name}_main.simple (name, rating) VALUES ('insert in postgres', 123)"
    )
    cur.execute("COMMIT")
    cur.execute(f"SELECT count(*) from {project_name}_main.simple")
    assert cur.fetchone()[0] == 4

    # first pull changes from Mergin to DB (+rebase changes in DB) and then push the changes from DB to Mergin
    dbsync_pull(mc)
    db_proj_info = _get_db_project_comment(conn, 'test_sync_both_base')
    assert db_proj_info["version"] == 'v2'
    dbsync_push(mc)
    db_proj_info = _get_db_project_comment(conn, 'test_sync_both_base')
    assert db_proj_info["version"] == 'v3'

    # pull new version of the project to the work project directory
    mc.pull_project(project_dir)

    # check that the insert has been applied to our GeoPackage
    gpkg_conn = sqlite3.connect(os.path.join(project_dir, 'test_sync.gpkg'))
    gpkg_cur = gpkg_conn.cursor()
    gpkg_cur.execute("SELECT count(*) FROM simple")
    assert gpkg_cur.fetchone()[0] == 5

    # check that the insert has been applied to the DB
    cur = conn.cursor()
    cur.execute(f"SELECT count(*) from {project_name}_main.simple")
    assert cur.fetchone()[0] == 5

    print("---")
    dbsync_status(mc)
示例#2
0
def main():

    print(f"== starting mergin-db-sync daemon == version {__version__} ==")

    filename = 'config.ini'
    dbsync.load_config(filename)

    # load daemon-specific bits
    cfg = configparser.ConfigParser()
    cfg.read(filename)
    sleep_time = int(cfg['daemon']['sleep_time'])

    print("Logging in to Mergin...")
    mc = dbsync.create_mergin_client()

    if len(sys.argv) == 2:
        # optionally we can run initialization before starting the sync loop
        cmd = sys.argv[1]
        if cmd == '--init-from-gpkg':
            dbsync.dbsync_init(mc, from_gpkg=True)
        elif cmd == '--init-from-db':
            dbsync.dbsync_init(mc, from_gpkg=False)
        else:
            raise ValueError("Unknown command line option: " + cmd)

    while True:

        print(datetime.datetime.now())

        try:
            print("Trying to pull")
            dbsync.dbsync_pull(mc)

            print("Trying to push")
            dbsync.dbsync_push(mc)

            # check mergin client token expiration
            delta = mc._auth_session['expire'] - datetime.datetime.now(
                datetime.timezone.utc)
            if delta.total_seconds() < 3600:
                mc = dbsync.create_mergin_client()

        except dbsync.DbSyncError as e:
            print("Error: " + str(e))

        print("Going to sleep")
        time.sleep(sleep_time)
def test_basic_push(mc):
    """ Initialize a project and test push of a new row from PostgreSQL to Mergin """

    project_name = 'test_sync_push'
    source_gpkg_path = os.path.join(TEST_DATA_DIR, 'base.gpkg')
    project_dir = os.path.join(TMP_DIR,
                               project_name + '_work')  # working directory

    init_sync_from_geopackage(mc, project_name, source_gpkg_path)

    conn = psycopg2.connect(DB_CONNINFO)

    # test that database schemas are created + tables are populated
    cur = conn.cursor()
    cur.execute("SELECT count(*) from test_sync_push_main.simple")
    assert cur.fetchone()[0] == 3

    # make a change in PostgreSQL
    cur = conn.cursor()
    cur.execute(
        "INSERT INTO test_sync_push_main.simple (name, rating) VALUES ('insert in postgres', 123)"
    )
    cur.execute("COMMIT")
    cur.execute("SELECT count(*) from test_sync_push_main.simple")
    assert cur.fetchone()[0] == 4

    # push the change from DB to PostgreSQL
    dbsync_push(mc)
    db_proj_info = _get_db_project_comment(conn, 'test_sync_push_base')
    assert db_proj_info["version"] == 'v2'

    # pull new version of the project to the work project directory
    mc.pull_project(project_dir)

    # check that the insert has been applied to our GeoPackage
    gpkg_conn = sqlite3.connect(os.path.join(project_dir, 'test_sync.gpkg'))
    gpkg_cur = gpkg_conn.cursor()
    gpkg_cur.execute("SELECT count(*) FROM simple")
    assert gpkg_cur.fetchone()[0] == 4

    print("---")
    dbsync_status(mc)
示例#4
0
def main():

    filename = 'config.ini'
    dbsync.load_config(filename)

    # load daemon-specific bits
    cfg = configparser.ConfigParser()
    cfg.read(filename)
    sleep_time = int(cfg['daemon']['sleep_time'])

    if len(sys.argv) == 2:
        # optionally we can run initialization before starting the sync loop
        cmd = sys.argv[1]
        if cmd == '--init-from-gpkg':
            dbsync.dbsync_init(from_gpkg=True)
        elif cmd == '--init-from-db':
            dbsync.dbsync_init(from_gpkg=False)
        else:
            raise ValueError("Unknown command line option: " + cmd)

    while True:

        print(datetime.datetime.now())

        try:
            print("Trying to pull")
            dbsync.dbsync_pull()

            print("Trying to push")
            dbsync.dbsync_push()

        except dbsync.DbSyncError as e:
            print("Error: " + str(e))

        print("Going to sleep")
        time.sleep(sleep_time)
def test_init_from_gpkg(mc):
    project_name = 'test_init'
    source_gpkg_path = os.path.join(TEST_DATA_DIR, 'base.gpkg')
    project_dir = os.path.join(TMP_DIR, project_name + '_work')
    db_schema_main = project_name + '_main'
    db_schema_base = project_name + '_base'

    init_sync_from_geopackage(mc, project_name, source_gpkg_path)

    # test that database schemas are created + tables are populated
    conn = psycopg2.connect(DB_CONNINFO)
    cur = conn.cursor()
    cur.execute(f"SELECT count(*) from {db_schema_main}.simple")
    assert cur.fetchone()[0] == 3
    # run again, nothing should change
    dbsync_init(mc, from_gpkg=True)
    cur.execute(f"SELECT count(*) from {db_schema_main}.simple")
    assert cur.fetchone()[0] == 3
    db_proj_info = _get_db_project_comment(conn, db_schema_base)
    assert db_proj_info["name"] == config.mergin_project_name
    assert db_proj_info["version"] == 'v1'

    # rename base schema to mimic some mismatch
    cur.execute(f"ALTER SCHEMA {db_schema_base} RENAME TO schema_tmp")
    conn.commit()
    with pytest.raises(DbSyncError) as err:
        dbsync_init(mc, from_gpkg=True)
    assert "The 'modified' schema exists but the base schema is missing" in str(
        err.value)
    # and revert back
    cur.execute(f"ALTER SCHEMA schema_tmp RENAME TO {db_schema_base}")
    conn.commit()

    # make change in GPKG and push to server to create pending changes, it should pass but not sync
    shutil.copy(os.path.join(TEST_DATA_DIR, 'inserted_1_A.gpkg'),
                os.path.join(project_dir, 'test_sync.gpkg'))
    mc.push_project(project_dir)
    #  remove local copy of project (to mimic loss at docker restart)
    shutil.rmtree(config.project_working_dir)
    dbsync_init(mc, from_gpkg=True)
    cur.execute(f"SELECT count(*) from {db_schema_main}.simple")
    assert cur.fetchone()[0] == 3
    db_proj_info = _get_db_project_comment(conn, db_schema_base)
    assert db_proj_info["version"] == 'v1'

    # let's remove local working dir and download different version from server to mimic versions mismatch
    shutil.rmtree(config.project_working_dir)
    mc.download_project(config.mergin_project_name, config.project_working_dir,
                        'v2')
    # run init again, it should handle local working dir properly (e.g. download correct version) and pass but not sync
    dbsync_init(mc, from_gpkg=True)
    db_proj_info = _get_db_project_comment(conn, db_schema_base)
    assert db_proj_info["version"] == 'v1'

    # pull server changes to db to make sure we can sync again
    dbsync_pull(mc)
    cur.execute(f"SELECT count(*) from {db_schema_main}.simple")
    assert cur.fetchone()[0] == 4
    db_proj_info = _get_db_project_comment(conn, db_schema_base)
    assert db_proj_info["version"] == 'v2'

    # update some feature from 'modified' db to create mismatch with src geopackage, it should pass but not sync
    fid = 1
    cur.execute(f"SELECT * from {db_schema_main}.simple WHERE fid={fid}")
    old_value = cur.fetchone()[3]
    cur.execute(
        f"UPDATE {db_schema_main}.simple SET rating=100 WHERE fid={fid}")
    conn.commit()
    cur.execute(f"SELECT * from {db_schema_main}.simple WHERE fid={fid}")
    assert cur.fetchone()[3] == 100
    dbsync_init(mc, from_gpkg=True)
    # check geopackage has not been modified - after init we are not synced!
    gpkg_conn = sqlite3.connect(os.path.join(project_dir, 'test_sync.gpkg'))
    gpkg_cur = gpkg_conn.cursor()
    gpkg_cur.execute(f"SELECT * FROM simple WHERE fid={fid}")
    assert gpkg_cur.fetchone()[3] == old_value
    # push db changes to server (and download new version to local working dir) to make sure we can sync again
    dbsync_push(mc)
    mc.pull_project(project_dir)
    gpkg_cur.execute(f"SELECT * FROM simple WHERE fid={fid}")
    assert gpkg_cur.fetchone()[3] == 100
    db_proj_info = _get_db_project_comment(conn, db_schema_base)
    assert db_proj_info["version"] == 'v3'

    # update some feature from 'base' db to create mismatch with src geopackage and modified
    cur.execute(f"SELECT * from {db_schema_base}.simple")
    fid = cur.fetchone()[0]
    old_value = cur.fetchone()[3]
    cur.execute(
        f"UPDATE {db_schema_base}.simple SET rating=100 WHERE fid={fid}")
    conn.commit()
    cur.execute(f"SELECT * from {db_schema_base}.simple WHERE fid={fid}")
    assert cur.fetchone()[3] == 100
    with pytest.raises(DbSyncError) as err:
        dbsync_init(mc, from_gpkg=True)
    assert "The db schemas already exist but 'base' schema is not synchronized with source GPKG" in str(
        err.value)

    # make local changes to src file to introduce local changes
    shutil.copy(
        os.path.join(TEST_DATA_DIR, 'base.gpkg'),
        os.path.join(config.project_working_dir, config.mergin_sync_file))
    with pytest.raises(DbSyncError) as err:
        dbsync_init(mc, from_gpkg=True)
    assert "There are pending changes in the local directory - that should never happen" in str(
        err.value)
示例#6
0
while True:
    if select.select([conn],[],[],5) == ([],[],[]):
        print("Timeout")

        print("Trying to pull")
        dbsync.dbsync_pull()

    else:
        conn.poll()
        while conn.notifies:
            notify = conn.notifies.pop(0)
            print("Got NOTIFY:", notify.pid, notify.channel, notify.payload)

        # new stuff in the database - let's push a new version

        # how about if the transaction is not committed yet?
        # Docs: "if a NOTIFY is executed inside a transaction, the notify events
        # are not delivered until and unless the transaction is committed"

        # TODO: need to wait before the changes are accessible?

        print("Trying to push")
        dbsync.dbsync_push()



# TODO: create on init
# CREATE RULE geodiff_rule_update_simple AS ON UPDATE TO gd_sync_base.simple DO ALSO NOTIFY geodiff;
# CREATE RULE geodiff_rule_insert_simple AS ON INSERT TO gd_sync_base.simple DO ALSO NOTIFY geodiff;
# CREATE RULE geodiff_rule_delete_simple AS ON DELETE TO gd_sync_base.simple DO ALSO NOTIFY geodiff;