def main(): print(f"== starting mergin-db-sync daemon == version {__version__} ==") filename = 'config.ini' dbsync.load_config(filename) # load daemon-specific bits cfg = configparser.ConfigParser() cfg.read(filename) sleep_time = int(cfg['daemon']['sleep_time']) print("Logging in to Mergin...") mc = dbsync.create_mergin_client() if len(sys.argv) == 2: # optionally we can run initialization before starting the sync loop cmd = sys.argv[1] if cmd == '--init-from-gpkg': dbsync.dbsync_init(mc, from_gpkg=True) elif cmd == '--init-from-db': dbsync.dbsync_init(mc, from_gpkg=False) else: raise ValueError("Unknown command line option: " + cmd) while True: print(datetime.datetime.now()) try: print("Trying to pull") dbsync.dbsync_pull(mc) print("Trying to push") dbsync.dbsync_push(mc) # check mergin client token expiration delta = mc._auth_session['expire'] - datetime.datetime.now( datetime.timezone.utc) if delta.total_seconds() < 3600: mc = dbsync.create_mergin_client() except dbsync.DbSyncError as e: print("Error: " + str(e)) print("Going to sleep") time.sleep(sleep_time)
def init_sync_from_geopackage(mc, project_name, source_gpkg_path): """ Initialize sync from given GeoPackage file: - (re)create Mergin project with the file - (re)create local project working directory and sync directory - configure DB sync and let it do the init (make copies to the database) """ full_project_name = API_USER + "/" + project_name project_dir = os.path.join(TMP_DIR, project_name + '_work') # working directory sync_project_dir = os.path.join(TMP_DIR, project_name + '_dbsync') # used by dbsync db_schema_main = project_name + '_main' db_schema_base = project_name + '_base' conn = psycopg2.connect(DB_CONNINFO) cleanup(mc, full_project_name, [project_dir, sync_project_dir]) cleanup_db(conn, db_schema_base, db_schema_main) # prepare a new Mergin project mc.create_project(project_name) mc.download_project(full_project_name, project_dir) shutil.copy(source_gpkg_path, os.path.join(project_dir, 'test_sync.gpkg')) mc.push_project(project_dir) # prepare sync dir mc.download_project(full_project_name, sync_project_dir) # prepare dbsync config config.geodiffinfo_exe = GEODIFFINFO_EXE config.mergin_username = API_USER config.mergin_password = USER_PWD config.mergin_url = SERVER_URL config.db_conn_info = DB_CONNINFO config.project_working_dir = sync_project_dir config.mergin_sync_file = 'test_sync.gpkg' config.db_driver = 'postgres' config.db_schema_modified = db_schema_main config.db_schema_base = db_schema_base dbsync_init(from_gpkg=True)
def main(): filename = 'config.ini' dbsync.load_config(filename) # load daemon-specific bits cfg = configparser.ConfigParser() cfg.read(filename) sleep_time = int(cfg['daemon']['sleep_time']) if len(sys.argv) == 2: # optionally we can run initialization before starting the sync loop cmd = sys.argv[1] if cmd == '--init-from-gpkg': dbsync.dbsync_init(from_gpkg=True) elif cmd == '--init-from-db': dbsync.dbsync_init(from_gpkg=False) else: raise ValueError("Unknown command line option: " + cmd) while True: print(datetime.datetime.now()) try: print("Trying to pull") dbsync.dbsync_pull() print("Trying to push") dbsync.dbsync_push() except dbsync.DbSyncError as e: print("Error: " + str(e)) print("Going to sleep") time.sleep(sleep_time)
def test_init_from_gpkg(mc): project_name = 'test_init' source_gpkg_path = os.path.join(TEST_DATA_DIR, 'base.gpkg') project_dir = os.path.join(TMP_DIR, project_name + '_work') db_schema_main = project_name + '_main' db_schema_base = project_name + '_base' init_sync_from_geopackage(mc, project_name, source_gpkg_path) # test that database schemas are created + tables are populated conn = psycopg2.connect(DB_CONNINFO) cur = conn.cursor() cur.execute(f"SELECT count(*) from {db_schema_main}.simple") assert cur.fetchone()[0] == 3 # run again, nothing should change dbsync_init(mc, from_gpkg=True) cur.execute(f"SELECT count(*) from {db_schema_main}.simple") assert cur.fetchone()[0] == 3 db_proj_info = _get_db_project_comment(conn, db_schema_base) assert db_proj_info["name"] == config.mergin_project_name assert db_proj_info["version"] == 'v1' # rename base schema to mimic some mismatch cur.execute(f"ALTER SCHEMA {db_schema_base} RENAME TO schema_tmp") conn.commit() with pytest.raises(DbSyncError) as err: dbsync_init(mc, from_gpkg=True) assert "The 'modified' schema exists but the base schema is missing" in str( err.value) # and revert back cur.execute(f"ALTER SCHEMA schema_tmp RENAME TO {db_schema_base}") conn.commit() # make change in GPKG and push to server to create pending changes, it should pass but not sync shutil.copy(os.path.join(TEST_DATA_DIR, 'inserted_1_A.gpkg'), os.path.join(project_dir, 'test_sync.gpkg')) mc.push_project(project_dir) # remove local copy of project (to mimic loss at docker restart) shutil.rmtree(config.project_working_dir) dbsync_init(mc, from_gpkg=True) cur.execute(f"SELECT count(*) from {db_schema_main}.simple") assert cur.fetchone()[0] == 3 db_proj_info = _get_db_project_comment(conn, db_schema_base) assert db_proj_info["version"] == 'v1' # let's remove local working dir and download different version from server to mimic versions mismatch shutil.rmtree(config.project_working_dir) mc.download_project(config.mergin_project_name, config.project_working_dir, 'v2') # run init again, it should handle local working dir properly (e.g. download correct version) and pass but not sync dbsync_init(mc, from_gpkg=True) db_proj_info = _get_db_project_comment(conn, db_schema_base) assert db_proj_info["version"] == 'v1' # pull server changes to db to make sure we can sync again dbsync_pull(mc) cur.execute(f"SELECT count(*) from {db_schema_main}.simple") assert cur.fetchone()[0] == 4 db_proj_info = _get_db_project_comment(conn, db_schema_base) assert db_proj_info["version"] == 'v2' # update some feature from 'modified' db to create mismatch with src geopackage, it should pass but not sync fid = 1 cur.execute(f"SELECT * from {db_schema_main}.simple WHERE fid={fid}") old_value = cur.fetchone()[3] cur.execute( f"UPDATE {db_schema_main}.simple SET rating=100 WHERE fid={fid}") conn.commit() cur.execute(f"SELECT * from {db_schema_main}.simple WHERE fid={fid}") assert cur.fetchone()[3] == 100 dbsync_init(mc, from_gpkg=True) # check geopackage has not been modified - after init we are not synced! gpkg_conn = sqlite3.connect(os.path.join(project_dir, 'test_sync.gpkg')) gpkg_cur = gpkg_conn.cursor() gpkg_cur.execute(f"SELECT * FROM simple WHERE fid={fid}") assert gpkg_cur.fetchone()[3] == old_value # push db changes to server (and download new version to local working dir) to make sure we can sync again dbsync_push(mc) mc.pull_project(project_dir) gpkg_cur.execute(f"SELECT * FROM simple WHERE fid={fid}") assert gpkg_cur.fetchone()[3] == 100 db_proj_info = _get_db_project_comment(conn, db_schema_base) assert db_proj_info["version"] == 'v3' # update some feature from 'base' db to create mismatch with src geopackage and modified cur.execute(f"SELECT * from {db_schema_base}.simple") fid = cur.fetchone()[0] old_value = cur.fetchone()[3] cur.execute( f"UPDATE {db_schema_base}.simple SET rating=100 WHERE fid={fid}") conn.commit() cur.execute(f"SELECT * from {db_schema_base}.simple WHERE fid={fid}") assert cur.fetchone()[3] == 100 with pytest.raises(DbSyncError) as err: dbsync_init(mc, from_gpkg=True) assert "The db schemas already exist but 'base' schema is not synchronized with source GPKG" in str( err.value) # make local changes to src file to introduce local changes shutil.copy( os.path.join(TEST_DATA_DIR, 'base.gpkg'), os.path.join(config.project_working_dir, config.mergin_sync_file)) with pytest.raises(DbSyncError) as err: dbsync_init(mc, from_gpkg=True) assert "There are pending changes in the local directory - that should never happen" in str( err.value)