def register(self): """Register/create the current host on the remote server if not already registered.""" if not Config.get_key("client_id"): r_kwargs = self.request_kwargs.copy() r = requests.post(self.get_resource("clients"), **r_kwargs) if r.status_code == 200: client = r.json() if client: Config.set_key("client_id", client["_id"]) else: log.error("An error occured during sync: {0}".format(r.text)) else: log.debug("Already registered ({0})".format(Config.get_key("client_id")))
def register(self): """Register/create the current host on the remote server if not already registered.""" if not Config.get_key("client_id"): r_kwargs = self.request_kwargs.copy() r = requests.post(self.get_resource("clients"), **r_kwargs) if r.status_code == 200: client = r.json() if client: Config.set_key("client_id", client["_id"]) else: log.error("An error occured during sync: {0}".format(r.text)) else: log.debug("Already registered ({0})".format( Config.get_key("client_id")))
def sync(self): """Draft for implementing bakthat clients (hosts) backups data synchronization. Synchronize Bakthat sqlite database via a HTTP POST request. Backups are never really deleted from sqlite database, we just update the is_deleted key. It sends the last server sync timestamp along with data updated since last sync. Then the server return backups that have been updated on the server since last sync. On both sides, backups are either created if they don't exists or updated if the incoming version is newer. """ log.debug("Start syncing") self.register() last_sync_ts = Config.get_key("sync_ts", 0) to_insert_in_mongo = [b._data for b in Backups.search(last_updated_gt=last_sync_ts)] data = dict(sync_ts=last_sync_ts, to_insert_in_mongo=to_insert_in_mongo) r_kwargs = self.request_kwargs.copy() log.debug("Initial payload: {0}".format(data)) r_kwargs.update({"data": json.dumps(data)}) r = requests.post(self.get_resource("backups/sync/status"), **r_kwargs) if r.status_code != 200: log.error("An error occured during sync: {0}".format(r.text)) return log.debug("Sync result: {0}".format(r.json())) to_insert_in_bakthat = r.json().get("to_insert_in_bakthat") sync_ts = r.json().get("sync_ts") for newbackup in to_insert_in_bakthat: sqlite_backup = Backups.match_filename(newbackup["stored_filename"], newbackup["backend"]) if sqlite_backup and newbackup["last_updated"] > sqlite_backup.last_updated: log.debug("Upsert {0}".format(newbackup)) Backups.upsert(**newbackup) elif not sqlite_backup: log.debug("Create backup {0}".format(newbackup)) Backups.create(**newbackup) Config.set_key("sync_ts", sync_ts) log.debug("Sync succcesful")
def sync(self): """Draft for implementing bakthat clients (hosts) backups data synchronization. Synchronize Bakthat sqlite database via a HTTP POST request. Backups are never really deleted from sqlite database, we just update the is_deleted key. It sends the last server sync timestamp along with data updated since last sync. Then the server return backups that have been updated on the server since last sync. On both sides, backups are either created if they don't exists or updated if the incoming version is newer. """ log.debug("Start syncing") self.register() last_sync_ts = Config.get_key("sync_ts", 0) to_insert_in_mongo = [ b._data for b in Backups.search(last_updated_gt=last_sync_ts) ] data = dict(sync_ts=last_sync_ts, new=to_insert_in_mongo) r_kwargs = self.request_kwargs.copy() log.debug("Initial payload: {0}".format(data)) r_kwargs.update({"data": json.dumps(data)}) r = requests.post(self.get_resource("backups/sync"), **r_kwargs) if r.status_code != 200: log.error("An error occured during sync: {0}".format(r.text)) return log.debug("Sync result: {0}".format(r.json())) to_insert_in_bakthat = r.json().get("updated", []) sync_ts = r.json().get("sync_ts") for newbackup in to_insert_in_bakthat: log.debug("Upsert {0}".format(newbackup)) Backups.upsert(**newbackup) Config.set_key("sync_ts", sync_ts) log.debug("Sync succcesful")
def sync(self): """Draft for implementing bakthat clients (hosts) backups data synchronization. Synchronize Bakthat sqlite database via a HTTP POST request. Backups are never really deleted from sqlite database, we just update the is_deleted key. It sends the last server sync timestamp along with data updated since last sync. Then the server return backups that have been updated on the server since last sync. Both side (bakthat and the sync server) make upserts of the latest data avaible: - if it doesn't exist yet, it will be created. - if it has been modified (e.g deleted, since it's the only action we can take) we update it. """ log.debug("Start syncing") self.register() last_sync_ts = Config.get_key("sync_ts", 0) to_insert_in_mongo = [b._data for b in Backups.search(last_updated_gt=last_sync_ts)] data = dict(sync_ts=last_sync_ts, to_insert_in_mongo=to_insert_in_mongo) r_kwargs = self.request_kwargs.copy() log.debug("Initial payload: {0}".format(data)) r_kwargs.update({"data": json.dumps(data)}) r = requests.post(self.get_resource("backups/sync/status"), **r_kwargs) if r.status_code != 200: log.error("An error occured during sync: {0}".format(r.text)) return log.debug("Sync result: {0}".format(r.json())) to_insert_in_bakthat = r.json().get("to_insert_in_bakthat") sync_ts = r.json().get("sync_ts") for newbackup in to_insert_in_bakthat: log.debug("Upsert {0}".format(newbackup)) Backups.upsert(**newbackup) Config.set_key("sync_ts", sync_ts) log.debug("Sync succcesful")
def reset_sync(self): log.debug("reset sync") Config.set_key("sync_ts", 0) Config.set_key("client_id", None)