def updateSyncRecordHashAndSync(): try: config.logger.info("controller:updateSyncRecordHashAndSync()") if is_connected(): roast_record = roast.getRoast() sync_record, sync_record_hash = roast.getSyncRecord(roast_record) if is_synced(): server_updates_modified_at = sync.getApplidedServerUpdatesModifiedAt( ) if server_updates_modified_at is not None and "roast_id" in roast_record: sync.addSync(roast_record["roast_id"], server_updates_modified_at) sync.setApplidedServerUpdatesModifiedAt(None) # we are connected and the profile is under sync if sync.syncRecordUpdated(roast_record): # we push updates on the sync record back to the server queue.addRoast(sync_record) return sync_record_hash else: return None except Exception as e: import sys _, _, exc_tb = sys.exc_info() config.logger.error( "controller: Exception in updateSyncRecordHashAndSync() line %s: %s", exc_tb.tb_lineno, e)
def updateSyncRecordHashAndSync(): try: config.logger.info("controller:updateSyncRecordHashAndSync()") if is_on(): roast_record = roast.getRoast() sync_record, sync_record_hash = roast.getSyncRecord(roast_record) if is_synced(): # check if profile is under sync already server_updates_modified_at = sync.getApplidedServerUpdatesModifiedAt( ) if server_updates_modified_at is not None and "roast_id" in roast_record: sync.addSync(roast_record["roast_id"], server_updates_modified_at) sync.setApplidedServerUpdatesModifiedAt(None) # artisan.plus is ON and the profile is under sync if sync.syncRecordUpdated(roast_record): # we push updates on the sync record back to the server via the queue queue.addRoast(sync_record) elif "roast_id" in roast_record and queue.full_roast_in_queue( roast_record["roast_id"]): # in case this roast is not yet in sync cache as it has not been successfully uploaded, but a corresponding full roast # record is already in the uploading queue we add this updating sync_record also to the queue queue.addRoast(sync_record) return sync_record_hash else: return None except Exception as e: import sys _, _, exc_tb = sys.exc_info() config.logger.error( "controller: Exception in updateSyncRecordHashAndSync() line %s: %s", exc_tb.tb_lineno, e) return None
def setSyncRecordHash(roast_record=None): global cached_sync_record_hash try: config.logger.debug("sync:setSyncRecordHash()") sync_record_semaphore.acquire(1) _, cached_sync_record_hash = roast.getSyncRecord(roast_record) except Exception as e: config.logger.error("sync: Exception in setSyncRecordHash() %s", e) finally: if sync_record_semaphore.available() < 1: sync_record_semaphore.release(1)
def syncRecordUpdated(roast_record=None): try: config.logger.debug("sync:syncRecordUpdated()") sync_record_semaphore.acquire(1) _, current_sync_record_hash = roast.getSyncRecord(roast_record) return cached_sync_record_hash != current_sync_record_hash except Exception as e: config.logger.error("sync: Exception in syncRecordUpdated() %s", e) return False finally: if sync_record_semaphore.available() < 1: sync_record_semaphore.release(1)
def setSyncRecordHash(sync_record=None, h=None): global cached_sync_record_hash, cached_sync_record try: config.logger.debug("sync:setSyncRecordHash()") sync_record_semaphore.acquire(1) if sync_record is not None and h is not None: cached_sync_record = sync_record cached_sync_record_hash = h else: cached_sync_record, cached_sync_record_hash = roast.getSyncRecord() except Exception as e: config.logger.error("sync: Exception in setSyncRecordHash() %s", e) finally: if sync_record_semaphore.available() < 1: sync_record_semaphore.release(1)
def sync(): try: config.logger.info("sync:sync()") aw = config.app_window rr = roast.getRoast() _,computed_sync_record_hash = roast.getSyncRecord(rr) if aw.qmc.plus_sync_record_hash is None or aw.qmc.plus_sync_record_hash != computed_sync_record_hash: # the sync record of the loaded profile is not consistent or missing, offline changes (might) have been applied aw.qmc.fileDirty() # set file dirty flag clearSyncRecordHash() # clear sync record hash cash to trigger an upload of the modified plus sync record on next save else: setSyncRecordHash(h = computed_sync_record_hash) # we remember that consistent state to be able to detect future modifications getUpdate(aw.qmc.roastUUID,aw.curFile) # now we check for updates on the server side except Exception as e: import sys _, _, exc_tb = sys.exc_info() config.logger.error("sync: Exception in sync() line %s: %s",exc_tb.tb_lineno,e)
def applyServerUpdates(data): global cached_sync_record dirty = False title_changed = False try: config.logger.debug("sync:applyServerUpdates()") config.logger.debug("sync: -> apply: %s", data) aw = config.app_window if "amount" in data and data["amount"] is not None: w = aw.convertWeight(data["amount"], aw.qmc.weight_units.index("Kg"), aw.qmc.weight_units.index(aw.qmc.weight[2])) if w != aw.qmc.weight[0]: aw.qmc.weight[0] = w dirty = True if "end_weight" in data and data["end_weight"] is not None: w = aw.convertWeight(data["end_weight"], aw.qmc.weight_units.index("Kg"), aw.qmc.weight_units.index(aw.qmc.weight[2])) if w != aw.qmc.weight[1]: aw.qmc.weight[1] = w dirty = True if "batch_number" in data and data[ "batch_number"] != aw.qmc.roastbatchnr: aw.qmc.roastbatchnr = data["batch_number"] dirty = True title_changed = True if "batch_prefix" in data and data[ "batch_prefix"] != aw.qmc.roastbatchprefix: aw.qmc.roastbatchprefix = data["batch_prefix"] dirty = True title_changed = True if "batch_pos" in data and data["batch_pos"] != aw.qmc.roastbatchpos: aw.qmc.roastbatchpos = data["batch_pos"] dirty = True title_changed = True if "label" in data and data["label"] != aw.qmc.title: aw.qmc.title = data["label"] dirty = True title_changed = True if "location" in data and data["location"] is not None: if "hr_id" in data["location"] and data["location"][ "hr_id"] != aw.qmc.plus_store: aw.qmc.plus_store = data["location"]["hr_id"] dirty = True if "label" in data["location"] and data["location"][ "label"] != aw.qmc.plus_store_label: aw.qmc.plus_store_label = data["location"]["label"] dirty = True if "coffee" in data and data["coffee"] is not None: if "hr_id" in data["coffee"] and data["coffee"][ "hr_id"] != aw.qmc.plus_coffee: aw.qmc.plus_coffee = data["coffee"]["hr_id"] dirty = True if "label" in data["coffee"] and data["coffee"][ "label"] != aw.qmc.plus_coffee_label: aw.qmc.plus_coffee_label = data["coffee"]["label"] dirty = True if aw.qmc.plus_coffee is not None: aw.qmc.plus_blend_label = None aw.qmc.plus_blend_spec = None aw.qmc.plus_blend_spec_labels = None if "blend" in data and data["blend"] is not None and "label" in data["blend"] and "ingredients" in data["blend"] \ and data["blend"]["ingredients"]: try: ingredients = [] for i in data["blend"]["ingredients"]: entry = {} entry["ratio"] = i["ratio"] entry["coffee"] = i["coffee"][ "hr_id"] # just the hr_id as a string and not the full object if "ratio_num" in i and i["ratio_num"] is not None: entry["ratio_num"] = i["ratio_num"] if "ratio_denom" in i and i["ratio_denom"] is not None: entry["ratio_denom"] = i["ratio_denom"] ingredients.append(entry) blend_spec = { "label": data["blend"]["label"], "ingredients": ingredients } blend_spec_labels = [ i["coffee"]["label"] for i in data["blend"]["ingredients"] ] aw.qmc.plus_blend_spec = blend_spec aw.qmc.plus_blend_spec_labels = blend_spec_labels dirty = True except Exception: pass if aw.qmc.plus_blend_spec is not None: aw.qmc.plus_coffee = None aw.qmc.plus_coffee_label = None # ensure that location is None if neither coffee nor blend is set if aw.qmc.plus_coffee is None and aw.qmc.plus_blend_spec is None and aw.qmc.plus_store is not None: aw.qmc.plus_store = None if "color_system" in data and data[ "color_system"] != aw.qmc.color_systems[ aw.qmc.color_system_idx]: try: aw.qmc.color_system_idx = aw.qmc.color_systems.index( data["color_system"]) dirty = True except: # cloud color system not known by Artisan client pass if "ground_color" in data and data[ "ground_color"] != aw.qmc.ground_color: aw.qmc.ground_color = data["ground_color"] dirty = True if "whole_color" in data and data["whole_color"] != aw.qmc.whole_color: aw.qmc.whole_color = data["whole_color"] dirty = True if "machine" in data and data["machine"] != aw.qmc.roastertype: aw.qmc.roastertype = data["machine"] dirty = True if "notes" in data and data["notes"] != aw.qmc.roastingnotes: aw.qmc.roastingnotes = data["notes"] dirty = True if "density_roasted" in data and data[ "density_roasted"] != aw.qmc.density_roasted[0]: aw.qmc.density_roasted[0] = data["density_roasted"] dirty = True if "moisture" in data and data["moisture"] != aw.qmc.density_roasted[0]: aw.qmc.moisture_roasted = data["moisture"] dirty = True if "temperature" in data and data["temperature"] != aw.qmc.ambientTemp: aw.qmc.ambientTemp = data["temperature"] dirty = True if "pressure" in data and data["pressure"] != aw.qmc.ambient_pressure: aw.qmc.ambient_pressure = data["pressure"] dirty = True if "humidity" in data and data["humidity"] != aw.qmc.ambient_humidity: aw.qmc.ambient_humidity = data["humidity"] dirty = True if "roastersize" in data and data["roastersize"] != aw.qmc.roastersize: aw.qmc.roastersize = data["roastersize"] dirty = True if "roasterheating" in data and data[ "roasterheating"] != aw.qmc.roasterheating: aw.qmc.roasterheating = data["roasterheating"] dirty = True setSyncRecordHash( ) # here the sync record is taken form the profiles data after application of the recieved server updates # not that this sync record does not contain null values not transferred for attributes from the server side # to fix this, we will update that sync record with all attributes not in the server data set to null values # this forces those non-null values from the profile to be transmitted to the server on next sync updated_record = {} for key, value in cached_sync_record.items(): if not (key in data): # we explicitly add the implicit null value (0 or "") for that key if key in roast.sync_record_zero_supressed_attributes and value != 0: updated_record[key] = 0 elif key in roast.sync_record_empty_string_supressed_attributes and value != "": updated_record[key] = "" else: updated_record[key] = value cached_sync_record, cached_sync_record_hash = roast.getSyncRecord( updated_record) setSyncRecordHash(cached_sync_record, cached_sync_record_hash) except Exception as e: config.logger.error("sync: Exception in applyServerUpdates() %s", e) finally: if title_changed: aw.setTitleSignal.emit( aw.qmc.title, True ) # we force an updatebackground to ensure proper repainting if dirty: aw.qmc.fileDirty() aw.sendmessageSignal.emit( QApplication.translate( "Plus", "Updated data received from artisan.plus", None), True, None) if "modified_at" in data: # we remember the timestamp of the applied server updates setApplidedServerUpdatesModifiedAt(data["modified_at"])
def run(self): global queue config.logger.debug("queue:run()") time.sleep(config.queue_start_delay) self.resume() # unpause self item = None while True: time.sleep(config.queue_task_delay) with self.state: if self.paused: self.state.wait() # block until notified config.logger.debug("queue: -> qsize: %s",queue.qsize()) config.logger.debug("queue: looking for next item to be fetched") try: if item is None: item = queue.get() time.sleep(config.queue_task_delay) config.logger.debug("queue: -> worker processing item: %s",item) iters = config.queue_retries + 1 while iters > 0: config.logger.debug("queue: -> remaining iterations: %s",iters) r = None try: # we upload only full roast records, or partial updates in case the are under sync (registered in the sync cache) if is_full_roast_record(item["data"]) or ("roast_id" in item["data"] and sync.getSync(item["data"]["roast_id"])): controller.connect(clear_on_failure=False,interactive=False) r = connection.sendData(item["url"],item["data"],item["verb"]) r.raise_for_status() # successfully transmitted, we add/update the roasts UUID sync-cache iters = 0 self.addSyncItem(item) # if current roast was just successfully uploaded, we set the syncRecordHash to the full sync record # to track further edits. Note we take a fresh (full) SyncRecord here as the uploaded record might contain only changed attributes sr,h = roast.getSyncRecord() if item["data"]["roast_id"] == sr["roast_id"]: sync.setSyncRecordHash(sync_record = sr, h = h) else: # partial sync updates for roasts not registered for syncing are ignored iters = 0 except ConnectionError as e: try: if controller.is_connected(): config.logger.debug("queue: -> connection error, disconnecting: %s",e) # we disconnect controller.disconnect(remove_credentials = False, stop_queue=True) except: pass # we don't change the iter, but retry to connect after a delay in the next iteration time.sleep(config.queue_retry_delay) except Exception as e: config.logger.debug("queue: -> task failed: %s",e) if r is not None: config.logger.debug("queue: -> status code %s",r.status_code) else: config.logger.debug("queue: -> no status code") if r is not None and r.status_code == 401: # authentication failed try: if controller.is_connected(): config.logger.debug("queue: -> connection error, disconnecting: %s",e) # we disconnect, but keep the queue running to let it automatically reconnect if possible controller.disconnect(remove_credentials = False, stop_queue=False) except: pass iters = iters - 1 # we retry to connect after a delay in the next iteration time.sleep(config.queue_retry_delay) elif r is not None and r.status_code == 409: # conflict iters = 0 # we don't retry, but remove the task as it is faulty else: # 500 internal server error, 429 Client Error: Too Many Requests, 404 Client Error: Not Found or others # something went wrong we don't mark this task as done and retry iters = iters - 1 time.sleep(config.queue_retry_delay) # we call task_done to remove the item from the queue queue.task_done() item = None config.logger.debug("queue: -> task done") config.logger.debug("queue: end of run:while paused=%s",self.paused) except Exception as e: pass