def test_apply_delta(): """Test delta application. Some functionality is not used at the Scheduler, so is not covered by integration testing. """ w_id = 'workflow_id' delta = DELTAS_MAP[ALL_DELTAS]() delta.workflow.time = time() flow = delta.workflow.updated flow.id = 'workflow_id' flow.stamp = f'{w_id}@{delta.workflow.time}' delta.workflow.pruned = w_id data = deepcopy(DATA_TEMPLATE) assert data[WORKFLOW].id != w_id assert data[WORKFLOW].pruned is False for field, sub_delta in delta.ListFields(): apply_delta(field.name, sub_delta, data) assert data[WORKFLOW].id == w_id assert data[WORKFLOW].pruned is True
def _reconcile_update(self, topic, delta, w_id): """Reconcile local with workflow data-store. Verify data-store is in sync by topic/element-type and on failure request entire set of respective data elements. Args: topic (str): topic of published data. delta (object): Published protobuf message data container. w_id (str): Workflow external ID. """ if topic == WORKFLOW: return if topic == EDGES: s_att = 'id' else: s_att = 'stamp' local_checksum = generate_checksum( [getattr(e, s_att) for e in self.data[w_id][topic].values()]) if local_checksum != delta.checksum: self.log.debug( f'Out of sync with {topic} of {w_id}... Reconciling.') try: # use threadsafe as client socket is in main loop thread. future = asyncio.run_coroutine_threadsafe( workflow_request( self.workflows_mgr.workflows[w_id]['req_client'], 'pb_data_elements', args={'element_type': topic} ), self.loop ) _, new_delta_msg = future.result(self.RECONCILE_TIMEOUT) new_delta = DELTAS_MAP[topic]() new_delta.ParseFromString(new_delta_msg) self._clear_data_field(w_id, topic) apply_delta(topic, new_delta, self.data[w_id]) self.data[w_id]['delta_times'][topic] = new_delta.time except asyncio.TimeoutError: self.log.debug( f'The reconcile update coroutine {w_id} {topic}' f'took too long, cancelling the subscription/sync.' ) future.cancel() except Exception as exc: self.log.exception(exc)
def apply_all_delta(self, w_id, delta): """Apply the AllDeltas delta.""" for field, sub_delta in delta.ListFields(): delta_time = getattr(sub_delta, 'time', 0.0) # If the workflow has reloaded clear the data before # delta application. if sub_delta.reloaded: if field.name == WORKFLOW: self.data[w_id][field.name].Clear() else: self.data[w_id][field.name].clear() self.data[w_id]['delta_times'][field.name] = 0.0 # Apply the delta if newer than the previously applied. if delta_time >= self.data[w_id]['delta_times'][field.name]: apply_delta(field.name, sub_delta, self.data[w_id]) self.data[w_id]['delta_times'][field.name] = delta_time self.reconcile_update(field.name, sub_delta, w_id)
def _apply_all_delta(self, w_id, delta): """Apply the AllDeltas delta.""" for field, sub_delta in delta.ListFields(): delta_time = getattr(sub_delta, 'time', 0.0) # If the workflow has reloaded clear the data before # delta application. if sub_delta.reloaded: self._clear_data_field(w_id, field.name) self.data[w_id]['delta_times'][field.name] = 0.0 # hard to catch errors in a threaded async app, so use try-except. try: # Apply the delta if newer than the previously applied. if delta_time >= self.data[w_id]['delta_times'][field.name]: apply_delta(field.name, sub_delta, self.data[w_id]) self.data[w_id]['delta_times'][field.name] = delta_time if not sub_delta.reloaded: self._reconcile_update(field.name, sub_delta, w_id) except Exception as exc: self.log.exception(exc)