def work(self, trace_set_paths, conf, keep_trace_sets=False, keep_scores=True, keep_ai=False): """ Actions to be performed by workers on the trace set given in trace_set_path. """ resolve_paths(trace_set_paths) # Get absolute paths if type(trace_set_paths) is list: result = EMResult(task_id=self.request.id) # Keep state and results # Process trace set paths and fill in results of analysis process_trace_set_paths(result, trace_set_paths, conf, request_id=self.request.id, keep_trace_sets=keep_trace_sets) if not keep_trace_sets: # Do not return processed traces result.trace_sets = None result.reference_signal = None if not keep_scores: # Do not return attack scores result.correlations = None result.distances = None if not keep_ai: result.ai = None # Do not return AI object return result else: logger.error("Must provide a list of trace set paths to worker!") return None
def merge(self, to_merge, conf): if type(to_merge) is EMResult: to_merge = [to_merge] # Is it useful to merge? if len(to_merge) >= 1: result = EMResult(task_id=self.request.id) # If we are attacking, merge the correlations # TODO this can be cleaned up if conf_has_op(conf, 'attack') or conf_has_op( conf, 'memattack') or conf_has_op(conf, 'spattack'): # Get size of correlations shape = to_merge[ 0].correlations._n.shape # TODO fixme init hetzelfde als in attack # Init result result.correlations = CorrelationList(shape) # Start merging for m in to_merge: result.correlations.merge(m.correlations) elif conf_has_op( conf, 'dattack' ): # TODO just check for presence of to_merge.distances instead of doing this shape = to_merge[0].distances._n.shape result.distances = DistanceList(shape) for m in to_merge: result.distances.merge(m.distances) elif conf_has_op(conf, 'pattack'): shape = to_merge[0].probabilities.shape result.probabilities = np.zeros(shape) for m in to_merge: result.probabilities += m.probabilities elif conf_has_op(conf, 'keyplot'): result.means = {} tmp = defaultdict(lambda: []) for m in to_merge: for key, mean_traces in m.means.items(): tmp[key].extend(mean_traces) for key, mean_traces in tmp.items(): all_traces = np.array(mean_traces) print("Merging %d traces for subkey value %s" % (all_traces.shape[0], key)) result.means[key] = np.mean(all_traces, axis=0) # Clean up tasks if conf.remote: for m in to_merge: logger.warning("Deleting %s" % m.task_id) app.AsyncResult(m.task_id).forget() return result else: return None