def correlation(correlation_id): """Returns the JSON representation of the requested correlation.""" try: correlation = Correlation.find_one(bson.ObjectId(correlation_id)) if not correlation: abort(404) correlation = OrderedDict([(key, correlation[key] ) for key in ['interval', 'start', 'end', 'correlation', 'paths', 'group_by']]) except bson.errors.InvalidId: abort(404) protected_func = (lambda x: x) # Wrap the return function in the appropriate realm checks. for path in correlation['paths']: protected_funct = PROVIDER.require_oauth( realm = path.split("/")[0])(protected_func) return protected_func(json.dumps( correlation, cls = correlations.jsonencoder.JSONEncoder))
def retrieve_cache(self, correlation_key): correlations = [] params = {'key': correlation_key} if self.start: params['start'] = {'$gte': start} if self.end: params['end'] = {'$lte': self.end} #cache = [row for row in Correlation.get_collection().find(params # ).sort('start', pymongo.ASCENDING)] cache = Correlation.get_collection().find(params).sort( 'start', pymongo.ASCENDING) for row in cache: correlations.append(Correlation(**row)) return correlations
def retrieve_cache(self, correlation_key): correlations = [] params = {'key': correlation_key} if self.start: params['start'] = {'$gte': start} if self.end: params['end'] = {'$lte': self.end} #cache = [row for row in Correlation.get_collection().find(params # ).sort('start', pymongo.ASCENDING)] cache = Correlation.get_collection().find(params ).sort('start', pymongo.ASCENDING) for row in cache: correlations.append(Correlation(**row)) return correlations
def find_correlations(self, matrix, timestamps): correlations = [] correlation = 0 activated_threshold = None current_threshold = None window_start = 0 window_end = self.window_size matrix_row_len = len(matrix[0]) # Make sure there are enough datapoints in our sliding # window to find a correlation. if matrix_row_len >= self.window_size: activated_threshold = None while window_end < matrix_row_len: try: activated_threshold = None last_correlation = correlation correlation = utils.correlate( [stream[window_start:window_end] for stream in matrix]) for gatekeeper in self.gatekeepers: if not activated_threshold: activated_threshold = gatekeeper(correlation) else: break except Exception as err: logging.error(("Error while finding correlations " + "for user %s." % self.user['_id']), exc_info=err) # Were we trying to accumulate datapoints when the # correlation went away or changed? Then save the buff. # We want to save the start of the last interval the # buff was applicable to as that buff's end, so we grab # the date *before* the date of the last datapoint, as # the last datapoint caused the correlation to end. if activated_threshold != current_threshold: # If we were extending out a correlation, then it must have # just dropped below the threshold. Save it and start the # window over. if current_threshold: correlations.append( Correlation(user_id=self.user['_id'], threshold=current_threshold, paths=self.paths, group_by=self.group_by, sort=self.sort, start=json.loads( timestamps[last_window_start]), end=json.loads(timestamps[window_end - 1]), correlation=last_correlation, key=self.generate_correlation_key())) window_start = window_end window_end = window_start + self.window_size else: last_window_start = window_start # Either way, update current_threshold. current_threshold = activated_threshold # Slide the window forward elif current_threshold == activated_threshold: window_start += 1 window_end += 1 # "if current_threshold," then we were extending out a correlation # when we reached the end of the matrix. Save the correlation. if current_threshold: correlations.append( Correlation(user_id=self.user['_id'], threshold=current_threshold, paths=self.paths, group_by=self.group_by, sort=self.sort, start=json.loads(timestamps[window_start]), end=json.loads(timestamps[window_end - 1]), correlation=last_correlation, key=self.generate_correlation_key())) return correlations