def gcs_test(): print ("Testing CMA Aggregator: ") agrgtr_cma = aggregator.Aggregator(aggregator_type="cma", debug=True) agrgtr_cma.aggregate(MODEL_DIRECTORY_NAMES_GCS, OUTPUT_PATH_CMA_GCS) print ("Testing WCMA Aggregator: ") agrgtr_wcma = aggregator.Aggregator(aggregator_type="wcma", debug=True) agrgtr_wcma.aggregate(MODEL_DIRECTORY_NAMES_GCS, OUTPUT_PATH_WCMA_GCS) for i in range(4): # Tests that sessions and default graphs are reset agrgtr_wcma.aggregate(MODEL_DIRECTORY_NAMES_GCS, "{0}-{1}".format(OUTPUT_PATH_WCMA_GCS, i))
def group(self, attrIdx, count=False, aggrAttrIdx=[], aggrFunc=[]): #TODO: remove attributes of intermediary tables in attribute store attrIdx, aggrAttrIdx, aggrFunc = util.makelist(attrIdx, aggrAttrIdx, aggrFunc) assert len(aggrAttrIdx) == len(aggrFunc) tmptable, idxmap = self.copy() aggrAttrIdx = util.mapIdx(aggrAttrIdx, idxmap) if count: cntIdx = tmptable.addAttr(gsql.WEIGHT_ATTR_NAME, val=Value(val=1)) aggrAttrIdx.append(cntIdx) aggrFunc.append('cnt') # Find values for aggregation agg = aggregator.Aggregator(aggrFunc) aggCols = [tmptable.getColumn(idx) for idx in aggrAttrIdx] # Find groups of rows, and corresponding list of aggregation attributes tproj, _ = tmptable.project(attrIdx) groups = {} for i, row in enumerate(tproj.data): key = tuple(row) if not key in groups: groups[key] = [] groups[key].append([col[i] for col in aggCols]) # groups[key] is a list of lists: each inner list is the list of # aggregation values corresponding to this row # Create final table tfinal, _ = tmptable.project(attrIdx + aggrAttrIdx) for key in groups: aggvals = agg.calc(groups[key]) newrow = list(key) + aggvals tfinal.data.append(newrow) idxmap = dict(zip(attrIdx + aggrAttrIdx, tfinal.columns)) return tfinal, idxmap
def agreement_term_apply_remedy(self, agreement_id, term): """ Method for the application of the remedy clause. """ LOG.info( "Enforcing remedy for agreement {} called for term {}.".format( agreement_id, term)) myrulesengine = rulesengine.RulesEngine() myrulesengine.update_term(agreement_id, term + ".term.state", "violated") remedy = self._slo_terms_metrics[term]['remedy'] # ToDo: interact with RCBaaS for charging the remedy self.__publish_to_rcb_queue(agreement_id, term, '', '', self.device_id, self._violated_metrics, self._slo_terms_metrics[term]['remedy']) extras = self.__get_extras(agreement_id) violation = self.__create_violation_resource(term, '', '', self.device_id, self._violated_metrics, remedy, extras) link = self.__create_violation_link(agreement_id, violation, extras) LOG.info('Wait for term to become valid.') aggrator = aggregator.Aggregator() term_slo_metrics = aggrator.pull_term( term, agreement_id, self._slo_terms_metrics[term]['metrics'], self._device_id) for key, value in term_slo_metrics.iteritems(): self._metrics[key] = value while self.agreement_term_violated(agreement_id, term): term_slo_metrics = aggrator.pull_term( term, agreement_id, self._slo_terms_metrics[term]['metrics'], self._device_id) for key, value in term_slo_metrics.iteritems(): self._metrics[key] = value time.sleep(15) LOG.info('SLO term {} became valid again.'.format(term)) myrulesengine.update_term(agreement_id, term + ".term.state", "fulfilled") #self.__update_violation_end_time(violation, extras) # Deleting the violation resource while is no longer active self.__delete_violation(violation, extras) self.__delete_violation_link(agreement_id, violation, link, extras)
def __subscribe_term(self, agreement_id, attributes, device_ids, template, term): ''' Parse term's attributes and subscribe the term of an agreement to the Aggregator. ''' metrics = {} term_remedy = attributes[build_attr(term, 'term.remedy')] attributes_keys = attributes.keys() attributes_keys.remove(build_attr(term, 'term.remedy')) for key in attributes_keys: if build_attr(template, term) in key: mixed_metrics = key.replace( build_attr(template, term) + '.', '') if len(mixed_metrics.split('.')) == 1: metrics[mixed_metrics] = { 'value': attributes.get(key), 'limiter_type': attributes[build_attr(template, term, mixed_metrics, 'limiter_type')] } if attributes[build_attr(template, term, mixed_metrics, 'limiter_type')] == 'margin': temp1 = attributes[build_attr(template, term, mixed_metrics, 'limiter_type')] temp2 = attributes[build_attr(template, term, mixed_metrics, 'limiter_value')] metrics[mixed_metrics] = { 'value': attributes.get(key), 'limiter_type': temp1, 'limiter_value': temp2 } if metrics: # Subscribe term to Aggregator aggrator = aggregator.Aggregator() aggrator.subscribe_term(term, agreement_id, { 'remedy': term_remedy, 'metrics': metrics }, device_ids) return {'remedy': term_remedy, 'metrics': metrics} else: return {}
def __metric_subscription(self, device_id, metric, slo_value, limiter_type, margin_value): """ Private method for implementing the thread loop """ while device_id + "#" + metric in DummyCollector._subscriptions: LOG.debug('Checking {}#{}'.format(device_id, metric)) value = self.pull_metric(device_id, metric) if self.metric_violated(metric, slo_value, value, limiter_type, margin_value): gator = aggregator.Aggregator() gator.notification_event(device_id, metric, value) time.sleep(15)
def main(args): logging.basicConfig(level=logging.DEBUG) with open(args.config_path, 'r') as fh: config = yaml.load(fh.read()) ag = aggregator.Aggregator(config, args.skip_downloads, args.update_throttled_entries) ''' print('\n***************') print('* Card Counts *') print('***************') for card_name in ag.cards: print('{}: {}'.format(card_name, ag.cards[card_name].json[common.OCCUR_STR])) print_examples(ag) print('\n\n*************') print('* Groupings *') print('*************') ''' all_groupings = groupings.create_groupings(ag.grouping_specs, ag.num_other_cubes) groupings.GroupingProcessor(ag.cards, config['output_dir'], all_groupings, True)
def start_engine(self, refresh_period): """ Method for stating the Rules Engine. It gets the valid agreements from the registry and triggers the parsing. It takes as input the time interval of the loop. """ LOG.info(">>>>>>>>>>>>>> OCCI SLAaaS Rules Engine started! " "<<<<<<<<<<<<<<<<<") loop_status = True while loop_status: valid_agreements = self.__get_valid_agreements() agreement_keys = self.__parse_valid_agreements(valid_agreements) # REMOVE OLD POLICIES THAT HAVE EXPIRED FROM CACHE AND FROM DB expired_policies = [] if len(self.active_policies.keys()) > len(agreement_keys): expired_policies = list( set(self.active_policies.keys()) - set(agreement_keys)) for key in expired_policies: # Check if agreement is under reasoning # Do not remove agreement until the reasoning is complete. if key not in RulesEngine._agreements_under_reasoning: LOG.info("Removing Agreement and policy for " "Agreement ID: " + key) if key in RulesEngine._registry.resources.keys(): # Get Agreement Entity agreement = RulesEngine._registry.resources[key] # agreement = self.registry.get_resource(key, None) # Change Terms state to "undefined" terms = self.__get_slo_terms(agreement.attributes) for term in terms: self.update_term(key, term + ".term.state", "undefined") # Unsubscribe every term metricsinfos = DB.find({'agreement_id': key}, { '_id': 0, 'terms': 1 }) mtrcs = metricsinfos[0]['terms'][term] aggrator = aggregator.Aggregator() if len(self.subscribed_devices[key]) > 0: device_ids = self.subscribed_devices[key] aggrator.unsubscribe_term( term, key, mtrcs, device_ids) DB.remove({'agreement_id': key}) del self.active_policies[key] if self.active_policies.keys(): LOG.debug('Active agreements and policies are:') for key in self.active_policies.keys(): LOG.debug(key) if refresh_period != 0: time.sleep(refresh_period) else: loop_status = False
def __parse_valid_agreements(self, valid_agreements): """ Method for parsing the active agreements and triggering the policy generation and Aggregator subscription. """ agreement_keys = [] for agreement in valid_agreements: links = agreement.links agreement_id = agreement.identifier if len(links) > 0: # parse links rtrn_values = self.__get_devices(links, valid_agreements) device_ids = rtrn_values['devices'] linked_agreements = rtrn_values['linked_agreements'] skip_agreement = rtrn_values['skip_agreement_flag'] # break loop if agreement link towards an agreement is invalid. if skip_agreement: continue agreement_keys.append(agreement_id) if agreement_id not in self.active_policies.keys(): LOG.info("New valid Agreement found: " + agreement_id) self.__subscribe_agreement_terms(agreement, device_ids, linked_agreements) else: LOG.debug("Agreement %s already exists in active list." % agreement_id) # check if resources/devices have changed current_links = self.active_agreements[agreement_id].links current_devices = self.__get_devices( current_links, valid_agreements) removed_devices = [] for device in self.subscribed_devices[agreement_id]: if device not in current_devices['devices']: LOG.info( 'Device {} removed from agreement {}'.format( device, agreement_id)) # unsubscribe device removed_devices.append(device) self.subscribed_devices[agreement_id]\ .remove(device) if len(removed_devices) > 0: terms = self.__get_slo_terms(agreement.attributes) policy_record = DB.find({'agreement_id': agreement_id}) for term in terms: # Unsubscribe every term mtrcs = policy_record[0]['terms'][term] aggrator = aggregator.Aggregator() aggrator.unsubscribe_term(term, agreement_id, mtrcs, removed_devices) temp = policy_record[0] for device in removed_devices: temp['devices'].remove(device) DB.update({'agreement_id': agreement_id}, temp, upsert=True) new_devices = [] for device in current_devices['devices']: if device not in \ self.subscribed_devices[agreement_id]: LOG.info('New device {} for agreement {}'.format( device, agreement_id)) # create the new devices list used later new_devices.append(device) self.subscribed_devices[agreement_id]\ .append(device) if len(new_devices) > 0: self.__subscribe_agreement_terms( agreement, new_devices, linked_agreements) else: LOG.info('Valid agreement {} does not have linked resources.'. format(agreement_id)) return agreement_keys
def __init__(self): self.imgCounter = aggregator.Aggregator('image') self.capCounter = aggregator.Aggregator('captions') self.deltaCounter = aggregator.Aggregator('delta') self.failedPages = set() # Pages to retry
def ireland_headlines(): agg = aggregator.Aggregator() content = agg.get_links("ireland") return render_template('rss_feed.html', content=content)
def background_aggregator(): agg = aggregator.Aggregator() content = agg.get_links("world") return render_template('rss_feed.html', content=content)