def __init__(self, app): self.app = app self.conf_db = get_conf_db(app, exclusive=True) self.proc = {} self.subs = {} self.log = get_worker_log('master') signal.signal(signal.SIGTERM, self.termination_handler) signal.signal(signal.SIGINT, self.termination_handler) signal.signal(signal.SIGCHLD, self.child_handler)
def actual_worker(analytics_name, sub, app): log = get_worker_log(analytics_name) try: conf_db = get_conf_db(app, exclusive=True) defn = conf_db.get("Analytics:ByName:%s" % analytics_name) analytics = Analytics(defn) if analytics["data_db"]: data_db = get_data_db(analytics["data_db"], app=app) else: data_db = get_data_db(app=app) measures = set(analytics["measures"]) query_dimensions = set(analytics["query_dimensions"]) slice_dimensions = set(analytics["slice_dimensions"]) mapping = analytics["mapping"] for content in sub.listen(): if content["type"] == "message": try: data = json.loads(content["data"]) transaction = data["payload"] tr_type = data["tr_type"] snoq_dimensions = slice_dimensions - query_dimensions qnos_dimensions = query_dimensions - slice_dimensions def build_key_str(dimensions): key = [] for dimension in sorted(list(dimensions)): d_type = mapping[dimension]["type"] function = DIMENSION_PARSERS_MAP[d_type] field = mapping[dimension]["field"] key.append(dimension) key.append(function(transaction[field])) return construct_key(key) query_key_str = build_key_str(query_dimensions) slice_key_str = build_key_str(slice_dimensions) snoq_key_str = build_key_str(snoq_dimensions) # Updating Reference count for qnos dimensions for dimension in sorted(list(qnos_dimensions)): field = mapping[dimension]["field"] ref_count_key = construct_key('RefCount', slice_key_str, dimension) if tr_type == "insert": value = data_db.hincrby(ref_count_key, transaction[field], 1) elif tr_type == "delete": value = data_db.hincrby(ref_count_key, transaction[field], -1) if value == 0: data_db.hdel(ref_count_key, transaction[field]) # Each measure gets added one at a time for m in measures: if mapping[m]["resource"] != content["channel"]: continue key_str = construct_key(m, query_key_str, snoq_key_str) function = MEASURING_FUNCTIONS_MAP[mapping[m]["type"]] field = mapping[m].get("field", None) conditions = mapping[m].get("conditions", []) kwargs = { "key_str": key_str, } for condition in conditions: condition_field = condition["field"] equals = condition.get("equals", None) not_equals = condition.get("not_equals", None) if equals is not None: if transaction[condition_field] != equals: break # Failed equals condition elif not_equals is not None: if transaction[condition_field] == not_equals: break # Failed not equals condition else: # All conditions passed if field is not None: kwargs["field_val"] = transaction[field] function(data_db, tr_type, **kwargs) except Exception, e: log.error("Error while consuming transaction.\n%s" % traceback.format_exc()) log.debug("Resource was: %s" % content["channel"]) log.debug("Data was: %s" % json.dumps(data)) except Exception, e: log.critical("Worker crashed.\nError was: %s" % str(e)) log.debug("Traceback: %s" % traceback.format_exc()) signal.pause()