class ParsingConfigurator(object): def __init__(self, parsingconf, aggregation_config=None): self.logger = CommonLogger() self.logger.debug("read combaine config") self.metahost = None try: _combaine = parse_common_cfg("combaine")#yaml.load(open('/etc/combaine/combaine.json')) _parsing = parse_parsing_cfg(parsingconf)#yaml.load(open('/etc/combaine/parsing/%s.json' % parsingconf)) if aggregation_config is None: _aggregations = [(parse_agg_cfg(agg_name), agg_name) for agg_name in _parsing["agg_configs"]] else: _aggregations = [(parse_agg_cfg(aggregation_config), aggregation_config), ] self.metahost = _aggregations[0][0].get('metahost') or _parsing.get('metahost') self.ds = _combaine["cloud_config"]["DistributedStorage"] self.df = _combaine["cloud_config"]["DataFetcher"] self.db = _combaine["cloud_config"]["LocalDatabase"] _ds = _parsing.get("DistributedStorage") _df = _parsing.get("DataFetcher") _db = _parsing.get("LocalDatabase") self.hosts_fetcher_http_hand = _combaine['Combainer'].get('HTTP_HAND') self.parser = _parsing.get("parser") if not _ds is None: self.logger.debug("Update ds from parsing") self.ds.update(_ds) if not _df is None: self.logger.debug("Update ds from parsing") self.df.update(_df) if not _db is None: self.logger.debug("Update ds from parsing") self.db.update(_db) #=============== agg_bind = { "summa" : "AverageAggregator", "quant" : "QuantAggregator", "average" : "AverageAggregator", "uniq" : "UniqAggregator", } self.aggregators = [] self.resulthadlers = list() for aggregator, _agg_name in _aggregations: for name, dic in aggregator["data"].iteritems(): tmp = dict() tmp["name"] = _agg_name + "@" + name tmp["query"] = dic.get("query", "EMPTY") tmp["type"] = dic["type"] tmp.update(dic) if dic["type"] == "quant": tmp["values"] = dic["values"] tmp["type"] = agg_bind.get(dic["type"]) #DIRTY HOOK!!!!!!! if not tmp["type"] is None: self.aggregators.append(tmp) if aggregator.has_key("ResultHandlers"): for name, dic in aggregator["ResultHandlers"].iteritems(): dic['type'] = name dic['parsing_conf'] = _parsing self.resulthadlers.append(dic) except Exception as err: self.logger.exception("Error in read confing") raise
class _Socket(AbstractFetcher): def __init__(self, **config): self.log = CommonLogger() try: self.port = config.get('port', 89) except Exception, err: self.log.error("Error in init Socket getter: %s" % str(err)) raise Exception
class Tailer(AbstractFetcher): def __init__(self, **config): self.log = CommonLogger() self.filter = lambda x: True try: self.port = config['port'] if config.has_key('port') else 89 except Exception, err: print err self.log.error("Error in init Tailer getter: %s" % str(err)) raise Exception
class _HTTP(AbstractFetcher): def __init__(self, **config): self.log = CommonLogger() try: url = config.get('url','') self.port = config.get('port', 3132) self.http_get_url = "%(url)s" % { 'url' : url} except Exception, err: self.log.error("Error in init HTTP Fetcher: %s" % str(err)) raise Exception
class Timetail(AbstractFetcher): def __init__(self, **config): self.log = CommonLogger() try: url = config['timetail_url'] self.port = config['timetail_port'] if config.has_key('timetail_port') else 3132 log_name = config['logname'] self.http_get_url = "%(url)s%(log)s&time=" % { 'url' : url, 'log' : log_name } except Exception, err: self.log.exception("Error in init Timetail getter") raise Exception
def _combaine_config(path, name): path = path.rstrip('/') L = CommonLogger() cfg = [_cfg for _cfg in ("%s/%s.%s" % (path, name, ext) for ext in constants.VALID_CONFIG_EXTENSIONS) if os.path.isfile(_cfg)] if len(cfg) == 0: raise MissingConfigError("%s/%s" % (path, name)) elif len(cfg) > 1: L.warning("More than one config with name %s. Use %s" % (name, cfg[0])) with open(cfg[0]) as f: _data = f.read() data = _handle_yaml(_data) or _handle_json(_data) if data is None: raise FormatError("%s/%s" % (path, name)) else: return data
def __init__(self, **config): self.logger = CommonLogger() self.graph_name = config.get("graph_name") self.graph_template = config.get("graph_template") self.fields = config.get("Fields") self.template_dict = {"template": self.graph_template, "title": self.graph_name, "graphname": self.graph_name} self.logger.debug(self.template_dict)
def aggregate_group(io): """Cloud wrapper """ message = "" try: message = io.read() group_name, config_name, agg_config_name, prev_time, cur_time = message.split(';') prev_time = int(prev_time) cur_time = int(cur_time) except Exception as err: io.write("failed;Wrong message format:%s;%s;%s" % (message, socket.gethostname(), str(err))) logger = CommonLogger() logger.error("Wrong message %s" % message) return else: try: res = Main(group_name, config_name, agg_config_name, prev_time, cur_time) except Exception as err: res = 'failed;Error: %s' % err finally: io.write(';'.join((res, message, socket.gethostname())))
def __init__(self, **config): self.logger = CommonLogger() try: self.expression = make_eval_string_safe(config["expression"]) self.logger.info("Evaluation expression: %s" % self.expression) except UnsafelyCodeError as err: self.logger.error(str(err)) raise except KeyError as err: self.logger.error("MathExp. Missing config parametr: %s" % str(err)) raise self._aggs = pattern.findall(self.expression)
def __init__(self, **config): self.logger = CommonLogger() cfg = [tuple(_i.split(":")) for _i in config["proxy_hosts"]] random.shuffle(cfg) self.hostsinfo = cfg self.read_timeout = config.get("read_timeout", 0.5) self.write_timeout = config.get("write_timeout", 0.5) self.read_url = string.Template("http://${HOST}:${R_PORT}/get/${KEY}?ioflags=3072") self.write_url = string.Template("http://${HOST}:${W_PORT}/upload/${KEY}?ioflags=3072")
class Agave(AbstractResultHandler): def __init__(self, **config): self.logger = CommonLogger() self.graph_name = config.get("graph_name") self.graph_template = config.get("graph_template") self.fields = config.get("Fields") self.template_dict = {"template": self.graph_template, "title": self.graph_name, "graphname": self.graph_name} self.logger.debug(self.template_dict) def __makeUrls(self, frmt_dict): self.template_dict.update(frmt_dict) template = ( "/api/update/%(group)s/%(graphname)s?values=%(values)s&ts=%(time)i&template=%(template)s&title=%(title)s" % self.template_dict ) self.__send_point(template) def __send_point(self, url): for agv_host in agave_hosts: conn = httplib.HTTPConnection(agv_host, timeout=1) headers = agave_headers headers["Host"] = agv_host + ":80" try: conn.request("GET", url, None, headers) # print url, agv_host, conn.getresponse().read().splitlines()[0] _r = conn.getresponse() self.logger.info("%s %s %s %s %s" % (agv_host, _r.status, _r.reason, _r.read().strip("\r\n"), url)) except Exception as err: self.logger.exception("Unable to connect to one agave") else: _r.close() def send(self, data): for_send = collections.defaultdict(list) for aggres in data: for sbg_name, val in aggres.values: _sbg = sbg_name if sbg_name == aggres.groupname else "-".join((aggres.groupname, sbg_name)) if isinstance(val, types.ListType): # Quantile l = itertools.izip(self.fields[aggres.aggname], val) _value = "+".join(("%s:%s" % x for x in l)) else: # Simle single value _value = "%s:%s" % (aggres.aggname, val) for_send[_sbg].append(_value) time = aggres.time for name, val in for_send.iteritems(): frmt_dict = {"group": name, "values": "+".join(val), "time": time} self.__makeUrls(frmt_dict)
class Aggregator(AbstractResultHandler): def __init__(self, **config): self.logger = CommonLogger() try: self.expression = make_eval_string_safe(config["expression"]) self.logger.info("Evaluation expression: %s" % self.expression) except UnsafelyCodeError as err: self.logger.error(str(err)) raise except KeyError as err: self.logger.error("MathExp. Missing config parametr: %s" % str(err)) raise self._aggs = pattern.findall(self.expression) def send(self, data): interest_results = [_ for _ in data if _.aggname in self._aggs] for subgroup_name, subgroup_data in make_template_placeholders(interest_results): code = self.expression for key, value in subgroup_data.iteritems(): # re.subn(r"\${20x}","A","(${20x}+${30x})") code, n = re.subn(r"\${%s}" % key, str(value), code) self.logger.debug("After substitution %s" % code) try: res = eval(code) self.logger.info("MathExp: Result %s %s" % (subgroup_name, res)) except Exception as err: self.logger.error("Exception in evaluation %s: %s" % (code, err))
class Elliptics(AbstractDistributedStorage): def __init__(self, **config): self.logger = CommonLogger() cfg = [tuple(_i.split(":")) for _i in config["proxy_hosts"]] random.shuffle(cfg) self.hostsinfo = cfg self.read_timeout = config.get("read_timeout", 0.5) self.write_timeout = config.get("write_timeout", 0.5) self.read_url = string.Template("http://${HOST}:${R_PORT}/get/${KEY}?ioflags=3072") self.write_url = string.Template("http://${HOST}:${W_PORT}/upload/${KEY}?ioflags=3072") def connect(self, namespace): return True def insert(self, raw_key, data): key = hashlib.md5(raw_key).hexdigest() for host, r_port, w_port in self.hostsinfo[:]: try: r = requests.post(self.write_url.substitute(KEY=key, HOST=host, W_PORT=w_port), data=PACK(data), timeout=self.write_timeout) if r.status_code == 200: #because elliptics write cache bug self.logger.debug("Elliptics: insert key %s (%s) succesfully" % (key, raw_key)) return True except requests.exceptions.Timeout as err: self.hostsinfo.remove((host, r_port, w_port)) except requests.exceptions.ConnectionError as err: self.logger.debug("Elliptics hosts: %s" % self.hostsinfo) self.hostsinfo.remove((host, r_port, w_port)) self.logger.error("Elliptics: failed to insert key %s (%s)" % (key, raw_key)) return False def read(self, raw_key, cache=False): key = hashlib.md5(raw_key).hexdigest() for host, r_port, w_port in self.hostsinfo[:]: try: r = requests.get(self.read_url.substitute(KEY=key, HOST=host, R_PORT=r_port), timeout=self.read_timeout) if r.ok: self.logger.debug("Elliptics: read key %s (%s) succesfully" % (key, raw_key)) ret = UNPACK(r.content) r.close() return ret elif r.status_code == 404: self.logger.info("Elliptics: Key %s (%s) is missing" % (key, raw_key)) return None except requests.exceptions.Timeout as err: self.hostsinfo.remove((host, r_port, w_port)) except requests.exceptions.ConnectionError as err: self.hostsinfo.remove((host, r_port, w_port)) except Exception as err: self.logger.exception("Read error in elliptics proxy") self.logger.error("Elliptics: failed to read key %s (%s)" % (key, raw_key)) return None def remove(self, key): return "OK" def close(self): return True