class ParsingConfigurator(object):

    def __init__(self, parsingconf, aggregation_config=None):
        self.logger = CommonLogger()
        self.logger.debug("read combaine config")
        self.metahost = None
        try:
            _combaine = parse_common_cfg("combaine")#yaml.load(open('/etc/combaine/combaine.json'))
            _parsing = parse_parsing_cfg(parsingconf)#yaml.load(open('/etc/combaine/parsing/%s.json' % parsingconf))
            if aggregation_config is None:
                _aggregations = [(parse_agg_cfg(agg_name), agg_name) for agg_name in _parsing["agg_configs"]]
            else:
                _aggregations = [(parse_agg_cfg(aggregation_config), aggregation_config), ]
                self.metahost = _aggregations[0][0].get('metahost') or _parsing.get('metahost')
            self.ds = _combaine["cloud_config"]["DistributedStorage"]
            self.df = _combaine["cloud_config"]["DataFetcher"]
            self.db = _combaine["cloud_config"]["LocalDatabase"]
            _ds = _parsing.get("DistributedStorage")
            _df = _parsing.get("DataFetcher")
            _db = _parsing.get("LocalDatabase")
            self.hosts_fetcher_http_hand = _combaine['Combainer'].get('HTTP_HAND')
            self.parser = _parsing.get("parser")
            if not _ds is None:
                self.logger.debug("Update ds from parsing")
                self.ds.update(_ds)
            if not _df is None:
                self.logger.debug("Update ds from parsing")
                self.df.update(_df)
            if not _db is None:
                self.logger.debug("Update ds from parsing")
                self.db.update(_db)
            #===============
            agg_bind = {
                "summa" : "AverageAggregator",
                "quant" : "QuantAggregator",
                "average" : "AverageAggregator",
                "uniq" : "UniqAggregator",
            }
            self.aggregators = []
            self.resulthadlers = list()
            for aggregator, _agg_name in _aggregations:
                for name, dic in aggregator["data"].iteritems():
                    tmp = dict()
                    tmp["name"] = _agg_name + "@" + name
                    tmp["query"] = dic.get("query", "EMPTY")
                    tmp["type"] = dic["type"]
                    tmp.update(dic)
                    if dic["type"] == "quant":
                        tmp["values"] = dic["values"]
                    tmp["type"] = agg_bind.get(dic["type"])  #DIRTY  HOOK!!!!!!!
                    if not tmp["type"] is None:
                        self.aggregators.append(tmp)
                if aggregator.has_key("ResultHandlers"):
                    for name, dic in aggregator["ResultHandlers"].iteritems():
                        dic['type'] = name
                        dic['parsing_conf'] = _parsing
                        self.resulthadlers.append(dic)
        except Exception as err:
            self.logger.exception("Error in read confing")
            raise
Exemple #2
0
class Timetail(AbstractFetcher):

    def __init__(self, **config):
        self.log = CommonLogger()
        try:
            url = config['timetail_url']
            self.port = config['timetail_port'] if config.has_key('timetail_port') else 3132
            log_name = config['logname']
            self.http_get_url = "%(url)s%(log)s&time=" % { 'url' : url, 'log' : log_name }
        except Exception, err:
            self.log.exception("Error in init Timetail getter")
            raise Exception
Exemple #3
0
class Agave(AbstractResultHandler):
    def __init__(self, **config):
        self.logger = CommonLogger()
        self.graph_name = config.get("graph_name")
        self.graph_template = config.get("graph_template")
        self.fields = config.get("Fields")
        self.template_dict = {"template": self.graph_template, "title": self.graph_name, "graphname": self.graph_name}
        self.logger.debug(self.template_dict)

    def __makeUrls(self, frmt_dict):
        self.template_dict.update(frmt_dict)
        template = (
            "/api/update/%(group)s/%(graphname)s?values=%(values)s&ts=%(time)i&template=%(template)s&title=%(title)s"
            % self.template_dict
        )
        self.__send_point(template)

    def __send_point(self, url):
        for agv_host in agave_hosts:
            conn = httplib.HTTPConnection(agv_host, timeout=1)
            headers = agave_headers
            headers["Host"] = agv_host + ":80"
            try:
                conn.request("GET", url, None, headers)
                # print url, agv_host, conn.getresponse().read().splitlines()[0]
                _r = conn.getresponse()
                self.logger.info("%s %s %s %s %s" % (agv_host, _r.status, _r.reason, _r.read().strip("\r\n"), url))
            except Exception as err:
                self.logger.exception("Unable to connect to one agave")
            else:
                _r.close()

    def send(self, data):
        for_send = collections.defaultdict(list)
        for aggres in data:
            for sbg_name, val in aggres.values:
                _sbg = sbg_name if sbg_name == aggres.groupname else "-".join((aggres.groupname, sbg_name))
                if isinstance(val, types.ListType):  # Quantile
                    l = itertools.izip(self.fields[aggres.aggname], val)
                    _value = "+".join(("%s:%s" % x for x in l))
                else:  # Simle single value
                    _value = "%s:%s" % (aggres.aggname, val)
                for_send[_sbg].append(_value)
                time = aggres.time

        for name, val in for_send.iteritems():
            frmt_dict = {"group": name, "values": "+".join(val), "time": time}
            self.__makeUrls(frmt_dict)
Exemple #4
0
class Elliptics(AbstractDistributedStorage):

    def __init__(self, **config):
        self.logger = CommonLogger()
        
        cfg = [tuple(_i.split(":")) for _i in config["proxy_hosts"]]
        random.shuffle(cfg)
        self.hostsinfo = cfg

        self.read_timeout = config.get("read_timeout", 0.5)
        self.write_timeout = config.get("write_timeout", 0.5)

        self.read_url = string.Template("http://${HOST}:${R_PORT}/get/${KEY}?ioflags=3072")
        self.write_url = string.Template("http://${HOST}:${W_PORT}/upload/${KEY}?ioflags=3072")

    def connect(self, namespace):
        return True

    def insert(self, raw_key, data):
        key = hashlib.md5(raw_key).hexdigest()
        for host, r_port, w_port in self.hostsinfo[:]:
            try:
                r = requests.post(self.write_url.substitute(KEY=key, HOST=host, W_PORT=w_port), data=PACK(data), timeout=self.write_timeout)
                if r.status_code == 200: #because elliptics write cache bug
                    self.logger.debug("Elliptics: insert key %s (%s) succesfully" % (key, raw_key))
                    return True
            except requests.exceptions.Timeout as err:
                self.hostsinfo.remove((host, r_port, w_port))
            except requests.exceptions.ConnectionError as err:
                self.logger.debug("Elliptics hosts: %s" % self.hostsinfo)
                self.hostsinfo.remove((host, r_port, w_port))
        self.logger.error("Elliptics: failed to insert key %s (%s)" % (key, raw_key))
        return False

    def read(self, raw_key, cache=False):
        key =  hashlib.md5(raw_key).hexdigest()
        for host, r_port, w_port in self.hostsinfo[:]:
            try:
                r = requests.get(self.read_url.substitute(KEY=key, HOST=host, R_PORT=r_port), timeout=self.read_timeout)
                if r.ok:
                    self.logger.debug("Elliptics: read key %s (%s) succesfully" % (key, raw_key))
                    ret = UNPACK(r.content)
                    r.close()
                    return ret
                elif r.status_code == 404:
                    self.logger.info("Elliptics: Key %s (%s) is missing" % (key, raw_key))
                    return None
            except requests.exceptions.Timeout as err:
                self.hostsinfo.remove((host, r_port, w_port))
            except requests.exceptions.ConnectionError as err:
                self.hostsinfo.remove((host, r_port, w_port))
            except Exception as err:
                self.logger.exception("Read error in elliptics proxy")
        self.logger.error("Elliptics: failed to read key %s (%s)" % (key, raw_key))
        return None

    def remove(self, key):
        return "OK"

    def close(self):
        return True