Пример #1
0
def main():
    project = "haproxy"
    tablename = "http_host"
    datalogger = DataLoggerWeb("https://datalogger-api.tirol-kliniken.cc/DataLogger")
    # datestring = datalogger.get_last_business_day_datestring()
    # two days back for haproxy logs
    datestring = (datetime.date.today() - datetime.timedelta(int(2))).isoformat()
    caches = datalogger.get_caches(project, tablename, datestring)
    vhosts = [eval(key)[0].split(":")[0] for key in caches["ts"]["keys"].keys()]
    index = 1
    out_data = []
    out_data.append(("index", "vhost", "domain", "fqdn", "ip", "ip_reverse_hostname", "status_code", "x_backend_server", "duration"))
    filter_vhost = generate_filter_vhost()
    for vhost in vhosts:
        if filter_vhost(vhost) is True:
            logging.info("vhost %s filtered out", vhost)
            continue
        ip = "unknown"
        hostname = "unknown"
        duration = -1.0
        status_code = 0
        x_backend_server = None
        domain = ".".join(vhost.split(".")[1:])
        try:
            fqdn = socket.getfqdn(vhost)
            ip = socket.gethostbyname(vhost)
            hostname = socket.gethostbyaddr(ip)[0]
        except (socket.herror, socket.gaierror):
            pass
        if (ip == "unknown") or (not ip.startswith("10.")):
            logging.info("could not resolv hostname %s , probably fake", vhost)
            continue
        # could be obsolete
        elif (not ip.startswith("10.")):
            logging.info("%s is external, skipping", vhost)
            continue
        try:
            starttime = time.time()
            res = requests.request("GET", "http://%s/" % vhost, timeout=10, stream=False)
            duration = time.time()-starttime
            status_code = res.status_code
        except (requests.exceptions.ConnectionError, requests.exceptions.InvalidURL):
            logging.info("ConnectionError or InvalidURL occured %s", vhost)
        except requests.exceptions.ReadTimeout:
            logging.info("RequestTimeout occured %s", vhost)
        try:
            x_backend_server = res.headers['x-backend-server']
            if len(x_backend_server) == 8:
                # TODO not exact, hack
                ip_backend_server = decode_ip(x_backend_server)
                x_backend_server = socket.gethostbyaddr(ip_backend_server)[0] # only hostname part
            else:
                x_backend_server = socket.getfqdn(x_backend_server)
        except KeyError:
            pass
        logging.debug("%40s : %20s : %40s : %15s : %40s : %d : %s : %02f", vhost, domain, fqdn, ip, hostname, status_code, x_backend_server, duration)
        out_data.append((index, vhost, domain, fqdn, ip, hostname, status_code, x_backend_server, duration))
        index += 1
    json.dump({"last_update_ts" : str(datetime.date.today()), "data" : out_data}, open("/var/www/webapps/webmap/webmap.json", "w"))
Пример #2
0
def main():
    """
    what do you think, what main should do
    """
    yesterday_datestring = (datetime.date.today() - datetime.timedelta(1)).isoformat()
    parser = argparse.ArgumentParser(description='generate TimeseriesArrays on local backend')
    parser.add_argument('--url', default="https://datalogger-api.tirol-kliniken.cc/DataLogger", help="url of DataLogger Webapplication")
    parser.add_argument('--logdir', default="/data1/haproxy_daily/", help="directory where to find day sorted haproxylogs")
    parser.add_argument("-b", '--back', help="how many days back from now")
    parser.add_argument("-s", '--startdate', help="start date in isoformat YYY-MM-DD")
    parser.add_argument("-e", '--enddate', default=yesterday_datestring, help="stop date in isoformat YYY-MM-DD")
    parser.add_argument("-q", '--quiet', action='store_true', help="set to loglevel ERROR")
    parser.add_argument("-v", '--verbose', action='store_true', help="set to loglevel DEBUG")
    args = parser.parse_args()
    if args.quiet is True:
        logging.getLogger("").setLevel(logging.ERROR)
    if args.verbose is True:
        logging.getLogger("").setLevel(logging.DEBUG)
    if (args.back is not None) == (args.startdate is not None):
        logging.error("option -b and -e are mutual exclusive, use only one")
        sys.exit(1)
    startdate = None
    if args.back is not None:
        startdate = (datetime.date.today() - datetime.timedelta(int(args.back))).isoformat()
    elif args.startdate is not None:
        startdate = args.startdate
    else:
        logging.error("you have to provide either -b or -s")
        sys.exit(1)
    # lets get started
    datalogger = DataLoggerWeb(args.url)
    project = "haproxy"
    tablename = "http_host"
    baseurl = "%s/upload_raw_file/" % args.url
    logdir = args.logdir # where to find haproxy logs
    keys = ("http_host", )
    values = ("bytes_read", "rsp_1xx", "rsp_2xx", "rsp_3xx", "rsp_4xx", "rsp_5xx", "rsp_other", "srv_queue", "backend_queue", "actconn", "feconn", "beconn", "srv_conn", "retries", "tq", "tw", "tc", "tr", "tt", "hits")
    ts_keyname = "ts"
    for datestring in datewalk(startdate, args.enddate):
        caches = datalogger.get_caches(project, tablename, datestring)
        if caches["tsa"]["raw"] is not None:
            logging.info("Skipping this datestring, raw data is already available")
            continue
        try:
            stringio = generate_datalogger_csv(logdir, datestring, keys, values, ts_keyname)
            #upload data
            files = {'myfile': stringio}
            url = "/".join((baseurl, project, tablename, datestring))
            logging.info("calling %s", url)
            response = requests.post(url, files=files)
            print response.content
        except StandardError as exc:
            logging.error("Exception on file datestring %si, skipping this date", datestring)
        except zlib.error as exc:
            logging.error(exc)