def apache_stats(): apache_logs_stats = [] if not common.check_config_sections(['apache_logs',]): return apache_logs_stats for website in common.config['apache_logs']: website_name = website.keys()[0] website_config = website.values()[0] log_file_pattern = website_config.get('file', None) if not log_file_pattern: common.process_exception("no logfile pattern for website %s" % website) url_filter_string = website_config.get('url_filter', None) url_regex = re.compile(url_filter_string) if url_filter_string else None count, avg_time = process_logs(log_file_pattern, url_regex) date = common.now() print "LOGS date: %s website: %s count: %s duration: %s" % (date, website_name, count, avg_time) apache_logs_stats.extend([ {'date': date, 't': 'LOG_REQUESTS-COUNT', 'd1': common.HOSTNAME, 'd2': website_name, 'V': count}, {'date': date, 't': 'LOG_REQUESTS-DURATION', 'd1': common.HOSTNAME, 'd2': website_name, 'V': avg_time}, ]) return apache_logs_stats
def dirs_size(): sizes = [] if not common.check_config_sections(['disks', 'dirs_size']): return sizes for directory in common.config['disks']['dirs_size']: if not os.path.exists(directory): common.process_exception("%s is not exists. skip..." % directory) cmd = "du -s %s" % directory size=subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True).\ communicate()[0].split()[0] size = common.kb_to_mb(size) date = common.now() print "DSIZE date: %s directory: %s size: %s" % ( date, directory, size, ) sizes.append({ "date": date, "t": "DSIZE", "d1": common.HOSTNAME, "d2": directory, "V": size }) return sizes
def network_stats(): network_bytes = [] if not common.check_config_sections(['networking', 'interfaces']): return network_bytes if hasattr(psutil, 'network_io_counters'): counters = psutil.network_io_counters(pernic=True) else: counters = psutil.net_io_counters(True) for interface in common.CONFIG['networking']['interfaces']: counter = counters.get(interface, None) if not counter: common.process_exception('cannot find counters for interface %s. skip..' % interface) continue date = common.now() mb_rcv = common.b_to_mb(counter.bytes_recv) mb_sent = common.b_to_mb(counter.bytes_sent) logging.info ("NET date: %s interface: %s recv: %s sent: %s", date, interface, mb_rcv, mb_sent, ) network_bytes.extend([ {"date": date, "t": "NET-RCV", "d1": common.HOSTNAME, "d2": interface, "V": mb_rcv}, {"date": date, "t": "NET-SENT", "d1": common.HOSTNAME, "d2": interface, "V": mb_sent}, ]) return network_bytes
def disks_stats(): usages=[] if not common.check_config_sections(['disks', 'mount_points']): return usages for mount_point in common.config['disks']['mount_points']: try: fs_stats = psutil.disk_usage(mount_point) except OSError as e: common.process_exception(e) continue used = common.b_to_mb(fs_stats.used) date = common.now() print "DISK date: %s mount_point: %s used: %s" % (date, mount_point, used, ) usages.append({"date": date, "t":"DISK-USAGE", "d1": common.HOSTNAME, "d2": mount_point, "V":used}) return usages
def dirs_size(): sizes = [] if not common.check_config_sections(['disks', 'dirs_size']): return sizes for directory in common.config['disks']['dirs_size']: if not os.path.exists(directory): common.process_exception("%s is not exists. skip..." % directory ) cmd="du -s %s" % directory size=subprocess.Popen([cmd], stdout=subprocess.PIPE, shell=True).\ communicate()[0].split()[0] size = common.kb_to_mb(size) date = common.now() print "DSIZE date: %s directory: %s size: %s" % (date, directory, size, ) sizes.append({"date": date, "t":"DSIZE", "d1": common.HOSTNAME, "d2": directory, "V":size}) return sizes
def io_stats(): io_perdev = [] if not common.check_config_sections(['disks', 'block_devs']): return io_perdev counters = psutil.disk_io_counters(perdisk=True) for dev in common.config['disks']['block_devs']: counter = counters.get(dev, None) if not counter: common.process_exception('cannot find counters for block device %s. skip..' % dev) continue date = common.now() print "DISK date: %s block_dev: %s reads: %s writes: %s" % (date, dev, counter.read_count, counter.write_count, ) io_perdev.extend([ {"date": date, "t": "DISK-READS", "d1": common.HOSTNAME, "d2": dev, "V": counter.read_count}, {"date": date, "t": "DISK-WRITES", "d1": common.HOSTNAME, "d2": dev, "V": counter.write_count}, ]) return io_perdev
def network_stats(): network_bytes = [] if not common.check_config_sections(['networking', 'interfaces']): return network_bytes counters = psutil.net_io_counters(True) for interface in common.config['networking']['interfaces']: counter = counters.get(interface, None) if not counter: common.process_exception( 'cannot find counters for interface %s. skip..' % interface) continue date = common.now() mb_rcv = common.b_to_mb(counter.bytes_recv) mb_sent = common.b_to_mb(counter.bytes_sent) print "NET date: %s interface: %s recv: %s sent: %s" % ( date, interface, mb_rcv, mb_sent, ) network_bytes.extend([ { "date": date, "t": "NET-RCV", "d1": common.HOSTNAME, "d2": interface, "V": mb_rcv }, { "date": date, "t": "NET-SENT", "d1": common.HOSTNAME, "d2": interface, "V": mb_sent }, ]) return network_bytes
def process_logs(log_name_pattern, url_regex=None): response_times = [] non_matching_count = 0 line_parser_re = re.compile('[^"]*"[^ ]* ([^ ]*) [^"]*" [0-9]* ([0-9])*ms .*') for log_path in glob.glob(log_name_pattern): log_name = os.path.basename(log_path) shift_file_path = os.path.join(common.DATA_DIR, log_name) shift = 0 if os.path.exists(shift_file_path): try: with open(shift_file_path, 'r') as shift_file: shift = int(shift_file.read()) except IOError as e: common.process_exception(e, critical = True) try: logging.info ("LOGS Checking log: %s shift: %d", log_path, shift) with open(log_path, 'r') as apache_log: apache_log.seek(shift, 0) for line in apache_log: response_time = get_response_time(line, line_parser_re, url_regex) if response_time != None: response_times.append(response_time) else: non_matching_count = non_matching_count + 1 shift = apache_log.tell() with open(shift_file_path, 'w') as shift_file: shift_file.write(str(shift)) except IOError as e: common.process_exception(e, critical = True) count = len(response_times) avg_time = sum(response_times) / count if count > 0 else 0 return (count, avg_time, non_matching_count)
def io_stats(): io_perdev = [] if not common.check_config_sections(['disks', 'block_devs']): return io_perdev counters = psutil.disk_io_counters(perdisk=True) for dev in common.config['disks']['block_devs']: counter = counters.get(dev, None) if not counter: common.process_exception( 'cannot find counters for block device %s. skip..' % dev) continue date = common.now() print "DISK date: %s block_dev: %s reads: %s writes: %s" % ( date, dev, counter.read_count, counter.write_count, ) io_perdev.extend([ { "date": date, "t": "DISK-READS", "d1": common.HOSTNAME, "d2": dev, "V": counter.read_count }, { "date": date, "t": "DISK-WRITES", "d1": common.HOSTNAME, "d2": dev, "V": counter.write_count }, ]) return io_perdev
def process_logs(log_name_pattern, url_regex=None): response_times = [] for log_path in glob.glob(log_name_pattern): if os.path.getmtime(log_path) < LAST_START_TIME: continue log_name = os.path.basename(log_path) shift_file_path = os.path.join(common.DATA_DIR, log_name) shift = 0 if os.path.exists(shift_file_path): try: with open(shift_file_path, 'r') as shift_file: shift = int(shift_file.read()) except IOError as e: common.process_exception(e, critical = True) try: with open(log_path, 'r') as apache_log: apache_log.seek(shift, 0) for line in apache_log: response_time = get_response_time(line, url_regex) if response_time != None: response_times.append(response_time) shift = apache_log.tell() with open(shift_file_path, 'w') as shift_file: shift_file.write(str(shift)) except IOError as e: common.process_exception(e, critical = True) count = len(response_times) avg_time = sum(response_times) / count if count > 0 else 0 return (count, avg_time)