def get_ntp_time(): try: config = get_configs() ntp_host = config.get('ntp', 'ntp_host') client = ntplib.NTPClient() response = client.request(ntp_host) return True, long(response.tx_time * 1000) except Exception as e: return False, long(time.time() * 1000)
def main(argv): config = get_configs() cache = CacheProxy('svr') ntp_host = config.get('ntp', 'ntp_host') agent_version = config.get('agent', 'agent_version') dimensions = {} metrics = {} load_average_1m = int( os.popen('wmic cpu get LoadPercentage').read().split()[1]) avg_recoder = AverageRecorder(cache, "load_avg_") avg_recoder.record(load_average_1m, int(time.time() / 60)) load_average_5m, load_average_15m = avg_recoder.get_avg() metrics['load_average_1m'] = load_average_1m / 100.0 if load_average_5m is not None: metrics['load_average_5m'] = load_average_5m / 100.0 if load_average_15m is not None: metrics['load_average_15m'] = load_average_15m / 100.0 cache.close() metrics['boot_time'] = int(psutil.boot_time()) timestamp = int(time.time() * 1000) ntp_checked = True try: client = ntplib.NTPClient() response = client.request(ntp_host) timestamp = int(response.tx_time * 1000) deviation = int(time.time() * 1000) - timestamp metrics['time_deviation'] = deviation except Exception: ntp_checked = False metrics['user_cnt'] = len(psutil.users()) metrics['agent_version'] = agent_version out = { 'dimensions': dimensions, 'metrics': metrics, 'timestamp': timestamp, 'ntp_checked': ntp_checked } out_list = [out] print(json.dumps(out_list)) sys.stdout.flush()
def main(argv): config = get_configs() section = 'custom_path' if not config.has_section(section): return cache = CacheProxy('custom') options = config.options(section) delay_limit = config.getint('custom_config', 'dely_limit') out_list = [] ntp_checked, timestamp = time_util.get_ntp_time() for key in options: dir_path = config.get(section, key) if check_valid(dir_path): key_out = {'data': [], 'source_key': key} log_list = get_log_list(dir_path) log_record = cache.get(key) for log in log_list: log_path = '%s/%s' % (dir_path, log) if log_record and log < log_record: os.remove(log_path) continue else: cache.set(key, log) if os.path.isfile(log_path) and os.access(log_path, os.R_OK): delete = False with open(log_path) as f: offset_key = '%s-%s' % (key, log) offset = cache.get(offset_key) if offset: f.seek(offset) else: offset = 0 while True: line = f.readline() if line: offset += len(line.decode('ascii')) cache.set(offset_key, offset) line_dict = parse_line(line) if line_dict: if ('timestamp' in line_dict ) and (line_dict['timestamp'] < long( time.time() * 1000) - delay_limit): pass else: data = { 'dimensions': {}, 'metrics': line_dict, 'timestamp': timestamp, 'ntp_checked': ntp_checked } key_out['data'].append(data) else: if log_path != get_latest_log(dir_path): cache.delete(offset_key) delete = True break if delete: os.remove(log_path) if key_out['data']: out_list.append(key_out) cache.close() if out_list: print(json.dumps(out_list)) sys.stdout.flush()
def __init__(self): self.script_config = get_configs()
def __init__(self, script): self.config = get_configs() collectors_dir = self.config.get('base', 'collectors_dir') self.cache = Cache( os.path.join(collectors_dir, 'cache/script/', script))