class ErrorReport(object): def __init__(self): self.cache = CacheProxy('err_cache') def __del__(self): self.cache.close() def get_report_id(self): report_id = self.cache.get('report_id') if report_id is None: report_id = uuid.uuid1().__str__() self.cache.set('report_id', report_id) return report_id def record_err_info(self, err_name, err): err_info = {err_name: err, 'timestamp': int(time.time())} self.cache.set(err_name, json.dumps(err_info)) def pop_err_info(self, err_name): err_info_str = self.cache.get(err_name) self.cache.delete(err_name) if err_info_str is None: return None else: return json.loads(err_info_str) def set_report_enabled(self, enable): enabled = 0 if enable: enabled = 1 self.cache.set('report_enabled', enabled) def get_report_enabled(self): enabled = self.cache.get('report_enabled') if enabled is None: return False else: return bool(enabled) def record_reader_err(self, err): self.record_err_info(READER_ERR_KEY, err) def record_sender_err(self, err): self.record_err_info(SENDER_ERR_KEY, err) def record_other_err(self, err): self.record_err_info(OTHER_ERR_KEY, err) def pop_reader_err(self): return self.pop_err_info(READER_ERR_KEY) def pop_sender_err(self): return self.pop_err_info(SENDER_ERR_KEY) def pop_other_err(self): return self.pop_err_info(OTHER_ERR_KEY)
def main(argv): config = get_configs() section = 'custom_path' if not config.has_section(section): return cache = CacheProxy('custom') options = config.options(section) delay_limit = config.getint('custom_config', 'dely_limit') out_list = [] ntp_checked, timestamp = time_util.get_ntp_time() for key in options: dir_path = config.get(section, key) if check_valid(dir_path): key_out = {'data': [], 'source_key': key} log_list = get_log_list(dir_path) log_record = cache.get(key) for log in log_list: log_path = '%s/%s' % (dir_path, log) if log_record and log < log_record: os.remove(log_path) continue else: cache.set(key, log) if os.path.isfile(log_path) and os.access(log_path, os.R_OK): delete = False with open(log_path) as f: offset_key = '%s-%s' % (key, log) offset = cache.get(offset_key) if offset: f.seek(offset) else: offset = 0 while True: line = f.readline() if line: offset += len(line.decode('ascii')) cache.set(offset_key, offset) line_dict = parse_line(line) if line_dict: if ('timestamp' in line_dict ) and (line_dict['timestamp'] < long( time.time() * 1000) - delay_limit): pass else: data = { 'dimensions': {}, 'metrics': line_dict, 'timestamp': timestamp, 'ntp_checked': ntp_checked } key_out['data'].append(data) else: if log_path != get_latest_log(dir_path): cache.delete(offset_key) delete = True break if delete: os.remove(log_path) if key_out['data']: out_list.append(key_out) cache.close() if out_list: print(json.dumps(out_list)) sys.stdout.flush()