def test_entry_time_when_timezone(self): start_time_with_tz = datetime.strptime('2018-04-03T18:25:22+0230', "%Y-%m-%dT%H:%M:%S%z") self.simulate_logging(execution_id='id1', start_time_millis=to_millis(start_time_with_tz)) entry = self.logging_service.find_history_entry('id1', 'userX') self.assertEqual(entry.start_time, start_time_with_tz) self.assertEqual(entry.start_time.utcoffset(), timedelta(hours=0, minutes=0))
def __migrate_old_files(context): output_folder = os.path.join(context.log_folder, 'processes') if not os.path.exists(output_folder): return log_files = [ os.path.join(output_folder, file) for file in os.listdir(output_folder) if file.lower().endswith('.log') ] def is_new_format(log_file): with open(log_file, 'r') as f: first_line = f.readline().strip() if not first_line.startswith('id:'): return False for line in f: if line.strip() == execution.logging.OUTPUT_STARTED_MARKER: return True return False old_files = [ log_file for log_file in log_files if not is_new_format(log_file) ] if not old_files: return existing_ids = set() for file in log_files: correct, parameters_text = ExecutionLoggingService._read_parameters_text( file) if not correct: continue parameters = ExecutionLoggingService._parse_history_parameters( parameters_text) if not parameters or 'id' not in parameters: continue existing_ids.add(parameters['id']) id_generator = (str(id) for id in itertools.count()) id_generator = filter(lambda id: id not in existing_ids, id_generator) for old_file in old_files: log_basename = os.path.basename(old_file) filename = os.path.splitext(log_basename)[0] match = re.fullmatch( '(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename) if match: script_name = match.group(1) username = match.group(2) start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S') id = next(id_generator) else: script_name = 'unknown' username = '******' start_time = sec_to_datetime(os.path.getctime(old_file)) id = next(id_generator) new_begin = '' new_begin += 'id:' + id + '\n' new_begin += 'user_name:' + username + '\n' new_begin += 'user_id:' + username + '\n' new_begin += 'script:' + script_name + '\n' new_begin += 'start_time:' + str(to_millis(start_time)) + '\n' new_begin += 'command:unknown' + '\n' new_begin += execution.logging.OUTPUT_STARTED_MARKER + '\n' file_content = file_utils.read_file(old_file) file_content = new_begin + file_content file_utils.write_file(old_file, file_content)
def __migrate_old_files(self, output_folder): log_files = [ os.path.join(output_folder, file) for file in os.listdir(output_folder) if file.lower().endswith('.log') ] # from oldest to newest log_files.sort(key=lambda file_path: file_path[-17:]) def is_new_format(log_file): with open(log_file, 'r') as f: first_line = f.readline().strip() if not first_line.startswith('id:'): return False for line in f: if line.strip() == OUTPUT_STARTED_MARKER: return True return False old_files = [] for log_file in log_files: if is_new_format(log_file): break old_files.append(log_file) if not old_files: return used_ids = set(self._ids_to_file_map.keys()) def id_generator_function(): counter = 0 while True: id = str(counter) if id not in used_ids: yield id counter += 1 id_generator = id_generator_function() for old_file in old_files: log_basename = os.path.basename(old_file) filename = os.path.splitext(log_basename)[0] match = re.fullmatch( '(.+)_([^_]+)_((\d\d)(\d\d)(\d\d)_(\d\d)(\d\d)(\d\d))', filename) if match: script_name = match.group(1) username = match.group(2) start_time = datetime.strptime(match.group(3), '%y%m%d_%H%M%S') id = next(id_generator) else: script_name = 'unknown' username = '******' start_time = sec_to_datetime(os.path.getctime(old_file)) id = next(id_generator) new_begin = '' new_begin += 'id:' + id + '\n' new_begin += 'user:'******'\n' new_begin += 'script:' + script_name + '\n' new_begin += 'start_time:' + str(to_millis(start_time)) + '\n' new_begin += 'command:unknown' + '\n' new_begin += OUTPUT_STARTED_MARKER + '\n' file_content = file_utils.read_file(old_file) file_content = new_begin + file_content file_utils.write_file(old_file, file_content) self._ids_to_file_map[id] = log_basename