def __init__(self, logdir='', file_path=''): """Instance of LogReader Args: logdir: The dir include vdl log files, multiple subfolders allowed. """ if isinstance(logdir, str): self.dir = [logdir] else: self.dir = logdir self.reader = None self.readers = {} self.walks = None self._tags = {} self.name2tags = {} self.tags2name = {} self.file_readers = {} # {'run': {'scalar': {'tag1': data, 'tag2': data}}} self._log_datas = collections.defaultdict( lambda: collections.defaultdict(lambda: collections.defaultdict( list))) if file_path: self._log_data = collections.defaultdict( lambda: collections.defaultdict(list)) self.get_file_reader(file_path=file_path) remain = self.get_remain() self.read_log_data(remain=remain) components_name = components.keys() for name in components_name: exec("self.get_%s=partial(self.get_data, '%s')" % (name, name)) elif logdir: self.data_manager = default_data_manager self.load_new_data(update=True) self._a_tags = {} self._model = ""
log_reader.name2tags.pop(key) log_reader.name2tags.update({temp_key: value}) log_reader.tags2name.pop(value) log_reader.tags2name.update({value: temp_key}) run2tag['runs'][run2tag['runs'].index(key)] = temp_key else: temp_key = key MODIFIED_RUNS.append(temp_key) return run2tag for name in components.keys(): exec("get_%s_tags=partial(get_logs, component='%s')" % (name, name)) def get_scalar(log_reader, run, tag): run = log_reader.name2tags[run] if run in log_reader.name2tags else run log_reader.load_new_data() records = log_reader.data_manager.get_reservoir("scalar").get_items( run, decode_tag(tag)) results = [[s2ms(item.timestamp), item.id, item.value] for item in records] return results def get_scalar_data(log_reader, run, tag, type='tsv'): run = log_reader.name2tags[run] if run in log_reader.name2tags else run log_reader.load_new_data()