def append_index_to_dic(self, dic, s, act, subact, ca, fno): """ transform self.index_file in nested dictionary such that self.index_file[s][act][subact][ca][fno]=path :param dic: dictionary :param s: subject :param act: act :param subact: ... :param ca: ... :param fno: sequence number :return: """ path, _, _ = self.get_name(s, act, subact, ca, fno) if not file_exists(path): self._logger.error("file found by path %s does not exist" % path) if s not in dic: dic[s] = {act: {subact: {ca: {fno: path}}}} else: if act not in dic[s].keys(): dic[s][act] = {subact: {ca: {fno: path}}} else: if subact not in dic[s][act].keys(): dic[s][act][subact] = {ca: {fno: path}} else: if ca not in dic[s][act][subact].keys(): dic[s][act][subact][ca] = {fno: path} else: if fno not in dic[s][act][subact][ca].keys(): dic[s][act][subact][ca][fno] = path else: self._logger.error(" adding path %s twice " % path) return dic
def save_index_file(self): if self.index_file is None: self._logger.error("File to save is None") self._logger.info('Saving index file...') file_path = os.path.join(self.index_file_loc, self.index_name) if file_exists(file_path): self._logger.info("Overwriting previous file") file_indices = [self.index_file, self.all_metadata] pkl.dump(file_indices, open(file_path, "wb"))
def load_index_file(self): self._logger.info( 'Extract index file ... Note sampling might not correspond') file_path = os.path.join(self.index_file_loc, self.index_name) if not file_exists(file_path): self._logger.warning("index file to load does not exist") file_indices = pkl.load(open(file_path, "rb")) self.index_file = file_indices[0] self.all_metadata = file_indices[1]
def load_image(self, s, act, subact, ca, fno): if self.index_as_dict: path = self.index_file[s][act][subact][ca][fno] else: path, _, _ = self.get_name(s, act, subact, ca, fno) if not file_exists(path): self._logger.error("path not loaded %s" % path) else: return self.extract_image(path)
def _dump_summary_info(self): """Save training summary""" info_file_path = os.path.join(self.save_dir, self.training_name, 'INFO.json') if not io.file_exists(info_file_path): info = self._summary_info() io.write_json(info_file_path, info) else: info = io.read_from_json(info_file_path) self.training_info = info
def load_batch_images(self, name, idx): dic = {} dir_path = os.path.join(self.path, name) if name not in self.scalars.keys(): self._logger.error("Key not found") if not os.path.isdir(dir_path): self._logger.error("Folder not found") path = os.path.join(dir_path, '%s.npy' % idx) if not file_exists(path): self._logger.error("File not found") dic['image'] = np.load(path) path = os.path.join(dir_path, '%sT.npy' % idx) if file_exists(path): dic['image_T'] = np.load(path) path = os.path.join(dir_path, '%sT_gt.npy' % idx) if file_exists(path): dic['image_T_gt'] = np.load(path) path = os.path.join(dir_path, '%spose.npy' % idx) if file_exists(path): dic['pose'] = np.load(path) path = os.path.join(dir_path, '%spose_gt.npy' % idx) if file_exists(path): dic['pose_gt'] = np.load(path) return dic
def append_index_to_list(self, dic, s, act, subact, ca, fno): """ while apending it checks the file is there :param dic: list :param s: :param act: :param subact: :param ca: :param fno: :return: list appended details """ path, _, _ = self.get_name(s, act, subact, ca, fno) if not file_exists(path): self._logger.error("file found by path %s does not exist" % path) dic.append([s, act, subact, ca, fno]) return dic
def save_backgrounds(self, subject_list): """ created index file for each subject and goes through all 4 cameras in the dataset :param subject_list: .... :return: None """ for i in subject_list: self.get_index_backgrounds(i) backgrounds= np.zeros((4, H36M_CONF.max_size,H36M_CONF.max_size, 3)) for ca in range(1,5): m = b.get_backgrounds(ca) print("out of the get background") backgrounds[ca-1,:,:,:] = m self._logger.info('Saving background file...') file_path = os.path.join(backgrounds_location, "background_subject%s.npy" % i) if file_exists(file_path): self._logger.error("Overwriting previous file") np.save(file_path,backgrounds)
def _index_dataset(self) -> list: """Index dataset files for faster data loading Returns: list: list of indices ([file_path, internal_index]) """ index_file_path = os.path.join(self._path, 'index.npy') if io.file_exists(index_file_path): self._logger.info( 'Loading index file {}...'.format(index_file_path)) indices = np.load(index_file_path, allow_pickle=True) return indices self._logger.info('Indexing Human3.6M dataset files...') indices = self._index_sub_dir(self._path) self._logger.info('Saving indexed dataset...') np.save(index_file_path, indices, allow_pickle=True) return indices
def load_mask(self, s, act, subact, ca, fno): path, _, _ = self.get_mask_name(s, act, subact, ca, fno) if not file_exists(path): self._logger.error("path not loaded %s" % path) else: return self.extract_mask(path)
def load_logger(self): current = 0 while file_exists(os.path.join(self.path, 'scalars%s.pkl' % current)): current += 1 current -= 1 self.scalars_saved = current
def __check_for_executable_script(script): file_exists(script, throw_exception=True) is_executable(script, throw_exception=True)