def __init__(self, data_path): """ Create dataset instance. Empty directories (for EXIF, matches, etc) will be created if they don't exist already. :param data_path: Path to directory containing dataset """ self.data_path = data_path # Load configuration. config_file = os.path.join(self.data_path, 'config.yaml') self.config = config.load_config(config_file) # Load list of images. image_list_file = os.path.join(self.data_path, 'image_list.txt') if os.path.isfile(image_list_file): with open(image_list_file) as fin: lines = fin.read().splitlines() self.set_image_list(lines) else: self.set_image_path(os.path.join(self.data_path, 'images')) # Create output folders. for p in [self.__exif_path(), self.__feature_path(), self.__matches_path()]: io.mkdir_p(p)
def load_opensfm_config(filepath): """ filepath: image file path """ # calls opensfm config.py return config.load_config(filepath)
def __init__(self, data_path): ''' Create meta dataset instance for large scale reconstruction. :param data_path: Path to directory containing meta dataset ''' self.data_path = data_path config_file = os.path.join(self.data_path, 'config.yaml') self.config = config.load_config(config_file) self._image_list_file_name = 'image_list_with_gps.tsv' self._clusters_file_name = 'clusters.npz' self._clusters_with_neighbors_file_name = 'clusters_with_neighbors.npz' io.mkdir_p(self._submodels_path())
def __init__(self, data_path): ''' Create meta dataset instance for large scale reconstruction. :param data_path: Path to directory containing meta dataset ''' self.data_path = data_path config_file = os.path.join(self.data_path, 'config.yaml') self.config = config.load_config(config_file) self._image_list_file_name = 'image_list_with_gps.tsv' self._clusters_file_name = 'clusters.npz' self._clusters_with_neighbors_file_name = 'clusters_with_neighbors.npz' self._clusters_with_neighbors_geojson_file_name = 'clusters_with_neighbors.geojson' io.mkdir_p(self._submodels_path())
def __init__(self, data_path): """ Create meta dataset instance for large scale reconstruction. :param data_path: Path to directory containing meta dataset """ self.data_path = os.path.abspath(data_path) config_file = os.path.join(self.data_path, "config.yaml") self.config = config.load_config(config_file) self._image_list_file_name = "image_list_with_gps.tsv" self._clusters_file_name = "clusters.npz" self._clusters_with_neighbors_file_name = "clusters_with_neighbors.npz" self._clusters_with_neighbors_geojson_file_name = ( "clusters_with_neighbors.geojson") self._clusters_geojson_file_name = "clusters.geojson" io.mkdir_p(self._submodels_path())
def _load_config(self): config_file = os.path.join(self.data_path, 'config.yaml') self.config = config.load_config(config_file)
pdr_shots_paths = [] for pdr_shots_type in ('./pdr_shots.txt', './osfm/pdr_shots.txt'): pdr_shots_paths.extend(glob.glob(pdr_shots_type)) if not pdr_shots_paths or not os.path.exists(pdr_shots_paths[0]): logger.error("pdr shots not found!") exit(0) config_file_paths = [] for config_file_type in ('./config.yaml', './osfm/config.yaml'): config_file_paths.extend(glob.glob(config_file_type)) if not config_file_paths or not os.path.exists(config_file_paths[0]): logger.error("config file not found!") exit(0) else: data_config = config.load_config(config_file_paths[0]) recon_file_paths = [] for recon_file_type in ('./reconstruction.json', './osfm/reconstruction.json'): recon_file_paths.extend(glob.glob(recon_file_type)) if not recon_file_paths or not os.path.exists(recon_file_paths[0]): logger.error("reconstructions not found!") exit(0) pdr_gps_picker(plan_paths[0], pdr_shots_paths[0], recon_file_paths[0], data_config['reconstruction_scale_factor'], pdr_extrapolation_frames)
def load_config(self): self.config = config.load_config(self._config_file())
def load_config(self): config_file = os.path.join(self.data_path, "config.yaml") self.config = config.load_config(config_file)