def migrate_workspace(directory): import re from numpy import genfromtxt, reshape, savetxt, all config_file = os.path.join(directory, "config.yaml") if os.path.isfile(config_file): raise WorkspaceException("Workspace already initialized") old_config_file = os.path.join(directory, "configuration.m") if not os.path.isfile(old_config_file): raise WorkspaceException("Old workspace config not detected") with open(old_config_file, "r") as fp: content = fp.read() matches = re.findall( "set\\_global\\_variable\\('stack', '([A-Za-z0-9]+)'\\)", content) if not len(matches) == 1: raise WorkspaceException("Experiment stack could not be retrieved") stack = matches[0] for tracker_dir in [ x for x in os.scandir(os.path.join(directory, "results")) if x.is_dir() ]: for experiment_dir in [ x for x in os.scandir(tracker_dir.path) if x.is_dir() ]: for sequence_dir in [ x for x in os.scandir(experiment_dir.path) if x.is_dir() ]: timing_file = os.path.join( sequence_dir.path, "{}_time.txt".format(sequence_dir.name)) if os.path.isfile(timing_file): logger.info("Migrating %s", timing_file) times = genfromtxt(timing_file, delimiter=",") if len(times.shape) == 1: times = reshape(times, (times.shape[0], 1)) for k in range(times.shape[1]): if all(times[:, k] == 0): break savetxt(os.path.join(sequence_dir.path, \ "%s_%03d_time.value" % (sequence_dir.name, k+1)), \ times[:, k] / 1000, fmt='%.6e') os.unlink(timing_file) try: resolve_stack(stack) except: logging.warning( "Stack %s not found, you will have to manually edit and correct config file.", stack) with open(config_file, 'w') as fp: yaml.dump(dict(stack=stack, registry=["."]), fp) os.unlink(old_config_file) logging.info("Workspace %s migrated", directory)
def do_workspace(config, logger): from vot.workspace import initialize_workspace, migrate_workspace if config.stack is None and os.path.isfile( os.path.join(config.workspace, "configuration.m")): migrate_workspace(config.workspace) return elif config.stack is None: stacks = list_integrated_stacks() logger.error("Unable to continue without a stack") logger.error("List of available integrated stacks: ") for k, v in stacks.items(): logger.error(" * %s - %s", k, v) return stack_file = resolve_stack(config.stack) if stack_file is None: logger.error("Experiment stack %s not found", stack_file) return default_config = dict(stack=config.stack, registry=["./trackers.ini"]) initialize_workspace(config.workspace, default_config) logger.info("Initialized workspace in '%s'", config.workspace)
def __init__(self, directory): config_file = os.path.join(directory, "config.yaml") if not os.path.isfile(config_file): raise WorkspaceException("Workspace not initialized") with open(config_file, 'r') as fp: self._config = yaml.load(fp, Loader=yaml.BaseLoader) if not "stack" in self._config: raise WorkspaceException( "Experiment stack not found in workspace configuration") stack_file = resolve_stack(self._config["stack"], directory) if stack_file is None: raise WorkspaceException("Experiment stack does not exist") with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) self._stack = Stack(self, stack_metadata) dataset_directory = normalize_path( self._config.get("sequences", "sequences"), directory) results_directory = normalize_path( self._config.get("results", "results"), directory) cache_directory = normalize_path("cache", directory) self._download(dataset_directory) self._dataset = VOTDataset(dataset_directory) self._results = results_directory self._cache = cache_directory self._root = directory
def initialize(directory, config=dict(), download=True): config_file = os.path.join(directory, "config.yaml") if os.path.isfile(config_file): raise WorkspaceException("Workspace already initialized") os.makedirs(directory, exist_ok=True) with open(config_file, 'w') as fp: yaml.dump(config, fp) os.makedirs(os.path.join(directory, "sequences"), exist_ok=True) os.makedirs(os.path.join(directory, "results"), exist_ok=True) if not os.path.isfile(os.path.join(directory, "trackers.ini")): open(os.path.join(directory, "trackers.ini"), 'w').close() if download: # Try do retrieve dataset from stack and download it stack_file = resolve_stack(config["stack"], directory) dataset_directory = normalize_path( config.get("sequences", "sequences"), directory) if stack_file is None: return dataset = None with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) dataset = stack_metadata["dataset"] if dataset: Workspace.download_dataset(dataset, dataset_directory)
def do_workspace(config, logger): from vot.workspace import WorkspaceException print(os.path.join(config.workspace, "configuration.m")) if config.stack is None and os.path.isfile( os.path.join(config.workspace, "configuration.m")): from vot.utilities.migration import migrate_matlab_workspace migrate_matlab_workspace(config.workspace) return elif config.stack is None: stacks = list_integrated_stacks() logger.error("Unable to continue without a stack") logger.error("List of available integrated stacks: ") for k, v in stacks.items(): logger.error(" * %s - %s", k, v) return stack_file = resolve_stack(config.stack) if stack_file is None: logger.error("Experiment stack %s not found", stack_file) return default_config = dict(stack=config.stack, registry=["./trackers.ini"]) try: Workspace.initialize(config.workspace, default_config, download=not config.nodownload) logger.info("Initialized workspace in '%s'", config.workspace) except WorkspaceException as we: logger.error("Error during workspace initialization: %s", we)
def test_stacks(self): stacks = list_integrated_stacks() for stack_name in stacks: try: with open(resolve_stack(stack_name), 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) Stack(stack_name, NoWorkspace(), **stack_metadata) except Exception as e: self.fail("Stack {}: {}".format(stack_name, e))
def coerce(self, value, ctx): importlib.import_module("vot.analysis") importlib.import_module("vot.experiment") if isinstance(value, str): stack_file = resolve_stack(value, ctx["parent"].directory) if stack_file is None: raise WorkspaceException("Experiment stack does not exist") with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) return Stack(value, ctx["parent"], **stack_metadata) else: return Stack(None, ctx["parent"], **value)
def __init__(self, directory): self._root = directory directory = normalize_path(directory) config_file = os.path.join(directory, "config.yaml") if not os.path.isfile(config_file): raise WorkspaceException("Workspace not initialized") with open(config_file, 'r') as fp: self._config = yaml.load(fp, Loader=yaml.BaseLoader) if not "stack" in self._config: raise WorkspaceException( "Experiment stack not found in workspace configuration") stack_file = resolve_stack(self._config["stack"], directory) if stack_file is None: raise WorkspaceException("Experiment stack does not exist") self._storage = LocalStorage(directory) with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) self._stack = Stack(self, **stack_metadata) dataset_directory = normalize_path( self._config.get("sequences", "sequences"), directory) if not self._stack.dataset is None: Workspace.download_dataset(self._stack.dataset, dataset_directory) self._dataset = VOTDataset(dataset_directory) self._registry = [ normalize_path(r, directory) for r in self._config.get("registry", []) ]
def migrate_matlab_workspace(directory): logger = logging.getLogger("vot") logger.info("Attempting to migrate workspace in %s", directory) def scan_text(pattern, content, default=None): matches = re.findall(pattern, content) if not len(matches) == 1: return default return matches[0] config_file = os.path.join(directory, "config.yaml") if os.path.isfile(config_file): raise WorkspaceException("Workspace already initialized") old_config_file = os.path.join(directory, "configuration.m") if not os.path.isfile(old_config_file): raise WorkspaceException("Old workspace config not detected") with open(old_config_file, "r") as fp: content = fp.read() stack = scan_text("set\\_global\\_variable\\('stack', '([A-Za-z0-9-_]+)'\\)", content) if stack is None: raise WorkspaceException("Experiment stack could not be retrieved") tracker_ids = list() for tracker_dir in [x for x in os.scandir(os.path.join(directory, "results")) if x.is_dir()]: if not is_valid_identifier(tracker_dir.name): logger.info("Results directory %s is not a valid identifier, skipping.", tracker_dir.name) continue logger.debug("Scanning results for %s", tracker_dir.name) tracker_ids.append(tracker_dir.name) for experiment_dir in [x for x in os.scandir(tracker_dir.path) if x.is_dir()]: for sequence_dir in [x for x in os.scandir(experiment_dir.path) if x.is_dir()]: timing_file = os.path.join(sequence_dir.path, "{}_time.txt".format(sequence_dir.name)) if os.path.isfile(timing_file): logger.debug("Migrating %s", timing_file) times = np.genfromtxt(timing_file, delimiter=",") if len(times.shape) == 1: times = np.reshape(times, (times.shape[0], 1)) for k in range(times.shape[1]): if np.all(times[:, k] == 0): break np.savetxt(os.path.join(sequence_dir.path, \ "%s_%03d_time.value" % (sequence_dir.name, k+1)), \ times[:, k] / 1000, fmt='%.6e') os.unlink(timing_file) trackers = dict() for tid in tracker_ids: old_description = os.path.join(directory, "tracker_{}.m".format(tid)) label = tid if os.path.isfile(old_description): with open(old_description, "r") as fp: content = fp.read() label = scan_text("tracker\\_label *= * ['\"](.*)['\"]", content, tid) trackers[tid] = dict(label=label, protocol="unknown", command="") if trackers: with open(os.path.join(directory, "trackers.ini"), "w") as fp: for tid, tdata in trackers.items(): fp.write("[" + tid + "]\n") for k, v in tdata.items(): fp.write(k + " = " + v + "\n") fp.write("\n\n") if resolve_stack(stack) is None: logger.warning("Stack %s not found, you will have to manually edit and correct config file.", stack) with open(config_file, 'w') as fp: yaml.dump(dict(stack=stack, registry=["."]), fp) #os.unlink(old_config_file) logger.info("Workspace %s migrated", directory)