class Experiment(Attributee): realtime = Nested(RealtimeConfig, default=None) noise = Nested(NoiseConfig, default=None) inject = Nested(InjectConfig, default=None) transformers = List(Object(transformer_resolver), default=[]) analyses = List(Object(analysis_resolver), default=[]) def __init__(self, _identifier: str, _storage: "LocalStorage", **kwargs): self._identifier = _identifier self._storage = _storage super().__init__(**kwargs) # TODO: validate analysis names @property def identifier(self) -> str: return self._identifier @property def storage(self) -> "Storage": return self._storage def _get_initialization(self, sequence: "Sequence", index: int): return sequence.groundtruth(index) def _get_runtime(self, tracker: "Tracker", sequence: "Sequence"): if not self.realtime is None: grace = to_number(self.realtime.grace, min_n=0) fps = to_number(self.realtime.fps, min_n=0, conversion=float) interval = 1 / float(sequence.metadata("fps", fps)) runtime = RealtimeTrackerRuntime(tracker.runtime(), grace, interval) else: runtime = tracker.runtime() return runtime @abstractmethod def execute(self, tracker: "Tracker", sequence: "Sequence", force: bool = False, callback: typing.Callable = None): raise NotImplementedError @abstractmethod def scan(self, tracker: "Tracker", sequence: "Sequence"): raise NotImplementedError def results(self, tracker: "Tracker", sequence: "Sequence") -> "Results": from vot.tracker import Results from vot.workspace import LocalStorage if tracker.storage is not None: return tracker.storage.results(tracker, self, sequence) return self._storage.results(tracker, self, sequence) def log(self, identifier: str): return self._storage.substorage("logs").write("{}_{:%Y-%m-%dT%H-%M-%S.%f%z}.log".format(identifier, datetime.now())) def transform(self, sequence: "Sequence"): for transformer in self.transformers: sequence = transformer(sequence) return sequence
class SupervisedExperiment(MultiRunExperiment): skip_initialize = Integer(val_min=1, default=1) skip_tags = List(String(), default=[]) failure_overlap = Float(val_min=0, val_max=1, default=0) def execute(self, tracker: Tracker, sequence: Sequence, force: bool = False, callback: Callable = None): results = self.results(tracker, sequence) with self._get_runtime(tracker, sequence) as runtime: for i in range(1, self.repetitions + 1): name = "%s_%03d" % (sequence.name, i) if Trajectory.exists(results, name) and not force: continue if self._can_stop(tracker, sequence): return trajectory = Trajectory(sequence.length) frame = 0 while frame < sequence.length: _, properties, elapsed = runtime.initialize( sequence.frame(frame), self._get_initialization(sequence, frame)) properties["time"] = elapsed trajectory.set(frame, Special(Special.INITIALIZATION), properties) frame = frame + 1 while frame < sequence.length: region, properties, elapsed = runtime.update( sequence.frame(frame)) properties["time"] = elapsed if calculate_overlap( region, sequence.groundtruth(frame), sequence.size) <= self.failure_overlap: trajectory.set(frame, Special(Special.FAILURE), properties) frame = frame + self.skip_initialize if self.skip_tags: while frame < sequence.length: if not [ t for t in sequence.tags(frame) if t in self.skip_tags ]: break frame = frame + 1 break else: trajectory.set(frame, region, properties) frame = frame + 1 if callback: callback(i / self.repetitions) trajectory.write(results, name)
class Workspace(Attributee): """Workspace class represents the main junction of trackers, datasets and experiments. Each workspace performs given experiments on a provided dataset. """ registry = List( String(transformer=lambda x, ctx: normalize_path( x, ctx["parent"].directory))) stack = StackLoader() sequences = String(default="sequences") report = Nested(ReportConfiguration) @staticmethod def initialize(directory: str, config: typing.Optional[typing.Dict] = None, download: bool = True) -> None: """[summary] Args: directory (str): Root for workspace storage config (typing.Optional[typing.Dict], optional): Workspace initial configuration. Defaults to None. download (bool, optional): Download the dataset immediately. Defaults to True. Raises: WorkspaceException: When a workspace cannot be created. """ config_file = os.path.join(directory, "config.yaml") if os.path.isfile(config_file): raise WorkspaceException("Workspace already initialized") os.makedirs(directory, exist_ok=True) with open(config_file, 'w') as fp: yaml.dump(config if config is not None else dict(), fp) os.makedirs(os.path.join(directory, "sequences"), exist_ok=True) os.makedirs(os.path.join(directory, "results"), exist_ok=True) if not os.path.isfile(os.path.join(directory, "trackers.ini")): open(os.path.join(directory, "trackers.ini"), 'w').close() if download: # Try do retrieve dataset from stack and download it stack_file = resolve_stack(config["stack"], directory) dataset_directory = normalize_path( config.get("sequences", "sequences"), directory) if stack_file is None: return dataset = None with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) dataset = stack_metadata["dataset"] if dataset: Workspace.download_dataset(dataset, dataset_directory) @staticmethod def download_dataset(dataset: str, directory: str) -> None: """Download the dataset if no dataset is present already. Args: dataset (str): Dataset URL or ID directory (str): Directory where the dataset is saved """ if os.path.exists( os.path.join(directory, "list.txt") ): #TODO: this has to be improved now that we also support other datasets that may not have list.txt return False from vot.dataset import download_dataset download_dataset(dataset, directory) _logger.info("Download completed") @staticmethod def load(directory): """Load a workspace from a given location. This Args: directory ([type]): [description] Raises: WorkspaceException: [description] Returns: [type]: [description] """ directory = normalize_path(directory) config_file = os.path.join(directory, "config.yaml") if not os.path.isfile(config_file): raise WorkspaceException("Workspace not initialized") with open(config_file, 'r') as fp: config = yaml.load(fp, Loader=yaml.BaseLoader) return Workspace(directory, **config) def __init__(self, directory: str, **kwargs): """Do not call this constructor directly unless you know what you are doing, instead use the static Workspace.load method. Args: directory ([type]): [description] """ self._directory = directory self._storage = LocalStorage( directory) if directory is not None else NullStorage() super().__init__(**kwargs) dataset_directory = normalize_path(self.sequences, directory) if not self.stack.dataset is None: Workspace.download_dataset(self.stack.dataset, dataset_directory) self._dataset = load_dataset(dataset_directory) @property def directory(self) -> str: """Returns the root directory for the workspace. Returns: str: The absolute path to the root of the workspace. """ return self._directory @property def dataset(self) -> Dataset: """Returns dataset associated with the workspace Returns: Dataset: The dataset object. """ return self._dataset @property def storage(self) -> Storage: """Returns the storage object associated with this workspace. Returns: Storage: The storage object. """ return self._storage def list_results(self, registry: "Registry") -> typing.List["Tracker"]: """Utility method that looks for all subfolders in the results folder and tries to resolve them as tracker references. It returns a list of Tracker objects, i.e. trackers that have at least some results or an existing results directory. Returns: [typing.List[Tracker]]: A list of trackers with results. """ references = self._storage.substorage("results").folders() return registry.resolve(*references)
class ProgramGroup(Attributee, Serializable): title = String(default="") description = String(default="") log = String(default=None) user = String(default=None) group = String(default=None) environment = Map(String()) plugins = List(String(), default=[]) programs = Map(ProgramDescription()) def __init__(self, *args, _source: str = None, **kwargs): super().__init__(*args, **kwargs) self._programs = {} self._source = _source registry = plugin_registry() def load_plugin(name): if name in registry: return registry[name]() plugin_cls = import_class(name) if issubclass(plugin_cls, Plugin): return plugin_cls() self._plugins = [load_plugin(x) for x in self.plugins] for identifier, item in self.programs.items(): if getattr(item, "ignore", False): continue self._programs[identifier] = item run_plugins(self._plugins, 'on_program_init', item) run_plugins(self._plugins, 'on_group_init', self) graph = {} for i, program in self._programs.items(): dependencies = set() for d in program.depends: if not d in self._programs: raise ValueError("Dependency %s not defined" % d) dependencies.add(d) graph[i] = dependencies blocks = toposort(graph) sequence = [] for block in blocks: sequence.extend(list(block)) self.startup_sequence = sequence def start(self): run_plugins(self._plugins, 'on_group_start', self) for item in self.startup_sequence: run_plugins(self._plugins, 'on_program_start', self._programs[item]) self._programs[item].start() run_plugins(self._plugins, 'on_program_started', self._programs[item]) run_plugins(self._plugins, 'on_group_started', self) def stop(self, force=False): run_plugins(self._plugins, 'on_group_stop', self) for item in reversed(self.startup_sequence): run_plugins(self._plugins, 'on_program_stop', self._programs[item]) self._programs[item].stop(force) run_plugins(self._plugins, 'on_program_stopped', self._programs[item]) run_plugins(self._plugins, 'on_group_stopped', self) def valid(self): valid = 0 for program in self._programs.values(): if program.valid(): valid = valid + 1 elif getattr(program, "required", True): return False return valid > 0 def announce(self, message): print_colored(message, RED, True) print("") @property def source(self): return self._source
class ProgramHandler(Attributee): command = String(readonly=False) directory = String(default=None, readonly=False) environment = Map(String(), readonly=False) required = Boolean(default=False) restart = Boolean(default=False) user = String(default=None) group = String(default=None) console = String(default=None) depends = List(String(), default=[]) log = String(default=None) logappend = Boolean(default=False) delay = Integer(val_min=0, default=0) signal = Enumeration(_signals, default="term") auxiliary = Unclaimed( description="Remaining arguments, enables plugin configuration") def __init__(self, *args, _identifier: str = None, **kwargs): super().__init__(*args, **kwargs) self.thread = threading.Thread(target=self.run) self.thread.daemon = True self.identifier = _identifier self.running = False self.process = None self._user_id = get_userid(self.user) self._group_id = get_groupid(self.group) self.color = next(_COLOR_POOL) self.observers = [] self.attempts = 0 self.logfile = None def observe(self, observer): self.observers.append(observer) def start(self): if self.running: return self.thread.start() def run(self): self.running = True if not self.log is None: os.makedirs(os.path.dirname(self.log), exist_ok=True) environment = os.environ.copy() environment.update(self.environment) environment = {k: expandvars(v) for k, v in environment.items()} if self.logappend: self.logfile = open(self.log, 'w') if not self.log is None else None else: self.logfile = open(self.log, 'a') if not self.log is None else None if not self.logfile is None: self.logfile.write("\n----- Starting log at %s ------\n\n" % datetime.datetime.now()) while self.running: returncode = None try: self.attempts = self.attempts + 1 self.announce("Starting program (attempt %d)" % self.attempts) full_command = shlex.split( expandvars(self.command, additional=environment)) full_directory = expandvars(self.directory if self.directory is not None else os.curdir, additional=environment) if self.console and is_linux(): full_command.insert(0, 'stdbuf') full_command.insert(1, '-oL') preexec_fn = prepare_and_demote(self._user_id[0], self._group_id[0], self._group_id[2]) self.process = subprocess.Popen(full_command, shell=False, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=environment, cwd=full_directory, preexec_fn=preexec_fn) self.announce("PID = %d" % self.process.pid) while True: logline = self.process.stdout.readline() if logline: logline = logline.decode("utf-8") if self.console: with _TERMINAL_LOCK: print_colored( "[%s]: " % self.identifier.ljust(20, ' '), self.color) # new line is already present sys.stdout.write(logline) if not self.logfile is None: self.logfile.write(logline) self.logfile.flush() else: break self.process.wait() returncode = self.process.returncode except OSError as err: returncode = None self.announce("Error: %s" % str(err)) if returncode != None: if returncode < 0: self.announce("Program has stopped (signal %d)" % -returncode) else: self.announce("Program has stopped (exit code %d)" % returncode) else: self.announce("Execution stopped because of an error") self.process = None if not self.running: break if self.restart is False: break if not self.restart is True and self.restart == self.attempts: self.announce("Maximum numer of attempts reached, giving up.") break self.announce("Restarting program.") time.sleep(1) def announce(self, message): with _TERMINAL_LOCK: print_colored("[%s]: " % self.identifier.ljust(20, ' '), self.color) print(message) if hasattr(self, "logfile") and not self.logfile is None: self.logfile.write(message) self.logfile.write("\n") def stop(self, force=False): if self.running and not force: self.running = False try: if self.process: self.announce("Stopping program.") self.process.send_signal(self.signal) #self.process.terminate() # send_signal(signal.CTRL_C_EVENT) except OSError: pass self.thread.join(5) try: if self.process: self.announce("Escalating, killing program.") self.process.kill() except OSError: pass def valid(self): # Is valid if it is running or it was not even executed return self.running or self.attempts == 0
class ReportConfiguration(Attributee): style = Nested(StyleManager) sort = Nested(TrackerSorter) generators = List(Object(subclass=Generator), default=[])