class TrackerSorter(Attributee): experiment = String(default=None) analysis = String(default=None) result = Integer(val_min=0, default=0) def __call__(self, experiments, trackers, sequences): if self.experiment is None or self.analysis is None: return range(len(trackers)) experiment = next(filter(lambda x: x.identifier == self.experiment, experiments), None) if experiment is None: raise RuntimeError("Experiment not found") analysis = next(filter(lambda x: x.name == self.analysis, experiment.analyses), None) if analysis is None: raise RuntimeError("Analysis not found") future = analysis.commit(experiment, trackers, sequences) result = future.result() scores = [x[self.result] for x in result] indices = [i[0] for i in sorted(enumerate(scores), reverse=True, key=lambda x: x[1])] return indices
class Stack(Attributee): title = String() dataset = String(default="") url = String(default="") deprecated = Boolean(default=False) experiments = Map(Object(experiment_resolver)) def __init__(self, workspace: "Workspace", **kwargs): self._workspace = workspace super().__init__(**kwargs) @property def workspace(self): return self._workspace def __iter__(self): return iter(self.experiments.values()) def __len__(self): return len(self.experiments) def __getitem__(self, identifier): return self.experiments[identifier]
class SupervisedExperiment(MultiRunExperiment): skip_initialize = Integer(val_min=1, default=1) skip_tags = List(String(), default=[]) failure_overlap = Float(val_min=0, val_max=1, default=0) def execute(self, tracker: Tracker, sequence: Sequence, force: bool = False, callback: Callable = None): results = self.results(tracker, sequence) with self._get_runtime(tracker, sequence) as runtime: for i in range(1, self.repetitions+1): name = "%s_%03d" % (sequence.name, i) if Trajectory.exists(results, name) and not force: continue if self._can_stop(tracker, sequence): return trajectory = Trajectory(sequence.length) frame = 0 while frame < sequence.length: _, properties, elapsed = runtime.initialize(sequence.frame(frame), self._get_initialization(sequence, frame)) properties["time"] = elapsed trajectory.set(frame, Special(Special.INITIALIZATION), properties) frame = frame + 1 while frame < sequence.length: region, properties, elapsed = runtime.update(sequence.frame(frame)) properties["time"] = elapsed if calculate_overlap(region, sequence.groundtruth(frame), sequence.size) <= self.failure_overlap: trajectory.set(frame, Special(Special.FAILURE), properties) frame = frame + self.skip_initialize if self.skip_tags: while frame < sequence.length: if not [t for t in sequence.tags(frame) if t in self.skip_tags]: break frame = frame + 1 break else: trajectory.set(frame, region, properties) frame = frame + 1 if callback: callback(i / self.repetitions) trajectory.write(results, name)
class Analysis(Attributee): name = String(default=None) def __init__(self, **kwargs): super().__init__(**kwargs) self._identifier_cache = None def compatible(self, experiment: Experiment): raise NotImplementedError @property def title(self) -> str: raise NotImplementedError @property def identifier(self) -> str: if not self._identifier_cache is None: return self._identifier_cache params = self.dump() del params["name"] confighash = arg_hash(**params) self._identifier_cache = class_fullname(self) + "@" + confighash return self._identifier_cache def describe(self) -> Tuple["Result"]: """Returns a tuple of descriptions of results """ raise NotImplementedError def compute(self, experiment: Experiment, trackers: List[Tracker], sequences: List[Sequence]) -> Grid: raise NotImplementedError def axes(self): """ Returns a tuple of axes of results or None if only a single result tuple is returned """ raise NotImplementedError def commit(self, experiment: Experiment, trackers: List[Tracker], sequences: List[Sequence]): from vot.analysis.processor import AnalysisProcessor return AnalysisProcessor.commit_default(self, experiment, trackers, sequences)
class MultiStartExperiment(Experiment): anchor = String(default="anchor") def scan(self, tracker: Tracker, sequence: Sequence): files = [] complete = True results = self.results(tracker, sequence) forward, backward = find_anchors(sequence, self.anchor) if len(forward) == 0 and len(backward) == 0: raise RuntimeError("Sequence does not contain any anchors") for i in forward + backward: name = "%s_%08d" % (sequence.name, i) if Trajectory.exists(results, name): files.extend(Trajectory.gather(results, name)) else: complete = False return complete, files, results def execute(self, tracker: Tracker, sequence: Sequence, force: bool = False, callback: Callable = None): results = self.results(tracker, sequence) forward, backward = find_anchors(sequence, self.anchor) if len(forward) == 0 and len(backward) == 0: raise RuntimeError("Sequence does not contain any anchors") total = len(forward) + len(backward) current = 0 for i, reverse in [(f, False) for f in forward] + [(f, True) for f in backward]: name = "%s_%08d" % (sequence.name, i) if Trajectory.exists(results, name) and not force: continue if reverse: proxy = FrameMapSequence(sequence, list(reversed(range(0, i + 1)))) else: proxy = FrameMapSequence(sequence, list(range(i, sequence.length))) trajectory = Trajectory(proxy.length) with self._get_runtime(tracker, sequence) as runtime: _, properties, elapsed = runtime.initialize( proxy.frame(0), self._get_initialization(proxy, 0)) properties["time"] = elapsed trajectory.set(0, Special(Special.INITIALIZATION), properties) for frame in range(1, proxy.length): region, properties, elapsed = runtime.update( proxy.frame(frame)) properties["time"] = elapsed trajectory.set(frame, region, properties) trajectory.write(results, name) current = current + 1 if callback: callback(current / total)
class AttributeMultiStart(SequenceAveragingAnalysis): burnin = Integer(default=10, val_min=0) grace = Integer(default=10, val_min=0) bounded = Boolean(default=True) threshold = Float(default=0.1, val_min=0, val_max=1) tags = List(String()) @property def name(self): return "AR per-attribute analysis" def describe(self): accuracy = [ Measure("Accuracy: " + t, "A " + t, minimal=0, maximal=1, direction=Sorting.DESCENDING) for t in self.tags ] robustness = [ Measure("Robutsness" + t, "R " + t, minimal=0, maximal=1, direction=Sorting.DESCENDING) for t in self.tags ] length = [None] * len(self.tags) return tuple( functools.reduce( operator.add, [[a, r, n] for a, r, n in zip(accuracy, robustness, length)])) def compatible(self, experiment: Experiment): return isinstance(experiment, MultiStartExperiment) def collapse(self, tracker: Tracker, sequences: typing.List[Sequence], results: typing.List[tuple]): accuracy = Counter() robustness = Counter() attribute_total = Counter() for seq_acc, seq_rob, seq_attr_count in results: for t in seq_attr_count: accuracy[t] += (seq_acc[t] if t in seq_acc else 0) * seq_attr_count[t] robustness[t] += seq_rob * seq_attr_count[t] attribute_total[t] += seq_attr_count[t] accuracy = [accuracy[t] / attribute_total[t] for t in self.tags] robustness = [robustness[t] / attribute_total[t] for t in self.tags] length = [attribute_total[t] for t in self.tags] return tuple( functools.reduce( operator.add, [[a, r, n] for a, r, n in zip(accuracy, robustness, length)])) def subcompute(self, experiment: Experiment, tracker: Tracker, sequence: Sequence): results = experiment.results(tracker, sequence) forward, backward = find_anchors(sequence, experiment.anchor) if len(forward) == 0 and len(backward) == 0: raise RuntimeError("Sequence does not contain any anchors") accuracy_ = Counter() tags_count_ = Counter() robustness_ = 0 total_ = 0 for i, reverse in [(f, False) for f in forward] + [(f, True) for f in backward]: name = "%s_%08d" % (sequence.name, i) if not Trajectory.exists(results, name): raise MissingResultsException() if reverse: proxy = FrameMapSequence(sequence, list(reversed(range(0, i + 1)))) else: proxy = FrameMapSequence(sequence, list(range(i, sequence.length))) trajectory = Trajectory.read(results, name) overlaps = calculate_overlaps(trajectory.regions(), proxy.groundtruth(), proxy.size if self.burnin else None) grace = self.grace progress = len(proxy) for j, overlap in enumerate(overlaps): if overlap <= self.threshold and not proxy.groundtruth( j).is_empty(): grace = grace - 1 if grace == 0: progress = j + 1 - self.grace # subtract since we need actual point of the failure break else: grace = self.grace for j in range(progress): overlap = overlaps[j] tags = proxy.tags(j) if len(tags) == 0: tags = ['empty'] for t in tags: accuracy_[t] += overlap tags_count_[t] += 1 robustness_ += progress total_ += len(proxy) seq_robustness = robustness_ / total_ seq_accuracy = {} for t in accuracy_: seq_accuracy[t] = accuracy_[t] / tags_count_[t] # calculate weights for each attribute attribute_counter = Counter() for frame_idx in range(len(sequence)): tags = sequence.tags(frame_idx) if len(tags) == 0: tags = ['empty'] for t in tags: attribute_counter[t] += 1 return seq_accuracy, seq_robustness, attribute_counter
class AttributeDifficultyLevelMultiStart(SequenceAveragingAnalysis): burnin = Integer(default=10, val_min=0) grace = Integer(default=10, val_min=0) bounded = Boolean(default=True) threshold = Float(default=0.1, val_min=0, val_max=1) fail_interval = Integer(default=30, val_min=1) tags = List(String()) @property def name(self): return "Attribute difficulty" def describe(self): return tuple([ Measure(t, t, minimal=0, maximal=1, direction=Sorting.DESCENDING) for t in self.tags ] + [None] * len(self.tags)) def compatible(self, experiment: Experiment): return isinstance(experiment, MultiStartExperiment) def collapse(self, tracker: Tracker, sequences: typing.List[Sequence], results: typing.List[tuple]): attribute_difficulty = Counter() attribute_counter = Counter() for seq_tags_not_failed, seq_tags_count, seq_attr_count in results: for tag in seq_tags_count: if tag in seq_tags_not_failed: seq_attr_difficulty = seq_tags_not_failed[ tag] / seq_tags_count[tag] else: seq_attr_difficulty = 0 attribute_difficulty[ tag] += seq_attr_difficulty * seq_attr_count[tag] attribute_counter[tag] += seq_attr_count[tag] return tuple([ attribute_difficulty[tag] / attribute_counter[tag] for tag in self.tags ] + [attribute_counter[tag] for tag in self.tags]) def subcompute(self, experiment: Experiment, tracker: Tracker, sequence: Sequence): results = experiment.results(tracker, sequence) forward, backward = find_anchors(sequence, experiment.anchor) if len(forward) == 0 and len(backward) == 0: raise RuntimeError("Sequence does not contain any anchors") tags_count = Counter() tags_not_failed = Counter() for i, reverse in [(f, False) for f in forward] + [(f, True) for f in backward]: name = "%s_%08d" % (sequence.name, i) if not Trajectory.exists(results, name): raise MissingResultsException() if reverse: proxy = FrameMapSequence(sequence, list(reversed(range(0, i + 1)))) else: proxy = FrameMapSequence(sequence, list(range(i, sequence.length))) trajectory = Trajectory.read(results, name) overlaps = calculate_overlaps(trajectory.regions(), proxy.groundtruth(), proxy.size if self.burnin else None) grace = self.grace progress = len(proxy) for j, overlap in enumerate(overlaps): if overlap <= self.threshold and not proxy.groundtruth( j).is_empty(): grace = grace - 1 if grace == 0: progress = j + 1 - self.grace # subtract since we need actual point of the failure break else: grace = self.grace for j in range(progress): tags = proxy.tags(j) if len(tags) == 0: tags = ['empty'] for t in tags: tags_count[t] += 1 if progress == len( proxy) or j < progress - self.fail_interval: tags_not_failed[t] += 1 attribute_counter = Counter() for frame_idx in range(len(sequence)): tags = sequence.tags(frame_idx) if len(tags) == 0: tags = ['empty'] for t in tags: attribute_counter[t] += 1 return tags_not_failed, tags_count, attribute_counter
class Workspace(Attributee): registry = List( String(transformer=lambda x, ctx: normalize_path( x, ctx["parent"].directory))) stack = StackLoader() sequences = String(default="sequences") report = Nested(ReportConfiguration) @staticmethod def initialize(directory, config=None, download=False): config_file = os.path.join(directory, "config.yaml") if os.path.isfile(config_file): raise WorkspaceException("Workspace already initialized") os.makedirs(directory, exist_ok=True) with open(config_file, 'w') as fp: yaml.dump(config if config is not None else dict(), fp) os.makedirs(os.path.join(directory, "sequences"), exist_ok=True) os.makedirs(os.path.join(directory, "results"), exist_ok=True) if not os.path.isfile(os.path.join(directory, "trackers.ini")): open(os.path.join(directory, "trackers.ini"), 'w').close() download = False if download: # Try do retrieve dataset from stack and download it stack_file = resolve_stack(config["stack"], directory) dataset_directory = normalize_path( config.get("sequences", "sequences"), directory) if stack_file is None: return dataset = None with open(stack_file, 'r') as fp: stack_metadata = yaml.load(fp, Loader=yaml.BaseLoader) dataset = stack_metadata["dataset"] if dataset: Workspace.download_dataset(dataset, dataset_directory) @staticmethod def download_dataset(dataset, directory): if os.path.exists(os.path.join(directory, "list.txt")): return False from vot.dataset import download_dataset download_dataset(dataset, directory) logger.info("Download completed") @staticmethod def load(directory): directory = normalize_path(directory) config_file = os.path.join(directory, "config.yaml") if not os.path.isfile(config_file): raise WorkspaceException("Workspace not initialized") with open(config_file, 'r') as fp: config = yaml.load(fp, Loader=yaml.BaseLoader) return Workspace(directory, **config) def __init__(self, directory, **kwargs): self._directory = directory self._storage = LocalStorage( directory) if directory is not None else VoidStorage() super().__init__(**kwargs) dataset_directory = normalize_path(self.sequences, directory) if not self.stack.dataset is None: Workspace.download_dataset(self.stack.dataset, dataset_directory) self._dataset = VOTDataset(dataset_directory) @property def directory(self) -> str: return self._directory @property def dataset(self) -> Dataset: return self._dataset @property def storage(self) -> LocalStorage: return self._storage def cache(self, identifier) -> LocalStorage: if not isinstance(identifier, str): identifier = class_fullname(identifier) return self._storage.substorage("cache").substorage(identifier) def list_results(self, registry: "Registry"): references = self._storage.substorage("results").folders() return registry.resolve(*references)