Esempio n. 1
0
def generate_serialized(trackers: typing.List[Tracker], sequences: typing.List[Sequence], results, storage: "Storage", serializer: str):

    doc = dict()
    doc["toolkit"] = version
    doc["timestamp"] = datetime.datetime.now().isoformat()
    doc["trackers"] = {t.reference : t.describe() for t in trackers}
    doc["sequences"] = {s.name : s.describe() for s in sequences}

    doc["results"] = dict()

    for experiment, analyses in results.items():
        exp = dict(parameters=experiment.dump(), type=class_fullname(experiment))
        exp["results"] = []
        for _, data in analyses.items():
            exp["results"].append(data)
        doc["results"][experiment.identifier] = exp

    if serializer == "json":
        with storage.write("results.json") as handle:
            json.dump(doc, handle, indent=2)
    elif serializer == "yaml":
        with storage.write("results.yaml") as handle:
            yaml.dump(doc, handle)
    else:
        raise RuntimeError("Unknown serializer")
    def identifier(self) -> str:
        if not self._identifier_cache is None:
            return self._identifier_cache

        params = self.dump()
        confighash = arg_hash(**params)

        self._identifier_cache = class_fullname(self) + "@" + confighash

        return self._identifier_cache
Esempio n. 3
0
    def cache(self, *args):
        segments = []
        for arg in args:
            if arg is None:
                continue
            if isinstance(arg, str):
                segments.append(arg)
            elif isinstance(arg, (int, float)):
                segments.append(str(arg))
            else:
                segments.append(class_fullname(arg))

        path = os.path.join(self._cache, *segments)
        os.makedirs(path, exist_ok=True)

        return path
def generate_json_document(trackers: List[Tracker], sequences: List[Sequence], results, storage: Storage):

    doc = dict()
    doc["toolkit"] = version
    doc["timestamp"] = datetime.datetime.now().isoformat()
    doc["trackers"] = {t.reference : t.describe() for t in trackers}
    doc["sequences"] = {s.name : s.describe() for s in sequences}

    doc["results"] = dict()

    for experiment, analyses in results.items():
        exp = dict(parameters=experiment.dump(), type=class_fullname(experiment))
        exp["results"] = []
        for _, data in analyses.items():
            exp["results"].append(data)
        doc["results"][experiment.identifier] = exp

    with storage.write("results.json") as handle:
        json.dump(doc, handle, indent=2)
Esempio n. 5
0
def process_measures(workspace: "Workspace", trackers: List[Tracker]):

    results = dict()

    for experiment in workspace.stack:

        results[experiment.identifier] = list()

        for tracker in trackers:

            tracker_results = {}
            tracker_results['tracker_name'] = tracker.identifier

            for measure in workspace.stack.measures(experiment):

                if not measure.compatible(experiment):
                    continue

                tracker_results[class_fullname(measure)] = measure.compute(
                    tracker, experiment)

            results[experiment.identifier].append(tracker_results)

    return results
    def cache(self, identifier) -> LocalStorage:
        if not isinstance(identifier, str):
            identifier = class_fullname(identifier)

        return LocalStorage(os.path.join(self._root, "cache", identifier))
Esempio n. 7
0
def process_stack_analyses(workspace: "Workspace", trackers: List[Tracker]):

    processor = AnalysisProcessor.default()

    results = dict()
    condition = Condition()

    def insert_result(container: dict, key):
        def insert(future: Future):
            try:
                container[key] = future.result()
            except AnalysisError as e:
                e.print(logger)
            except Exception as e:
                logger.exception(e)
            with condition:
                condition.notify()
        return insert

    for experiment in workspace.stack:

        logger.debug("Traversing experiment %s", experiment.identifier)

        experiment_results = dict()

        results[experiment] = experiment_results

        sequences = [experiment.transform(sequence) for sequence in workspace.dataset]

        for analysis in experiment.analyses:

            if not analysis.compatible(experiment):
                continue

            logger.debug("Traversing analysis %s", class_fullname(analysis))

            with condition:
                experiment_results[analysis] = None
            promise = processor.commit(analysis, experiment, trackers, sequences)
            promise.add_done_callback(insert_result(experiment_results, analysis))

    if processor.total == 0:
        return results

    logger.debug("Waiting for %d analysis tasks to finish", processor.total)

    with Progress("Running analysis", processor.total) as progress:
        try:

            while True:

                progress.absolute(processor.total - processor.pending)
                if processor.pending == 0:
                    break

                with condition:
                    condition.wait(1)

        except KeyboardInterrupt:
            processor.cancel()
            progress.close()
            logger.info("Analysis interrupted by user, aborting.")
            return None

    return results
Esempio n. 8
0
    def cache(self, identifier) -> LocalStorage:
        if not isinstance(identifier, str):
            identifier = class_fullname(identifier)

        return self._storage.substorage("cache").substorage(identifier)