示例#1
0
 def download_uncompress(url, path):
     tmp_file = tempfile.mktemp() + ".zip"
     with Progress(unit='B', desc="Downloading", leave=False) as pbar:
         download(url, tmp_file, pbar.update_absolute)
     with Progress(unit='files', desc="Extracting", leave=True) as pbar:
         extract_files(tmp_file, path, pbar.update_relative)
     os.unlink(tmp_file)
示例#2
0
    class EvaluationProgress(object):
        def __init__(self, description, total):
            self.bar = Progress(description, total)
            self._finished = 0

        def __call__(self, progress):
            self.bar.absolute(self._finished + min(1, max(0, progress)))

        def push(self):
            self._finished = self._finished + 1
            self.bar.absolute(self._finished)
示例#3
0
文件: otb.py 项目: LitingLin/toolkit
def download_dataset(path: str, otb50: bool = False):

    from vot.utilities.net import download_uncompress, join_url, NetworkException

    dataset = _SEQUENCES

    if otb50:
        dataset = {k: v for k, v in dataset.items() if k in _OTB50_SUBSET}

    with Progress("Downloading", len(dataset)) as progress:
        for name, metadata in dataset.items():
            name = metadata.get("base", name)
            if not os.path.isdir(os.path.join(path, name)):
                try:
                    download_uncompress(join_url(_BASE_URL, "%s.zip" % name),
                                        path)
                except NetworkException as ex:
                    raise DatasetException(
                        "Unable do download sequence data") from ex
                except IOError as ex:
                    raise DatasetException(
                        "Unable to extract sequence data, is the target directory writable and do you have enough space?"
                    ) from ex

            progress.relative(1)
示例#4
0
    def __init__(self, path, splits=False):
        super().__init__(path)

        if not splits and not TrackingNetDataset.check(path):
            raise DatasetException(
                "Unsupported dataset format, expected TrackingNet")

        sequences = []
        if not splits:
            for file in glob.glob(os.path.join(path, "anno", "*.txt")):
                sequences.append(file)
        else:
            # Special mode to load all training splits
            for split in ["TRAIN_%d" % i for i in range(0, 12)]:
                for file in glob.glob(
                        os.path.join(path, split, "anno", "*.txt")):
                    sequences.append(file)

        self._sequences = OrderedDict()

        with Progress("Loading dataset", len(sequences)) as progress:
            for sequence in sequences:
                name = os.path.splitext(os.path.basename(sequence))[0]
                self._sequences[name] = TrackingNetSequence(sequence,
                                                            dataset=self)
                progress.relative(1)
示例#5
0
    def __init__(self, path):
        super().__init__(path)

        if not os.path.isfile(os.path.join(path, "list.txt")):
            raise DatasetException("Dataset not available locally")

        with open(os.path.join(path, "list.txt"), 'r') as fd:
            names = fd.readlines()
        self._sequences = {
            name.strip(): VOTSequence(os.path.join(path, name.strip()),
                                      dataset=self)
            for name in Progress(
                names, desc="Loading dataset", unit="sequences")
        }
示例#6
0
文件: otb.py 项目: LitingLin/toolkit
    def __init__(self, path, otb50: bool = False):
        super().__init__(path)

        dataset = _SEQUENCES

        if otb50:
            dataset = {k: v for k, v in dataset.items() if k in _OTB50_SUBSET}

        self._sequences = OrderedDict()

        with Progress("Loading dataset", len(dataset)) as progress:

            for name in sorted(list(dataset.keys())):
                self._sequences[name.strip()] = OTBSequence(path,
                                                            name,
                                                            dataset=self)
                progress.relative(1)
示例#7
0
    def __init__(self, path):
        super().__init__(path)

        if not os.path.isfile(os.path.join(path, "list.txt")):
            raise DatasetException("Dataset not available locally")

        with open(os.path.join(path, "list.txt"), 'r') as fd:
            names = fd.readlines()

        self._sequences = OrderedDict()

        with Progress("Loading dataset", len(names)) as progress:

            for name in names:
                self._sequences[name.strip()] = VOTSequence(os.path.join(
                    path, name.strip()),
                                                            dataset=self)
                progress.relative(1)
示例#8
0
    def wait(self):

        if self.total == 0:
            return

        with Progress("Running analysis", self.total) as progress:
            try:

                while True:
                    progress.absolute(self.total - self.pending)
                    if self.pending == 0:
                        break

                    with self._wait_condition:
                        self._wait_condition.wait(1)

            except KeyboardInterrupt:
                self.cancel()
                progress.close()
示例#9
0
    def __init__(self, path, sequence_list="list.txt"):
        super().__init__(path)

        if not os.path.isabs(sequence_list):
            sequence_list = os.path.join(path, sequence_list)

        if not os.path.isfile(sequence_list):
            raise DatasetException("Sequence list does not exist")

        with open(sequence_list, 'r') as handle:
            names = handle.readlines()

        self._sequences = OrderedDict()

        with Progress("Loading dataset", len(names)) as progress:

            for name in names:
                self._sequences[name.strip()] = GOT10kSequence(os.path.join(
                    path, name.strip()),
                                                               dataset=self)
                progress.relative(1)
示例#10
0
def do_pack(config, logger):

    import zipfile, io
    from shutil import copyfileobj

    workspace = Workspace(config.workspace)

    logger.info("Loaded workspace in '%s'", config.workspace)

    registry = load_trackers(workspace.registry + config.registry)

    logger.info("Found data for %d trackers", len(registry))

    try:
        tracker = registry[config.tracker]
    except KeyError as ke:
        logger.error("Tracker not found %s", str(ke))
        return

    logger.info("Packaging results for tracker %s", tracker.identifier)

    all_files = []
    can_finish = True

    progress = Progress(desc="Scanning",
                        total=len(workspace.dataset) * len(workspace.stack))

    for experiment in workspace.stack:
        for sequence in workspace.dataset:
            transformers = experiment.workspace.stack.transformers(experiment)
            for transformer in transformers:
                sequence = transformer(sequence)
            complete, files, results = experiment.scan(tracker, sequence)
            all_files.extend([(f, experiment.identifier, sequence.name,
                               results) for f in files])
            if not complete:
                logger.error(
                    "Results are not complete for experiment %s, sequence %s",
                    experiment.identifier, sequence.name)
                can_finish = False
            progress.update_relative(1)

    if not can_finish:
        logger.error("Unable to continue, experiments not complete")
        return

    logger.info("Collected %d files, compressing to archive ...",
                len(all_files))

    archive_name = os.path.join(
        workspace.directory,
        "{}_{:%Y-%m-%dT%H-%M-%S.%f%z}.zip".format(tracker.identifier,
                                                  datetime.now()))

    progress = Progress(desc="Compressing", total=len(all_files))

    with zipfile.ZipFile(archive_name, 'w') as archive:
        for f in all_files:
            with io.TextIOWrapper(
                    archive.open(os.path.join(f[1], f[2], f[0]),
                                 mode="w")) as fout, f[3].read(f[0]) as fin:
                copyfileobj(fin, fout)
            progress.update_relative(1)

    logger.info("Result packaging successful, archive available in %s",
                archive_name)
示例#11
0
def process_stack_analyses(workspace: "Workspace", trackers: List[Tracker],
                           executor: Executor, cache: Cache):

    from vot.utilities import Progress
    from threading import Condition

    processor = AnalysisProcessor(executor, cache)

    results = dict()
    condition = Condition()

    def insert_result(container: dict, key):
        def insert(x):
            if isinstance(x, Exception):
                if isinstance(x, AnalysisError):
                    x.print(logger)
                else:
                    logger.exception(x)
            else:
                container[key] = x
            with condition:
                condition.notify()

        return insert

    for experiment in workspace.stack:

        logger.debug("Traversing experiment %s", experiment.identifier)

        experiment_results = dict()

        results[experiment] = experiment_results

        for analysis in experiment.analyses:

            if not analysis.compatible(experiment):
                continue

            logger.debug("Traversing analysis %s", analysis.name)

            with condition:
                experiment_results[analysis] = None
            processor.submit(analysis, experiment, trackers, workspace.dataset,
                             insert_result(experiment_results, analysis))

    if processor.total == 0:
        return results

    logger.debug("Waiting for %d analysis tasks to finish", processor.total)

    with Progress("Running analysis", processor.total) as progress:
        try:

            while True:
                progress.absolute(processor.total - processor.pending)
                if processor.pending == 0:
                    break

                with condition:
                    condition.wait(1)

        except KeyboardInterrupt:
            processor.cancel_all()
            progress.close()
            logger.info("Analysis interrupted by user, aborting.")
            return None

    return results
 def __init__(self, description, total):
     self.bar = Progress(description, total)
     self._finished = 0
示例#13
0
 def __init__(self, description, total):
     self.bar = Progress(desc=description, total=total, unit="sequence")
     self._finished = 0
示例#14
0
    def download(self, url, path="."):
        from vot.utilities.net import download_uncompress, download_json, get_base_url, join_url, NetworkException

        if os.path.splitext(url)[1] == '.zip':
            logger.info(
                'Downloading sequence bundle from "%s". This may take a while ...',
                url)

            try:
                download_uncompress(url, path)
            except NetworkException as e:
                raise DatasetException(
                    "Unable do download dataset bundle, Please try to download the bundle manually from {} and uncompress it to {}'"
                    .format(url, path))
            except IOError as e:
                raise DatasetException(
                    "Unable to extract dataset bundle, is the target directory writable and do you have enough space?"
                )

        else:

            meta = download_json(url)

            logger.info('Downloading sequence dataset "%s" with %s sequences.',
                        meta["name"], len(meta["sequences"]))

            base_url = get_base_url(url) + "/"

            with Progress("Donwloading", len(meta["sequences"])) as progress:
                for sequence in meta["sequences"]:
                    sequence_directory = os.path.join(path, sequence["name"])
                    os.makedirs(sequence_directory, exist_ok=True)

                    data = {
                        'name': sequence["name"],
                        'fps': sequence["fps"],
                        'format': 'default'
                    }

                    annotations_url = join_url(base_url,
                                               sequence["annotations"]["url"])

                    try:
                        download_uncompress(annotations_url,
                                            sequence_directory)
                    except NetworkException as e:
                        raise DatasetException(
                            "Unable do download annotations bundle")
                    except IOError as e:
                        raise DatasetException(
                            "Unable to extract annotations bundle, is the target directory writable and do you have enough space?"
                        )

                    for cname, channel in sequence["channels"].items():
                        channel_directory = os.path.join(
                            sequence_directory, cname)
                        os.makedirs(channel_directory, exist_ok=True)

                        channel_url = join_url(base_url, channel["url"])

                        try:
                            download_uncompress(channel_url, channel_directory)
                        except NetworkException as e:
                            raise DatasetException(
                                "Unable do download channel bundle")
                        except IOError as e:
                            raise DatasetException(
                                "Unable to extract channel bundle, is the target directory writable and do you have enough space?"
                            )

                        if "pattern" in channel:
                            data["channels." +
                                 cname] = cname + os.path.sep + channel[
                                     "pattern"]
                        else:
                            data["channels." + cname] = cname + os.path.sep

                    write_properties(
                        os.path.join(sequence_directory, 'sequence'), data)

                    progress.relative(1)

            with open(os.path.join(path, "list.txt"), "w") as fp:
                for sequence in meta["sequences"]:
                    fp.write('{}\n'.format(sequence["name"]))
示例#15
0
def do_pack(config, logger):

    import zipfile, io
    from shutil import copyfileobj

    workspace = Workspace.load(config.workspace)

    logger.info("Loaded workspace in '%s'", config.workspace)

    registry = Registry(workspace.registry + config.registry,
                        root=config.workspace)

    logger.info("Found data for %d trackers", len(registry))

    tracker = registry[config.tracker]

    logger.info("Packaging results for tracker %s", tracker.identifier)

    all_files = []
    can_finish = True

    with Progress("Scanning",
                  len(workspace.dataset) * len(workspace.stack)) as progress:

        for experiment in workspace.stack:
            for sequence in workspace.dataset:
                sequence = experiment.transform(sequence)
                complete, files, results = experiment.scan(tracker, sequence)
                all_files.extend([(f, experiment.identifier, sequence.name,
                                   results) for f in files])
                if not complete:
                    logger.error(
                        "Results are not complete for experiment %s, sequence %s",
                        experiment.identifier, sequence.name)
                    can_finish = False
                progress.relative(1)

    if not can_finish:
        logger.error("Unable to continue, experiments not complete")
        return

    logger.info("Collected %d files, compressing to archive ...",
                len(all_files))

    timestamp = datetime.now()

    archive_name = "{}_{:%Y-%m-%dT%H-%M-%S.%f%z}.zip".format(
        tracker.identifier, timestamp)

    with Progress("Compressing", len(all_files)) as progress:

        manifest = dict(
            identifier=tracker.identifier,
            configuration=tracker.configuration(),
            timestamp="{:%Y-%m-%dT%H-%M-%S.%f%z}".format(timestamp),
            platform=sys.platform,
            python=sys.version,
            toolkit=__version__)

        with zipfile.ZipFile(workspace.storage.write(archive_name,
                                                     binary=True)) as archive:
            for f in all_files:
                info = zipfile.ZipInfo(filename=os.path.join(f[1], f[2], f[0]),
                                       date_time=timestamp.timetuple())
                with io.TextIOWrapper(archive.open(
                        info, mode="w")) as fout, f[3].read(f[0]) as fin:
                    copyfileobj(fin, fout)
                progress.relative(1)

            info = zipfile.ZipInfo(filename="manifest.yml",
                                   date_time=timestamp.timetuple())
            with io.TextIOWrapper(archive.open(info, mode="w")) as fout:
                yaml.dump(manifest, fout)

    logger.info("Result packaging successful, archive available in %s",
                archive_name)
示例#16
0
def process_stack_analyses(workspace: "Workspace", trackers: List[Tracker]):

    processor = AnalysisProcessor.default()

    results = dict()
    condition = Condition()

    def insert_result(container: dict, key):
        def insert(future: Future):
            try:
                container[key] = future.result()
            except AnalysisError as e:
                e.print(logger)
            except Exception as e:
                logger.exception(e)
            with condition:
                condition.notify()
        return insert

    for experiment in workspace.stack:

        logger.debug("Traversing experiment %s", experiment.identifier)

        experiment_results = dict()

        results[experiment] = experiment_results

        sequences = [experiment.transform(sequence) for sequence in workspace.dataset]

        for analysis in experiment.analyses:

            if not analysis.compatible(experiment):
                continue

            logger.debug("Traversing analysis %s", class_fullname(analysis))

            with condition:
                experiment_results[analysis] = None
            promise = processor.commit(analysis, experiment, trackers, sequences)
            promise.add_done_callback(insert_result(experiment_results, analysis))

    if processor.total == 0:
        return results

    logger.debug("Waiting for %d analysis tasks to finish", processor.total)

    with Progress("Running analysis", processor.total) as progress:
        try:

            while True:

                progress.absolute(processor.total - processor.pending)
                if processor.pending == 0:
                    break

                with condition:
                    condition.wait(1)

        except KeyboardInterrupt:
            processor.cancel()
            progress.close()
            logger.info("Analysis interrupted by user, aborting.")
            return None

    return results