Exemplo n.º 1
0
    def download_import_files(self, task_id, data_dir):
        import_struct = self.api.simple_request('GetImportStructure', api_proto.ListFiles, api_proto.Id(id=task_id))
        progress = sly.ProgressCounter(subtask_name='Downloading',
                                       total_cnt=len(import_struct.files),
                                       ext_logger=self.logger,
                                       report_limit=int(len(import_struct.files) / 10))

        def close_fh(fh):
            fpath = fh.file_path
            if fh.close_and_check():
                progress.iter_done_report()
            else:
                self.logger.warning('file was skipped while downloading', extra={'file_path': fpath})

        file_handler = None
        for chunk in self.api.get_stream_with_data('GetImportFiles',
                                                   api_proto.ChunkFile,
                                                   api_proto.ImportRequest(task_id=task_id, files=import_struct.files)):
            new_fpath = chunk.file.path
            if new_fpath:  # non-empty
                if file_handler is not None:
                    close_fh(file_handler)
                real_fpath = osp.join(data_dir, new_fpath.lstrip('/'))
                self.logger.trace('download import file', extra={'file_path': real_fpath})
                file_handler = ChunkedFileWriter(file_path=real_fpath)

            file_handler.write(chunk.chunk)

        close_fh(file_handler)
Exemplo n.º 2
0
    def download_nn(self, nn_id, nn_hash, model_dir):
        if self.storage.nns.read_object(nn_hash, model_dir):
            self.logger.info('NN has been copied from local storage.')
            return

        nn_archive_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar')
        fh = None
        progress = None
        for nn_chunk in self.api.get_stream_with_data('DownloadModel',
                                                      api_proto.Chunk,
                                                      api_proto.ModelDescription(id=nn_id, hash=nn_hash)):
            if fh is None:
                fh = ChunkedFileWriter(file_path=nn_archive_path)
            fh.write(nn_chunk)

            if progress is None:  # fh.total_size may be got from first chunk
                progress = sly.progress_download_nn(fh.total_size, ext_logger=self.logger)
            progress.iters_done_report(len(nn_chunk.buffer))

        if not fh.close_and_check():
            self.logger.critical('file was skipped while downloading', extra={'file_path': fh.file_path})
            raise RuntimeError('Unable to download NN weights.')

        with tarfile.open(nn_archive_path) as archive:
            archive.extractall(model_dir)
        sly.silent_remove(nn_archive_path)
        self.logger.info('NN has been downloaded from server.')

        self.storage.nns.write_object(model_dir, nn_hash)
Exemplo n.º 3
0
    def _download_annotations(self, pr_writer, image_id_to_ds):
        progress = sly.ProgressCounter('Download annotations',
                                       len(image_id_to_ds),
                                       ext_logger=self.logger)

        for batch_img_ids in sly.batched(
                list(image_id_to_ds.keys()),
                constants.BATCH_SIZE_DOWNLOAD_ANNOTATIONS):
            for chunk in self.api.get_stream_with_data(
                    'DownloadAnnotations', api_proto.ChunkImage,
                    api_proto.ImageArray(images=batch_img_ids)):
                img_id = chunk.image.id
                ds_name = image_id_to_ds[img_id]
                self.logger.trace('download_annotations',
                                  extra={'img_id': img_id})
                fh = ChunkedFileWriter(file_path=pr_writer.get_ann_path(
                    ds_name, chunk.image.title))
                fh.write(chunk.chunk)
                progress.iter_done_report()
                if not fh.close_and_check():
                    self.logger.warning('ann was skipped while downloading',
                                        extra={
                                            'img_id': img_id,
                                            'ann_path': fh.file_path
                                        })
Exemplo n.º 4
0
    def _download_images_from_remote(self, pr_writer, image_id_to_ds, img_infos):
        if len(img_infos) == 0:
            return

        infos_with_paths = [(info, pr_writer.get_img_path(image_id_to_ds[info.id], info.title, info.ext))
                            for info in img_infos]
        hash2path = {x[0].hash: x[1] for x in infos_with_paths}  # for unique hashes
        unique_hashes = list(hash2path.keys())

        ready_paths = []
        ready_hashes = []
        progress = sly.ProgressCounter('Download remote images', len(unique_hashes), ext_logger=self.logger)

        def close_fh(fh):
            fpath = fh.file_path
            if fh.close_and_check():
                ready_paths.append(fpath)
                ready_hashes.append(img_hash)
                progress.iter_done_report()
            else:
                self.logger.warning('file was skipped while downloading',
                                    extra={'img_path': fpath, 'img_hash': img_hash})

        # download by unique hashes
        for batch_img_hashes in sly.batched(unique_hashes, constants.BATCH_SIZE_DOWNLOAD_IMAGES):
            file_handler = None
            img_hash = None
            for chunk in self.api.get_stream_with_data('DownloadImages',
                                                       api_proto.ChunkImage,
                                                       api_proto.ImagesHashes(images_hashes=batch_img_hashes)):
                if chunk.image.hash:  # non-empty hash means beginning of new image
                    if file_handler is not None:
                        close_fh(file_handler)
                    img_hash = chunk.image.hash
                    self.logger.trace('download_images', extra={'img_hash': img_hash})
                    dst_fpath = hash2path[img_hash]
                    file_handler = ChunkedFileWriter(file_path=dst_fpath)

                file_handler.write(chunk.chunk)

            close_fh(file_handler)  # must be not None

        # process non-unique hashes
        for info, dst_path in infos_with_paths:
            origin_path = hash2path[info.hash]
            if (origin_path != dst_path) and osp.isfile(origin_path):
                sly.ensure_base_path(dst_path)
                sly.copy_file(origin_path, dst_path)

        self._write_images_to_agent_storage(ready_paths, ready_hashes)