def upload_nn(self, nn_id, nn_hash): local_service_log = {'nn_id': nn_id, 'nn_hash': nn_hash} storage_nn_dir = self.storage.nns.check_storage_object(nn_hash) if storage_nn_dir is None: self.logger.critical('NN_NOT_FOUND', extra=local_service_log) local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') sly.archive_directory(storage_nn_dir, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload NN", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkModel(chunk=current_chunk, model=api_proto.ModelDescription(id=nn_id, hash=nn_hash)) progress.iter_done_report() try: self.api.put_stream_with_data('UploadModel', api_proto.Empty, chunk_generator(), addit_headers={'x-model-hash': nn_hash}) finally: sly.silent_remove(local_tar_path) self.logger.info('NN_UPLOADED', extra=local_service_log)
def upload_archive(self, dir_to_archive, archive_name): local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') self.logger.info("PACK_TO_ARCHIVE ...") sly.archive_directory(dir_to_archive, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload archive", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield current_chunk progress.iter_done_report() try: self.api.put_stream_with_data( 'UploadArchive', api_proto.Empty, chunk_generator(), addit_headers={'x-archive-name': archive_name}) finally: sly.silent_remove(local_tar_path) self.logger.info('ARCHIVE_UPLOADED', extra={'archive_name': archive_name})
def chunk_generator(): progress = sly.ProgressCounter('Upload annotations', len(img_ids), ext_logger=self.logger) for batch_some in sly.batched( list(zip(img_ids, img_names, ann_paths)), constants.BATCH_SIZE_UPLOAD_ANNOTATIONS): for img_id, img_name, ann_path in batch_some: proto_img = api_proto.Image(id=img_id, title=img_name, project_id=project_id) freader = ChunkedFileReader(ann_path, constants.NETW_CHUNK_SIZE) for chunk_bytes in freader: current_chunk = api_proto.Chunk( buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkImage(chunk=current_chunk, image=proto_img) self.logger.trace('annotation is uploaded', extra={ 'img_name': img_name, 'img_path': ann_path }) progress.iter_done_report()
def chunk_generator(): for img_id, img_name, ann_path in batch_some: proto_img = api_proto.Image(id=img_id, title=img_name, project_id=project_id) freader = ChunkedFileReader(ann_path, constants.NETW_CHUNK_SIZE) for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkImage(chunk=current_chunk, image=proto_img) self.logger.trace('annotation is uploaded', extra={'img_name': img_name, 'img_path': ann_path}) progress.iter_done_report()
def chunk_generator(): for fpath, proto_img_info in batch_paths_infos: self.logger.trace('image upload start', extra={'img_path': fpath}) freader = ChunkedFileReader(fpath, constants.NETW_CHUNK_SIZE) for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkImage(chunk=current_chunk, image=proto_img_info) self.logger.trace('image uploaded', extra={'img_path': fpath}) progress.iter_done_report()
def chunk_generator(): progress = sly.ProgressCounter('Upload images', len(fpaths), ext_logger=self.logger) for batch_paths_infos in sly.batched( list(zip(fpaths, infos)), constants.BATCH_SIZE_UPLOAD_IMAGES): for fpath, proto_img_info in batch_paths_infos: self.logger.trace('image upload start', extra={'img_path': fpath}) freader = ChunkedFileReader(fpath, constants.NETW_CHUNK_SIZE) for chunk_bytes in freader: current_chunk = api_proto.Chunk( buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkImage(chunk=current_chunk, image=proto_img_info) self.logger.trace('image uploaded', extra={'img_path': fpath}) progress.iter_done_report()