def download_nn(self, nn_id, nn_hash, model_dir): if self.storage.nns.read_object(nn_hash, model_dir): self.logger.info('NN has been copied from local storage.') return nn_archive_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') fh = None progress = None for nn_chunk in self.api.get_stream_with_data('DownloadModel', api_proto.Chunk, api_proto.ModelDescription(id=nn_id, hash=nn_hash)): if fh is None: fh = ChunkedFileWriter(file_path=nn_archive_path) fh.write(nn_chunk) if progress is None: # fh.total_size may be got from first chunk progress = sly.progress_download_nn(fh.total_size, ext_logger=self.logger) progress.iters_done_report(len(nn_chunk.buffer)) if not fh.close_and_check(): self.logger.critical('file was skipped while downloading', extra={'file_path': fh.file_path}) raise RuntimeError('Unable to download NN weights.') with tarfile.open(nn_archive_path) as archive: archive.extractall(model_dir) sly.silent_remove(nn_archive_path) self.logger.info('NN has been downloaded from server.') self.storage.nns.write_object(model_dir, nn_hash)
def upload_nn(self, nn_id, nn_hash): local_service_log = {'nn_id': nn_id, 'nn_hash': nn_hash} storage_nn_dir = self.storage.nns.check_storage_object(nn_hash) if storage_nn_dir is None: self.logger.critical('NN_NOT_FOUND', extra=local_service_log) local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') sly.archive_directory(storage_nn_dir, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload NN", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkModel(chunk=current_chunk, model=api_proto.ModelDescription(id=nn_id, hash=nn_hash)) progress.iter_done_report() try: self.api.put_stream_with_data('UploadModel', api_proto.Empty, chunk_generator(), addit_headers={'x-model-hash': nn_hash}) finally: sly.silent_remove(local_tar_path) self.logger.info('NN_UPLOADED', extra=local_service_log)
def upload_archive(self, dir_to_archive, archive_name): local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') self.logger.info("PACK_TO_ARCHIVE ...") sly.archive_directory(dir_to_archive, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload archive", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield current_chunk progress.iter_done_report() try: self.api.put_stream_with_data( 'UploadArchive', api_proto.Empty, chunk_generator(), addit_headers={'x-archive-name': archive_name}) finally: sly.silent_remove(local_tar_path) self.logger.info('ARCHIVE_UPLOADED', extra={'archive_name': archive_name})