def upload_nn(self, nn_id, nn_hash): local_service_log = {'nn_id': nn_id, 'nn_hash': nn_hash} storage_nn_dir = self.storage.nns.check_storage_object(nn_hash) if storage_nn_dir is None: self.logger.critical('NN_NOT_FOUND', extra=local_service_log) local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') sly.archive_directory(storage_nn_dir, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload NN", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield api_proto.ChunkModel(chunk=current_chunk, model=api_proto.ModelDescription(id=nn_id, hash=nn_hash)) progress.iter_done_report() try: self.api.put_stream_with_data('UploadModel', api_proto.Empty, chunk_generator(), addit_headers={'x-model-hash': nn_hash}) finally: sly.silent_remove(local_tar_path) self.logger.info('NN_UPLOADED', extra=local_service_log)
def spawn_container(self, add_envs=None): if add_envs is None: add_envs = {} self.container = self._docker_api.containers.run( self.docker_image_name, runtime=self.docker_runtime, detach=True, name='sly_task_{}_'.format(self.info['task_id'], sly.generate_random_string(5)), remove=False, volumes={ self.dir_task_host: { 'bind': '/sly_task_data', 'mode': 'rw' } }, environment={ 'LOG_LEVEL': 'DEBUG', **add_envs }, labels={ 'ecosystem': 'supervisely', 'ecosystem_token': constants.TASKS_DOCKER_LABEL, 'task_id': str(self.info['task_id']) }, shm_size="1G", stdin_open=False, tty=False) self.logger.info('Docker container spawned', extra={ 'container_id': self.container.id, 'container_name': self.container.name })
def download_nn(self, nn_id, nn_hash, model_dir): if self.storage.nns.read_object(nn_hash, model_dir): self.logger.info('NN has been copied from local storage.') return nn_archive_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') fh = None progress = None for nn_chunk in self.api.get_stream_with_data('DownloadModel', api_proto.Chunk, api_proto.ModelDescription(id=nn_id, hash=nn_hash)): if fh is None: fh = ChunkedFileWriter(file_path=nn_archive_path) fh.write(nn_chunk) if progress is None: # fh.total_size may be got from first chunk progress = sly.progress_download_nn(fh.total_size, ext_logger=self.logger) progress.iters_done_report(len(nn_chunk.buffer)) if not fh.close_and_check(): self.logger.critical('file was skipped while downloading', extra={'file_path': fh.file_path}) raise RuntimeError('Unable to download NN weights.') with tarfile.open(nn_archive_path) as archive: archive.extractall(model_dir) sly.silent_remove(nn_archive_path) self.logger.info('NN has been downloaded from server.') self.storage.nns.write_object(model_dir, nn_hash)
def upload_archive(self, dir_to_archive, archive_name): local_tar_path = os.path.join(constants.AGENT_TMP_DIR, sly.generate_random_string(30) + '.tar') self.logger.info("PACK_TO_ARCHIVE ...") sly.archive_directory(dir_to_archive, local_tar_path) freader = ChunkedFileReader(local_tar_path, constants.NETW_CHUNK_SIZE) progress = sly.ProgressCounter("Upload archive", freader.splitter.chunk_cnt, ext_logger=self.logger) def chunk_generator(): for chunk_bytes in freader: current_chunk = api_proto.Chunk(buffer=chunk_bytes, total_size=freader.file_size) yield current_chunk progress.iter_done_report() try: self.api.put_stream_with_data( 'UploadArchive', api_proto.Empty, chunk_generator(), addit_headers={'x-archive-name': archive_name}) finally: sly.silent_remove(local_tar_path) self.logger.info('ARCHIVE_UPLOADED', extra={'archive_name': archive_name})
def get_free_name(self, name): new_name = name if name in self.existing_names: new_name = name + '_' + generate_random_string(10) self.existing_names.add(new_name) return new_name
def _create_project(self, project_name, project_meta): remote_name = project_name for _ in range(3): project = self.api.simple_request('CreateProject', api_proto.Id, api_proto.Project(title=remote_name, meta=project_meta)) if project.id != 0: # created return project.id, remote_name remote_name = "{}_{}".format(project_name, sly.generate_random_string(5)) raise RuntimeError('Unable to create project with random suffix.')
def _create_dataset(self, project_id, dataset_name): remote_name = dataset_name for _ in range(3): dataset = self.api.simple_request('CreateDataset', api_proto.Id, api_proto.ProjectDataset(project=api_proto.Project(id=project_id), dataset=api_proto.Dataset(title=remote_name))) if dataset.id != 0: # created return dataset.id, remote_name remote_name = '{}_{}'.format(dataset_name, sly.generate_random_string(5)) raise RuntimeError('Unable to create dataset with random suffix.')