def download_data_sources(self, only_meta=False): self.logger.info("download_data_sources started") data_sources = sly.get_data_sources(self.info['graph']) for proj, datasets in data_sources.items(): pr_name = proj pr_proto = self.api.simple_request( 'GetProjectByName', api_proto.Project, api_proto.Project(title=pr_name)) if pr_proto.id == -1: self.logger.critical('Project not found', extra={'project_name': pr_name}) raise RuntimeError('Project not found') datasets_proto_arr = [] if datasets != "*": for ds_name in datasets: ds_proto = self.api.simple_request( 'GetDatasetByName', api_proto.Dataset, api_proto.ProjectDataset( project=api_proto.Project(id=pr_proto.id), dataset=api_proto.Dataset(title=ds_name))) if ds_proto.id == -1: self.logger.critical('Dataset not found', extra={ 'project_id': pr_proto.id, 'project_title': pr_name, 'dataset_title': ds_name }) raise RuntimeError('Dataset not found') datasets_proto_arr.append( api_proto.Dataset(id=ds_proto.id, title=ds_name)) else: datasets_proto = self.api.simple_request( 'GetProjectDatasets', api_proto.DatasetArray, api_proto.Id(id=pr_proto.id)) datasets_proto_arr = datasets_proto.datasets if only_meta is True: project_info = self.api.simple_request( 'GetProjectMeta', api_proto.Project, api_proto.Id(id=pr_proto.id)) pr_writer = sly.ProjectWriterFS(self.dir_data, project_info.title) pr_meta = sly.ProjectMeta(sly.json_loads(project_info.meta)) pr_writer.write_meta(pr_meta) else: self.data_mgr.download_project( self.dir_data, pr_proto, datasets_proto_arr, download_images=self.download_images)
def download_step(self): self.logger.info("DOWNLOAD_DATA") sly.json_dump(self.info['config'], self.config_path) pr_info = self.info['project'] project = api_proto.Project(id=pr_info['id'], title=pr_info['title']) datasets = [ api_proto.Dataset(id=ds['id'], title=ds['title']) for ds in pr_info['datasets'] ] self.data_mgr.download_project(self.dir_data, project, datasets) if self.info.get('nn_model', None) is not None: nn_id = self.info['nn_model']['id'] nn_hash = self.info['nn_model']['hash'] self.logger.info('DOWNLOAD_NN', extra={ 'nn_id': nn_id, 'nn_hash': nn_hash }) self.data_mgr.download_nn(nn_id, nn_hash, self.dir_model) else: self.logger.info('Initializing task without source NN.') self.report_step_done(TaskStep.DOWNLOAD)
def _create_project(self, project_name, project_meta): remote_name = project_name for _ in range(3): project = self.api.simple_request('CreateProject', api_proto.Id, api_proto.Project(title=remote_name, meta=project_meta)) if project.id != 0: # created return project.id, remote_name remote_name = "{}_{}".format(project_name, sly.generate_random_string(5)) raise RuntimeError('Unable to create project with random suffix.')
def _create_dataset(self, project_id, dataset_name): remote_name = dataset_name for _ in range(3): dataset = self.api.simple_request('CreateDataset', api_proto.Id, api_proto.ProjectDataset(project=api_proto.Project(id=project_id), dataset=api_proto.Dataset(title=remote_name))) if dataset.id != 0: # created return dataset.id, remote_name remote_name = '{}_{}'.format(dataset_name, sly.generate_random_string(5)) raise RuntimeError('Unable to create dataset with random suffix.')
def download_step(self): if self.info.get('nn_model', None) is None: self.logger.critical('TASK_NN_EMPTY') raise ValueError('TASK_NN_EMPTY') nn_id = self.info['nn_model']['id'] nn_hash = self.info['nn_model']['hash'] self.logger.info('DOWNLOAD_NN', extra={'nn_id': nn_id, 'nn_hash': nn_hash}) self.data_mgr.download_nn(nn_id, nn_hash, self.dir_model) self.logger.info("DOWNLOAD_DATA") sly.json_dump(self.info['config'], self.config_path) pr_info = self.info['project'] project = api_proto.Project(id=pr_info['id'], title=pr_info['title']) datasets = [api_proto.Dataset(id=ds['id'], title=ds['title']) for ds in pr_info['datasets']] self.data_mgr.download_project(self.dir_data, project, datasets) self.report_step_done(TaskStep.DOWNLOAD)