def set_project_meta(api, project_id, state): fg_class = sly.ObjClass(state[const.FG_NAME], GET_GEOMETRY_FROM_STR(state[const.FG_SHAPE]), color=sly.color.hex2rgb(state[const.FG_COLOR])) st_class = sly.ObjClass(state[const.ST_NAME], GET_GEOMETRY_FROM_STR(state[const.ST_SHAPE]), color=sly.color.hex2rgb(state[const.ST_COLOR])) meta = sly.ProjectMeta( obj_classes=sly.ObjClassCollection([fg_class, st_class])) api.project.update_meta( project_id, sly.ProjectMeta().to_json()) # clear previous labels and classes api.project.update_meta(project_id, meta.to_json()) return fg_class, st_class
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) all_dirs = os.path.join(sly.TaskPaths.DATA_DIR, 'RANGE') src_datasets = read_datasets(all_dirs) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: subdir = os.path.join(all_dirs, ds_name) img_foto = os.path.join(subdir, 'd_images') img_mat = os.path.join(subdir, 'd_masks') src_img_path = os.path.join(img_foto, name + '.jpg') inst_path = os.path.join(img_mat, name + '.mat') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): sly.fs.clean_dir(sly.TaskPaths.RESULTS_DIR) settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) all_img = os.path.join( sly.TaskPaths.DATA_DIR, 'graz50_facade_dataset/graz50_facade_dataset/images') all_ann = os.path.join( sly.TaskPaths.DATA_DIR, 'graz50_facade_dataset/graz50_facade_dataset/labels_full') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) imgs_dir = sly.TaskPaths.DATA_DIR inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'stuffthingmaps_trainval2017') labels = os.path.join(sly.TaskPaths.DATA_DIR, 'labels.txt') number_class, pixel_color = read_colors(labels) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger imgs_dir_new = os.path.join(imgs_dir, ds_name) inst_dir_new = os.path.join(inst_dir, ds_name) for name in sample_names: src_img_path = os.path.join(imgs_dir_new, name + '.jpg') inst_path = os.path.join(inst_dir_new, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'train2017') ann_file = os.path.join(sly.TaskPaths.DATA_DIR, 'COCO_Text.json') src_datasets = read_datasets(ann_file) photo_to_coords_text = read_coords_text(ann_file) NAME_ZERO_PADDING = 12 for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: full_img_name = name.zfill(NAME_ZERO_PADDING) + '.jpg' src_img_path = os.path.join(imgs_dir, full_img_name) if all((os.path.isfile(x) or (x is None) for x in [src_img_path])): try: coords_text = photo_to_coords_text[int(name)] except KeyError: continue ann = get_ann(src_img_path, coords_text) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): images_pathes = self._get_images_pathes() masks_map = self._get_masks_mapping() dataset_name = 'ds' out_pr = sly.ProjectStructure(self.settings['res_names']['project']) for image_fp in images_pathes: base_name = os.path.basename(image_fp) image_ext = os.path.splitext(image_fp)[1] image_id = os.path.splitext(base_name)[0] if base_name.replace(image_ext, '') in masks_map: dt = {"image_ext": ".png", "image_orig_path": image_fp} out_pr.add_item(dataset_name, image_id, dt) out_pr_fs = sly.ProjectFS(self.out_dir, out_pr) out_pr_fs.make_dirs() res_meta = sly.ProjectMeta() res_meta.classes.add({ 'title': 'leaf', 'shape': 'bitmap', 'color': sly.gen_new_color() }) res_meta.to_dir(out_pr_fs.project_path) progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt) for sample_info in out_pr_fs: self._convert_sample(sample_info, masks_map) progress.iter_done_report()
def convert(self): out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) progress = sly.Progress('Dataset:', len(self.src_datasets)) for ds_name, samples_paths in self.src_datasets.items(): ds = out_project.create_dataset(ds_name) for src_img_path in samples_paths: try: ann_path = self.get_ann_path(src_img_path) if all( (os.path.isfile(x) for x in [src_img_path, ann_path])): ann = self.get_ann(src_img_path, ann_path) ds.add_item_file(os.path.basename(src_img_path), src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn( 'Input sample skipped due to error: {}'.format( exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image_name': src_img_path, }) progress.iter_done_report() out_meta = sly.ProjectMeta( obj_classes=sly.ObjClassCollection(self.id_to_obj_class.values())) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'JPEGImages') inst_dir_trainval = os.path.join(sly.TaskPaths.DATA_DIR, 'Annotations_Part') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir_trainval) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(imgs_dir, name + '.jpg') inst_path = os.path.join(inst_dir_trainval, name + '.mat') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Pratheepan_Dataset') inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Ground_Truth') default_classes_colors = {'background': [1, 1, 1], 'skin': [255, 255, 255]} out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger img_dir_temp = os.path.join(imgs_dir, ds_name) inst_dir_temp = os.path.join(inst_dir, 'GroundT_' + ds_name) for name in sample_names: src_img_path = os.path.join(img_dir_temp, name + '.jpg') inst_path = os.path.join(inst_dir_temp, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): sly.fs.clean_dir(sly.TaskPaths.RESULTS_DIR) settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'ParisArtDecoFacadesDataset-master/images') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'ParisArtDecoFacadesDataset-master/labels') out_project = sly.Project(os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) number_class = {2: 'Door', 3: 'Shop', 4: 'Balcony', 5: 'Window', 6: 'Wall', 7: 'Sky', 8: 'Roof', 1: 'Unknown'} pixel_color = {2: (255, 255, 0), 3: (0, 128, 0), 4: (0, 0, 255), 5: (128, 255, 0), 6: (255, 0, 0), 7: (0, 255, 255), 8: (211, 211, 211), 1: (0, 0, 0)} for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '.txt') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): out_project = sly.Project(os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) for ds_name, sample_names in self.src_datasets.items(): progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) progress.report_every = 10 # By default progress for 18000 samples report only every 180 - too big. ds = out_project.create_dataset(ds_name) for name in sample_names: img_name = name + '.jpg' src_img_path = os.path.join(self._imgs_dir(ds_name), img_name) inst_path = os.path.join(self._inst_dir(ds_name), name + '.png') try: ann = self._generate_annotation(src_img_path, inst_path) ds.add_item_file(img_name, src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn('Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image': src_img_path, }) progress.iter_done_report() sly.logger.info("Dataset '{}' samples processing is done.".format(ds_name), extra={}) out_meta = sly.ProjectMeta(obj_classes=sly.ObjClassCollection(self._class_id_to_object_class.values())) out_project.set_meta(out_meta) sly.logger.info("Mapillary samples processing is done.", extra={})
def download_project(self, parent_dir, project, datasets, download_images=True): project_info = self.api.simple_request('GetProjectMeta', api_proto.Project, api_proto.Id(id=project.id)) pr_writer = sly.ProjectWriterFS(parent_dir, project_info.title) pr_meta = sly.ProjectMeta(sly.json_loads(project_info.meta)) pr_writer.write_meta(pr_meta) image_id_to_ds = {} for dataset in datasets: image_array = self.api.simple_request('GetDatasetImages', api_proto.ImageArray, api_proto.Id(id=dataset.id)) image_id_to_ds.update( {img_id: dataset.title for img_id in image_array.images}) if download_images is True: self._download_images(pr_writer, image_id_to_ds) self._download_annotations(pr_writer, image_id_to_ds)
def convert(): settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) for directory in ['train', 'test']: if directory == 'train': imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'ch4_training_images') inst_dir = os.path.join( sly.TaskPaths.DATA_DIR, 'ch4_training_localization_transcription_gt') else: imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'ch4_test_images') inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Challenge4_Test_Task1_GT') src_datasets = read_datasets(inst_dir, directory) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(imgs_dir, name + '.jpg') inst_path = os.path.join(inst_dir, 'gt_' + name + '.txt') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): # map input structure to output out_pr = sly.ProjectStructure(self.settings['res_names']['project']) for ds_name, sample_names in self.src_datasets.items(): for name in sample_names: dt = { 'src_img_path': osp.join(self._imgs_dir(ds_name), name + '.jpg'), 'segm_path': osp.join(self._segm_dir(ds_name), name + '.png'), 'inst_path': osp.join(self._inst_dir(ds_name), name + '.png') } if all((osp.isfile(x) for x in dt.values())): dt['image_ext'] = '.jpg' out_pr.add_item(ds_name, name, dt) out_pr_fs = sly.ProjectFS(self.out_dir, out_pr) out_pr_fs.make_dirs() res_meta = sly.ProjectMeta() for class_name, color in self.cls2col.items(): res_meta.classes.add({ 'title': class_name, 'shape': 'bitmap', 'color': sly.color2code(color) }) res_meta.to_dir(out_pr_fs.project_path) progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt) for sample_info in out_pr_fs: self._convert_sample(sample_info) progress.iter_done_report()
def convert(self): images_pathes = self._get_files_list(self.img_dir) masks_pathes = self._get_files_list(self.ann_dir) masks_map = { os.path.splitext(mask_p)[0]: mask_p for mask_p in masks_pathes } dataset_name = 'ds' out_pr = sly.ProjectStructure(self.settings['res_names']['project']) for image_fp in images_pathes: image_ext = os.path.splitext(image_fp)[1] image_id = os.path.splitext(image_fp)[0] dt = {"image_ext": ".png", "image_ext_in": image_ext} out_pr.add_item(dataset_name, image_id, dt) out_pr_fs = sly.ProjectFS(self.out_dir, out_pr) out_pr_fs.make_dirs() res_meta = sly.ProjectMeta() res_meta.classes.add({ 'title': 'untitled', 'shape': 'bitmap', 'color': sly.gen_new_color() }) res_meta.to_dir(out_pr_fs.project_path) progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt) for sample_info in out_pr_fs: self._convert_sample(sample_info, masks_map) progress.iter_done_report()
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'ADEChallengeData2016/images') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'annotations_instance') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) default_classes_colors = {'background': (10, 10, 10)} default_colors_classes = {(10, 10, 10): 'background'} for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger all_img_temp = os.path.join(all_img, ds_name) all_ann_temp = os.path.join(all_ann, ds_name) for name in sample_names: src_img_path = os.path.join(all_img_temp, name + '.jpg') inst_path = os.path.join(all_ann_temp, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors, default_colors_classes) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): images_pathes = self._get_images_pathes() dataset_name = os.path.basename(os.path.normpath(self.dataset_dir)) out_pr = sly.ProjectStructure(self.settings['res_names']['project']) for image_fp in images_pathes: image_ext = os.path.splitext(image_fp)[1] image_name = os.path.splitext(image_fp)[0] dt = { "image_ext": image_ext } out_pr.add_item(dataset_name, image_name, dt) out_pr_fs = sly.ProjectFS(self.out_dir, out_pr) out_pr_fs.make_dirs() res_meta = sly.ProjectMeta() print(self.classes) for class_name in self.classes: res_meta.classes.add({'title': class_name, 'shape': 'bitmap', 'color': sly.gen_new_color()}) res_meta.to_dir(out_pr_fs.project_path) progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt) for sample_info in out_pr_fs: self._convert_sample(sample_info) progress.iter_done_report()
def construct_model_meta(): g.labels_urls = sly.json.load_json_file(g.local_labels_urls_path) g.gt_labels = sly.json.load_json_file(g.local_gt_labels_path) g.gt_index_to_labels = {index: name for name, index in g.gt_labels.items()} tag_metas = [] for name, index in g.gt_labels.items(): tag_metas.append(sly.TagMeta(name, sly.TagValueType.NONE)) g.meta = sly.ProjectMeta(tag_metas=sly.TagMetaCollection(tag_metas))
def get_empty_gallery(meta: sly.ProjectMeta = sly.ProjectMeta()): CNT_GRID_COLUMNS = 2 empty_gallery = { "content": { "projectMeta": meta.to_json(), "annotations": {}, "layout": [[] for i in range(CNT_GRID_COLUMNS)] }, } return CNT_GRID_COLUMNS, empty_gallery
def process_meta(input_meta): output_meta = sly.ProjectMeta(obj_classes=None, img_tag_metas=input_meta.img_tag_metas, objtag_metas=input_meta.obj_tags) for obj_class in input_meta.obj_classes: if obj_class.name in classes_mapping.keys() or obj_class.name in classes_mapping.values(): output_meta = output_meta.add_obj_class(obj_class) for gt_class in classes_mapping: output_meta = output_meta.add_obj_class(sly.ObjClass(make_false_positive_name(gt_class), sly.Bitmap)) output_meta = output_meta.add_obj_class(sly.ObjClass(make_false_negative_name(gt_class), sly.Bitmap)) output_meta = output_meta.add_img_tag_meta(sly.TagMeta(make_iou_tag_name(gt_class), sly.TagValueType.ANY_NUMBER)) return output_meta
def convert(self): in_datasets = self._find_in_datasets() # map input structure to output out_pr = sly.ProjectStructure(self.settings['res_names']['project']) for ds_name, ds_path in in_datasets: img_fnames = sly.ImportImgLister.list_images(ds_path) for name_with_ext in img_fnames: img_name, img_ext = osp.splitext(name_with_ext) src_img_path = osp.join(ds_path, name_with_ext) dt = { 'src_img_path': src_img_path, 'image_ext': img_ext, } out_pr.add_item(ds_name, img_name, dt) logger.info( 'Found source dataset with raw images: "{}", {} sample(s).'. format(ds_name, len(img_fnames))) out_pr_fs = sly.ProjectFS(self.out_dir, out_pr) out_pr_fs.make_dirs() res_meta = sly.ProjectMeta() # empty res_meta.to_dir(out_pr_fs.project_path) progress = sly.progress_counter_import(out_pr.name, out_pr.image_cnt) for sample_info in out_pr_fs: sample_data = sample_info.ia_data src_img_path = sample_data['src_img_path'] sly.copy_file(src_img_path, sample_info.img_path) # img is ready image = Image.open(sample_info.img_path) exif_data = pyexiv2.metadata.ImageMetadata(sample_info.img_path) exif_data.read() if exif_data.get_orientation() != 1: logger.debug('Image with flip/rot EXIF', extra={ 'orientation': exif_data.get_orientation(), 'image_path': sample_info.img_path }) image = sly.image_transpose_exif(image) image.save(sample_info.img_path) exif_data['Exif.Image.Orientation'] = pyexiv2.ExifTag( 'Exif.Image.Orientation', 1) exif_data.modified = True exif_data.write() imsize_wh = image.size ann = sly.Annotation.new_with_objects(imsize_wh, []) sly.json_dump(ann.pack(), sample_info.ann_path) # ann is ready progress.iter_done_report()
def download_data_sources(self, only_meta=False): self.logger.info("download_data_sources started") data_sources = sly.get_data_sources(self.info['graph']) for proj, datasets in data_sources.items(): pr_name = proj pr_proto = self.api.simple_request( 'GetProjectByName', api_proto.Project, api_proto.Project(title=pr_name)) if pr_proto.id == -1: self.logger.critical('Project not found', extra={'project_name': pr_name}) raise RuntimeError('Project not found') datasets_proto_arr = [] if datasets != "*": for ds_name in datasets: ds_proto = self.api.simple_request( 'GetDatasetByName', api_proto.Dataset, api_proto.ProjectDataset( project=api_proto.Project(id=pr_proto.id), dataset=api_proto.Dataset(title=ds_name))) if ds_proto.id == -1: self.logger.critical('Dataset not found', extra={ 'project_id': pr_proto.id, 'project_title': pr_name, 'dataset_title': ds_name }) raise RuntimeError('Dataset not found') datasets_proto_arr.append( api_proto.Dataset(id=ds_proto.id, title=ds_name)) else: datasets_proto = self.api.simple_request( 'GetProjectDatasets', api_proto.DatasetArray, api_proto.Id(id=pr_proto.id)) datasets_proto_arr = datasets_proto.datasets if only_meta is True: project_info = self.api.simple_request( 'GetProjectMeta', api_proto.Project, api_proto.Id(id=pr_proto.id)) pr_writer = sly.ProjectWriterFS(self.dir_data, project_info.title) pr_meta = sly.ProjectMeta(sly.json_loads(project_info.meta)) pr_writer.write_meta(pr_meta) else: self.data_mgr.download_project( self.dir_data, pr_proto, datasets_proto_arr, download_images=self.download_images)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) classes_collection = sly.ObjClassCollection() instance_classes, id_to_class, class_to_color = read_colors() src_datasets = read_datasets() skipped_count = 0 samples_count = 0 for ds_name, sample_names in src_datasets.items(): dataset = out_project.create_dataset(ds_name) dataset_progress = sly.Progress('Dataset {!r}'.format(ds_name), len(sample_names)) for name in sample_names: try: src_img_path = osp.join(images_dir(ds_name), name) inst_path = osp.join(instances_dir(ds_name), name) ann, classes_collection = generate_annotation( src_img_path, inst_path, id_to_class, class_to_color, classes_collection) item_name = osp.splitext(name)[0] dataset.add_item_file(item_name, src_img_path, ann) samples_count += 1 except Exception as e: exc_str = str(e) sly.logger.warn( 'Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image_name': name }) skipped_count += 1 dataset_progress.iter_done_report() sly.logger.info('Processed.', extra={ 'samples': samples_count, 'skipped': skipped_count }) out_meta = sly.ProjectMeta(obj_classes=classes_collection) out_project.set_meta(out_meta)
def process_meta(input_meta): classes_mapping = {} output_meta = sly.ProjectMeta(obj_classes=[], tag_metas=input_meta.tag_metas) for obj_class in input_meta.obj_classes: classes_mapping[obj_class.name] = '{}_bbox'.format(obj_class.name) new_obj_class = sly.ObjClass(classes_mapping[obj_class.name], sly.Rectangle, color=obj_class.color) output_meta = output_meta.add_obj_class(new_obj_class) output_meta = output_meta.add_tag_meta( sly.TagMeta('train', sly.TagValueType.NONE)) output_meta = output_meta.add_tag_meta( sly.TagMeta('val', sly.TagValueType.NONE)) return output_meta, classes_mapping
def create_meta(config, side): meta = sly.ProjectMeta() classes = [ sly.ObjClass(class_name, sly.AnyGeometry) for class_name in config[side] ] meta = meta.add_obj_classes(classes) meta = meta.add_tag_metas([ sly.TagMeta("case_id", sly.TagValueType.ANY_STRING), sly.TagMeta("validation", sly.TagValueType.ONEOF_STRING, possible_values=["accepted", "rejected"]), sly.TagMeta("finished", sly.TagValueType.NONE) ]) return meta
def merge(api: sly.Api, task_id, context, state, app_logger): classes = _merge(CLASSES_INFO, META1.obj_classes, META2.obj_classes, state["mergeClasses"], state["resolveClasses"]) tags = _merge(TAGS_INFO, META1.tag_metas, META2.tag_metas, state["mergeTags"], state["resolveTags"]) res_meta = sly.ProjectMeta(obj_classes=sly.ObjClassCollection(classes), tag_metas=sly.TagMetaCollection(tags), project_type=PROJECT1.type) res_project = api.project.create( state["workspaceId"], state["resultProjectName"], type=PROJECT1.type, description=f"{PROJECT1.name} + {PROJECT2.name}", change_name_if_conflict=True) api.project.update_meta(res_project.id, res_meta.to_json()) api.project.update_custom_data( res_project.id, { "project1": { "id": PROJECT1.id, "name": PROJECT1.name }, "project2": { "id": PROJECT2.id, "name": PROJECT2.name } }) fields = [ { "field": "data.createdProjectId", "payload": res_project.id }, { "field": "data.createdProjectName", "payload": res_project.name }, ] api.app.set_fields(task_id, fields) app_logger.info("Project is created", extra={ 'project_id': res_project.id, 'project_name': res_project.name }) #api.task.set_output_project(task_id, res_project.id, res_project.name) my_app.stop()
def preview(api: sly.Api, task_id, context, state, app_logger): bg_images = update_bg_images(api, state) if len(bg_images) == 0: sly.logger.warn("There are no background images") else: cache_dir = os.path.join(app.data_dir, "cache_images_preview") sly.fs.mkdir(cache_dir) sly.fs.clean_dir(cache_dir) img, ann, res_meta = synthesize(api, task_id, state, meta, images_info, labels, bg_images, cache_dir) res_meta, ann = postprocess(state, ann, res_meta, sly.ProjectMeta()) if state["taskType"] == "inst-seg" and state[ "highlightInstances"] is True: res_meta, ann = highlight_instances(res_meta, ann) src_img_path = os.path.join(cache_dir, "res.png") dst_img_path = os.path.join(f"/flying_object/{task_id}", "res.png") sly.image.write(src_img_path, img) file_info = None if api.file.exists(team_id, dst_img_path): api.file.remove(team_id, dst_img_path) file_info = api.file.upload(team_id, src_img_path, dst_img_path) gallery = dict(empty_gallery) gallery["content"]["projectMeta"] = res_meta.to_json() gallery["content"]["annotations"] = { "preview": { "url": file_info.full_storage_url, "figures": [label.to_json() for label in ann.labels] } } gallery["content"]["layout"] = [["preview"]] fields = [ { "field": "data.gallery", "payload": gallery }, { "field": "state.previewLoading", "payload": False }, ] api.task.set_fields(task_id, fields)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'PennFudanPed/PNGImages') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'PennFudanPed/PedMasks') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) number_class = { 10: 'background', 1: 'object1', 2: 'object2', 3: 'object3', 4: 'object4', 5: 'object5', 6: 'object6', 7: 'object7', 8: 'object8' } pixel_color = { 10: (0, 0, 0), 1: (255, 255, 0), 2: (255, 0, 255), 3: (0, 255, 255), 4: (0, 255, 0), 5: (255, 0, 0), 6: (0, 0, 255), 7: (127, 0, 217), 8: (248, 248, 248) } src_datasets = read_datasets(all_ann) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '_mask' + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) images_filenames = dict() for image_path in sly.fs.list_files(self.imgs_dir): image_name_noext = sly.fs.get_file_name(image_path) if image_name_noext in images_filenames: raise RuntimeError('Multiple image with the same base name {!r} exist.'.format(image_name_noext)) images_filenames[image_name_noext] = image_path for ds_name, sample_names in self.src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) for sample_name in sample_names: src_img_path = images_filenames[sample_name] src_img_filename = os.path.basename(src_img_path) segm_path = os.path.join(self.segm_dir, sample_name + MASKS_EXTENSION) inst_path = None if self.with_instances: inst_path = os.path.join(self.inst_dir, sample_name + MASKS_EXTENSION) if all((x is None) or os.path.isfile(x) for x in [src_img_path, segm_path, inst_path]): try: ann = self._get_ann(src_img_path, segm_path, inst_path) ds.add_item_file(src_img_filename, src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn('Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image': src_img_path, }) else: sly.logger.warning("Processing '{}' skipped because no corresponding mask found." .format(src_img_filename)) progress.iter_done_report() sly.logger.info('Dataset "{}" samples processing is done.'.format(ds_name), extra={}) out_meta = sly.ProjectMeta(obj_classes=self.obj_classes) out_project.set_meta(out_meta) sly.logger.info('Pascal VOC samples processing is done.', extra={})
def upload_project_meta(api, project_id, config_yaml_info): classes = [] for class_id, class_name in enumerate(config_yaml_info["names"]): yaml_class_color = config_yaml_info["colors"][class_id] obj_class = sly.ObjClass(name=class_name, geometry_type=sly.Rectangle, color=yaml_class_color) classes.append(obj_class) tags_arr = [ sly.TagMeta(name="train", value_type=sly.TagValueType.NONE), sly.TagMeta(name="val", value_type=sly.TagValueType.NONE) ] project_meta = sly.ProjectMeta( obj_classes=sly.ObjClassCollection(items=classes), tag_metas=sly.TagMetaCollection(items=tags_arr)) api.project.update_meta(project_id, project_meta.to_json()) return project_meta