def upload_project(self, parent_dir, project_name, new_title, legacy=False, add_to_existing=False): # @TODO: reimplement and use path without splitting if legacy is False: project = sly.Project(os.path.join(parent_dir, project_name), sly.OpenMode.READ) else: project = sly.Project(parent_dir, sly.OpenMode.READ) if add_to_existing is True: project_id = self.public_api.project.get_info_by_name(self.workspace_id, project_name).id meta_json = self.public_api.project.get_meta(project_id) existing_meta = sly.ProjectMeta.from_json(meta_json) project.set_meta(sly.ProjectMeta.merge_list([project.meta, existing_meta])) else: new_project_name = self.public_api.project.get_free_name(self.workspace_id, new_title) project_id = self.public_api.project.create(self.workspace_id, new_project_name).id self.public_api.project.update_meta(project_id, project.meta.to_json()) for dataset in project: ds_name = dataset.name if add_to_existing is True: ds_name = self.public_api.dataset.get_free_name(project_id, ds_name) dataset_id = self.public_api.dataset.create(project_id, ds_name).id self.upload_dataset(dataset, dataset_id) self.logger.info('PROJECT_CREATED',extra={'event_type': sly.EventType.PROJECT_CREATED, 'project_id': project_id})
def convert(): task_settings = json.load(open(sly.TaskPaths.TASK_CONFIG_PATH, 'r')) logo_file_name = "logo.png" try: project_cwd = sly.TaskPaths.DATA_DIR sly.logger.info( 'Import info: Uploaded folder is a Supervisely Project, working from {}' .format(project_cwd)) sly.logger.info('Current working directory contents: {}'.format( os.listdir(project_cwd))) project = sly.Project(project_cwd, sly.OpenMode.READ) logo_path = os.path.join(project_cwd, logo_file_name) sly.logger.info( 'Trying to read logo file from path: {}'.format(logo_path)) logo_img = cv2.imread(logo_path) if not os.path.isfile(logo_path): sly.logger.error("No logo file found in the root directory.") sly.logger.info( "Searched for the following logo path: {}".format(logo_path)) return 1 watermark_project(project, logo_img) except FileNotFoundError: possible_projects = sly.fs.get_subdirs(sly.TaskPaths.DATA_DIR) if len(possible_projects) != 1: raise RuntimeError( 'Wrong input project structure, or multiple projects are passed.' ) project_cwd = os.path.join(sly.TaskPaths.DATA_DIR, possible_projects[0]) sly.logger.info( 'Import info: Uploaded sub-directory is a Supervisely Project, working from {}' .format(project_cwd)) sly.logger.info('Current working directory contents: {}'.format( os.listdir(project_cwd))) project = sly.Project(project_cwd, sly.OpenMode.READ) logo_path = os.path.join(project_cwd, logo_file_name) sly.logger.info( 'Trying to read logo file from path: {}'.format(logo_path)) logo_img = cv2.imread(logo_path) if not os.path.isfile(logo_path): sly.logger.error("No logo file found in the root directory.") sly.logger.info( "Searched for the following logo path: {}".format(logo_path)) return 1 watermark_project(project, logo_img) except Exception as e: raise e sly.logger.info('Project info: {} dataset(s), {} images(s).'.format( len(project.datasets), project.total_items)) project.validate() project.copy_data(sly.TaskPaths.RESULTS_DIR, dst_name=task_settings['res_names']['project'], _use_hardlink=True)
def modify_labels_by_multiprocess(project_path, allow_list=[]): project = sly.Project(directory=project_path, mode=sly.OpenMode.READ) data_fpaths = [] for dataset in project: if dataset.name not in allow_list: sly.logger.info('This is invalid dataset: {}/{}'.format( project.name, dataset.name)) continue sly.logger.info('Processing dataset: {}/{}'.format( project.name, dataset.name)) for item_idx, item_name in enumerate(dataset): item_paths = dataset.get_item_paths(item_name) data_fpaths.append(item_paths) # 프로세스풀 Context 영역 with ProcessPoolExecutor(max_workers=30) as executor: # Future 로드(실행X) future_to_task = { executor.submit(modify_label, project_path, project.meta, data_fpath): data_fpath for data_fpath in data_fpaths } # 실행 for future in as_completed(future_to_task): # timeout=1(테스트 추천) # Key값이 Future 객체 data_fpath = future_to_task[future] ret_ = future.result() print('Modify & Save results : {}'.format(ret_)) return True
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) all_dirs = os.path.join(sly.TaskPaths.DATA_DIR, 'RANGE') src_datasets = read_datasets(all_dirs) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: subdir = os.path.join(all_dirs, ds_name) img_foto = os.path.join(subdir, 'd_images') img_mat = os.path.join(subdir, 'd_masks') src_img_path = os.path.join(img_foto, name + '.jpg') inst_path = os.path.join(img_mat, name + '.mat') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): sly.fs.clean_dir(sly.TaskPaths.RESULTS_DIR) settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) all_img = os.path.join( sly.TaskPaths.DATA_DIR, 'graz50_facade_dataset/graz50_facade_dataset/images') all_ann = os.path.join( sly.TaskPaths.DATA_DIR, 'graz50_facade_dataset/graz50_facade_dataset/labels_full') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'ADEChallengeData2016/images') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'annotations_instance') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) default_classes_colors = {'background': (10, 10, 10)} default_colors_classes = {(10, 10, 10): 'background'} for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger all_img_temp = os.path.join(all_img, ds_name) all_ann_temp = os.path.join(all_ann, ds_name) for name in sample_names: src_img_path = os.path.join(all_img_temp, name + '.jpg') inst_path = os.path.join(all_ann_temp, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors, default_colors_classes) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def __init__(self, config, output_folder, net): Layer.__init__(self, config) self.output_folder = output_folder self.net = net self.out_project = sly.Project(directory=output_folder, mode=sly.OpenMode.CREATE) self.net_change_images = self.net.may_require_images()
def convert_video(): task_settings = json.load(open(sly.TaskPaths.SETTINGS_PATH, 'r')) step = DEFAULT_STEP if 'step' in task_settings['options']: step = int(task_settings['options']['step']) else: sly.logger.warning( 'step parameter not found. set to default: {}'.format( DEFAULT_STEP)) video_paths = sly.fs.list_files(sly.TaskPaths.DATA_DIR, sly.video.ALLOWED_VIDEO_EXTENSIONS) if len(video_paths) < 0: raise RuntimeError("Videos not found") project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, task_settings['res_names']['project']), sly.OpenMode.CREATE) for video_path in video_paths: ds_name = sly.fs.get_file_name(video_path) ds = project.create_dataset(ds_name=ds_name) vreader = skvideo.io.FFmpegReader(video_path) vlength = vreader.getShape()[0] progress = sly.Progress('Import video: {}'.format(ds_name), vlength) for frame_id, image in enumerate(vreader.nextFrame()): if frame_id % step == 0: img_name = "frame_{:05d}".format(frame_id) ds.add_item_np(img_name, image, img_ext='.png') progress.iter_done_report()
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Pratheepan_Dataset') inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Ground_Truth') default_classes_colors = {'background': [1, 1, 1], 'skin': [255, 255, 255]} out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger img_dir_temp = os.path.join(imgs_dir, ds_name) inst_dir_temp = os.path.join(inst_dir, 'GroundT_' + ds_name) for name in sample_names: src_img_path = os.path.join(img_dir_temp, name + '.jpg') inst_path = os.path.join(inst_dir_temp, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, default_classes_colors) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def __init__(self, config, output_folder, net): Layer.__init__(self, config) if 'gt_machine_color' in self.settings: for cls in self.settings['gt_machine_color']: col = self.settings['gt_machine_color'][cls] # @TODO: is it required? # if np.min(col) != np.max(col): # raise ValueError('"gt_machine_color"s should have equal rgb values, e.g.: [3, 3, 3].') if np.min(col) < 0: raise ValueError('Minimum "gt_machine_color" should be [0, 0, 0].') for _, flag_name, mapping_name in self.odir_flag_mapping: if self.settings[flag_name]: if mapping_name not in self.settings: raise ValueError("Color mapping {} required if {} is true.".format(mapping_name, flag_name)) # @TODO: maybe check if all classes are present target_arr = ['masks_machine', 'masks_human'] target_determ = any((self.settings[x] for x in target_arr)) if not target_determ: raise ValueError("Some output target ({}) should be set to true.".format(', '.join(target_arr))) self.output_folder = output_folder self.net = net self.out_project = sly.Project(directory=output_folder, mode=sly.OpenMode.CREATE) # Deprecate warning for param in ['images', 'annotations']: if param in self.settings: sly.logger.warning("'save_masks' layer: '{}' parameter is deprecated. Skipped.".format(param))
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'train2017') ann_file = os.path.join(sly.TaskPaths.DATA_DIR, 'COCO_Text.json') src_datasets = read_datasets(ann_file) photo_to_coords_text = read_coords_text(ann_file) NAME_ZERO_PADDING = 12 for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: full_img_name = name.zfill(NAME_ZERO_PADDING) + '.jpg' src_img_path = os.path.join(imgs_dir, full_img_name) if all((os.path.isfile(x) or (x is None) for x in [src_img_path])): try: coords_text = photo_to_coords_text[int(name)] except KeyError: continue ann = get_ann(src_img_path, coords_text) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) imgs_dir = sly.TaskPaths.DATA_DIR inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'stuffthingmaps_trainval2017') labels = os.path.join(sly.TaskPaths.DATA_DIR, 'labels.txt') number_class, pixel_color = read_colors(labels) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger imgs_dir_new = os.path.join(imgs_dir, ds_name) inst_dir_new = os.path.join(inst_dir, ds_name) for name in sample_names: src_img_path = os.path.join(imgs_dir_new, name + '.jpg') inst_path = os.path.join(inst_dir_new, name + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) progress = sly.Progress('Dataset:', len(self.src_datasets)) for ds_name, samples_paths in self.src_datasets.items(): ds = out_project.create_dataset(ds_name) for src_img_path in samples_paths: try: ann_path = self.get_ann_path(src_img_path) if all( (os.path.isfile(x) for x in [src_img_path, ann_path])): ann = self.get_ann(src_img_path, ann_path) ds.add_item_file(os.path.basename(src_img_path), src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn( 'Input sample skipped due to error: {}'.format( exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image_name': src_img_path, }) progress.iter_done_report() out_meta = sly.ProjectMeta( obj_classes=sly.ObjClassCollection(self.id_to_obj_class.values())) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) for directory in ['train', 'test']: if directory == 'train': imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'ch4_training_images') inst_dir = os.path.join( sly.TaskPaths.DATA_DIR, 'ch4_training_localization_transcription_gt') else: imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'ch4_test_images') inst_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'Challenge4_Test_Task1_GT') src_datasets = read_datasets(inst_dir, directory) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(imgs_dir, name + '.jpg') inst_path = os.path.join(inst_dir, 'gt_' + name + '.txt') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): out_project = sly.Project(os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) for ds_name, sample_names in self.src_datasets.items(): progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) progress.report_every = 10 # By default progress for 18000 samples report only every 180 - too big. ds = out_project.create_dataset(ds_name) for name in sample_names: img_name = name + '.jpg' src_img_path = os.path.join(self._imgs_dir(ds_name), img_name) inst_path = os.path.join(self._inst_dir(ds_name), name + '.png') try: ann = self._generate_annotation(src_img_path, inst_path) ds.add_item_file(img_name, src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn('Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image': src_img_path, }) progress.iter_done_report() sly.logger.info("Dataset '{}' samples processing is done.".format(ds_name), extra={}) out_meta = sly.ProjectMeta(obj_classes=sly.ObjClassCollection(self._class_id_to_object_class.values())) out_project.set_meta(out_meta) sly.logger.info("Mapillary samples processing is done.", extra={})
def convert(): sly.fs.clean_dir(sly.TaskPaths.RESULTS_DIR) settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'ParisArtDecoFacadesDataset-master/images') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'ParisArtDecoFacadesDataset-master/labels') out_project = sly.Project(os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(all_ann) number_class = {2: 'Door', 3: 'Shop', 4: 'Balcony', 5: 'Window', 6: 'Wall', 7: 'Sky', 8: 'Roof', 1: 'Unknown'} pixel_color = {2: (255, 255, 0), 3: (0, 128, 0), 4: (0, 0, 255), 5: (128, 255, 0), 6: (255, 0, 0), 7: (0, 255, 255), 8: (211, 211, 211), 1: (0, 0, 0)} for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) #make train -> img, ann progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '.txt') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): settings = load_json_file(sly.TaskPaths.TASK_CONFIG_PATH) imgs_dir = os.path.join(sly.TaskPaths.DATA_DIR, 'JPEGImages') inst_dir_trainval = os.path.join(sly.TaskPaths.DATA_DIR, 'Annotations_Part') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) src_datasets = read_datasets(inst_dir_trainval) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(imgs_dir, name + '.jpg') inst_path = os.path.join(inst_dir_trainval, name + '.mat') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(): task_settings = json.load(open(sly.TaskPaths.TASK_CONFIG_PATH, 'r')) try: project = sly.Project(sly.TaskPaths.DATA_DIR, sly.OpenMode.READ) except FileNotFoundError: possible_projects = sly.fs.get_subdirs(sly.TaskPaths.DATA_DIR) if len(possible_projects) != 1: raise RuntimeError('Wrong input project structure, or multiple projects are passed.') project = sly.Project(os.path.join(sly.TaskPaths.DATA_DIR, possible_projects[0]), sly.OpenMode.READ) except Exception as e: raise e sly.logger.info( 'Project info: {} dataset(s), {} images(s).'.format(len(project.datasets), project.total_items)) project.validate() project.copy_data(sly.TaskPaths.RESULTS_DIR, task_settings['res_names']['project'])
def load_annotations(self, ann_file): self.project = sly.Project(self.img_prefix, sly.OpenMode.READ) img_infos = list() anno_list = mmcv.list_from_file(ann_file) print('data loading ...') img_infos = mmcv.track_parallel_progress(self._load_ann, anno_list, 16) print('data loading finished !!!') return img_infos
def main(): task_helpers.task_verification(check_in_graph) logger.info('DTL started') helper = DtlHelper() try: net = Net(helper.graph, helper.in_project_metas, helper.paths.results_dir) helper.save_res_meta(net.get_result_project_meta()) datasets_conflict_map = calculate_datasets_conflict_map(helper) except Exception as e: logger.error("Error occurred on DTL-graph initialization step!") raise e # is_archive = net.is_archive() results_counter = 0 for pr_name, pr_dir in helper.in_project_dirs.items(): project = sly.Project(directory=pr_dir, mode=sly.OpenMode.READ) progress = progress_counter.progress_counter_dtl( pr_name, project.total_items) for dataset in project: for item_name in dataset: try: img_desc = ImageDescriptor( make_legacy_project_item(project, dataset, item_name), datasets_conflict_map[project.name][dataset.name]) ann = json_utils.json_load(dataset.get_ann_path(item_name)) data_el = (img_desc, ann) export_output_generator = net.start(data_el) for res_export in export_output_generator: logger.trace("image processed", extra={ 'img_name': res_export[0][0].get_img_name() }) results_counter += 1 except Exception as e: extra = { 'project_name': project.name, 'ds_name': dataset.name, 'image_name': item_name, 'exc_str': str(e), } logger.warn( 'Image was skipped because some error occurred', exc_info=True, extra=extra) progress.iter_done_report() logger.info('DTL finished', extra={ 'event_type': EventType.DTL_APPLIED, 'new_proj_size': results_counter })
def __init__(self, project_dir, data_prefix, pipeline, test_mode=False): self.gt_labels = sly.json.load_json_file( os.path.join(project_dir, "gt_labels.json")) Supervisely.CLASSES = sorted(self.gt_labels, key=self.gt_labels.get) self.split_name = data_prefix self.items = sly.json.load_json_file( os.path.join(project_dir, "splits.json"))[self.split_name] self.project_fs = sly.Project(project_dir, sly.OpenMode.READ) super(Supervisely, self).__init__(data_prefix=self.split_name, pipeline=pipeline, test_mode=test_mode)
def __init__(self, config, output_folder, net): Layer.__init__(self, config) self.output_folder = output_folder self.net = net self.out_project = sly.Project(directory=output_folder, mode=sly.OpenMode.CREATE) # Deprecate warning for param in ['images', 'annotations']: if param in self.settings: sly.logger.warning( "'save' layer: '{}' parameter is deprecated. Skipped.". format(param))
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) classes_collection = sly.ObjClassCollection() instance_classes, id_to_class, class_to_color = read_colors() src_datasets = read_datasets() skipped_count = 0 samples_count = 0 for ds_name, sample_names in src_datasets.items(): dataset = out_project.create_dataset(ds_name) dataset_progress = sly.Progress('Dataset {!r}'.format(ds_name), len(sample_names)) for name in sample_names: try: src_img_path = osp.join(images_dir(ds_name), name) inst_path = osp.join(instances_dir(ds_name), name) ann, classes_collection = generate_annotation( src_img_path, inst_path, id_to_class, class_to_color, classes_collection) item_name = osp.splitext(name)[0] dataset.add_item_file(item_name, src_img_path, ann) samples_count += 1 except Exception as e: exc_str = str(e) sly.logger.warn( 'Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image_name': name }) skipped_count += 1 dataset_progress.iter_done_report() sly.logger.info('Processed.', extra={ 'samples': samples_count, 'skipped': skipped_count }) out_meta = sly.ProjectMeta(obj_classes=classes_collection) out_project.set_meta(out_meta)
def download_project(self, parent_dir, name, datasets_whitelist=None): self.logger.info("DOWNLOAD_PROJECT", extra={'title': name}) project_fs = sly.Project(os.path.join(parent_dir, name), sly.OpenMode.CREATE) project_id = self.public_api.project.get_info_by_name(self.workspace_id, name).id meta = sly.ProjectMeta.from_json(self.public_api.project.get_meta(project_id)) project_fs.set_meta(meta) for dataset_info in self.public_api.dataset.get_list(project_id): dataset_name = dataset_info.name dataset_id = dataset_info.id need_download = True if datasets_whitelist is not None and dataset_name not in datasets_whitelist: need_download = False if need_download is True: dataset = project_fs.create_dataset(dataset_name) self.download_dataset(dataset, dataset_id)
def test_from_supervisely(): import supervisely_lib as sly data_path = "./data/test_supervisely/from_to" if os.path.exists(data_path): shutil.rmtree(data_path) project_name = "pixel_project" project_path = os.path.join(data_path, project_name) project = sly.Project(project_path, sly.OpenMode.CREATE) init_meta = project.meta project.meta._project_type = "images" project_ds = project.create_dataset(project_name) img = np.ones((30, 30, 3)) project_ds.add_item_np("pixel.jpeg", img) item_path, item_ann_path = project_ds.get_item_paths("pixel.jpeg") ann = sly.Annotation.load_json_file(item_ann_path, project.meta) bbox_class = sly.ObjClass(name="_bbox", geometry_type=sly.Rectangle) meta_with_bboxes = project.meta.add_obj_classes([bbox_class]) bbox_label = sly.Label( geometry=sly.Rectangle(0, 0, 10, 10), obj_class=meta_with_bboxes.obj_classes.get("_bbox"), ) ann_with_bboxes = ann.add_labels([bbox_label]) project_ds.set_ann("pixel.jpeg", ann_with_bboxes) project.set_meta(meta_with_bboxes) trans = hub.Dataset.from_supervisely(project) dataset = trans.store(os.path.join(data_path, "pixel_dataset_bbox")) project_back = dataset.to_supervisely( os.path.join(data_path, "pixel_project_bbox_back")) project.set_meta(init_meta) poly_class = sly.ObjClass(name="_poly", geometry_type=sly.Polygon) meta_with_poly = project.meta.add_obj_classes([poly_class]) points = [[0, 0], [0, 10], [10, 0], [10, 10]] point_loc_points = [ sly.geometry.point_location.PointLocation(*point) for point in points ] poly_label = sly.Label( geometry=sly.Polygon(exterior=point_loc_points, interior=[]), obj_class=meta_with_poly.obj_classes.get("_poly"), ) ann_with_polys = ann.add_labels([poly_label]) project_ds.set_ann("pixel.jpeg", ann_with_polys) project.set_meta(meta_with_poly) trans = hub.Dataset.from_supervisely(project) dataset = trans.store(os.path.join(data_path, "pixel_dataset_poly")) project_back = dataset.to_supervisely( os.path.join(data_path, "pixel_project_poly_back"))
def download_data_sources(self, only_meta=False): data_sources = _get_data_sources(self.info['graph']) for project_name, datasets in data_sources.items(): project_id = self.public_api.project.get_info_by_name( self.data_mgr.workspace_id, project_name).id if only_meta is True: meta_json = self.public_api.project.get_meta(project_id) project = sly.Project( os.path.join(self.dir_data, project_name), sly.OpenMode.CREATE) project.set_meta(sly.ProjectMeta.from_json(meta_json)) else: datasets_to_download = None #will download all datasets if datasets != "*": datasets_to_download = datasets self.data_mgr.download_project(self.dir_data, project_name, datasets_to_download)
def convert(): settings = load_json_file(sly.TaskPaths.SETTINGS_PATH) all_img = os.path.join(sly.TaskPaths.DATA_DIR, 'PennFudanPed/PNGImages') all_ann = os.path.join(sly.TaskPaths.DATA_DIR, 'PennFudanPed/PedMasks') out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, settings['res_names']['project']), sly.OpenMode.CREATE) number_class = { 10: 'background', 1: 'object1', 2: 'object2', 3: 'object3', 4: 'object4', 5: 'object5', 6: 'object6', 7: 'object7', 8: 'object8' } pixel_color = { 10: (0, 0, 0), 1: (255, 255, 0), 2: (255, 0, 255), 3: (0, 255, 255), 4: (0, 255, 0), 5: (255, 0, 0), 6: (0, 0, 255), 7: (127, 0, 217), 8: (248, 248, 248) } src_datasets = read_datasets(all_ann) for ds_name, sample_names in src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) # for logger for name in sample_names: src_img_path = os.path.join(all_img, name + '.png') inst_path = os.path.join(all_ann, name + '_mask' + '.png') if all((os.path.isfile(x) or (x is None) for x in [src_img_path, inst_path])): ann = get_ann(src_img_path, inst_path, number_class, pixel_color) ds.add_item_file(name, src_img_path, ann=ann) progress.iter_done_report() out_meta = sly.ProjectMeta(obj_classes=classes_dict) out_project.set_meta(out_meta)
def convert(self): out_project = sly.Project( os.path.join(sly.TaskPaths.RESULTS_DIR, self.settings['res_names']['project']), sly.OpenMode.CREATE) images_filenames = dict() for image_path in sly.fs.list_files(self.imgs_dir): image_name_noext = sly.fs.get_file_name(image_path) if image_name_noext in images_filenames: raise RuntimeError('Multiple image with the same base name {!r} exist.'.format(image_name_noext)) images_filenames[image_name_noext] = image_path for ds_name, sample_names in self.src_datasets.items(): ds = out_project.create_dataset(ds_name) progress = sly.Progress('Dataset: {!r}'.format(ds_name), len(sample_names)) for sample_name in sample_names: src_img_path = images_filenames[sample_name] src_img_filename = os.path.basename(src_img_path) segm_path = os.path.join(self.segm_dir, sample_name + MASKS_EXTENSION) inst_path = None if self.with_instances: inst_path = os.path.join(self.inst_dir, sample_name + MASKS_EXTENSION) if all((x is None) or os.path.isfile(x) for x in [src_img_path, segm_path, inst_path]): try: ann = self._get_ann(src_img_path, segm_path, inst_path) ds.add_item_file(src_img_filename, src_img_path, ann=ann) except Exception as e: exc_str = str(e) sly.logger.warn('Input sample skipped due to error: {}'.format(exc_str), exc_info=True, extra={ 'exc_str': exc_str, 'dataset_name': ds_name, 'image': src_img_path, }) else: sly.logger.warning("Processing '{}' skipped because no corresponding mask found." .format(src_img_filename)) progress.iter_done_report() sly.logger.info('Dataset "{}" samples processing is done.'.format(ds_name), extra={}) out_meta = sly.ProjectMeta(obj_classes=self.obj_classes) out_project.set_meta(out_meta) sly.logger.info('Pascal VOC samples processing is done.', extra={})
def process_random_object_image(api): project_fs = sly.Project(g.project_dir, sly.OpenMode.READ) datasets = [dataset for dataset in project_fs.datasets] dataset = choice(datasets) # for dataset in datasets: # ds_dir = os.path.join(g.project_dir, dataset.name) img_dir = os.path.join(dataset.directory, "img") ann_dir = os.path.join(dataset.directory, "ann") image_path = choice( [os.path.join(img_dir, img_name) for img_name in os.listdir(img_dir)]) ann_path = os.path.join(ann_dir, f"{get_file_name_with_ext(image_path)}.json") ann_json = load_json_file(ann_path) img = sly.image.read(image_path) ann = sly.Annotation.from_json(ann_json, g.project_meta) img_file_info = api.file.upload(g.team_id, image_path, remote_preview_path) return img_file_info, img, ann
def calculate_datasets_conflict_map(helper): tmp_datasets_map = {} # Save all [datasets : projects] relations for _, pr_dir in helper.in_project_dirs.items(): project = sly.Project(directory=pr_dir, mode=sly.OpenMode.READ) for dataset in project: projects_list = tmp_datasets_map.setdefault(dataset.name, []) projects_list.append(project.name) datasets_conflict_map = {} for dataset_name in tmp_datasets_map: projects_names_list = tmp_datasets_map[dataset_name] for project_name in projects_names_list: datasets_conflict_map[project_name] = datasets_conflict_map.get( project_name, {}) datasets_conflict_map[project_name][dataset_name] = ( len(projects_names_list) > 1) return datasets_conflict_map