def get_dataset_specs(args, model_specs): dataset = model_specs['dataset'] meta = {} meta_path = osp.join('issegm/data', dataset, 'meta.pkl') if osp.isfile(meta_path): with open(meta_path) as f: meta = cPickle.load(f) label_2_id = None id_2_label = None ident_size = False cache_images = False mx_workspace = 1650 if dataset == 'cityscapes': sys.path.insert(0, 'data/cityscapesScripts/cityscapesscripts/helpers') from labels import id2label, trainId2label # label_2_id = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((1024, 2048)) mx_workspace = 8000 meta['label_2_id'] = label_2_id meta['id_2_label'] = id_2_label meta['valid_labels'] = valid_labels meta['cmap'] = cmap meta['ident_size'] = ident_size meta['max_shape'] = meta.get('max_shape', max_shape) meta['cache_images'] = cache_images meta['mx_workspace'] = mx_workspace return meta
def get_dataset_specs(args, model_specs): dataset = model_specs['dataset'] meta = {} meta_path = osp.join('issegm/data', dataset, 'meta.pkl') if osp.isfile(meta_path): with open(meta_path) as f: meta = cPickle.load(f) label_2_id = None id_2_label = None ident_size = False cmap = None cmap_path = 'data/shared/cmap.pkl' cache_images = args.phase == 'train' mx_workspace = 1650 if dataset == 'cityscapes': sys.path.insert(0, 'data/cityscapesscripts/helpers') from labels import id2label, trainId2label # label_2_id = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((1024, 2048)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val', ): mx_workspace = 8192 elif dataset == 'gta': sys.path.insert(0, 'data/cityscapesscripts/helpers') from labels import id2label, trainId2label # label_2_id = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((1052, 1914)) # if args.phase in ('val', ): mx_workspace = 8192 elif dataset == 'synthia': sys.path.insert(0, 'data/cityscapesscripts/helpers') from labels_synthia import id2label, trainId2label # label_2_id = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((760, 1280)) # if args.phase in ('val', ): mx_workspace = 8192 else: raise NotImplementedError('Unknow dataset: {}'.format(dataset)) if cmap is None and cmap_path is not None: if osp.isfile(cmap_path): with open(cmap_path) as f: cmap = cPickle.load(f) meta['label_2_id'] = label_2_id meta['id_2_label'] = id_2_label meta['valid_labels'] = valid_labels meta['cmap'] = cmap meta['ident_size'] = ident_size meta['max_shape'] = meta.get('max_shape', max_shape) meta[ 'cache_images'] = args.cache_images if args.cache_images is not None else cache_images meta['mx_workspace'] = mx_workspace return meta
def get_dataset_specs(args, model_specs): dataset = model_specs['dataset'] meta = {} meta_path = osp.join('issegm/data', dataset, 'meta.pkl') if osp.isfile(meta_path): with open(meta_path) as f: meta = cPickle.load(f) label_2_id = None id_2_label = None cmap = None cmap_path = 'data/shared/cmap.pkl' ident_size = False cache_images = args.phase == 'train' mx_workspace = 1650 if dataset == 'ade20k': num_classes = model_specs.get('classes', 150) label_2_id = np.arange(-1, 150) label_2_id[0] = 255 id_2_label = np.arange(1, 256 + 1) id_2_label[255] = 0 valid_labels = range(1, 150 + 1) # if args.split == 'test': cmap_path = None # max_shape = np.array((2100, 2100)) if model_specs.get('balanced', False) and args.split == 'trainval': meta['image_classes']['trainval'] = meta['image_classes'][ 'train'] + meta['image_classes']['val'] elif dataset == 'cityscapes': sys.path.insert(0, 'data/cityscapesScripts/cityscapesscripts/helpers') from labels import id2label, trainId2label # num_classes = model_specs.get('classes', 19) label_2_id = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((1024, 2048)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val', ): mx_workspace = 8000 elif dataset == 'coco': sys.path.insert(0, osp.join(args.data_root, 'PythonAPI')) from pycocotools.coco import COCO coco = COCO( osp.join(args.data_root, 'annotations', 'instances_minival2014.json')) # id_2_label = np.array([0] + sorted(coco.getCatIds())) assert len(id_2_label) == 81 valid_labels = id_2_label.tolist() num_classes = model_specs.get('classes', 81) label_2_id = 255 * np.ones((256, )) for i, l in enumerate(id_2_label): label_2_id[l] = i # max_shape = np.array((640, 640)) elif dataset == 'pascal-context': num_classes = model_specs.get('classes', 60) valid_labels = range(num_classes) # max_shape = np.array((500, 500)) elif dataset == 'voc': num_classes = model_specs.get('classes', 21) valid_labels = range(num_classes) # if args.split in ('train++', ): max_shape = np.array((640, 640)) else: max_shape = np.array((500, 500)) elif dataset == 'segnet': num_classes = 13 label_2_id = 255 * np.ones((256, )) valid_labels = range(num_classes) # ident_size = True # max_shape = np.array((720, 960)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val', ): mx_workspace = 8000 else: raise NotImplementedError('Unknow dataset: {}'.format(dataset)) if cmap is None and cmap_path is not None: if osp.isfile(cmap_path): with open(cmap_path) as f: cmap = cPickle.load(f) meta['label_2_id'] = label_2_id meta['id_2_label'] = id_2_label meta['valid_labels'] = valid_labels meta['cmap'] = cmap meta['ident_size'] = ident_size meta['max_shape'] = meta.get('max_shape', max_shape) meta[ 'cache_images'] = args.cache_images if args.cache_images is not None else cache_images meta['mx_workspace'] = mx_workspace return meta
def get_dataset_specs_tgt(args, model_specs): dataset = args.dataset dataset_tgt = args.dataset_tgt meta = {} mine_id = None mine_id_priority = None mine_port = args.mine_port mine_th = args.mine_thresh cmap_path = 'data/shared/cmap.pkl' cache_images = args.phase == 'train' mx_workspace = 1650 sys.path.insert(0, 'data/cityscapesscripts/helpers') if args.phase == 'train': mine_id = np.load(args.mine_id_address + '/mine_id.npy') mine_id_priority = np.load(args.mine_id_address + '/mine_id_priority.npy') mine_th = np.zeros(len(mine_id)) # trainId starts from 0 if dataset == 'gta' and dataset_tgt == 'cityscapes': from labels import id2label, trainId2label # label_2_id_tgt = 255 * np.ones((256, )) for l in id2label: if l in (-1, 255): continue label_2_id_tgt[l] = id2label[l].trainId id_2_label_tgt = np.array( [trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels_tgt = sorted(set(id_2_label_tgt.ravel())) id_2_label_src = id_2_label_tgt label_2_id_src = label_2_id_tgt valid_labels_src = valid_labels_tgt # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # # max_shape_src = np.array((1052, 1914)) max_shape_src = np.array((1024, 2048)) max_shape_tgt = np.array((1024, 2048)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val', ): mx_workspace = 8192 elif dataset == 'synthia' and dataset_tgt == 'cityscapes': from labels_cityscapes_synthia import id2label as id2label_tgt from labels_cityscapes_synthia import trainId2label as trainId2label_tgt from labels_synthia import id2label as id2label_src label_2_id_src = 255 * np.ones((256, )) for l in id2label_src: if l in (-1, 255): continue label_2_id_src[l] = id2label_src[l].trainId label_2_id_tgt = 255 * np.ones((256, )) for l in id2label_tgt: if l in (-1, 255): continue label_2_id_tgt[l] = id2label_tgt[l].trainId id_2_label_tgt = np.array([ trainId2label_tgt[_].id for _ in trainId2label_tgt if _ not in (-1, 255) ]) valid_labels_tgt = sorted(set(id_2_label_tgt.ravel())) id_2_label_src = None valid_labels_src = None # cmap = np.zeros((256, 3), dtype=np.uint8) for i in id2label_tgt.keys(): cmap[i] = id2label_tgt[i].color # ident_size = True # max_shape_src = np.array((760, 1280)) max_shape_tgt = np.array((1024, 2048)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val', ): mx_workspace = 8192 else: raise NotImplementedError('Unknow dataset: {}'.format(args.dataset)) if cmap is None and cmap_path is not None: if osp.isfile(cmap_path): with open(cmap_path) as f: cmap = cPickle.load(f) meta['gpus'] = args.gpus meta['mine_port'] = mine_port meta['mine_id'] = mine_id meta['mine_id_priority'] = mine_id_priority meta['mine_th'] = mine_th meta['label_2_id_tgt'] = label_2_id_tgt meta['id_2_label_tgt'] = id_2_label_tgt meta['valid_labels_tgt'] = valid_labels_tgt meta['label_2_id_src'] = label_2_id_src meta['id_2_label_src'] = id_2_label_src meta['valid_labels_src'] = valid_labels_src meta['cmap'] = cmap meta['ident_size'] = ident_size meta['max_shape_src'] = meta.get('max_shape_src', max_shape_src) meta['max_shape_tgt'] = meta.get('max_shape_tgt', max_shape_tgt) meta[ 'cache_images'] = args.cache_images if args.cache_images is not None else cache_images meta['mx_workspace'] = mx_workspace return meta
def get_dataset_specs(args, model_specs): dataset = model_specs['dataset'] meta = {} meta_path = osp.join('issegm/data', dataset, 'meta.pkl') if osp.isfile(meta_path): with open(meta_path) as f: meta = cPickle.load(f) label_2_id = None id_2_label = None cmap = None cmap_path = 'data/shared/cmap.pkl' ident_size = False cache_images = args.phase == 'train' mx_workspace = 1650 if dataset == 'ade20k': num_classes = model_specs.get('classes', 150) label_2_id = np.arange(-1, 150) label_2_id[0] = 255 id_2_label = np.arange(1, 256+1) id_2_label[255] = 0 valid_labels = range(1, 150+1) # if args.split == 'test': cmap_path = None # max_shape = np.array((2100, 2100)) if model_specs.get('balanced', False) and args.split == 'trainval': meta['image_classes']['trainval'] = meta['image_classes']['train'] + meta['image_classes']['val'] elif dataset == 'cityscapes': sys.path.insert(0, 'data/cityscapesScripts/cityscapesscripts/helpers') from labels import id2label, trainId2label # num_classes = model_specs.get('classes', 19) label_2_id = 255 * np.ones((256,)) for l in id2label: if l in (-1, 255): continue label_2_id[l] = id2label[l].trainId id_2_label = np.array([trainId2label[_].id for _ in trainId2label if _ not in (-1, 255)]) valid_labels = sorted(set(id_2_label.ravel())) # cmap = np.zeros((256,3), dtype=np.uint8) for i in id2label.keys(): cmap[i] = id2label[i].color # ident_size = True # max_shape = np.array((1024, 2048)) # if args.split in ('train+', 'trainval+'): cache_images = False # if args.phase in ('val',): mx_workspace = 8000 elif dataset == 'coco': sys.path.insert(0, osp.join(args.data_root, 'PythonAPI')) from pycocotools.coco import COCO coco = COCO(osp.join(args.data_root, 'annotations', 'instances_minival2014.json')) # id_2_label = np.array([0] + sorted(coco.getCatIds())) assert len(id_2_label) == 81 valid_labels = id_2_label.tolist() num_classes = model_specs.get('classes', 81) label_2_id = 255 * np.ones((256,)) for i, l in enumerate(id_2_label): label_2_id[l] = i # max_shape = np.array((640, 640)) elif dataset == 'pascal-context': num_classes = model_specs.get('classes', 60) valid_labels = range(num_classes) # max_shape = np.array((500, 500)) elif dataset == 'voc': num_classes = model_specs.get('classes', 21) valid_labels = range(num_classes) # if args.split in ('train++',): max_shape = np.array((640, 640)) else: max_shape = np.array((500, 500)) else: raise NotImplementedError('Unknow dataset: {}'.format(dataset)) if cmap is None and cmap_path is not None: if osp.isfile(cmap_path): with open(cmap_path) as f: cmap = cPickle.load(f) meta['label_2_id'] = label_2_id meta['id_2_label'] = id_2_label meta['valid_labels'] = valid_labels meta['cmap'] = cmap meta['ident_size'] = ident_size meta['max_shape'] = meta.get('max_shape', max_shape) meta['cache_images'] = args.cache_images if args.cache_images is not None else cache_images meta['mx_workspace'] = mx_workspace return meta