def __init__(self, core_dataset_name='buildchange', src_version='v0', dst_version='v1', imageset='shanghai', subimage_size=1024, gap=512, num_processor=16): self.core_dataset_name = core_dataset_name self.src_version = src_version self.dst_version = dst_version self.imageset = imageset self.subimage_size = subimage_size self.gap = gap self.image_path = './data/{}/{}/{}/images'.format(core_dataset_name, src_version, imageset) self.merged_shp_path = './data/{}/{}/{}/merged_shp'.format(core_dataset_name, src_version, imageset) self.geo_path = './data/{}/{}/{}/geo_info'.format(core_dataset_name, src_version, imageset) self.image_save_path = './data/{}/{}/{}/images'.format(core_dataset_name, dst_version, "{}_{}".format(imageset, subimage_size)) wwtool.mkdir_or_exist(self.image_save_path) self.label_save_path = './data/{}/{}/{}/labels'.format(core_dataset_name, dst_version, "{}_{}".format(imageset, subimage_size)) wwtool.mkdir_or_exist(self.label_save_path) self.shp_parser = wwtool.ShpParse() self.pool = Pool(num_processor)
def __init__(self, rgb_file, foot_shp_file, geo_info, pred_segmentation_file, side_shp_file, pixel_anno): self.rgb_file = rgb_file self.foot_shp_file = foot_shp_file self.geo_info = geo_info self.pred_segmentation_file = pred_segmentation_file self.side_shp_file = side_shp_file self.pixel_anno = pixel_anno mask_parser = wwtool.MaskParse() objects = mask_parser(pixel_anno, category=255) self.ignore_polygons = [obj['polygon'] for obj in objects] self.rgb_image = cv2.imread(rgb_file) shp_parser = wwtool.ShpParse() objects = shp_parser(foot_shp_file, geo_info) self.foot_mask = cv2.imread(geo_info, 0) # annotated by floor heigh self.foot_polygons = [obj['converted_polygon'] for obj in objects] self.floors = [obj['converted_property']['Floor'] for obj in objects] self.pred_segmentation = cv2.imread(pred_segmentation_file, 0) try: side_shp = geopandas.read_file(side_shp_file, encoding='utf-8') self.side_annotations = [ side_coor for _, side_coor in side_shp.iterrows() ] except: print("Can't open this side shp file: {}".format(side_shp_file)) self.offset_and_floorheigh_list = []
def __init__(self, core_dataset_name='buildchange', src_version='v0', dst_version='v1', imageset='shanghai', subimage_size=1024, gap=512, multi_processing=False, num_processor=16, show=False): self.core_dataset_name = core_dataset_name self.src_version = src_version self.dst_version = dst_version self.imageset = imageset self.subimage_size = subimage_size self.gap = gap self.image_path = './data/{}/{}/{}/images'.format( core_dataset_name, src_version, imageset) self.merged_shp_path = './data/{}/{}/{}/merged_shp'.format( core_dataset_name, src_version, imageset) self.geo_path = './data/{}/{}/{}/geo_info'.format( core_dataset_name, src_version, imageset) self.pixel_anno_path = './data/{}/{}/{}/anno_v2'.format( core_dataset_name, src_version, imageset) self.image_save_path = './data/{}/{}/{}/images'.format( core_dataset_name, dst_version, "{}_{}".format(imageset, subimage_size)) if not os.path.exists(self.image_save_path): os.makedirs(self.image_save_path) else: shutil.rmtree(self.image_save_path) os.makedirs(self.image_save_path) self.label_save_path = './data/{}/{}/{}/labels'.format( core_dataset_name, dst_version, "{}_{}".format(imageset, subimage_size)) if not os.path.exists(self.label_save_path): os.makedirs(self.label_save_path) else: shutil.rmtree(self.label_save_path) os.makedirs(self.label_save_path) self.shp_parser = wwtool.ShpParse() self.mask_parser = wwtool.MaskParse() self.multi_processing = multi_processing self.pool = Pool(num_processor) self.show = show
def __init__(self, core_dataset_name='buildchange', src_version='v0', dst_version='v1', imageset='shanghai', sub_imageset_fold='arg', subimage_size=1024, gap=512, multi_processing=False, num_processor=16, show=False): self.core_dataset_name = core_dataset_name self.src_version = src_version self.dst_version = dst_version self.imageset = imageset self.sub_imageset_fold = sub_imageset_fold self.subimage_size = subimage_size self.gap = gap self.image_path = './data/{}/{}/{}/{}/images'.format( core_dataset_name, src_version, imageset, sub_imageset_fold) self.roof_shp_path = './data/{}/{}/{}/{}/roof_shp_4326'.format( core_dataset_name, src_version, imageset, sub_imageset_fold) self.geo_path = './data/{}/{}/{}/{}/geo_info'.format( core_dataset_name, src_version, imageset, sub_imageset_fold) self.pixel_anno_v2_path = './data/{}/{}/{}/{}/pixel_anno_v2'.format( core_dataset_name, src_version, imageset, sub_imageset_fold) self.image_save_path = './data/{}/{}/{}/{}/images'.format( core_dataset_name, dst_version, imageset, sub_imageset_fold) wwtool.mkdir_or_exist(self.image_save_path) self.label_save_path = './data/{}/{}/{}/{}/labels'.format( core_dataset_name, dst_version, imageset, sub_imageset_fold) wwtool.mkdir_or_exist(self.label_save_path) self.shp_parser = wwtool.ShpParse() self.multi_processing = multi_processing self.pool = Pool(num_processor) self.show = show
def __setstate__(self, state): self.__dict__.update(state) if __name__ == '__main__': # cities = ['shanghai', 'beijing', 'jinan', 'haerbin', 'chengdu'] # sub_imageset_folds = {'beijing': ['arg', 'google', 'ms', 'tdt'], # 'chengdu': ['arg', 'google', 'ms', 'tdt'], # 'haerbin': ['arg', 'google', 'ms'], # 'jinan': ['arg', 'google', 'ms', 'tdt'], # 'shanghai': ['arg', 'google', 'ms', 'tdt', 'PHR2016', 'PHR2017']} cities = ['shanghai'] sub_imageset_folds = {'shanghai': ['arg']} core_dataset_name = 'buildchange' src_version = 'v0' dst_version = 'v2' sub_img_w, sub_img_h = 1024, 1024 mask_parser = wwtool.MaskParse() shp_parser = wwtool.ShpParse() for city in cities: convert = Simpletxt2Json(dst_version=dst_version, city=city, sub_imageset_folds=sub_imageset_folds, multi_processing=True, num_processor=8) convert.core() print(f"finish processing {city}")