def verify(self): logger = ProgressLogger(len(self.image_sets), self.verify) for image_set in self.image_sets: res = image_set.verify() if res is not None: self.image_sets.remove(res) logger.log_progress() self.write_augmented() self.write_stats()
def delete_image_sets_by_idx(self, idx): to_delete = [self.image_sets[i] for i in idx] logger = ProgressLogger(len(to_delete), self.delete_image_sets_by_idx) for image_set in to_delete: shutil.rmtree(image_set.root) self.image_sets.remove(image_set) logger.log_progress() self.write_augmented() self.write_stats()
def clean_invalid_masks(self, min_px=20): logger = ProgressLogger(len(self.image_sets), self.clean_invalid_masks) for image_set in self.image_sets: res = image_set.clean_invalid_masks(min_px=min_px) if res is not None: self.image_sets.remove(res) logger.log_progress() self.write_augmented() self.write_stats()
def read_path(self): image_set_paths = [ os.path.join(self.path, folder) for folder in os.listdir(self.path) if os.path.isdir(os.path.join(self.path, folder)) ] image_sets = [] logger = ProgressLogger(len(image_set_paths), self.read_path) for path in image_set_paths: basename = os.path.basename(path) if basename in self.augmented: aug = self.augmented[basename] else: aug = False image_sets.append(ImageSet(path, augmented=aug)) logger.log_progress() return image_sets
def augment(self, ids, seq=None, iterations=1): if seq is None: seq = self.seq idx = int(sorted(list(self.augmented))[-1]) + 1 logger = ProgressLogger(len(ids) * iterations, self.augment) for j in ids: image_set = self.image_sets[j] if image_set.augmented: continue for i in range(iterations): path = os.path.join(self.path, str(idx).zfill(9)) aug = image_set.augment(path, seq) if aug is not None: self.image_sets.append(aug) idx += 1 logger.log_progress() self.write_augmented() self.augmented = self.read_augmented() self.write_stats()
def check_nulldata(path): files = [ os.path.join(path, file) for file in os.listdir(path) if file.endswith(".tif") ] logger = ProgressLogger(len(files), check_nulldata) for file in files: name = os.path.splitext(os.path.basename(file))[0] img = arcpy.sa.Raster(file) isn = arcpy.sa.IsNull(img) rect = 25 i = 0 while isn.minimum != isn.maximum: os.makedirs(os.path.join(resources.temp, "{}_{}".format(name, i)), exist_ok=True) nbr = (rect, rect) img = arcpy.sa.Con( arcpy.sa.IsNull(img), arcpy.sa.FocalStatistics( img, arcpy.sa.NbrRectangle(nbr[0], nbr[1], "CELL"), "MEAN"), img) cpy = arcpy.CopyRaster_management( img, os.path.join(resources.temp, "{}_{}".format(name, i), "{}.tif".format(name))) del img cpy = cpy.getOutput(0) rect += 25 img = arcpy.sa.Raster(cpy) isn = arcpy.sa.IsNull(img) if isn.minimum == isn.maximum: shutil.copy( os.path.join(resources.temp, "{}_{}".format(name, i), "{}.tif".format(name)), file) del img i += 1 for j in range(i): shutil.rmtree( os.path.join(resources.temp, "{}_{}".format(name, j))) i += 1 logger.log_progress()
def evaluate(self, dataset_test, set_size): image_ids = np.random.choice(dataset_test.image_ids, set_size) APs = [] logger = ProgressLogger(set_size, self.evaluate) for image_id in image_ids: # Load image and ground truth data image, image_meta, gt_class_id, gt_bbox, gt_mask = \ modellib.load_image_gt(dataset_test, self.config, image_id, use_mini_mask=False) molded_images = np.expand_dims( modellib.mold_image(image, self.config), 0) # Run object detection results = self.model.detect([image], verbose=0) r = results[0] # Compute AP AP, precisions, recalls, overlaps = \ utils.compute_ap(gt_bbox, gt_class_id, gt_mask, r["rois"], r["class_ids"], r["scores"], r['masks']) APs.append(AP) logger.log_progress() return np.mean(APs)
def create_training_chips(images, chips_dir, shapefile, value_field, tile_size, stride, rotation=0): logger = ProgressLogger(len(images), create_training_chips) for image in images: arcpy.sa.ExportTrainingDataForDeepLearning( in_raster=image, out_folder=chips_dir, in_class_data=shapefile, image_chip_format="TIFF", tile_size_x=tile_size, tile_size_y=tile_size, output_nofeature_tiles="ONLY_TILES_WITH_FEATURES", stride_x=stride, stride_y=stride, metadata_format="RCNN_Masks", rotation_angle=rotation, class_value_field=value_field) logger.log_progress()
def mesh(detections, splits, shapefile, dst_name="buildings_3d"): # georeferencing def shift_and_rescale(image, mask_arr): basename = os.path.splitext(os.path.basename(image))[0] image = arcpy.sa.Raster(image) mask = arcpy.NumPyArrayToRaster(mask_arr.astype(np.uint8) * 255) arcpy.env.outputCoordinateSystem = image os.makedirs(resources.temp_shifted, exist_ok=True) shifted = os.path.join(resources.temp_shifted, "{}.tif".format(basename)) arcpy.Shift_management(mask, shifted, image.extent.XMin - mask.extent.XMin, image.extent.YMin - mask.extent.YMin, in_snap_raster=image) # match LIDAR resolution os.makedirs(resources.temp_rescaled, exist_ok=True) rescaled = os.path.join(resources.temp_rescaled, "{}.tif".format(basename)) arcpy.Rescale_management(shifted, rescaled, image.meanCellWidth, image.meanCellHeight) return rescaled # vertorize masks def vectorize(georef_mask): basename = os.path.splitext(os.path.basename(georef_mask))[0] os.makedirs(resources.temp_vectorized, exist_ok=True) vectorized = os.path.join(resources.temp_vectorized, "{}.shp".format(basename)) field = "VALUE" arcpy.RasterToPolygon_conversion(georef_mask, vectorized, "NO_SIMPLIFY", field) return vectorized def get_centroid(polygon, mask): mask_ras = arcpy.sa.Raster(mask) centroid = None with arcpy.da.SearchCursor(polygon, ["SHAPE@", "gridcode"]) as cursor: for row in cursor: if row[1] == 255: if mask_ras.extent.contains(row[0]): centroid = row[0].centroid del cursor return centroid # work with quantiles to suppress outliers def pick_heights(image_arr, mask_arr): heights = (image_arr * mask_arr) heights = heights[np.nonzero(heights)] if len(heights) == 0: return None, None, None, None, None q_000 = np.quantile(heights, 0.0) q_010 = np.quantile(heights, 0.1) q_050 = np.quantile(heights, 0.5) q_090 = np.quantile(heights, 0.9) q_100 = np.quantile(heights, 1.0) return q_000, q_010, q_050, q_090, q_100 def write_centroids_to_feature_class(centroids): os.makedirs(resources.temp_centroids, exist_ok=True) fc = arcpy.CreateFeatureclass_management(resources.temp_centroids, "centroids.shp", "POINT") fc = fc.getOutput(0) arcpy.AddField_management(fc, "Roof", "TEXT") arcpy.AddField_management(fc, "Score", "FLOAT") arcpy.AddField_management(fc, "Perc_000", "FLOAT") arcpy.AddField_management(fc, "Perc_010", "FLOAT") arcpy.AddField_management(fc, "Perc_050", "FLOAT") arcpy.AddField_management(fc, "Perc_090", "FLOAT") arcpy.AddField_management(fc, "Perc_100", "FLOAT") with arcpy.da.InsertCursor(fc, [ "SHAPE@", "Roof", "Score", "Perc_000", "Perc_010", "Perc_050", "Perc_090", "Perc_100" ]) as cursor: for centroid in centroids: cursor.insertRow([ centroid[0], centroid[1], centroid[2], centroid[3], centroid[4], centroid[5], centroid[6], centroid[7] ]) del cursor return fc def get_footprints(centroids, shapefile): os.makedirs(resources.temp_footprints, exist_ok=True) output_feature_class = os.path.join(resources.temp_footprints, "unsorted.shp") join_operation = "JOIN_ONE_TO_MANY" join_type = "KEEP_COMMON" match_option = "CONTAINS" arcpy.SpatialJoin_analysis(shapefile, centroids, output_feature_class, join_operation=join_operation, join_type=join_type, match_option=match_option) fields = [ field.name for field in arcpy.ListFields(output_feature_class) ] valids = [ "FID", "Shape", "Roof", "Score", "Perc_000", "Perc_010", "Perc_050", "Perc_090", "Perc_100" ] to_drop = [item for item in fields if item not in valids] arcpy.DeleteField_management(output_feature_class, to_drop) output = os.path.join(resources.temp_footprints, "footprints.shp") arcpy.Sort_management(output_feature_class, output, "Shape ASCENDING", "UL") arcpy.Delete_management(output_feature_class) arcpy.DeleteIdentical_management(output, "Shape", None, 0) return output # generate 3d model via CityEngine-Support def create_3d_model(footprints, rule_package, name): if not os.path.exists(resources.building_gdb): arcpy.CreateFileGDB_management( os.path.dirname(resources.building_gdb), os.path.basename(resources.building_gdb)) arcpy.FeaturesFromCityEngineRules_3d( footprints, rule_package, os.path.join(resources.building_gdb, name)) return os.path.join(resources.building_gdb, name) if type(splits) != list or type(detections) != list: raise Exception("Images and detections must be passed as a list!") if len(splits) != len(detections): raise Exception("LEN of images and detections must be equal!") centr_with_attr = [] logger = ProgressLogger(len(splits), mesh) for i in range(len(splits)): image = splits[i] image_arr = skimage.io.imread(image) detection = detections[i] masks = detection["masks"] if len(detection["class_ids"]) == 0: logger.log_progress() continue for j in range(masks.shape[2]): mask_arr = masks[:, :, j] georef_mask = shift_and_rescale(image, mask_arr) vectorized = vectorize(georef_mask) centroid = get_centroid(vectorized, georef_mask) q_000, q_010, q_050, q_090, q_100 = pick_heights( image_arr, mask_arr) if centroid is None or q_000 is None: continue score = float(detection["scores"][j]) roof = dataset.roofs[detection["class_ids"][j]] centr_with_attr.append( [centroid, roof, score, q_000, q_010, q_050, q_090, q_100]) logger.log_progress() print("start writing centroids to feature class...") centroid_features = write_centroids_to_feature_class(centr_with_attr) print("done.") print("start picking footprints..") footprints = get_footprints(centroid_features, shapefile) print("done.") print("start meshing..") model = create_3d_model(footprints, resources.rulefile, dst_name) print("done.") return model