def buffer_by_field(in_layer_path: str, out_layer_path, field: str, epsg: int = None, min_buffer=None, centered=False) -> None: """generate buffered polygons by value in field Args: flowlines (str): feature class of line features to buffer field (str): field with buffer value epsg (int): output srs min_buffer: use this buffer value for field values that are less than this Returns: geometry: unioned polygon geometry of buffered lines """ log = Logger('buffer_by_field') with get_shp_or_gpkg(out_layer_path, write=True) as out_layer, get_shp_or_gpkg(in_layer_path) as in_layer: conversion = in_layer.rough_convert_metres_to_vector_units(1) # Add input Layer Fields to the output Layer if it is the one we want out_layer.create_layer(ogr.wkbPolygon, epsg=epsg, fields=in_layer.get_fields()) transform = VectorBase.get_transform(in_layer.spatial_ref, out_layer.spatial_ref) factor = 0.5 if centered else 1.0 for feature, _counter, progbar in in_layer.iterate_features('Buffering features', write_layers=[out_layer]): geom = feature.GetGeometryRef() if geom is None: progbar.erase() # get around the progressbar log.warning('Feature with FID={} has no geometry. Skipping'.format(feature.GetFID())) continue buffer_dist = feature.GetField(field) * conversion * factor geom.Transform(transform) geom_buffer = geom.Buffer(buffer_dist if buffer_dist > min_buffer else min_buffer) # Create output Feature out_feature = ogr.Feature(out_layer.ogr_layer_def) out_feature.SetGeometry(geom_buffer) # Add field values from input Layer for i in range(0, out_layer.ogr_layer_def.GetFieldCount()): out_feature.SetField(out_layer.ogr_layer_def.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) out_layer.ogr_layer.CreateFeature(out_feature) out_feature = None
def copy_feature_class(in_layer_path: str, out_layer_path: str, epsg: int = None, attribute_filter: str = None, clip_shape: BaseGeometry = None, clip_rect: List[float] = None, buffer: float = 0, ) -> None: """Copy a Shapefile from one location to another This method is capable of reprojecting the geometries as they are copied. It is also possible to filter features by both attributes and also clip the features to another geometryNone Args: in_layer (str): Input layer path epsg ([type]): EPSG Code to use for the transformation out_layer (str): Output layer path attribute_filter (str, optional): [description]. Defaults to None. clip_shape (BaseGeometry, optional): [description]. Defaults to None. clip_rect (List[double minx, double miny, double maxx, double maxy)]): Iterate over a subset by clipping to a Shapely-ish geometry. Defaults to None. buffer (float): Buffer the output features (in meters). """ log = Logger('copy_feature_class') # NOTE: open the outlayer first so that write gets the dataset open priority with get_shp_or_gpkg(out_layer_path, write=True) as out_layer, \ get_shp_or_gpkg(in_layer_path) as in_layer: # Add input Layer Fields to the output Layer if it is the one we want out_layer.create_layer_from_ref(in_layer, epsg=epsg) transform = VectorBase.get_transform(in_layer.spatial_ref, out_layer.spatial_ref) buffer_convert = 0 if buffer != 0: buffer_convert = in_layer.rough_convert_metres_to_vector_units(buffer) # This is the callback method that will be run on each feature for feature, _counter, progbar in in_layer.iterate_features("Copying features", write_layers=[out_layer], clip_shape=clip_shape, clip_rect=clip_rect, attribute_filter=attribute_filter): geom = feature.GetGeometryRef() if geom is None: progbar.erase() # get around the progressbar log.warning('Feature with FID={} has no geometry. Skipping'.format(feature.GetFID())) continue if geom.GetGeometryType() in VectorBase.LINE_TYPES: if geom.Length() == 0.0: progbar.erase() # get around the progressbar log.warning('Feature with FID={} has no Length. Skipping'.format(feature.GetFID())) continue # Buffer the shape if we need to if buffer_convert != 0: geom = geom.Buffer(buffer_convert) geom.Transform(transform) # Create output Feature out_feature = ogr.Feature(out_layer.ogr_layer_def) out_feature.SetGeometry(geom) # Add field values from input Layer for i in range(0, out_layer.ogr_layer_def.GetFieldCount()): out_feature.SetField(out_layer.ogr_layer_def.GetFieldDefn(i).GetNameRef(), feature.GetField(i)) out_layer.ogr_layer.CreateFeature(out_feature) out_feature = None
def rvd(huc: int, flowlines_orig: Path, existing_veg_orig: Path, historic_veg_orig: Path, valley_bottom_orig: Path, output_folder: Path, reach_codes: List[str], flow_areas_orig: Path, waterbodies_orig: Path, meta=None): """[Generate segmented reaches on flowline network and calculate RVD from historic and existing vegetation rasters Args: huc (integer): Watershed ID flowlines_orig (Path): Segmented flowlines feature layer existing_veg_orig (Path): LANDFIRE version 2.00 evt raster, with adjacent xml metadata file historic_veg_orig (Path): LANDFIRE version 2.00 bps raster, with adjacent xml metadata file valley_bottom_orig (Path): Vbet polygon feature layer output_folder (Path): destination folder for project output reach_codes (List[int]): NHD reach codes for features to include in outputs flow_areas_orig (Path): NHD flow area polygon feature layer waterbodies (Path): NHD waterbodies polygon feature layer meta (Dict[str,str]): dictionary of riverscapes metadata key: value pairs """ log = Logger("RVD") log.info('RVD v.{}'.format(cfg.version)) try: int(huc) except ValueError: raise Exception('Invalid HUC identifier "{}". Must be an integer'.format(huc)) if not (len(huc) == 4 or len(huc) == 8): raise Exception('Invalid HUC identifier. Must be four digit integer') safe_makedirs(output_folder) project, _realization, proj_nodes = create_project(huc, output_folder) # Incorporate project metadata to the riverscapes project if meta is not None: project.add_metadata(meta) log.info('Adding inputs to project') _prj_existing_path_node, prj_existing_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['EXVEG'], existing_veg_orig) _prj_historic_path_node, prj_historic_path = project.add_project_raster(proj_nodes['Inputs'], LayerTypes['HISTVEG'], historic_veg_orig) # TODO: Don't forget the att_filter # _prj_flowlines_node, prj_flowlines = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS'], flowlines, att_filter="\"ReachCode\" Like '{}%'".format(huc)) # Copy in the vectors we need inputs_gpkg_path = os.path.join(output_folder, LayerTypes['INPUTS'].rel_path) intermediates_gpkg_path = os.path.join(output_folder, LayerTypes['INTERMEDIATES'].rel_path) outputs_gpkg_path = os.path.join(output_folder, LayerTypes['OUTPUTS'].rel_path) # Make sure we're starting with empty/fresh geopackages GeopackageLayer.delete(inputs_gpkg_path) GeopackageLayer.delete(intermediates_gpkg_path) GeopackageLayer.delete(outputs_gpkg_path) # Copy our input layers and also find the difference in the geometry for the valley bottom flowlines_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOWLINES'].rel_path) vbottom_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['VALLEY_BOTTOM'].rel_path) copy_feature_class(flowlines_orig, flowlines_path, epsg=cfg.OUTPUT_EPSG) copy_feature_class(valley_bottom_orig, vbottom_path, epsg=cfg.OUTPUT_EPSG) with GeopackageLayer(flowlines_path) as flow_lyr: # Set the output spatial ref as this for the whole project out_srs = flow_lyr.spatial_ref meter_conversion = flow_lyr.rough_convert_metres_to_vector_units(1) distance_buffer = flow_lyr.rough_convert_metres_to_vector_units(1) # Transform issues reading 102003 as espg id. Using sr wkt seems to work, however arcgis has problems loading feature classes with this method... raster_srs = ogr.osr.SpatialReference() ds = gdal.Open(prj_existing_path, 0) raster_srs.ImportFromWkt(ds.GetProjectionRef()) raster_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) transform_shp_to_raster = VectorBase.get_transform(out_srs, raster_srs) gt = ds.GetGeoTransform() cell_area = ((gt[1] / meter_conversion) * (-gt[5] / meter_conversion)) # Create the output feature class fields with GeopackageLayer(outputs_gpkg_path, layer_name='ReachGeometry', delete_dataset=True) as out_lyr: out_lyr.create_layer(ogr.wkbMultiLineString, spatial_ref=out_srs, options=['FID=ReachID'], fields={ 'GNIS_NAME': ogr.OFTString, 'ReachCode': ogr.OFTString, 'TotDASqKm': ogr.OFTReal, 'NHDPlusID': ogr.OFTReal, 'WatershedID': ogr.OFTInteger }) metadata = { 'RVD_DateTime': datetime.datetime.now().isoformat(), 'Reach_Codes': reach_codes } # Execute the SQL to create the lookup tables in the RVD geopackage SQLite database watershed_name = create_database(huc, outputs_gpkg_path, metadata, cfg.OUTPUT_EPSG, os.path.join(os.path.abspath(os.path.dirname(__file__)), '..', 'database', 'rvd_schema.sql')) project.add_metadata({'Watershed': watershed_name}) geom_vbottom = get_geometry_unary_union(vbottom_path, spatial_ref=raster_srs) flowareas_path = None if flow_areas_orig: flowareas_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['FLOW_AREA'].rel_path) copy_feature_class(flow_areas_orig, flowareas_path, epsg=cfg.OUTPUT_EPSG) geom_flow_areas = get_geometry_unary_union(flowareas_path) # Difference with existing vbottom geom_vbottom = geom_vbottom.difference(geom_flow_areas) else: del LayerTypes['INPUTS'].sub_layers['FLOW_AREA'] waterbodies_path = None if waterbodies_orig: waterbodies_path = os.path.join(inputs_gpkg_path, LayerTypes['INPUTS'].sub_layers['WATERBODIES'].rel_path) copy_feature_class(waterbodies_orig, waterbodies_path, epsg=cfg.OUTPUT_EPSG) geom_waterbodies = get_geometry_unary_union(waterbodies_path) # Difference with existing vbottom geom_vbottom = geom_vbottom.difference(geom_waterbodies) else: del LayerTypes['INPUTS'].sub_layers['WATERBODIES'] # Add the inputs to the XML _nd, _in_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Inputs'], LayerTypes['INPUTS']) # Filter the flow lines to just the required features and then segment to desired length # TODO: These are brat methods that need to be refactored to use VectorBase layers cleaned_path = os.path.join(outputs_gpkg_path, 'ReachGeometry') build_network(flowlines_path, flowareas_path, cleaned_path, waterbodies_path=waterbodies_path, epsg=cfg.OUTPUT_EPSG, reach_codes=reach_codes, create_layer=False) # Generate Voroni polygons log.info("Calculating Voronoi Polygons...") # Add all the points (including islands) to the list flowline_thiessen_points_groups = centerline_points(cleaned_path, distance_buffer, transform_shp_to_raster) flowline_thiessen_points = [pt for group in flowline_thiessen_points_groups.values() for pt in group] simple_save([pt.point for pt in flowline_thiessen_points], ogr.wkbPoint, raster_srs, "Thiessen_Points", intermediates_gpkg_path) # Exterior is the shell and there is only ever 1 myVorL = NARVoronoi(flowline_thiessen_points) # Generate Thiessen Polys myVorL.createshapes() # Dissolve by flowlines log.info("Dissolving Thiessen Polygons") dissolved_polys = myVorL.dissolve_by_property('fid') # Clip Thiessen Polys log.info("Clipping Thiessen Polygons to Valley Bottom") clipped_thiessen = clip_polygons(geom_vbottom, dissolved_polys) # Save Intermediates simple_save(clipped_thiessen.values(), ogr.wkbPolygon, raster_srs, "Thiessen", intermediates_gpkg_path) simple_save(dissolved_polys.values(), ogr.wkbPolygon, raster_srs, "ThiessenPolygonsDissolved", intermediates_gpkg_path) simple_save(myVorL.polys, ogr.wkbPolygon, raster_srs, "ThiessenPolygonsRaw", intermediates_gpkg_path) _nd, _inter_gpkg_path, _sublayers = project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES']) # OLD METHOD FOR AUDIT # dissolved_polys2 = dissolve_by_points(flowline_thiessen_points_groups, myVorL.polys) # simple_save(dissolved_polys2.values(), ogr.wkbPolygon, out_srs, "ThiessenPolygonsDissolved_OLD", intermediates_gpkg_path) # Load Vegetation Rasters log.info(f"Loading Existing and Historic Vegetation Rasters") vegetation = {} vegetation["EXISTING"] = load_vegetation_raster(prj_existing_path, outputs_gpkg_path, True, output_folder=os.path.join(output_folder, 'Intermediates')) vegetation["HISTORIC"] = load_vegetation_raster(prj_historic_path, outputs_gpkg_path, False, output_folder=os.path.join(output_folder, 'Intermediates')) for epoch in vegetation.keys(): for name in vegetation[epoch].keys(): if not f"{epoch}_{name}" == "HISTORIC_LUI": project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[f"{epoch}_{name}"]) if vegetation["EXISTING"]["RAW"].shape != vegetation["HISTORIC"]["RAW"].shape: raise Exception('Vegetation raster shapes are not equal Existing={} Historic={}. Cannot continue'.format(vegetation["EXISTING"]["RAW"].shape, vegetation["HISTORIC"]["RAW"].shape)) # Vegetation zone calculations riparian_zone_arrays = {} riparian_zone_arrays["RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["RIPARIAN"] + vegetation["HISTORIC"]["RIPARIAN"]) > 0) * 1 riparian_zone_arrays["NATIVE_RIPARIAN_ZONES"] = ((vegetation["EXISTING"]["NATIVE_RIPARIAN"] + vegetation["HISTORIC"]["NATIVE_RIPARIAN"]) > 0) * 1 riparian_zone_arrays["VEGETATION_ZONES"] = ((vegetation["EXISTING"]["VEGETATED"] + vegetation["HISTORIC"]["VEGETATED"]) > 0) * 1 # Save Intermediate Rasters for name, raster in riparian_zone_arrays.items(): save_intarr_to_geotiff(raster, os.path.join(output_folder, "Intermediates", f"{name}.tif"), prj_existing_path) project.add_project_raster(proj_nodes['Intermediates'], LayerTypes[name]) # Calculate Riparian Departure per Reach riparian_arrays = {f"{epoch.capitalize()}{(name.capitalize()).replace('Native_riparian', 'NativeRiparian')}Mean": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name in ["RIPARIAN", "NATIVE_RIPARIAN"]} # Vegetation Cell Counts raw_arrays = {f"{epoch}": array for epoch, arrays in vegetation.items() for name, array in arrays.items() if name == "RAW"} # Generate Vegetation Conversions vegetation_change = (vegetation["HISTORIC"]["CONVERSION"] - vegetation["EXISTING"]["CONVERSION"]) save_intarr_to_geotiff(vegetation_change, os.path.join(output_folder, "Intermediates", "Conversion_Raster.tif"), prj_existing_path) project.add_project_raster(proj_nodes['Intermediates'], LayerTypes['VEGETATION_CONVERSION']) # load conversion types dictionary from database conn = sqlite3.connect(outputs_gpkg_path) conn.row_factory = dict_factory curs = conn.cursor() curs.execute('SELECT * FROM ConversionTypes') conversion_classifications = curs.fetchall() curs.execute('SELECT * FROM vwConversions') conversion_ids = curs.fetchall() # Split vegetation change classes into binary arrays vegetation_change_arrays = { c['FieldName']: (vegetation_change == int(c["TypeValue"])) * 1 if int(c["TypeValue"]) in np.unique(vegetation_change) else None for c in conversion_classifications } # Calcuate average and unique cell counts per reach progbar = ProgressBar(len(clipped_thiessen.keys()), 50, "Extracting array values by reach...") counter = 0 discarded = 0 with rasterio.open(prj_existing_path) as dataset: unique_vegetation_counts = {} reach_average_riparian = {} reach_average_change = {} for reachid, poly in clipped_thiessen.items(): counter += 1 progbar.update(counter) # we can discount a lot of shapes here. if not poly.is_valid or poly.is_empty or poly.area == 0 or poly.geom_type not in ["Polygon", "MultiPolygon"]: discarded += 1 continue raw_values_unique = {} change_values_mean = {} riparian_values_mean = {} reach_raster = np.ma.masked_invalid( features.rasterize( [poly], out_shape=dataset.shape, transform=dataset.transform, all_touched=True, fill=np.nan)) for raster_name, raster in raw_arrays.items(): if raster is not None: current_raster = np.ma.masked_array(raster, mask=reach_raster.mask) raw_values_unique[raster_name] = np.unique(np.ma.filled(current_raster, fill_value=0), return_counts=True) else: raw_values_unique[raster_name] = [] for raster_name, raster in riparian_arrays.items(): if raster is not None: current_raster = np.ma.masked_array(raster, mask=reach_raster.mask) riparian_values_mean[raster_name] = np.ma.mean(current_raster) else: riparian_values_mean[raster_name] = 0.0 for raster_name, raster in vegetation_change_arrays.items(): if raster is not None: current_raster = np.ma.masked_array(raster, mask=reach_raster.mask) change_values_mean[raster_name] = np.ma.mean(current_raster) else: change_values_mean[raster_name] = 0.0 unique_vegetation_counts[reachid] = raw_values_unique reach_average_riparian[reachid] = riparian_values_mean reach_average_change[reachid] = change_values_mean progbar.finish() with SQLiteCon(outputs_gpkg_path) as gpkg: # Ensure all reaches are present in the ReachAttributes table before storing RVD output values gpkg.curs.execute('INSERT INTO ReachAttributes (ReachID) SELECT ReachID FROM ReachGeometry;') errs = 0 for reachid, epochs in unique_vegetation_counts.items(): for epoch in epochs.values(): insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0] try: gpkg.curs.executemany('''INSERT INTO ReachVegetation ( ReachID, VegetationID, Area, CellCount) VALUES (?,?,?,?)''', insert_values) # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is except sqlite3.IntegrityError as err: # THis is likely a constraint error. errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0]))) log.error(errstr) errs += 1 except sqlite3.Error as err: # This is any other kind of error errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err)) log.error(errstr) errs += 1 if errs > 0: raise Exception('Errors were found inserting records into the database. Cannot continue.') gpkg.conn.commit() # load RVD departure levels from DepartureLevels database table with SQLiteCon(outputs_gpkg_path) as gpkg: gpkg.curs.execute('SELECT LevelID, MaxRVD FROM DepartureLevels ORDER BY MaxRVD ASC') departure_levels = gpkg.curs.fetchall() # Calcuate Average Departure for Riparian and Native Riparian riparian_departure_values = riparian_departure(reach_average_riparian, departure_levels) write_db_attributes(outputs_gpkg_path, riparian_departure_values, departure_type_columns) # Add Conversion Code, Type to Vegetation Conversion with SQLiteCon(outputs_gpkg_path) as gpkg: gpkg.curs.execute('SELECT LevelID, MaxValue, NAME FROM ConversionLevels ORDER BY MaxValue ASC') conversion_levels = gpkg.curs.fetchall() reach_values_with_conversion_codes = classify_conversions(reach_average_change, conversion_ids, conversion_levels) write_db_attributes(outputs_gpkg_path, reach_values_with_conversion_codes, rvd_columns) # # Write Output to GPKG table # log.info('Insert values to GPKG tables') # # TODO move this to write_attirubtes method # with get_shp_or_gpkg(outputs_gpkg_path, layer_name='ReachAttributes', write=True, ) as in_layer: # # Create each field and store the name and index in a list of tuples # field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in { # "FromConifer": ogr.OFTReal, # "FromDevegetated": ogr.OFTReal, # "FromGrassShrubland": ogr.OFTReal, # "FromDeciduous": ogr.OFTReal, # "NoChange": ogr.OFTReal, # "Deciduous": ogr.OFTReal, # "GrassShrubland": ogr.OFTReal, # "Devegetation": ogr.OFTReal, # "Conifer": ogr.OFTReal, # "Invasive": ogr.OFTReal, # "Development": ogr.OFTReal, # "Agriculture": ogr.OFTReal, # "ConversionCode": ogr.OFTInteger, # "ConversionType": ogr.OFTString}.items()] # for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]): # reach = feature.GetFID() # if reach not in reach_values_with_conversion_codes: # continue # # Set all the field values and then store the feature # for field, _idx in field_indices: # if field in reach_values_with_conversion_codes[reach]: # if not reach_values_with_conversion_codes[reach][field]: # feature.SetField(field, None) # else: # feature.SetField(field, reach_values_with_conversion_codes[reach][field]) # in_layer.ogr_layer.SetFeature(feature) # # Create each field and store the name and index in a list of tuples # field_indices = [(field, in_layer.create_field(field, field_type)) for field, field_type in { # "EXISTING_RIPARIAN_MEAN": ogr.OFTReal, # "HISTORIC_RIPARIAN_MEAN": ogr.OFTReal, # "RIPARIAN_DEPARTURE": ogr.OFTReal, # "EXISTING_NATIVE_RIPARIAN_MEAN": ogr.OFTReal, # "HISTORIC_NATIVE_RIPARIAN_MEAN": ogr.OFTReal, # "NATIVE_RIPARIAN_DEPARTURE": ogr.OFTReal, }.items()] # for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]): # reach = feature.GetFID() # if reach not in riparian_departure_values: # continue # # Set all the field values and then store the feature # for field, _idx in field_indices: # if field in riparian_departure_values[reach]: # if not riparian_departure_values[reach][field]: # feature.SetField(field, None) # else: # feature.SetField(field, riparian_departure_values[reach][field]) # in_layer.ogr_layer.SetFeature(feature) # with sqlite3.connect(outputs_gpkg_path) as conn: # cursor = conn.cursor() # errs = 0 # for reachid, epochs in unique_vegetation_counts.items(): # for epoch in epochs.values(): # insert_values = [[reachid, int(vegetationid), float(count * cell_area), int(count)] for vegetationid, count in zip(epoch[0], epoch[1]) if vegetationid != 0] # try: # cursor.executemany('''INSERT INTO ReachVegetation ( # ReachID, # VegetationID, # Area, # CellCount) # VALUES (?,?,?,?)''', insert_values) # # Sqlite can't report on SQL errors so we have to print good log messages to help intuit what the problem is # except sqlite3.IntegrityError as err: # # THis is likely a constraint error. # errstr = "Integrity Error when inserting records: ReachID: {} VegetationIDs: {}".format(reachid, str(list(epoch[0]))) # log.error(errstr) # errs += 1 # except sqlite3.Error as err: # # This is any other kind of error # errstr = "SQL Error when inserting records: ReachID: {} VegetationIDs: {} ERROR: {}".format(reachid, str(list(epoch[0])), str(err)) # log.error(errstr) # errs += 1 # if errs > 0: # raise Exception('Errors were found inserting records into the database. Cannot continue.') # conn.commit() # Add intermediates and the report to the XML # project.add_project_geopackage(proj_nodes['Intermediates'], LayerTypes['INTERMEDIATES']) already # added above project.add_project_geopackage(proj_nodes['Outputs'], LayerTypes['OUTPUTS']) # Add the report to the XML report_path = os.path.join(project.project_dir, LayerTypes['REPORT'].rel_path) project.add_report(proj_nodes['Outputs'], LayerTypes['REPORT'], replace=True) report = RVDReport(report_path, project) report.write() log.info('RVD complete')