def createRefDTMMosaic(in_md_path, out_md_path, v_unit): a = datetime.now() if arcpy.Exists(out_md_path): arcpy.AddMessage("Referenced mosaic dataset exists " + out_md_path) else: arcpy.CreateReferencedMosaicDataset_management(in_dataset=in_md_path, out_mosaic_dataset=out_md_path, where_clause="TypeID = 1") raster_function_path = Raster.Contour_Meters_function_chain_path v_unit = str(v_unit).upper() if v_unit.find("FEET") >= 0 or v_unit.find("FOOT") >= 0 or v_unit.find("FT") >= 0: raster_function_path = Raster.Contour_IntlFeet_function_chain_path #if v_unit.find("INTL") >= 0 or v_unit.find("INTERNATIONAL") >= 0 or v_unit.find("STANDARD") >= 0 or v_unit.find("STD") >= 0: # raster_function_path = Raster.Contour_IntlFeet_function_chain_path if v_unit.find("US") >= 0 or v_unit.find("SURVEY") >= 0: arcpy.AddMessage("Using US FOOT Raster Function") raster_function_path = Raster.Contour_Feet_function_chain_path else: arcpy.AddMessage("Using INT FOOT Raster Function") else: arcpy.AddMessage("Using METER Raster Function") arcpy.EditRasterFunction_management(in_mosaic_dataset=out_md_path, edit_mosaic_dataset_item="EDIT_MOSAIC_DATASET", edit_options="REPLACE", function_chain_definition=raster_function_path, location_function_name="") Utility.addToolMessages() arcpy.CalculateStatistics_management(in_raster_dataset=out_md_path, x_skip_factor=SKIP_FACTOR, y_skip_factor=SKIP_FACTOR, ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set") doTime(a, "Created referenced mosaic dataset " + out_md_path)
def importMosaicDatasetGeometries(md_path, footprint_path, lasd_boundary_path): if footprint_path is not None: arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type="FOOTPRINT", target_join_field="Name", input_featureclass=footprint_path, input_join_field="name") Utility.addToolMessages() if lasd_boundary_path is not None: arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type="BOUNDARY", target_join_field="OBJECTID", input_featureclass=lasd_boundary_path, input_join_field="OBJECTID") Utility.addToolMessages()
def getRasterStats(ProjectUID, ProjectID, curr_raster, raster_path, group, elevation_type, raster_format, raster_PixelType, nodata, horz_cs_name, horz_unit_name, horz_cs_wkid, vert_cs_name, vert_unit_name, rows): # NOTE: Order here must match field list in CMDRConfig inMem_NameBound = "in_memory\MemBoundary" if arcpy.Exists(inMem_NameBound): arcpy.Delete_management(inMem_NameBound) Utility.addToolMessages() arcpy.RasterDomain_3d(raster_path, inMem_NameBound, "POLYGON")[0] Utility.addToolMessages() boundary = Utility.getExistingRecord(in_table=inMem_NameBound, field_names=['SHAPE@'], uidIndex=-1)[0][0] newRow = [ ProjectUID, ProjectID, boundary, curr_raster, raster_path, group, elevation_type, raster_format, nodata, raster_PixelType ] arcpy.CalculateStatistics_management(in_raster_dataset=raster_path, skip_existing="OVERWRITE") cellSize = getRasterProperties(raster_path, newRow) newRow.append(horz_cs_name) newRow.append(horz_unit_name) newRow.append(horz_cs_wkid) newRow.append(vert_cs_name) newRow.append(vert_unit_name) newRow.append(None) # Vert WKID, we can't know this in python rows.append(newRow) return cellSize
def createReferenceddMosaicDataset(in_md_path, out_md_path, spatial_ref, raster_v_unit): a = datetime.datetime.now() arcpy.CreateReferencedMosaicDataset_management( in_dataset=in_md_path, out_mosaic_dataset=out_md_path, coordinate_system=spatial_ref, number_of_bands="1", pixel_type="32_BIT_SIGNED", where_clause="", in_template_dataset="", extent="", select_using_features="SELECT_USING_FEATURES", lod_field="", minPS_field="", maxPS_field="", pixelSize="", build_boundary="BUILD_BOUNDARY") raster_function_path = Raster.Canopy_Density_function_chain_path arcpy.EditRasterFunction_management( in_mosaic_dataset=out_md_path, edit_mosaic_dataset_item="EDIT_MOSAIC_DATASET", edit_options="REPLACE", function_chain_definition=raster_function_path, location_function_name="") Utility.addToolMessages() # arcpy.CalculateStatistics_management(in_raster_dataset=out_md_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set") arcpy.AddMessage( "\tNOTE: !!! Please edit the MR Point Density function. Change to replace input to 'Multiply LAST by 100' with this project's POINT_COUNT_LAST mosaic dataset.\n\n\t{}\n" .format(out_md_path)) doTime(a, "Created DHM '{}'".format(out_md_path))
def DefineBuildOverviews(cellsizeOVR, MasterMD, MasterMD_overview_path, AreaToBuildOVR): arcpy.AddMessage( "\nCell size of First level Overview: {0}".format(cellsizeOVR)) # Define overviews # pixel size of the first level overview is cellsizeOVR # in_template_dataset=AreaToBuildOVR (this can be just the extent of the project, but # for now this is the extent of the entire Master Mosaic Dataset) # overview_factor = 2 # compression method = "LZW" # Sets the location of Mosaic Dataset overview TIFF files # (Note: this folder needs to be in the ArcGIS Server Data Store) arcpy.DefineOverviews_management( in_mosaic_dataset=MasterMD, overview_image_folder=MasterMD_overview_path, in_template_dataset=AreaToBuildOVR, extent="", pixel_size=cellsizeOVR, number_of_levels="", tile_rows="5120", tile_cols="5120", overview_factor="2", force_overview_tiles="NO_FORCE_OVERVIEW_TILES", resampling_method="BILINEAR", compression_method="LZW", compression_quality="100") #arcpy.DefineOverviews_management(MasterMD, MasterMD_overview_path, AreaToBuildOVR, extent="#", pixel_size=cellsizeOVR, # number_of_levels="#", tile_rows="5120", tile_cols="5120", overview_factor="2", # force_overview_tiles="NO_FORCE_OVERVIEW_TILES", resampling_method="BILINEAR", # compression_method="LZW", compression_quality="100") #messages = arcpy.GetMessages() #arcpy.AddMessage("\nResults output from DefineOverviews are: \n{0}\n".format(messages)) Utility.addToolMessages() whereClauseOVR = "#" arcpy.BuildOverviews_management( MasterMD, whereClauseOVR, define_missing_tiles="NO_DEFINE_MISSING_TILES", generate_overviews="GENERATE_OVERVIEWS", generate_missing_images="GENERATE_MISSING_IMAGES", regenerate_stale_images="IGNORE_STALE_IMAGES") # regenerate_stale_images="REGENERATE_STALE_IMAGES") Utility.addToolMessages() #messages = arcpy.GetMessages() #arcpy.AddMessage("\nResults output from BuildOverviews are: \n{0}\n".format(messages)) # Get another record count from the Master MD result = arcpy.GetCount_management(MasterMD) countMasterRastersOVR = int(result.getOutput(0)) arcpy.AddMessage( "After Building Overviews Master Mosaic Dataset: {0} has {1} row(s).". format(MasterMD, countMasterRastersOVR)) return
def CreateMasterMosaicDatasets(wmxJobID): Utility.printArguments(["wmxJobID"], [wmxJobID], "B02 CreateMasterMosaicDatasets") Utility.setWMXJobDataAsEnvironmentWorkspace(wmxJobID) MDMaster = CMDR.MDMaster() mdMaster_row = MDMaster.getMDMaster(wmxJobID) parent_path = MDMaster.getMDParentPath(mdMaster_row) mdMaster_path = MDMaster.getMDPath(mdMaster_row) MasterMDName = MDMaster.getMDName(mdMaster_row) MasterMDCellSize_Meters = MDMaster.getMDCellSize(mdMaster_row) # mdMaster_aoi = MDMaster.getMDAOI(mdMaster_row) if arcpy.Exists(parent_path): if not os.path.exists(mdMaster_path): os.makedirs(mdMaster_path) # master_fgdb_path = os.path.join(mdMaster_path, MasterMDName) md_list = [FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT] for md_name in md_list: local_fgdb_name = "{}_{}.gdb".format(MasterMDName, md_name) arcpy.AddMessage("local_fgdb_name '{}'".format(local_fgdb_name)) local_fgdb_path = os.path.join(mdMaster_path, local_fgdb_name) arcpy.AddMessage("local_fgdb_path '{}'".format(local_fgdb_path)) if not os.path.exists(local_fgdb_path): arcpy.AddMessage("creating MD master fGDB '{} / {}'".format(mdMaster_path, local_fgdb_name)) arcpy.CreateFileGDB_management(mdMaster_path, local_fgdb_name) Utility.addToolMessages() where_clause = "{} = {}".format(arcpy.AddFieldDelimiters(MDMaster.fclass, CMDRConfig.field_MDMaster_WMXJobID), wmxJobID) # mdMasterLayer = "MDMasterLayer" # arcpy.MakeFeatureLayer_management(in_features= MDMaster.fclass, out_layer = mdMasterLayer, where_clause=where_clause) local_fgdb_MDMasterFC = os.path.join(local_fgdb_path, MasterMDName) if not arcpy.Exists(local_fgdb_MDMasterFC): arcpy.FeatureClassToFeatureClass_conversion (in_features=MDMaster.fclass, out_path=local_fgdb_path, out_name=MasterMDName, where_clause=where_clause) CreateMasterMosaicDataset(local_fgdb_path, md_name, local_fgdb_MDMasterFC, MasterMDCellSize_Meters) else: arcpy.AddError("MD Master parent path doesn't exist '{}'. Cannot continue.".format(parent_path))
def getRasterProperties(rasterObjectPath, newRow): cellSize = 0 for PropertyType in CMDRConfig.Raster_PropertyTypes: try: propValue = arcpy.GetRasterProperties_management( rasterObjectPath, PropertyType) if propValue is not None: propValue = propValue[0] newRow.append(propValue) Utility.addToolMessages() if PropertyType == "CELLSIZEX": cellSize = newRow[len(newRow) - 1] except: Utility.addToolMessages() # Print error message if an error occurs newRow.append(None) return cellSize
def addStandardMosaicDatasetFields(md_path): arcpy.AddMessage("Adding fields to Mosaic Dataset '{}'".format(md_path)) # Add the required metadata fields to the Master/Project Mosaic Dataset # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_ID, field_length="100", field_alias=CMDRConfig.PROJECT_ID.replace("_", " "), add_index=True) Utility.addAndCalcFieldDate(dataset_path=md_path, field_name=CMDRConfig.PROJECT_DATE, field_alias=CMDRConfig.PROJECT_DATE.replace( "_", " "), add_index=True) Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.RASTER_PATH, field_length="800", field_alias=CMDRConfig.RASTER_PATH.replace( "_", " "), add_index=True) Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SOURCE, field_length="20", field_alias=CMDRConfig.PROJECT_SOURCE.replace( "_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SR_XY_NAME, field_length="100", field_alias=CMDRConfig.PROJECT_SR_XY_NAME.replace("_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SR_XY_UNITS, field_length="20", field_alias=CMDRConfig.PROJECT_SR_XY_UNITS.replace("_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SR_XY_CODE, field_length="100", field_alias=CMDRConfig.PROJECT_SR_XY_CODE.replace("_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SR_Z_NAME, field_length="100", field_alias=CMDRConfig.PROJECT_SR_Z_NAME.replace("_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.PROJECT_SR_Z_UNITS, field_length="100", field_alias=CMDRConfig.PROJECT_SR_Z_UNITS.replace("_", " "), add_index=True) # Utility.addAndCalcFieldText(dataset_path=md_path, field_name=CMDRConfig.FILE_NAME, field_length="100", field_alias=CMDRConfig.FILE_NAME.replace("_", " "), add_index=True) arcpy.EnableEditorTracking_management( in_dataset=md_path, creator_field="created_user", creation_date_field="created_date", last_editor_field="last_edited_user", last_edit_date_field="last_edited_date", add_fields="ADD_FIELDS", record_dates_in="UTC") Utility.addToolMessages()
def addMasterBoundary(master_fgdb_path, master_md_name, local_fgdb_MDMasterFC): # This is necessary, since the ImportMosaicDatasetGeometry tool won't replace an empty boundary # MasterBoundaryFC = RasterConfig.MasterBoundaryFC if arcpy.Exists(local_fgdb_MDMasterFC): arcpy.AddMessage("MD Master Boundary Feature Class: {0}".format(local_fgdb_MDMasterFC)) descBound = arcpy.Describe(local_fgdb_MDMasterFC) SpatRefBound = descBound.SpatialReference.name arcpy.AddMessage("Master Boundary Feature Class has spatial reference: {0}".format(SpatRefBound)) if SpatRefBound == "WGS_1984_Web_Mercator_Auxiliary_Sphere": # record count of the specified boundary feature class should be 1 # result = arcpy.GetCount_management(local_fgdb_MDMasterFC) # Utility.addToolMessages() # countRows = int(result.getOutput(0)) # # @TODO Not sure we need this check # if countRows == 1: fc = master_fgdb_path + r"\AMD_" + master_md_name + r"_BND" arcpy.AddMessage("Master Mosaic Boundary feature class name: {0}".format(fc)) fields = ['SHAPE@'] array = arcpy.Array([arcpy.Point(0, 0), arcpy.Point(1, 0), arcpy.Point(1, 1)]) polygon = arcpy.Polygon(array) with arcpy.da.InsertCursor(fc, fields) as cursor: # @UndefinedVariable cursor.insertRow([polygon]) del cursor # Replace the boundary row (just created) with the row in MasterBoundaryFC master_md_path = os.path.join(master_fgdb_path, master_md_name) arcpy.AddMessage("Importing boundary to Master GDB...") arcpy.ImportMosaicDatasetGeometry_management(master_md_path, target_featureclass_type="BOUNDARY", target_join_field="OBJECTID", input_featureclass=local_fgdb_MDMasterFC, input_join_field="OBJECTID") Utility.addToolMessages() # else: # arcpy.AddError("\nExiting: {0} should contain only 1 row.".format(local_fgdb_MDMasterFC)) else: arcpy.AddError("Spatial reference of the supplied Master Boundary is not Web Mercator ('{}') Cannot continue.".format(SpatRefBound)) else: arcpy.AddWarning("Master Boundary feature class not found: {0}. Continuing without it".format(local_fgdb_MDMasterFC))
def checkRecordCount(in_path): record_count = 0 try: arcpy.RepairGeometry_management(in_features=in_path, delete_null="DELETE_NULL") Utility.addToolMessages() except: Utility.addToolMessages() arcpy.AddWarning("\tWARNING: Failed to repair geometry of {}".format(in_path)) try: record_count = arcpy.GetCount_management(in_path)[0] arcpy.AddMessage("\t{} has {} records".format(in_path, record_count)) except: Utility.addToolMessages() arcpy.AddWarning("\tWARNING: Failed to count records in {}".format(in_path)) if record_count <= 0: arcpy.AddWarning("\tWARNING: NO RECORDS IN {}".format(in_path)) return record_count
def CreateMasterMosaicDataset(master_fgdb_path, master_md_name, MDMasterFC_path, masterCellSize_meters): Utility.printArguments(["master_fgdb_path", "master_md_name", "MDMasterFC_path", "masterCellSize_meters"], [master_fgdb_path, master_md_name, MDMasterFC_path, masterCellSize_meters], "B02 CreateMasterMosaicDataset") # Ensure the Master gdb exists if os.path.exists(master_fgdb_path): master_md_path = os.path.join(master_fgdb_path, master_md_name) arcpy.AddMessage("Full Master Mosaic Name: {0}".format(master_md_path)) if not arcpy.Exists(master_md_path): # SpatRefMaster = "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0],AUTHORITY['EPSG',3857]]" SpatRefMaster = RasterConfig.SpatRef_WebMercator # Create the Master Mosaic Dataset arcpy.CreateMosaicDataset_management(master_fgdb_path, master_md_name, coordinate_system=SpatRefMaster, num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", product_band_definitions="#") Utility.addToolMessages() # If a boundary is specified (it is optional)... # Write one record to the boundary so it can be subsequently replaced by the import Mosaic Dataset Geometry tool addMasterBoundary(master_fgdb_path, master_md_name, MDMasterFC_path) Raster.addStandardMosaicDatasetFields(md_path=master_md_path) # arcpy.AddField_management(master_md_path, field_name="ProjectID", field_type="TEXT", field_precision="#", field_scale="#", # field_length="100", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="ProjectDate", field_type="DATE", field_precision="#", field_scale="#", # field_length="#", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="RasterPath", field_type="TEXT", field_precision="#", field_scale="#", # field_length="512", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="ProjectSrs", field_type="TEXT", field_precision="#", field_scale="#", # field_length="100", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="ProjectSrsUnits", field_type="TEXT", field_precision="#", field_scale="#", # field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="ProjectSrsUnitsZ", field_type="TEXT", field_precision="#", field_scale="#", # field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="ProjectSource", field_type="TEXT", field_precision="#", field_scale="#", # field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddField_management(master_md_path, field_name="PCSCode", field_type="TEXT", field_precision="#", field_scale="#", # field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#") # Utility.addToolMessages() # arcpy.AddMessage("Creating Indexes on previously created fields in Master GDB...") # Create indexes on all metadata fields to facilitate query # arcpy.AddIndex_management(master_md_path, fields="ProjectID", index_name="ProjectID", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="ProjectDate", index_name="ProjectDate", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="ProjectSrs", index_name="ProjectSrs", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="ProjectSrsUnits", index_name="ProjectSrsUnits", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="ProjectSrsUnitsZ", index_name="ProjectSrsUnitsZ", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="ProjectSource", index_name="ProjectSource", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # arcpy.AddIndex_management(master_md_path, fields="PCSCode", index_name="PCSCode", unique="NON_UNIQUE", ascending="ASCENDING") # Utility.addToolMessages() # Set the desired Master MD properties (non-default parameters are listed below): # default mosaic method is "BYATTRIBUTE" w ProjectDate # order_base = 3000 (a year far into the future) # default_compression_type="LERC" # limited the transmission_fields # start_time_field="ProjectDate" (in case we decide to enable time later) # max_num_of_records_returned="2000" (default is 1000) # max_num_of_download_items="40" (default is 20) # max_num_per_mosaic = "40" (default is 20) # data_source_type="ELEVATION" # cell_size = 1 # rows_maximum_imagesize="25000" # columns_maximum_imagesize="25000" # metadata_level = "BASIC" transmissionFields = CMDRConfig.TRANSMISSION_FIELDS arcpy.AddMessage("transmissionFields: {0}".format(transmissionFields)) arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset=master_md_path, raster_type="Raster Dataset", input_path=RasterConfig.MasterTempRaster, update_cellsize_ranges="UPDATE_CELL_SIZES", update_boundary="UPDATE_BOUNDARY", update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", maximum_cell_size="0", minimum_dimension="1500", spatial_reference="", filter="#", sub_folder="SUBFOLDERS", duplicate_items_action="ALLOW_DUPLICATES", build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", build_thumbnails="NO_THUMBNAILS", operation_description="#", force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE") Utility.addToolMessages() arcpy.SetMosaicDatasetProperties_management(master_md_path, rows_maximum_imagesize="25000", columns_maximum_imagesize="25000", allowed_compressions="LERC;JPEG;None;LZ77", default_compression_type="LERC", JPEG_quality="75", LERC_Tolerance="0.001", resampling_type="BILINEAR", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", clip_to_boundary="NOT_CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="#", default_mensuration_capabilities="NONE", allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None", default_mosaic_method="ByAttribute", order_field=CMDRConfig.PROJECT_DATE, order_base="3000", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", view_point_x="600", view_point_y="300", max_num_per_mosaic="40", cell_size_tolerance="0.8", cell_size="{0} {0}".format(masterCellSize_meters), metadata_level="BASIC", transmission_fields=transmissionFields, use_time="DISABLED", start_time_field=CMDRConfig.PROJECT_DATE, end_time_field="#", time_format="#", geographic_transform="#", max_num_of_download_items="40", max_num_of_records_returned="2000", data_source_type="ELEVATION", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None") Utility.addToolMessages() # arcpy.SetMosaicDatasetProperties_management(in_mosaic_dataset="C:/temp/MDMaster/MDMaster_DSM.gdb/DSM", rows_maximum_imagesize="25000", columns_maximum_imagesize="25000", allowed_compressions="None;JPEG;LZ77;LERC", default_compression_type="None", JPEG_quality="75", LERC_Tolerance="0.001", resampling_type="BILINEAR", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", clip_to_boundary="NOT_CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="#", default_mensuration_capabilities="NONE", allowed_mosaic_methods="ByAttribute;NorthWest;Center;LockRaster;Nadir;Viewpoint;Seamline;None", default_mosaic_method="ByAttribute", order_field="Project_Date", order_base="3000", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", view_point_x="600", view_point_y="300", max_num_per_mosaic="40", cell_size_tolerance="0.8", cell_size="1 1", metadata_level="BASIC", transmission_fields="Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;CenterX;CenterY;ZOrder;Shape_Length;Shape_Area;Project_ID;Project_Date;Porject_Source;Project_SR_XY;Project_SR_XY_Units;Project_SR_XY_Code;Project_SR_Z_Units", use_time="DISABLED", start_time_field="Project_Date", end_time_field="", time_format="", geographic_transform="", max_num_of_download_items="40", max_num_of_records_returned="2000", data_source_type="ELEVATION", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None") # set statistics Min = -300 and Max = 2000M # set nodata = default no data value arcpy.SetRasterProperties_management(master_md_path, data_type="ELEVATION", statistics="1 0 2000 # #", stats_file="#", nodata="1 {}".format(RasterConfig.NODATA_DEFAULT)) Utility.addToolMessages() arcpy.RemoveRastersFromMosaicDataset_management(in_mosaic_dataset=master_md_path, where_clause="1=1", update_boundary="UPDATE_BOUNDARY", mark_overviews_items="MARK_OVERVIEW_ITEMS", delete_overview_images="DELETE_OVERVIEW_IMAGES", delete_item_cache="DELETE_ITEM_CACHE", remove_items="REMOVE_MOSAICDATASET_ITEMS", update_cellsize_ranges="UPDATE_CELL_SIZES") Utility.addToolMessages() else: arcpy.AddWarning("Master Mosaic Dataset already exists: {0}. Cannot continue".format(master_md_path)) else: arcpy.AddError("Master Geodatabase doesn't exist {0}".format(master_fgdb_path)) arcpy.AddMessage("Operation complete")
def processJob(ProjectJob, project, createQARasters=False, createMissingRasters=True, overrideBorderPath=None): aaa = datetime.now() a = aaa lasd_boundary = None ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) ProjectID = ProjectJob.getProjectID(project) ProjectUID = ProjectJob.getUID(project) target_path = ProjectFolder.derived.path # Get the LAS QA Info to determine if it is classified or not las_qainfo = getLasQAInfo(ProjectFolder) if las_qainfo.num_las_files <= 0: arcpy.AddError( "Project with Job ID {} has no .las files in DELIVERED LAS_CLASSIFIED or LAS_UNCLASSIFIED folders, CANNOT CONTINUE." .format(ProjectFolder.projectId)) else: ProjectFolders.createAnalysisFolders(target_path, las_qainfo.isClassified) # Make the STAT folder if it doesn't already exist stat_out_folder = ProjectFolder.derived.stats_path if not os.path.exists(stat_out_folder): os.makedirs(stat_out_folder) arcpy.AddMessage( "created Derived STAT folder '{}'".format(stat_out_folder)) else: arcpy.AddMessage("STAT folder '{}'".format(stat_out_folder)) # Make the scratch file GDB for the project if not os.path.exists(las_qainfo.filegdb_path): arcpy.CreateFileGDB_management(target_path, las_qainfo.filegdb_name) Utility.addToolMessages() else: arcpy.AddMessage( "Derived fGDB sand box already exists. Using '{}'".format( las_qainfo.filegdb_path)) las_qainfo.lasd_spatial_ref = checkSpatialOnLas( las_qainfo.las_directory, target_path, createQARasters, las_qainfo.isClassified) if las_qainfo.lasd_spatial_ref is None: arcpy.AddError( "ERROR: Neither spatial reference in PRJ or LAS files are valid CANNOT CONTINUE." ) arcpy.AddError( "ERROR: Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool." ) elif not las_qainfo.isValidSpatialReference(): las_qainfo.lasd_spatial_ref = None arcpy.AddError( "ERROR: Spatial Reference for the las files is not standard (see above)" ) arcpy.AddError( "ERROR: Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool." ) try: arcpy.AddError("ERROR: '{}'".format( Utility.getSpatialReferenceInfo( las_qainfo.lasd_spatial_ref))) except: pass elif las_qainfo.isUnknownSpatialReference(): las_qainfo.lasd_spatial_ref = None arcpy.AddError( "ERROR: Please provide a projection file (.prj) that provides a valid transformation in the LAS directory." ) arcpy.AddError( "ERROR: Please create a projection file (.prj) in the LAS folder using the '3D Analyst Tools/Conversion/From File/Point File Information' tool." ) arcpy.AddError( "ERROR: Spatial Reference for the las files is not standard") try: arcpy.AddError("ERROR: '{}'".format( Utility.getSpatialReferenceInfo( las_qainfo.lasd_spatial_ref))) except: pass if las_qainfo.lasd_spatial_ref is None: raise Exception( "Error: Spatial Reference is invalid, unknown, or not specified." ) else: # prj_Count, prj_File = Utility.fileCounter(las_qainfo.las_directory, '.prj') # if prj_Count > 0 and prj_File is not None and len(str(prj_File)) > 0: # prj_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File) # # if prj_Count > 0: # las_qainfo.setProjectionFile(prj_File) # las_spatial_ref = os.path.join(las_qainfo.las_directory, prj_File) # arcpy.AddMessage("Found a projection file with the las files, OVERRIDE LAS SR (if set) '{}'".format(las_spatial_ref)) # arcpy.AddMessage(Utility.getSpatialReferenceInfo(las_qainfo.getSpatialReference())) # else: # arcpy.AddMessage("Using projection (coordinate system) from las files if available.") fileList = getLasFileProcessList(las_qainfo.las_directory, target_path, createQARasters, las_qainfo.isClassified) createLasStatistics(fileList, target_path, las_qainfo.lasd_spatial_ref, las_qainfo.isClassified, createQARasters, createMissingRasters, overrideBorderPath) # Create the project's las dataset. Don't do this before you validated that each .las file has a .lasx if os.path.exists(las_qainfo.las_dataset_path): arcpy.AddMessage("Using existing LAS Dataset {}".format( las_qainfo.las_dataset_path)) # arcpy.AddMessage("Deleting existing LAS Dataset {}".format(las_qainfo.las_dataset_path)) # arcpy.Delete_management(las_qainfo.las_dataset_path) else: a = datetime.now() # note: don't use method in A04_B because we don't want to compute statistics this time arcpy.CreateLasDataset_management( input=las_qainfo.las_directory, out_las_dataset=las_qainfo.las_dataset_path, folder_recursion="RECURSION", in_surface_constraints="", spatial_reference=las_qainfo.lasd_spatial_ref, compute_stats="NO_COMPUTE_STATS", relative_paths="RELATIVE_PATHS", create_las_prj="FILES_MISSING_PROJECTION") Utility.addToolMessages() a = doTime( a, "Created LAS Dataset '{}'".format( las_qainfo.las_dataset_path)) desc = arcpy.Describe(las_qainfo.las_dataset_path) # las_qainfo.lasd_spatial_ref = desc.SpatialReference las_qainfo.LASDatasetPointCount = desc.pointCount las_qainfo.LASDatasetFileCount = desc.fileCount arcpy.AddMessage( "LASDatasetPointCount {} and LASDatasetFileCount {}".format( desc.pointCount, desc.fileCount)) lasd_boundary, las_footprint = A04_C_ConsolidateLASInfo.createRasterBoundaryAndFootprints( las_qainfo.filegdb_path, target_path, ProjectID, ProjectFolder.path, ProjectUID) mxd = createMXD(las_qainfo, target_path, ProjectID) # if createQARasters: arcpy.AddMessage("Creating QA raster mosaics") mosaics = A04_C_ConsolidateLASInfo.createQARasterMosaics( las_qainfo.isClassified, las_qainfo.filegdb_path, las_qainfo.lasd_spatial_ref, target_path, mxd, las_footprint, lasd_boundary) if mxd is not None: a = datetime.now() try: mxd_path = mxd.filePath for [md_path, md_name] in mosaics: arcpy.AddMessage( "Adding QA raster mosaic {} to mxd {}".format( md_path, mxd_path)) try: if not arcpy.Exists(md_path): a = doTime( a, "\tMD doesn't exist {}. Can't add to MXD {}. Is it open?" .format(md_path, mxd_path)) else: df = mxd.activeDataFrame if isLayerExist(mxd, df, md_name): a = doTime( a, "\t MD {} already exists in MXD {}". format(md_name, mxd_path)) else: if len(str(md_name)) > 0: try: lyr_md = arcpy.MakeMosaicLayer_management( in_mosaic_dataset=md_path, out_mosaic_layer=md_name ).getOutput(0) df = mxd.activeDataFrame arcpy.mapping.AddLayer( df, lyr_md, 'BOTTOM') # lyr_md.visible = False mxd.save() a = doTime( a, "\tAdded MD {} to MXD {} as {}" .format( md_name, mxd_path, lyr_md)) except: a = doTime( a, "\tfailed to add MD {} to MXD {}. Is it open?" .format(md_path, mxd_path)) except: try: a = doTime( a, "\tfailed to add MD to MXD {}. Is it open?" .format(mxd_path)) except: pass mxd.save() except: try: a = doTime( a, "\tfailed to save MXD {}. Is it open?".format( mxd_path)) except: pass bbb = datetime.now() td = (bbb - aaa).total_seconds() arcpy.AddMessage("Completed {} in {}".format(las_qainfo.las_dataset_path, td)) return las_qainfo, lasd_boundary
def PublishMDMasterMosaicDataset(jobID, serverFunctionPath, update=False, runCount=0): Utility.printArguments(["jobID", "serverFunctionPath"], [jobID, serverFunctionPath], "B03 PublishMDMasterMosaicDataset") startupType = None # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) # ssFunctions = Raster.getServerSideFunctions() Utility.setWMXJobDataAsEnvironmentWorkspace(jobID) MDMaster = CMDR.MDMaster() mdMaster_row = MDMaster.getMDMaster(jobID) if mdMaster_row is not None: mdMaster_path = MDMaster.getMDPath( mdMaster_row ) # ProjectFolders.getProjectFolderFromDBRow(ProjectJob, project) MasterMDName = MDMaster.getMDName( mdMaster_row) # ProjectJob.getProjectID(project) serverConnectionFile = MDMaster.getMDConFilePath(mdMaster_row) folderName = MDMaster.getMDServiceFolder(mdMaster_row) cellSize_m = MDMaster.getMDCellSize(mdMaster_row) Utility.printArguments([ "folderName", "serverConnectionFile", "MasterMDName", "mdMaster_path", "cellSize_m" ], [ folderName, serverConnectionFile, MasterMDName, mdMaster_path, cellSize_m ], "PublishMDMasterMosaicDataset") if not update: B04PublishContourMaster.publishContourMaster( mdMaster_path, serverConnectionFile, MasterMDName, folderName) # ProjectState = MDMaster#ProjectJob.getState(project) # ProjectYear = ProjectJob.getYear(project) # ProjectAlias = ProjectJob.getAlias(project) # ProjectAliasClean = ProjectJob.getAliasClean(project) # @TODO Add more info here! serviceDescription = "Elevation master service '{}'.".format( MasterMDName) serviceTags = ",".join( [MasterMDName, "Master", "Elevation", "Mosaic", "Dataset"]) md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: serviceName = "{}_{}".format(MasterMDName, md_name) local_fgdb_name = "{}.gdb".format(serviceName) arcpy.AddMessage("local_fgdb_name '{}'".format(local_fgdb_name)) local_fgdb_path = os.path.join(mdMaster_path, local_fgdb_name) arcpy.AddMessage("local_fgdb_path '{}'".format(local_fgdb_path)) project_md_path = os.path.join(local_fgdb_path, md_name) arcpy.AddMessage("MD path '{}'".format(project_md_path)) if arcpy.Exists(project_md_path): # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) if cellsizeX != cellSize_m: arcpy.AddMessage( "mosaic dataset cell size {} != requested cell size {}" .format(cellsizeX, cellSize_m)) arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) serviceDescription = "{} {} horizontal spatial reference is {} and cell size is {} {}".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceTags = ",".join([ serviceTags, str(cellsizeX), SpatRefUnitsMD, SpatRefNameMD ]) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. # allowedFieldListMD = descMD.AllowedFields # arcpy.AddMessage("AllowedFields in MD Properties: {0}".format(allowedFieldListMD)) # if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions so Hillshade is default" ) foundHillshade = True break # if Hillshade is found then re-order the list if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions so Hillshade is default") # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format(ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file...") wsPath = os.path.dirname(os.path.dirname(mdMaster_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # @ TODO Include this to overwrite a service # doc = DOM.parse(sddraftPath) # tagsType = doc.getElementsByTagName('Type') # for tagType in tagsType: # if tagType.parentNode.tagName == 'SVCManifest': # if tagType.hasChildNodes(): # tagType.firstChild.data = "esriServiceDefinitionType_Replacement" # # tagsState = doc.getElementsByTagName('State') # for tagState in tagsState: # if tagState.parentNode.tagName == 'SVCManifest': # if tagState.hasChildNodes(): # tagState.firstChild.data = "esriSDState_Published" # Analyze service definition draft arcpy.AddMessage("Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) arcpy.StageService_server(sddraftPath, sdPath) try: # UploadServiceDefinition if not update: arcpy.AddMessage( "Publishing mosaic data set as image service.") else: arcpy.AddMessage( "Updating mosaic data set as image service.") arcpy.UploadServiceDefinition_server( sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) except Exception as e: if runCount < 1: arcpy.AddMessage( "image service already exists, trying to update instead." ) PublishMDMasterMosaicDataset( jobID, serverFunctionPath, True, 1) break else: raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) # else: # arcpy.AddError("Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing.") # arcpy.AddError(" To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields...") else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) # FOR LOOP # # else: # arcpy.AddError("Project file geodatabase not found '{}'. Please add this before proceeding.".format(ProjectMDs_fgdb_path)) else: arcpy.AddError( "MDMaster record not found in the CMDR. Please add this to the CMDR before proceeding." ) arcpy.AddMessage("Operation complete")
def createQARasterMosaics(isClassified, gdb_path, spatial_reference, target_folder, mxd, footprint_path=None, lasd_boundary_path=None): mosaics = [] simple_footprint_path = None simple_lasd_boundary_path = None stats_methods = STATS_METHODS for method in stats_methods: arcpy.AddMessage("Creating {} MDS".format(method)) for dataset_name in DATASET_NAMES: name = dataset_name if not isClassified: # Using a generic name for non-classified data name = "" md_name = method if len(name) > 0: md_name = "{}{}".format(method, name) input_folder = os.path.join(target_folder, method, name[1:]) arcpy.AddMessage("Creating {} MD from {}".format(md_name, input_folder)) try: if simple_footprint_path is None: simple_footprint_path = "{}_Simple".format(footprint_path) arcpy.SimplifyPolygon_cartography(in_features=footprint_path, out_feature_class=simple_footprint_path, algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP") Utility.addToolMessages() deleteFields(simple_footprint_path) #try: # arcpy.DeleteField_management(in_table=simple_footprint_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass if simple_lasd_boundary_path is None: simple_lasd_boundary_path = "{}_Simple".format(lasd_boundary_path) arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_path, out_feature_class=simple_lasd_boundary_path, algorithm="POINT_REMOVE", tolerance=Raster.boundary_interval, minimum_area="0 SquareMeters", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP") Utility.addToolMessages() deleteFields(simple_lasd_boundary_path) #try: # arcpy.DeleteField_management(in_table=simple_lasd_boundary_path, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass except: arcpy.AddWarning("Failed to create simplified footprints and boundaries in '{}'".format(gdb_path)) qa_md = createQARasterMosaicDataset(md_name, gdb_path, spatial_reference, input_folder, mxd, simple_footprint_path, simple_lasd_boundary_path) if qa_md is not None: mosaics.append(qa_md) md_name = CANOPY_DENSITY dhm_md_path = os.path.join(gdb_path, md_name) mosaics.append([dhm_md_path, md_name]) if arcpy.Exists(dhm_md_path): arcpy.AddMessage("{} already exists.".format(md_name)) else: try: vert_cs_name, vert_unit_name = Utility.getVertCSInfo(spatial_reference) # @UnusedVariable # No need to update boundary and footprints since it will inherit from the original pc_all_md_path = os.path.join(gdb_path, "POINT_COUNT_ALL") createReferenceddMosaicDataset(pc_all_md_path, dhm_md_path, spatial_reference, vert_unit_name) except: arcpy.AddMessage("Failed to create {}".format(dhm_md_path)) deleteFileIfExists(simple_footprint_path, True) deleteFileIfExists(simple_lasd_boundary_path, True) return mosaics
def createBoundaryFeatureClass(raster_footprint, target_raster_boundary, statistics_fields="", alter_field_infos=None): a = datetime.datetime.now() aa = a deleteFields(raster_footprint) lasd_boundary_0 = "{}0".format(target_raster_boundary) lasd_boundary_1 = "{}1".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_0, True) deleteFileIfExists(lasd_boundary_1, True) arcpy.AddMessage("\tMultipart to Singlepart") arcpy.MultipartToSinglepart_management(in_features=raster_footprint, out_feature_class=lasd_boundary_0) Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_0, delete_null="DELETE_NULL") deleteFields(lasd_boundary_0) arcpy.AddMessage("\tBuffering") arcpy.Buffer_analysis(in_features=lasd_boundary_0, out_feature_class=lasd_boundary_1, buffer_distance_or_field="10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="NONE", method="PLANAR") Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_1, delete_null="DELETE_NULL") deleteFields(lasd_boundary_1) deleteFileIfExists(lasd_boundary_0, True) lasd_boundary_2 = "{}2".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_2, True) arcpy.AddMessage("\tDissolving with statistics: {}".format(statistics_fields)) arcpy.Dissolve_management( in_features=lasd_boundary_1, out_feature_class=lasd_boundary_2, statistics_fields=statistics_fields ) Utility.addToolMessages() arcpy.RepairGeometry_management(in_features=lasd_boundary_2, delete_null="DELETE_NULL") deleteFields(lasd_boundary_2) a = doTime(a, "\tDissolved to {}".format(lasd_boundary_2)) if alter_field_infos is not None: for alter_field_info in alter_field_infos: try: alterField(lasd_boundary_2, alter_field_info[0], alter_field_info[1], alter_field_info[2]) except: pass a = doTime(a, "\tRenamed summary fields") lasd_boundary_3 = "{}3".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_3, True) arcpy.EliminatePolygonPart_management(in_features=lasd_boundary_2, out_feature_class=lasd_boundary_3, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY") arcpy.RepairGeometry_management(in_features=lasd_boundary_3, delete_null="DELETE_NULL") deleteFileIfExists(lasd_boundary_1, True) deleteFields(lasd_boundary_3) lasd_boundary_4 = "{}4".format(target_raster_boundary) deleteFileIfExists(lasd_boundary_4, True) arcpy.SimplifyPolygon_cartography(in_features=lasd_boundary_3, out_feature_class=lasd_boundary_4, algorithm="BEND_SIMPLIFY", tolerance="20 Meters", minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="") arcpy.RepairGeometry_management(in_features=lasd_boundary_4, delete_null="DELETE_NULL") deleteFields(lasd_boundary_4) #try: # arcpy.DeleteField_management(in_table=lasd_boundary_4, drop_field="Id;ORIG_FID;InPoly_FID;SimPgnFlag;MaxSimpTol;MinSimpTol") #except: # pass deleteFileIfExists(lasd_boundary_3, True) deleteFileIfExists(target_raster_boundary, True) arcpy.Buffer_analysis(in_features=lasd_boundary_4, out_feature_class=target_raster_boundary, buffer_distance_or_field="-10 Meters", line_side="FULL", line_end_type="ROUND", dissolve_option="ALL", method="PLANAR") arcpy.RepairGeometry_management(in_features=target_raster_boundary, delete_null="DELETE_NULL") deleteFields(target_raster_boundary) deleteFileIfExists(lasd_boundary_4, True) if alter_field_infos is not None and len(alter_field_infos) > 0: fields = ";".join([field[1] for field in alter_field_infos]) arcpy.JoinField_management(in_data=target_raster_boundary, in_field="OBJECTID", join_table=lasd_boundary_2, join_field="OBJECTID", fields=fields) Utility.addToolMessages() deleteFileIfExists(lasd_boundary_2, True) a = doTime(aa, "Dissolved las footprints to dataset boundary {} ".format(target_raster_boundary))
def updateMDLASGeometry(geometry_type, filegdb_path, md_path, area_percent, point_interval): ''' geometry_type = ["BOUNDARY", "FOOTPRINT"] ''' geometry_name = "LAS" # Create an in-memory feature class to hold the geometry geometry_export = os.path.join(filegdb_path, "{}_{}_Export".format(geometry_type, geometry_name)) if arcpy.Exists(geometry_export): arcpy.Delete_management(geometry_export) Utility.addToolMessages() # Export the geometry to the in-memory feature class arcpy.ExportMosaicDatasetGeometry_management(md_path, geometry_export, where_clause="#", geometry_type=geometry_type) Utility.addToolMessages() # Remove the holes and save to a feature class in the file geodatabase geometry_no_holes = os.path.join(filegdb_path, "{}_{}_NoHoles".format(geometry_type, geometry_name)) if arcpy.Exists(geometry_no_holes): arcpy.Delete_management(geometry_no_holes) Utility.addToolMessages() arcpy.EliminatePolygonPart_management(geometry_export, geometry_no_holes, condition="PERCENT", part_area="0 SquareMeters", part_area_percent=area_percent, part_option="CONTAINED_ONLY") Utility.addToolMessages() # Smooth the polygons geometry_smooth = os.path.join(filegdb_path, "{}_{}".format(geometry_type, geometry_name)) if arcpy.Exists(geometry_smooth): arcpy.Delete_management(geometry_smooth) Utility.addToolMessages() arcpy.SmoothPolygon_cartography(geometry_no_holes, geometry_smooth, "PAEK", point_interval, "FIXED_ENDPOINT", "NO_CHECK") Utility.addToolMessages() # Clean up if arcpy.Exists(geometry_export): arcpy.Delete_management(geometry_export) Utility.addToolMessages() if arcpy.Exists(geometry_no_holes): arcpy.Delete_management(geometry_no_holes) Utility.addToolMessages() # import simplified Footprints/boundary arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type=geometry_type, target_join_field="OBJECTID", input_featureclass=geometry_smooth, input_join_field="OBJECTID") Utility.addToolMessages()
def contour_prep(in_fc, scheme_poly, scratch, footprint_path, name): a = datetime.datetime.now() aa = a # arcpy.AddMessage('Started: {}'.format(name)) db = os.path.split(in_fc)[0] fc = os.path.split(in_fc)[1] arcpy.env.overwriteOutput = True filter_folder = os.path.join(scratch, 'T{}'.format(name)) if not os.path.exists(filter_folder): os.makedirs(filter_folder) section_mxd_name = os.path.join(filter_folder, 'T{}.mxd'.format(name)) scratch_db = os.path.join(filter_folder, 'T{}.gdb'.format(name)) target_scheme_polys = os.path.join(filter_folder, 'T{}SP.shp'.format(name)) target_scheme_polys_fgdb = os.path.join(scratch_db, 'T{}SP'.format(name)) # Utility.printArguments(["in_fc", "scheme_poly", "scratch", "name", "db", "fc", "filter_folder", "section_mxd_name", "scratch_db", "target_scheme_polys"], # [in_fc, scheme_poly, scratch, name, db, fc, filter_folder, section_mxd_name, scratch_db, target_scheme_polys], "C02_B Contour Prep") anno1128 = os.path.join(scratch_db, r"Contours_1128Anno1128") anno2257 = os.path.join(scratch_db, r"Contours_2257Anno2256") anno4514 = os.path.join(scratch_db, r"Contours_4514Anno4513") anno9028 = os.path.join(scratch_db, r"Contours_9028Anno9027") anno_paths = [anno1128, anno2257, anno4514, anno9028] annoShp1128 = os.path.join(filter_folder, r"Contours_1128Anno1128.shp") annoShp2257 = os.path.join(filter_folder, r"Contours_2257Anno2256.shp") annoShp4514 = os.path.join(filter_folder, r"Contours_4514Anno4513.shp") annoShp9028 = os.path.join(filter_folder, r"Contours_9028Anno9027.shp") annoShp_paths = [annoShp1128, annoShp2257, annoShp4514, annoShp9028] # @UnusedVariable mask1128 = os.path.join(filter_folder, r"Mask1128.shp") mask2257 = os.path.join(filter_folder, r"Mask2256.shp") mask4514 = os.path.join(filter_folder, r"Mask4513.shp") mask9028 = os.path.join(filter_folder, r"Mask9027.shp") mask_paths = [mask1128, mask2257, mask4514, mask9028] annoLyr1128 = os.path.join(filter_folder, r"Contours_1128Anno1128.lyr") annoLyr2257 = os.path.join(filter_folder, r"Contours_2257Anno2256.lyr") annoLyr4514 = os.path.join(filter_folder, r"Contours_4514Anno4513.lyr") annoLyr9028 = os.path.join(filter_folder, r"Contours_9028Anno9027.lyr") annoLyr_paths = [annoLyr1128, annoLyr2257, annoLyr4514, annoLyr9028] clearScratch = True TRIES_ALLOWED = 10 if not isProcessFile(scratch, name): arcpy.AddMessage("{}: All artifacts exist".format(name)) else: created1 = False tries1 = 0 while not created1 and tries1 <= TRIES_ALLOWED: tries1 = tries1 + 1 try: # Clear out everything that was created before, we cant trust it if clearScratch: clearScratch = False clearScratchFiles(section_mxd_name, anno_paths, mask_paths, annoLyr_paths) mxd_tries1 = 0 while not os.path.exists( section_mxd_name) and mxd_tries1 < TRIES_ALLOWED: mxd_tries1 = mxd_tries1 + 1 try: if not os.path.exists(filter_folder): os.makedirs(filter_folder) arcpy.AddMessage( '\tREPEAT: Made section Scratch Folder Name: {}' .format(filter_folder)) else: arcpy.AddMessage( '\tEXISTS: Section Scratch Folder Name: {}'. format(filter_folder)) arcpy.AddMessage( '\tSection MXD Name: {}'.format(section_mxd_name)) shutil.copyfile(ContourConfig.MXD_ANNO_TEMPLATE, section_mxd_name) a = Utility.doTime( a, "\t{}: Saved a copy of the mxd template to '{}'". format(name, section_mxd_name)) arcpy.AddMessage( '\tSection MXD Name {} exists? {}'.format( section_mxd_name, os.path.exists(section_mxd_name))) except Exception as e: time.sleep(mxd_tries1) arcpy.AddWarning( 'Copying Section MXD Failed: {}'.format( section_mxd_name)) arcpy.AddWarning('Error: {}'.format(e)) type_, value_, traceback_ = sys.exc_info() tb = traceback.format_exception( type_, value_, traceback_, 3) arcpy.AddWarning('Error: \n{}: {}\n{}\n'.format( type_, value_, tb[1])) try: arcpy.AddMessage( '\t\t\t: Removing folder: {}'.format( filter_folder)) shutil.rmtree(filter_folder) arcpy.AddMessage( '\t\t\t: folder{} exists? {}'.format( filter_folder, os.path.exists(filter_folder))) if not os.path.exists(filter_folder): os.makedirs(filter_folder) arcpy.AddMessage( '\tREPEAT: Made section Scratch Folder Name: {}' .format(filter_folder)) arcpy.AddMessage( '\t\t\t: folder{} exists? {}'.format( filter_folder, os.path.exists(filter_folder))) except: arcpy.AddWarning( '\t\t\t: folder{} exists: {}'.format( filter_folder, os.path.exists(filter_folder))) if mxd_tries1 >= TRIES_ALLOWED: raise e # Set MXD For Processing mxd = arcpy.mapping.MapDocument(section_mxd_name) # Set Layers to Reference Input FC broken = arcpy.mapping.ListBrokenDataSources(mxd) for item in broken: if item.name.startswith(r'Contour'): item.replaceDataSource(db, "FILEGDB_WORKSPACE", fc) mxd.save() a = Utility.doTime( a, "\t{}: Fixed broken paths in '{}'".format( name, section_mxd_name)) # Create FGDB For Annotation Storage if arcpy.Exists(scratch_db): pass else: arcpy.CreateFileGDB_management(filter_folder, 'T{}.gdb'.format(name)) a = Utility.doTime( a, "\t{}: Created 'T{}.gdb' at {}".format( name, name, filter_folder)) if arcpy.Exists(target_scheme_polys): arcpy.AddMessage("\t{}: Scheme Poly exists: {}".format( name, target_scheme_polys)) else: # Filter for Section of Input FC feat = arcpy.MakeFeatureLayer_management( in_features=footprint_path, out_layer=name, where_clause="name='{}'".format(name)) a = Utility.doTime( a, "\t{}: Created feature layer '{}'".format(name, feat)) arcpy.Clip_analysis(in_features=scheme_poly, clip_features=feat, out_feature_class=target_scheme_polys, cluster_tolerance="") if arcpy.Exists(target_scheme_polys_fgdb): arcpy.Delete_management(target_scheme_polys_fgdb) created = False tries = 0 while not created and tries <= TRIES_ALLOWED: tries = tries + 1 try: arcpy.CopyFeatures_management( in_features=target_scheme_polys, out_feature_class=target_scheme_polys_fgdb) a = Utility.doTime( a, "\t{}: Copied target scheme polys '{}'".format( name, target_scheme_polys)) created = True except: time.sleep(1) # Reference Annotation FCs created with TiledLabelsToAnnotation df = arcpy.mapping.ListDataFrames(mxd, 'Layers')[0] a = Utility.doTime( a, "\t{}: Got data frame '{}'".format(name, df)) for lyr in arcpy.mapping.ListLayers(mxd): try: lyr.showLabels = False if lyr.name.upper().startswith("CONTOURS "): lyr.showLabels = True if lyr.supports("DEFINITIONQUERY"): lyr.definitionQuery = "{} and name = '{}'".format( lyr.definitionQuery, name) except: pass # some layers don't support labels. If not, just move one a = Utility.doTime( a, "\t{}: Creating annotation from tiled labels".format(name)) # Create Annotation with Filtered FC Extent arcpy.TiledLabelsToAnnotation_cartography( map_document=mxd.filePath, data_frame='Layers', polygon_index_layer=target_scheme_polys, out_geodatabase=scratch_db, out_layer='GroupAnno', anno_suffix='Anno', reference_scale_value='9028', reference_scale_field="Tile_Scale", tile_id_field="FID", feature_linked="STANDARD", generate_unplaced_annotation= "NOT_GENERATE_UNPLACED_ANNOTATION") Utility.addToolMessages() mxd.save() a = Utility.doTime( a, "\t{}: Exported tiled labels to annotation '{}'".format( name, target_scheme_polys)) # Create layer files for each of the Anno feature classes, and add to the map annotation_set = [ [anno1128, annoLyr1128, "Cont_1128Anno1128", annoShp1128], [anno2257, annoLyr2257, "Cont_2257Anno2256", annoShp2257], [anno4514, annoLyr4514, "Cont_4514Anno4513", annoShp4514], [anno9028, annoLyr9028, "Cont_9028Anno9027", annoShp9028] ] # Create .lyr Files & Add to MXD df = arcpy.mapping.ListDataFrames(mxd, 'Layers')[0] for anno in annotation_set: lyr_path = anno[1] if not arcpy.Exists(anno[0]): arcpy.AddWarning( "{}: WARNING: Annotation Layer Missing: {}".format( name, anno[0])) else: if arcpy.Exists(lyr_path): arcpy.AddMessage( "\t{}: Annotation Layer Exists: {}".format( name, lyr_path)) else: arcpy.MakeFeatureLayer_management(anno[0], anno[2]) arcpy.SaveToLayerFile_management( in_layer=anno[2], out_layer=lyr_path, is_relative_path='ABSOLUTE', version='CURRENT') arcpy.AddMessage( "\t{}: Annotation Layer Exported: {}".format( name, lyr_path)) shp_path = anno[3] if os.path.exists(shp_path): arcpy.AddMessage( "\t{}: Annotation shapefile Exported: {}". format(name, shp_path)) else: arcpy.FeatureToPoint_management( in_features=anno[0], out_feature_class=shp_path, point_location="INSIDE") arcpy.AddMessage( "\t{}: Annotation shapefile Exported: {}". format(name, shp_path)) addLayer = True for cur_lyr in arcpy.mapping.ListLayers(mxd): if cur_lyr.name.upper().startswith( str(anno[2]).upper()): addLayer = False break if addLayer: add_lyr = arcpy.mapping.Layer(lyr_path) arcpy.mapping.AddLayer(df, add_lyr, 'BOTTOM') mxd.save() a = Utility.doTime( a, "\t{}: Exported layer files for annotation set {}".format( name, annotation_set)) for lyr_path in annoLyr_paths: # arcpy.ListFiles('Contours*.lyr_path'): try: ref_scale = lyr_path[-8:-4] mask_fc = os.path.join(filter_folder, r'Mask{}.shp'.format(ref_scale)) if arcpy.Exists(mask_fc): arcpy.AddMessage( "\t{}: Mask Layer Exists: {}".format( name, mask_fc)) else: if os.path.exists(lyr_path): arcpy.FeatureOutlineMasks_cartography( input_layer=lyr_path, output_fc=mask_fc, reference_scale=ref_scale, spatial_reference=ContourConfig. WEB_AUX_SPHERE, margin='0 Points', method='BOX', mask_for_non_placed_anno='ALL_FEATURES', attributes='ALL') else: arcpy.AddWarning( "t{}: WARNING: Can't create masking layer. Layer file missing {}" .format(name, lyr_path)) except Exception as e: arcpy.AddError('{}: Exception: {}'.format(name, e)) pass mxd.save() a = Utility.doTime( a, "\t{}: Created masking polygons".format(name)) Utility.doTime(aa, 'Finished: {}'.format(name)) created1 = True del mxd except Exception as e: arcpy.AddError('Exception: {}'.format(e)) tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] arcpy.AddError( "PYTHON ERRORS:\nTraceback info:\n{}\nError Info:\n{}". format(tbinfo, str(sys.exc_info()[1]))) arcpy.AddError("ArcPy ERRORS:\n{}\n".format( arcpy.GetMessages(2))) if tries1 > TRIES_ALLOWED: arcpy.AddError('Dropped: {}'.format(name)) raise e
def processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=False, runCount=0): ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) ProjectID = ProjectJob.getProjectID(project) ProjectState = ProjectJob.getState(project) ProjectYear = ProjectJob.getYear(project) ProjectAlias = ProjectJob.getAlias(project) ProjectAliasClean = ProjectJob.getAliasClean(project) project_wmx_jobid = ProjectJob.getWMXJobID(project) Deliver = CMDR.Deliver() #delivery = Deliver.getDeliver(project_wmx_jobid) delivery = Deliver.getDeliver(ProjectID) dateDeliver = Deliver.getDeliverDate(delivery) startupType = "STARTED" Utility.printArguments([ "ProjectJob", "project", "ProjectUID", "serverConnectionFile", "serverFunctionPath", "update", "runCount", "ProjectFolder", "ProjectID", "ProjectState", "ProjectYear", "ProjectAlias", "ProjectAliasClean", "startupType" ], [ ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update, runCount, ProjectFolder, ProjectID, ProjectState, ProjectYear, ProjectAlias, ProjectAliasClean, startupType ], "A07_A Publish Project") # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) folderName = ProjectState # If the project has been moved for publishing, update the project directory old_path, new_path = updateJobDirectory(project_wmx_jobid, ProjectJob, project) old_ProjectID = ProjectID arcpy.AddMessage( "\n\n-----------------------------------------------------------") try: arcpy.AddMessage("Job directory paths: \n\tOLD: {}\n\tNEW: {}".format( old_path, new_path)) doRepath = True if str(old_path).lower().strip() == str(new_path).lower().strip(): arcpy.AddMessage( "Job directory paths old/new match, checking MD first record project folder value" ) filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], FoldersConfig.DTM) arcpy.AddMessage("checking fgdb '{}' ".format(filegdb_name)) dtm_md_path = os.path.join(new_path, ProjectID, FoldersConfig.published_dir, filegdb_name, FoldersConfig.DTM) arcpy.AddMessage( "checking MD '{}' first record project folder value".format( dtm_md_path)) record_project_path, uid = Utility.getExistingRecord( dtm_md_path, ["Project_Dir"], 0) arcpy.AddMessage("first record is {}".format(record_project_path)) record_project_path = list(record_project_path)[0] arcpy.AddMessage( "MD first record project folder value is {}".format( record_project_path)) # add a slash back in because strings remove it and remove the project name at the end old_path, old_ProjectID = os.path.split("\{}".format( str(record_project_path).strip())) arcpy.AddMessage( "Job directory paths: \n\tOLD: {}\n\tNEW: {}\n\tOLD Project ID: {}\n\tNEW Project ID: {}" .format(old_path, new_path, old_ProjectID, ProjectID)) if str(old_path).lower().strip() == str(new_path).lower().strip(): doRepath = False arcpy.AddMessage("Job directory paths match, doRepath = False") except: pass arcpy.AddMessage( "-----------------------------------------------------------\n\n") md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: update_paths_success = False # @TODO Add more info here! serviceDescription = "for project '{}' within state {} published in the year {}".format( ProjectAlias, ProjectState, ProjectYear) serviceTags = ",".join( [ProjectID, ProjectAliasClean, ProjectState, str(ProjectYear)]) filegdb_name = "{}_{}.gdb".format(ProjectFolder.published.fgdb_name, md_name) if ProjectFolder.published.fgdb_name.endswith(".gdb"): filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], md_name) #ProjectMDs_fgdb_path = os.path.join(ProjectFolder.published.path, filegdb_name) new_project_path = os.path.join(new_path, ProjectID) old_project_path = os.path.join(old_path, ProjectID) if str(ProjectID).lower().strip() != str( old_ProjectID).lower().strip(): old_project_path = os.path.join(old_path, old_ProjectID) #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_publish_path = os.path.join(new_project_path, "PUBLISHED") old_publish_path = os.path.join(old_project_path, "PUBLISHED") #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_delivered_path = os.path.join(new_project_path, "DELIVERED") old_delivered_path = os.path.join(old_project_path, "DELIVERED") new_projectMDs_fgdb_path = os.path.join(new_publish_path, filegdb_name) arcpy.AddMessage( "File Geodatabase Path: {0}".format(new_projectMDs_fgdb_path)) # Ensure the master_md_path exists if arcpy.Exists(new_projectMDs_fgdb_path): project_md_path = os.path.join(new_projectMDs_fgdb_path, md_name) arcpy.AddMessage( "Mosaic Dataset Path: {0}".format(project_md_path)) if arcpy.Exists(project_md_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(new_projectMDs_fgdb_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() #arcpy.AddMessage("Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}".format(new_projectMDs_fgdb_path, old_delivered_path, new_delivered_path)) #arcpy.RepairMosaicDatasetPaths_management(in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_delivered_path, new_delivered_path), where_clause="1=1") update_paths_success = True except: if md_name <> FoldersConfig.DHM and md_name <> FoldersConfig.DCM: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_path)) try: out_table = "{}_Paths".format(project_md_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}".format( out_table)) except: pass project_md_ocs_path = "{}_OCS".format(project_md_path) if arcpy.Exists(project_md_ocs_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(project_md_ocs_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, paths_list="# {0} {1}".format( old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() except: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_ocs_path)) try: out_table = "{}_Paths".format(project_md_ocs_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}". format(out_table)) except: pass serviceName = "{}_{}".format(ProjectID, md_name) arcpy.AddMessage("Service Name: {0}".format(serviceName)) # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) # append the cellsize and units of the Mosaic Dataset to the tags serviceTags = "{}, {}, {}".format(serviceTags, cellsizeX, SpatRefUnitsMD) serviceDescription = "{} {}. Horizontal spatial reference is {} and cell size is {} {}.".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceDescription = "{}. Please note that cell size does not refer to the underlying data's cell size.".format( serviceDescription) serviceDescription = "{}. You must check the meta-data for the underlying elevation data's resolution information (cell width, cell height, and Lidar point spacing).".format( serviceDescription) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. allowedFieldListMD = descMD.AllowedFields arcpy.AddMessage("AllowedFields in MD Properties: {0}".format( allowedFieldListMD)) if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = list([]) ssFunctionsList = "" if ssFunctions is not None: ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False if md_name <> FoldersConfig.INT: for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break else: for i, s in enumerate(ssFunctionsLst): if 'METER' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break # if Hillshade is found then re-order the list # Don't apply hillshade to intensity if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions to (first is default): " .format(ssFunctionsLst)) # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format( ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file: ") wsPath = os.path.dirname(os.path.dirname(project_md_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.AddMessage("\tMDPath='{}'".format(project_md_path)) arcpy.AddMessage("\tSDPath='{}'".format(sddraftPath)) arcpy.AddMessage("\tServiceName='{}'".format(serviceName)) arcpy.AddMessage("\tFolderName='{}'".format(folderName)) arcpy.AddMessage( "\tSummary='{}'".format(serviceDescription)) arcpy.AddMessage("\tTags='{}'".format(serviceTags)) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # Analyze service definition draft arcpy.AddMessage( "Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data ) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) RunUtil.runTool(r'ngce\pmdm\a\A07_B_StageSD.py', [ sddraftPath, sdPath, serverConnectionFile, startupType ], bit32=True, log_path=ProjectFolder.derived.path) # arcpy.StageService_server(sddraftPath, sdPath) # # try: # # # UploadServiceDefinition # # arcpy.AddMessage("Publishing mosaic data set as image service.") # # arcpy.UploadServiceDefinition_server(sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) # # except Exception as e: # # if runCount < 1: # ## PublishMosaicDataset(jobID, serverConnectionFile, True, 1) # # processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=True, runCount=1) # # break # # else: # # raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) else: arcpy.AddError( "Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing." ) arcpy.AddError( " To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields..." ) # Clean up and delete the .sd file Utility.deleteFileIfExists(sdPath, False) # For some reason publishing breaks the referenced mosaics. # The function paths also don't update properly. # So delete them and re-create later. if md_name == FoldersConfig.DHM or md_name == FoldersConfig.DCM: arcpy.AddMessage( "Deleting Mosaic Dataset to recreate later {}".format( project_md_path)) Utility.deleteFileIfExists(project_md_path, True) else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) else: arcpy.AddError( "Project file geodatabase not found '{}'. Please add this before proceeding." .format(new_projectMDs_fgdb_path)) # FOR LOOP ## ## Re-create the MD if it is FoldersConfig.DHM, FoldersConfig.DCM ## A06_A_CreateProjectMosaicDataset.CreateProjectMDs(project_wmx_jobid, dateDeliver=dateDeliver)
def processJob(ProjectJob, project, ProjectUID, masterParentDir, masterService): masterServiceFolder = None masterName = masterService index = masterService.find("/") if index < 0: index = masterService.find("\\") if index >= 0: masterServiceFolder = masterService[0:index] masterName = masterService[index + 1:] master_md_name = masterName # RasterConfig.MASTER_MD_NAME Utility.printArguments(["Master Folder", "Master Name", "Master MD Name"], [masterServiceFolder, masterName, master_md_name], "A08 AddPrjectToMaster") ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) # projectID = ProjectJob.getProjectID(project) ProjectMDs_fgdb_path = ProjectFolder.published.fgdb_path arcpy.AddMessage("Project file GDB Path: {}".format(ProjectMDs_fgdb_path)) # Don't need to do height models md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.INT ] # Ensure the master_md_path exists for md_name in md_list: projectMD_path = os.path.join( "{}_{}.gdb".format(ProjectMDs_fgdb_path[:-4], md_name), md_name) arcpy.AddMessage("Project {} Mosaic Dataset Path: {}".format( md_name, projectMD_path)) if arcpy.Exists(projectMD_path): master_md_path = os.path.join( masterParentDir, masterService, "{}_{}.gdb".format(masterName, md_name), md_name) master_md_path = os.path.normpath(master_md_path) arcpy.AddMessage("Master {} MD Path: {}".format( md_name, master_md_path)) if arcpy.Exists(master_md_path): # project_md_path = os.path.join(ProjectMDs_fgdb_path, md_name) # if arcpy.Exists(project_md_path): # Get a record count from the Master MD result = arcpy.GetCount_management(master_md_path) countMasterRasters = int(result.getOutput(0)) arcpy.AddMessage( "Before ingest Master Mosaic Dataset: {0} has {1} row(s)". format(master_md_path, countMasterRasters)) # # # Get the maximum value of ItemTS From the Master Mosaic Dataset # # # The value of ItemTS is based on the last time the row was modified. Knowing # # # the current maximum value of ItemTS in the Master will help us determine which rows were # # # added as a result of the subsequent call to "Add Raster" # # if countMasterRasters > 0: # # fc = r"in_memory/MaxItemTS" # # arcpy.Statistics_analysis(master_md_path,fc,statistics_fields="ItemTS MAX",case_field="#") # # # # fields = ['MAX_ITEMTS'] # # with arcpy.da.SearchCursor(fc, fields) as cursor: # # for row in cursor: # # MaxItemTSValue = float(row[0]) # # else: # # MaxItemTSValue = 0.0 # # # # arcpy.AddMessage("Maximum value for ItemTS before adding Project MD rows to Master: {0}".format(MaxItemTSValue)) # project_md_path = project_md_path.strip("'") # # Ensure the project_md_path exists # if not arcpy.Exists(project_md_path): # arcpy.AddError("\nExiting: Project Mosaic Dataset doesn't exist: {0}".format(project_md_path)) # continue # Get a record count from the Project MD just to be sure we have data to ingest result = arcpy.GetCount_management(projectMD_path) countProjRasters = int(result.getOutput(0)) if countProjRasters > 0: arcpy.AddMessage("{0} has {1} raster product(s).".format( projectMD_path, countProjRasters)) # Gather project_md_path metadata such as spatial reference and cell size descProjectMD = arcpy.Describe(projectMD_path) descProjectMDSR = descProjectMD.SpatialReference ProjectMDSpatialRef = descProjectMD.SpatialReference.exportToString( ) arcpy.AddMessage("Ingesting: {0}".format(projectMD_path)) # arcpy.AddMessage("Spatial reference of the Project MD is: \n\n{0}\n".format(ProjectMDSpatialRef)) # arcpy.AddMessage("Length of SR string is {0}:".format(len(ProjectMDSpatialRef))) # Ensure the project_md_path is 1-band 32-bit floating point (i.e. is an elevation raster) bandCountresult = arcpy.GetRasterProperties_management( projectMD_path, property_type="BANDCOUNT", band_index="") bandCount = int(bandCountresult.getOutput(0)) if bandCount == 1: bitDepthresult = arcpy.GetRasterProperties_management( projectMD_path, property_type="VALUETYPE", band_index="") bitDepth = int(bitDepthresult.getOutput(0)) if bitDepth == 9: # Determine the cell size of the Project Mosaic Dataset cellsizeResult = arcpy.GetRasterProperties_management( projectMD_path, property_type="CELLSIZEX", band_index="") cellsize = float(cellsizeResult.getOutput(0)) arcpy.AddMessage( "Cell size of Project MD: {0} {1}".format( cellsize, descProjectMDSR.linearUnitName)) # Add the rows from the Project MD to the Master MD using the # Table raster type, and don't update the cell size ranges or the boundary raster_type = "Table" arcpy.AddRastersToMosaicDataset_management( master_md_path, raster_type, projectMD_path, update_cellsize_ranges="NO_CELL_SIZES", update_boundary="NO_BOUNDARY", update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="#", maximum_cell_size="0", minimum_dimension="1500", spatial_reference=ProjectMDSpatialRef, filter="#", sub_folder="NO_SUBFOLDERS", duplicate_items_action="ALLOW_DUPLICATES", build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", build_thumbnails="NO_THUMBNAILS", operation_description="#", force_spatial_reference= "NO_FORCE_SPATIAL_REFERENCE") Utility.addToolMessages() # messages = arcpy.GetMessages() # arcpy.AddMessage("\nResults output from AddRastersToMosaicDataset are: \n{0}\n".format(messages)) # Get another record count from the Master MD result = arcpy.GetCount_management(master_md_path) countMasterRasters = int(result.getOutput(0)) arcpy.AddMessage( "After ingest Master Mosaic Dataset: {0} has {1} row(s)" .format(master_md_path, countMasterRasters)) # NOTE: The following section is commented, as setting Category to 2 for overviews created on the project_md_path doesn't work well # # # Reset Category to 2 for all overview records ingested from the Project MD (for some reason # # # the table raster type sets all rows to Category of 1). # # where_clause = "ItemTS > " + str(MaxItemTSValue) + " AND UPPER(Name) LIKE 'OV_%'" # # arcpy.AddMessage("Mosaic Layer where clause: {0}".format(where_clause)) # # arcpy.MakeMosaicLayer_management(master_md_path,"MasterMDLayer",where_clause,template="#",band_index="#", # # mosaic_method="BY_ATTRIBUTE",order_field="ProjectDate",order_base_value="3000", # # lock_rasterid="#",sort_order="ASCENDING",mosaic_operator="LAST",cell_size="1") # # # # messages = arcpy.GetMessages() # # arcpy.AddMessage("\nResults output from MakeMosaicLayer are: \n{0}\n".format(messages)) # # # # arcpy.CalculateField_management("MasterMDLayer", field="Category", expression="2", expression_type="VB", code_block="") # # messages = arcpy.GetMessages() # # arcpy.AddMessage("\nResults output from CalculateField are: \n{0}\n".format(messages)) # Build the boundary # NOTE: if the boundary has been set to a large shape, then APPEND should have no effect # on the existing boundary #arcpy.BuildBoundary_management(master_md_path, where_clause="", append_to_existing="APPEND", simplification_method="NONE") #messages = arcpy.GetMessages() #arcpy.AddMessage("\nResults output from BuildBoundary are: \n{0}\n".format(messages)) # set mosaic properties on the master *AGAIN* to ensure that clip to footprint doesn't get re-set # Clip to footprint is somehow getting reset in 10.3. It should be set so that footprints are NOT clipping data (NOT_CLIP) transmissionFields = "Name;LowPS;CenterX;CenterY;Project_Date;Project_ID;area;el_type;format;h_name;h_unit;h_wkid;v_name;v_unit;cell_h;cell_w;height;width;nodata;pixel;unc_size;xmax;xmin;ymax;ymin;zdev;ZMax;zmean;ZMin;zran;Version;ra_pt_ct;ra_pt_sp;ra_zmax;ra_zmin;ra_zran;PointCount;PointSpacing;is_class;LAS_ZMax;LAS_ZMin" arcpy.AddMessage( "\ntransmissionFields: {0}".format( transmissionFields)) arcpy.SetMosaicDatasetProperties_management( master_md_path, rows_maximum_imagesize="25000", columns_maximum_imagesize="25000", allowed_compressions="LERC;JPEG;None;LZ77", default_compression_type="LERC", JPEG_quality="75", LERC_Tolerance="0.001", resampling_type="BILINEAR", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata= "FOOTPRINTS_MAY_CONTAIN_NODATA", clip_to_boundary="NOT_CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="#", default_mensuration_capabilities="NONE", allowed_mosaic_methods= "NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None", default_mosaic_method="ByAttribute", order_field=CMDRConfig.PROJECT_DATE, order_base="3000", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", view_point_x="600", view_point_y="300", max_num_per_mosaic="40", cell_size_tolerance="0.8", cell_size="1 1", metadata_level="BASIC", transmission_fields=transmissionFields, use_time="DISABLED", start_time_field=CMDRConfig.PROJECT_DATE, end_time_field="#", time_format="#", geographic_transform="#", max_num_of_download_items="40", max_num_of_records_returned="2000", data_source_type="ELEVATION", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None") messages = arcpy.GetMessages() arcpy.AddMessage( "\nResults output from SetMosaicDatasetProperties are: \n{0}\n" .format(messages)) ## 20180505 EIronside: Removed overview generation because they were taking a long time and things seem to work ok without them. ## # define the location of the mosaic dataset overviews ## loc = master_md_path.rfind(".gdb") ## # arcpy.AddMessage("loc = {0}".format(loc)) ## # MasterMD_overview_path = master_md_path[:loc] + r".Overviews" + master_md_path[loc+4:] ## MasterMD_overview_path = master_md_path[:loc] + r".Overviews" ## arcpy.AddMessage("Mosaic Dataset Overview Location: {0}".format(MasterMD_overview_path)) # Define and Build overviews # Begin building service overviews at low scale (305.74811 Meters) #cellsizeOVR = 305.74811 #see what happens without overviews... #DefineBuildOverviews(cellsizeOVR, master_md_path, MasterMD_overview_path, projectMD_path) ## 20180505 EIronside: Removed overview generation because they were taking a long time and things seem to work ok without them. arcpy.Compact_management( in_workspace=os.path.dirname(master_md_path)) arcpy.AddMessage( "Completed Project Mosaic Dataset: {}\n\n". format(projectMD_path)) else: arcpy.AddWarning( "\nProject Mosaic bit depth is not 32-bit Floating Point. Ingoring mosaic dataset." ) else: arcpy.AddWarning( "Project band count is not 1 (expecting single band elevation data). Ingoring mosaic dataset." ) else: arcpy.AddWarning( "Count of rasters in project mosaic dataset is 0. Please add some rasters to the project." ) # else: # arcpy.AddWarning("Project Mosaic Dataset path is not found '{}'. Please create it before proceeding.".format(project_md_path)) else: arcpy.AddError( "Master Mosaic Dataset path is not found '{}'. Please create it before proceeding." .format(master_md_path)) else: arcpy.AddWarning( "Project Mosaic Dataset path is not found '{}'. Please create it before proceeding." .format(projectMD_path))
def createVectorBoundaryC(f_path, f_name, raster_props, stat_out_folder, vector_bound_path, minZ, maxZ, bound_path, elev_type): a = datetime.now() arcpy.AddMessage("\tCreating {} bound for '{}' using min z '{}' and max z'{}'".format(elev_type, f_path, minZ, maxZ)) vector_1_bound_path = os.path.join(stat_out_folder, "B1_{}.shp".format(f_name)) vector_2_bound_path = os.path.join(stat_out_folder, "B2_{}.shp".format(f_name)) vector_3_bound_path = os.path.join(stat_out_folder, "B3_{}.shp".format(f_name)) vector_4_bound_path = os.path.join(stat_out_folder, "B4_{}.shp".format(f_name)) vector_5_bound_path = os.path.join(stat_out_folder, "B5_{}.shp".format(f_name)) deleteFileIfExists(vector_bound_path, useArcpy=True) deleteFileIfExists(vector_1_bound_path, useArcpy=True) deleteFileIfExists(vector_2_bound_path, useArcpy=True) deleteFileIfExists(vector_3_bound_path, useArcpy=True) deleteFileIfExists(vector_4_bound_path, useArcpy=True) deleteFileIfExists(vector_5_bound_path, useArcpy=True) arcpy.RasterDomain_3d(in_raster=f_path, out_feature_class=vector_5_bound_path, out_geometry_type="POLYGON") Utility.addToolMessages() arcpy.MultipartToSinglepart_management(in_features=vector_5_bound_path, out_feature_class=vector_4_bound_path) Utility.addToolMessages() checkRecordCount(vector_4_bound_path) arcpy.EliminatePolygonPart_management(in_features=vector_4_bound_path, out_feature_class=vector_3_bound_path, condition="AREA", part_area="10000 SquareMiles", part_area_percent="0", part_option="CONTAINED_ONLY") Utility.addToolMessages() checkRecordCount(vector_3_bound_path) arcpy.SimplifyPolygon_cartography( in_features=vector_3_bound_path, out_feature_class=vector_2_bound_path, algorithm="POINT_REMOVE", tolerance="{} Meters".format(C_SIMPLE_DIST), minimum_area="0 Unknown", error_option="RESOLVE_ERRORS", collapsed_point_option="NO_KEEP", in_barriers="" ) Utility.addToolMessages() checkRecordCount(vector_2_bound_path) arcpy.AddMessage('ZFlag: ' + arcpy.env.outputZFlag) arcpy.AddMessage('MFlag: ' + arcpy.env.outputMFlag) arcpy.Dissolve_management(in_features=vector_2_bound_path, out_feature_class=vector_1_bound_path, dissolve_field="", statistics_fields="", multi_part="MULTI_PART", unsplit_lines="DISSOLVE_LINES") Utility.addToolMessages() checkRecordCount(vector_1_bound_path) deleteFields(vector_1_bound_path) record_count = checkRecordCount(vector_1_bound_path) footprint_area = 0 for row in arcpy.da.SearchCursor(vector_1_bound_path, ["SHAPE@"]): # @UndefinedVariable shape = row[0] footprint_area = shape.getArea ("PRESERVE_SHAPE", "SQUAREMETERS") if footprint_area <= 0: arcpy.AddMessage("\tWARNGING: Area is 0 in {} '{}' bound '{}'".format(elev_type, f_path, vector_bound_path)) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[PATH][0], field_alias=FIELD_INFO[PATH][1], field_type=FIELD_INFO[PATH][2], field_length=FIELD_INFO[PATH][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[NAME][0], field_alias=FIELD_INFO[NAME][1], field_type=FIELD_INFO[NAME][2], field_length=FIELD_INFO[NAME][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[AREA][0], field_alias=FIELD_INFO[AREA][1], field_type=FIELD_INFO[AREA][2], field_length=FIELD_INFO[AREA][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[ELEV_TYPE][0], field_alias=FIELD_INFO[ELEV_TYPE][1], field_type=FIELD_INFO[ELEV_TYPE][2], field_length=FIELD_INFO[ELEV_TYPE][3]) addField(in_table=vector_1_bound_path, field_name=FIELD_INFO[RANGE][0], field_alias=FIELD_INFO[RANGE][1], field_type=FIELD_INFO[RANGE][2], field_length=FIELD_INFO[RANGE][3]) deleteFields(vector_1_bound_path) arcpy.AddMessage(raster_props) for field_name in KEY_LIST: time.sleep(0.25) field_shpname = FIELD_INFO[field_name][0] field_alias = FIELD_INFO[field_name][1] field_type = FIELD_INFO[field_name][2] field_length = FIELD_INFO[field_name][3] field_value = raster_props[field_name] if field_type == "TEXT": if str(field_value).endswith('\\'): field_value = str(field_value)[0:-1] field_value = r'"{}"'.format(field_value) addField(in_table=vector_1_bound_path, field_name=field_shpname, field_alias=field_alias, field_type=field_type, field_length=field_length, expression=field_value) b_f_path, b_f_name = os.path.split(f_path) b_f_name = os.path.splitext(b_f_name)[0] arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[PATH][0], expression='"{}"'.format(b_f_path), expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[NAME][0], expression='"{}"'.format(b_f_name), expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[AREA][0], expression=footprint_area, expression_type="PYTHON_9.3") arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[ELEV_TYPE][0], expression='"{}"'.format(elev_type), expression_type="PYTHON_9.3") try: z_expr = "!{}! - !{}!".format(FIELD_INFO[MAX][0], FIELD_INFO[MIN][0]) arcpy.CalculateField_management(in_table=vector_1_bound_path, field=FIELD_INFO[RANGE][0], expression=z_expr, expression_type="PYTHON_9.3") except: pass deleteFileIfExists(vector_bound_path, True) arcpy.Clip_analysis(in_features=vector_1_bound_path, clip_features=bound_path, out_feature_class=vector_bound_path, cluster_tolerance="") Utility.addToolMessages() checkRecordCount(vector_bound_path) deleteFields(vector_bound_path) #debug = False #try: # debug = (str(f_path).find("alamazoo") >= 0) #except: # debug = False #if not debug: deleteFileIfExists(vector_1_bound_path, useArcpy=True) deleteFileIfExists(vector_2_bound_path, useArcpy=True) deleteFileIfExists(vector_3_bound_path, useArcpy=True) deleteFileIfExists(vector_4_bound_path, useArcpy=True) deleteFileIfExists(vector_5_bound_path, useArcpy=True) #else: # arcpy.AddMessage("\tleaving artifacts for {} '{}'".format(elev_type, vector_bound_path)) doTime(a, "\tCreated BOUND {}".format(vector_bound_path))