def AddMDMasterToCMDR(wmxJobId, masterParentDir, masterName, masterCellSize_m, masterServerConnectionFilePath): masterServiceFolder = None index = masterName.find("/") if index < 0: index = masterName.find("\\") if index >= 0: masterServiceFolder = masterName[0:index] masterName = masterName[index + 1:] Utility.printArguments([ "wmxJobId", "masterParentDir", "masterName", "masterCellSize_m", "masterServerConnectionFilePath", "masterServiceFolder" ], [ wmxJobId, masterParentDir, masterName, masterCellSize_m, masterServerConnectionFilePath, masterServiceFolder ], "B01 AddMDMasterToCMDR") # build attributes from parameters if wmxJobId is not None and int(wmxJobId) > 0: if masterParentDir is not None: if masterName is None: masterName = CMDRConfig.DEFAULT_MDMASTER_NAME # get CMDR from job data workspace and set as current workspace Utility.setWMXJobDataAsEnvironmentWorkspace(wmxJobId) # get job AOI geometry master_AOI = Utility.getJobAoi(wmxJobId) # NOTE: Edit session handled in Utility Master = CMDR.MDMaster() Master.addOrUpdate( wmx_job_ID=wmxJobId, parent_dir=masterParentDir, master_name=masterName, masterServerConnectionFilePath=masterServerConnectionFilePath, masterCellSize_m=masterCellSize_m, masterServiceFolder=masterServiceFolder, master_AOI=master_AOI) # mdMaster_row = list(Master.getExistingMDRow(wmxJobId)) # updateMasterDomain(Master.getMDPath(mdMaster_row), wmxWorkspace) else: arcpy.AddError("Master parent directory not set. cannot continue.") else: arcpy.AddError("Master wmx Job ID is not set. cannot continue.") arcpy.AddMessage("Operation complete")
def CreateMasterMosaicDatasets(wmxJobID): Utility.printArguments(["wmxJobID"], [wmxJobID], "B02 CreateMasterMosaicDatasets") Utility.setWMXJobDataAsEnvironmentWorkspace(wmxJobID) MDMaster = CMDR.MDMaster() mdMaster_row = MDMaster.getMDMaster(wmxJobID) parent_path = MDMaster.getMDParentPath(mdMaster_row) mdMaster_path = MDMaster.getMDPath(mdMaster_row) MasterMDName = MDMaster.getMDName(mdMaster_row) MasterMDCellSize_Meters = MDMaster.getMDCellSize(mdMaster_row) # mdMaster_aoi = MDMaster.getMDAOI(mdMaster_row) if arcpy.Exists(parent_path): if not os.path.exists(mdMaster_path): os.makedirs(mdMaster_path) # master_fgdb_path = os.path.join(mdMaster_path, MasterMDName) md_list = [FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT] for md_name in md_list: local_fgdb_name = "{}_{}.gdb".format(MasterMDName, md_name) arcpy.AddMessage("local_fgdb_name '{}'".format(local_fgdb_name)) local_fgdb_path = os.path.join(mdMaster_path, local_fgdb_name) arcpy.AddMessage("local_fgdb_path '{}'".format(local_fgdb_path)) if not os.path.exists(local_fgdb_path): arcpy.AddMessage("creating MD master fGDB '{} / {}'".format(mdMaster_path, local_fgdb_name)) arcpy.CreateFileGDB_management(mdMaster_path, local_fgdb_name) Utility.addToolMessages() where_clause = "{} = {}".format(arcpy.AddFieldDelimiters(MDMaster.fclass, CMDRConfig.field_MDMaster_WMXJobID), wmxJobID) # mdMasterLayer = "MDMasterLayer" # arcpy.MakeFeatureLayer_management(in_features= MDMaster.fclass, out_layer = mdMasterLayer, where_clause=where_clause) local_fgdb_MDMasterFC = os.path.join(local_fgdb_path, MasterMDName) if not arcpy.Exists(local_fgdb_MDMasterFC): arcpy.FeatureClassToFeatureClass_conversion (in_features=MDMaster.fclass, out_path=local_fgdb_path, out_name=MasterMDName, where_clause=where_clause) CreateMasterMosaicDataset(local_fgdb_path, md_name, local_fgdb_MDMasterFC, MasterMDCellSize_Meters) else: arcpy.AddError("MD Master parent path doesn't exist '{}'. Cannot continue.".format(parent_path))
def CreateContourCache(jobID, serverConnectionFile): Utility.printArguments(["WMX Job ID", "serverConnectionFile"], [jobID, serverConnectionFile], "C03 CreateContourCache") Utility.setWMXJobDataAsEnvironmentWorkspace(jobID) ProjectJob = CMDR.ProjectJob() project, ProjectUID = ProjectJob.getProject(jobID) # @UnusedVariable if project is not None: processJob(ProjectJob, project, ProjectUID, serverConnectionFile) else: arcpy.AddError("Failed to find project for job.") arcpy.AddMessage("Operation complete")
def processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=False, runCount=0): ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) ProjectID = ProjectJob.getProjectID(project) ProjectState = ProjectJob.getState(project) ProjectYear = ProjectJob.getYear(project) ProjectAlias = ProjectJob.getAlias(project) ProjectAliasClean = ProjectJob.getAliasClean(project) project_wmx_jobid = ProjectJob.getWMXJobID(project) Deliver = CMDR.Deliver() #delivery = Deliver.getDeliver(project_wmx_jobid) delivery = Deliver.getDeliver(ProjectID) dateDeliver = Deliver.getDeliverDate(delivery) startupType = "STARTED" Utility.printArguments([ "ProjectJob", "project", "ProjectUID", "serverConnectionFile", "serverFunctionPath", "update", "runCount", "ProjectFolder", "ProjectID", "ProjectState", "ProjectYear", "ProjectAlias", "ProjectAliasClean", "startupType" ], [ ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update, runCount, ProjectFolder, ProjectID, ProjectState, ProjectYear, ProjectAlias, ProjectAliasClean, startupType ], "A07_A Publish Project") # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) folderName = ProjectState # If the project has been moved for publishing, update the project directory old_path, new_path = updateJobDirectory(project_wmx_jobid, ProjectJob, project) old_ProjectID = ProjectID arcpy.AddMessage( "\n\n-----------------------------------------------------------") try: arcpy.AddMessage("Job directory paths: \n\tOLD: {}\n\tNEW: {}".format( old_path, new_path)) doRepath = True if str(old_path).lower().strip() == str(new_path).lower().strip(): arcpy.AddMessage( "Job directory paths old/new match, checking MD first record project folder value" ) filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], FoldersConfig.DTM) arcpy.AddMessage("checking fgdb '{}' ".format(filegdb_name)) dtm_md_path = os.path.join(new_path, ProjectID, FoldersConfig.published_dir, filegdb_name, FoldersConfig.DTM) arcpy.AddMessage( "checking MD '{}' first record project folder value".format( dtm_md_path)) record_project_path, uid = Utility.getExistingRecord( dtm_md_path, ["Project_Dir"], 0) arcpy.AddMessage("first record is {}".format(record_project_path)) record_project_path = list(record_project_path)[0] arcpy.AddMessage( "MD first record project folder value is {}".format( record_project_path)) # add a slash back in because strings remove it and remove the project name at the end old_path, old_ProjectID = os.path.split("\{}".format( str(record_project_path).strip())) arcpy.AddMessage( "Job directory paths: \n\tOLD: {}\n\tNEW: {}\n\tOLD Project ID: {}\n\tNEW Project ID: {}" .format(old_path, new_path, old_ProjectID, ProjectID)) if str(old_path).lower().strip() == str(new_path).lower().strip(): doRepath = False arcpy.AddMessage("Job directory paths match, doRepath = False") except: pass arcpy.AddMessage( "-----------------------------------------------------------\n\n") md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: update_paths_success = False # @TODO Add more info here! serviceDescription = "for project '{}' within state {} published in the year {}".format( ProjectAlias, ProjectState, ProjectYear) serviceTags = ",".join( [ProjectID, ProjectAliasClean, ProjectState, str(ProjectYear)]) filegdb_name = "{}_{}.gdb".format(ProjectFolder.published.fgdb_name, md_name) if ProjectFolder.published.fgdb_name.endswith(".gdb"): filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], md_name) #ProjectMDs_fgdb_path = os.path.join(ProjectFolder.published.path, filegdb_name) new_project_path = os.path.join(new_path, ProjectID) old_project_path = os.path.join(old_path, ProjectID) if str(ProjectID).lower().strip() != str( old_ProjectID).lower().strip(): old_project_path = os.path.join(old_path, old_ProjectID) #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_publish_path = os.path.join(new_project_path, "PUBLISHED") old_publish_path = os.path.join(old_project_path, "PUBLISHED") #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_delivered_path = os.path.join(new_project_path, "DELIVERED") old_delivered_path = os.path.join(old_project_path, "DELIVERED") new_projectMDs_fgdb_path = os.path.join(new_publish_path, filegdb_name) arcpy.AddMessage( "File Geodatabase Path: {0}".format(new_projectMDs_fgdb_path)) # Ensure the master_md_path exists if arcpy.Exists(new_projectMDs_fgdb_path): project_md_path = os.path.join(new_projectMDs_fgdb_path, md_name) arcpy.AddMessage( "Mosaic Dataset Path: {0}".format(project_md_path)) if arcpy.Exists(project_md_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(new_projectMDs_fgdb_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() #arcpy.AddMessage("Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}".format(new_projectMDs_fgdb_path, old_delivered_path, new_delivered_path)) #arcpy.RepairMosaicDatasetPaths_management(in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_delivered_path, new_delivered_path), where_clause="1=1") update_paths_success = True except: if md_name <> FoldersConfig.DHM and md_name <> FoldersConfig.DCM: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_path)) try: out_table = "{}_Paths".format(project_md_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}".format( out_table)) except: pass project_md_ocs_path = "{}_OCS".format(project_md_path) if arcpy.Exists(project_md_ocs_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(project_md_ocs_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, paths_list="# {0} {1}".format( old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() except: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_ocs_path)) try: out_table = "{}_Paths".format(project_md_ocs_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}". format(out_table)) except: pass serviceName = "{}_{}".format(ProjectID, md_name) arcpy.AddMessage("Service Name: {0}".format(serviceName)) # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) # append the cellsize and units of the Mosaic Dataset to the tags serviceTags = "{}, {}, {}".format(serviceTags, cellsizeX, SpatRefUnitsMD) serviceDescription = "{} {}. Horizontal spatial reference is {} and cell size is {} {}.".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceDescription = "{}. Please note that cell size does not refer to the underlying data's cell size.".format( serviceDescription) serviceDescription = "{}. You must check the meta-data for the underlying elevation data's resolution information (cell width, cell height, and Lidar point spacing).".format( serviceDescription) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. allowedFieldListMD = descMD.AllowedFields arcpy.AddMessage("AllowedFields in MD Properties: {0}".format( allowedFieldListMD)) if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = list([]) ssFunctionsList = "" if ssFunctions is not None: ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False if md_name <> FoldersConfig.INT: for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break else: for i, s in enumerate(ssFunctionsLst): if 'METER' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break # if Hillshade is found then re-order the list # Don't apply hillshade to intensity if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions to (first is default): " .format(ssFunctionsLst)) # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format( ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file: ") wsPath = os.path.dirname(os.path.dirname(project_md_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.AddMessage("\tMDPath='{}'".format(project_md_path)) arcpy.AddMessage("\tSDPath='{}'".format(sddraftPath)) arcpy.AddMessage("\tServiceName='{}'".format(serviceName)) arcpy.AddMessage("\tFolderName='{}'".format(folderName)) arcpy.AddMessage( "\tSummary='{}'".format(serviceDescription)) arcpy.AddMessage("\tTags='{}'".format(serviceTags)) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # Analyze service definition draft arcpy.AddMessage( "Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data ) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) RunUtil.runTool(r'ngce\pmdm\a\A07_B_StageSD.py', [ sddraftPath, sdPath, serverConnectionFile, startupType ], bit32=True, log_path=ProjectFolder.derived.path) # arcpy.StageService_server(sddraftPath, sdPath) # # try: # # # UploadServiceDefinition # # arcpy.AddMessage("Publishing mosaic data set as image service.") # # arcpy.UploadServiceDefinition_server(sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) # # except Exception as e: # # if runCount < 1: # ## PublishMosaicDataset(jobID, serverConnectionFile, True, 1) # # processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=True, runCount=1) # # break # # else: # # raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) else: arcpy.AddError( "Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing." ) arcpy.AddError( " To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields..." ) # Clean up and delete the .sd file Utility.deleteFileIfExists(sdPath, False) # For some reason publishing breaks the referenced mosaics. # The function paths also don't update properly. # So delete them and re-create later. if md_name == FoldersConfig.DHM or md_name == FoldersConfig.DCM: arcpy.AddMessage( "Deleting Mosaic Dataset to recreate later {}".format( project_md_path)) Utility.deleteFileIfExists(project_md_path, True) else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) else: arcpy.AddError( "Project file geodatabase not found '{}'. Please add this before proceeding." .format(new_projectMDs_fgdb_path)) # FOR LOOP ## ## Re-create the MD if it is FoldersConfig.DHM, FoldersConfig.DCM ## A06_A_CreateProjectMosaicDataset.CreateProjectMDs(project_wmx_jobid, dateDeliver=dateDeliver)
def ImportContourCacheToMaster(jobID, serverConnectionFilePath, masterServiceName, update=False, runCount=0): a = datetime.datetime.now() aa = a cache_dir = ContourConfig.CACHE_FOLDER #@TODO: Remove this workaround once fix is validated on NGCE if serverConnectionFilePath is None or len( str(serverConnectionFilePath)) <= 1 or str( serverConnectionFilePath).lower().find( "aiotxftw3gi013".lower()) < 0: serverConnectionFilePath = "//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net" Utility.printArguments([ "WMX Job ID", "serverConnectionFilePath", "cache_dir", "masterServiceName", "update", "runCount" ], [ jobID, serverConnectionFilePath, cache_dir, masterServiceName, update, runCount ], "C04 ImportContourCacheToMaster") Utility.setWMXJobDataAsEnvironmentWorkspace(jobID) ProjectJob = CMDR.ProjectJob() project, ProjectUID = ProjectJob.getProject(jobID) # @UnusedVariable if project is not None: projectID = ProjectJob.getProjectID(project) ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) # con_folder = ProjectFolder.derived.contour_path # contour_file_gdb_path = os.path.join(con_folder, CONTOUR_GDB_NAME) # PublishFolder = ProjectFolder.published.path derived_filegdb_path = ProjectFolder.derived.fgdb_path # contourMerged_Name = (ContourConfig.MERGED_FGDB_NAME).format(projectID) # contourMerged_Name = in_cont_fc = os.path.join(contour_file_gdb_path, CONTOUR_NAME_WM) # contour_pub_file_gdb_path = os.path.join(PublishFolder, contourMerged_Name) # contourMxd_Name = ContourConfig.CONTOUR_MXD_NAME # contourMxd_path = os.path.join(PublishFolder, contourMxd_Name) # ContourFC = os.path.join(contour_pub_file_gdb_path, ContourConfig.CONTOUR_FC_WEBMERC) # ContourBoundFC = os.path.join(contour_pub_file_gdb_path, ContourConfig.CONTOUR_BOUND_FC_WEBMERC)ContourBoundFC = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath(derived_filegdb_path, DTM) ContourBoundFC = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath( derived_filegdb_path, DTM) projectServiceName = "{}_{}".format( projectID, ContourConfig.CONTOUR_2FT_SERVICE_NAME ) # arcpy.GetParameterAsText(3) projectFolder = ProjectJob.getState( project) # arcpy.GetParameterAsText(4) # Get input parameters projectCache = os.path.join(ContourConfig.CACHE_FOLDER, projectServiceName, "Layers") if projectFolder is not None and len(projectFolder) > 0: projectCache = os.path.join( ContourConfig.CACHE_FOLDER, "{}_{}".format(projectFolder, projectServiceName), "Layers") # arcpy.GetParameterAsText(0) #YES areaOfInterest = ContourBoundFC # arcpy.GetParameterAsText(1) #YES # serverConnectionFilePath = serverConnectionFilePath # arcpy.GetParameterAsText(2) masterService = os.path.join( serverConnectionFilePath, "{}_{}.MapServer".format( masterServiceName, ContourConfig.CONTOUR_2FT_SERVICE_NAME)) # YES if serverConnectionFilePath.endswith(".ags"): masterService = os.path.join( serverConnectionFilePath[:-4], "{}_{}.MapServer".format( masterServiceName, ContourConfig.CONTOUR_2FT_SERVICE_NAME)) arcpy.AddMessage( "Location of master service is: {0}".format(masterService)) # scales = ContourConfig.CONTOUR_SCALES_STRING #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # The following paths and values can be modified if needed # Other map service properties # cachingInstances = ContourConfig.CACHE_INSTANCES# This should be increased based on server resources #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- Utility.printArguments([ "projectCache", "areaOfInterest", "projectFolder", "projectServiceName", "ContourBoundFC", "masterService" ], [ projectCache, areaOfInterest, projectFolder, projectServiceName, ContourBoundFC, masterService ], "C04 ImportContourCacheToMaster") # Import cache tiles from a project service into the master service # ts = time.time() # st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') # arcpy.AddMessage("Import started at: {0}".format(st)) a = doTime( a, "Ready to start import of '{}' into '{}'".format( projectCache, masterService)) arcpy.ImportMapServerCache_server( input_service=masterService, source_cache_type="CACHE_DATASET", source_cache_dataset=projectCache, source_tile_package="", upload_data_to_server="DO_NOT_UPLOAD", scales=ContourConfig.CONTOUR_SCALES_STRING, num_of_caching_service_instances=ContourConfig.CACHE_INSTANCES, area_of_interest=areaOfInterest, overwrite="OVERWRITE" # @TODO: Verify this is right ) # arcpy.ImportMapServerCache_server(input_service="//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net/Master/Elevation_1M_CONT_2FT.MapServer", # source_cache_type="CACHE_DATASET", # source_cache_dataset="//aiotxftw6na01data/SMB03/elevation/LiDAR/cache/OK_OK_SugarCreekEric_2008_CONT_2FT/Layers", # source_tile_package="", # upload_data_to_server="DO_NOT_UPLOAD", # scales="9027.977411;4513.988705;2256.994353;1128.497176", # num_of_caching_service_instances="6", # area_of_interest="//aiotxftw6na01data/sql1/elevation/OK_SugarCreekEric_2008/DERIVED/OK_SugarCreekEric_2008.gdb/BoundaryLASDataset", # overwrite="OVERWRITE") # arcpy.ImportMapServerCache_server(input_service="//aiotxftw6na01data/SMB03/elevation/WorkflowManager/arcgis on aiotxftw3gi013.usda.net/Master/Elevation_1M_CONT_2FT.MapServer", # source_cache_type="CACHE_DATASET", # source_cache_dataset="//aiotxftw6na01data/SMB03/elevation/LiDAR/cache/OK_OK_SugarCreekEric_2008_CONT_2FT/Layers", # source_tile_package="", # upload_data_to_server="DO_NOT_UPLOAD", # scales="9027.977411;4513.988705;2256.994353;1128.497176", # num_of_caching_service_instances="6", # area_of_interest="//aiotxftw6na01data/sql1/elevation/OK_SugarCreekEric_2008/DERIVED/OK_SugarCreekEric_2008.gdb/BoundaryLASDataset", # overwrite="OVERWRITE") a = doTime( a, "TWO: Finished import of '{}' into '{}'".format( projectCache, masterService)) # ts = time.time() # st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') # arcpy.AddMessage("Import completed at: {0}".format(st)) a = doTime( a, "Import of '{}' into '{}' finished".format(projectCache, masterService)) else: a = doTime(a, "Failed to find project for job.") doTime(aa, "Operation complete")
def PublishMDMasterMosaicDataset(jobID, serverFunctionPath, update=False, runCount=0): Utility.printArguments(["jobID", "serverFunctionPath"], [jobID, serverFunctionPath], "B03 PublishMDMasterMosaicDataset") startupType = None # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) # ssFunctions = Raster.getServerSideFunctions() Utility.setWMXJobDataAsEnvironmentWorkspace(jobID) MDMaster = CMDR.MDMaster() mdMaster_row = MDMaster.getMDMaster(jobID) if mdMaster_row is not None: mdMaster_path = MDMaster.getMDPath( mdMaster_row ) # ProjectFolders.getProjectFolderFromDBRow(ProjectJob, project) MasterMDName = MDMaster.getMDName( mdMaster_row) # ProjectJob.getProjectID(project) serverConnectionFile = MDMaster.getMDConFilePath(mdMaster_row) folderName = MDMaster.getMDServiceFolder(mdMaster_row) cellSize_m = MDMaster.getMDCellSize(mdMaster_row) Utility.printArguments([ "folderName", "serverConnectionFile", "MasterMDName", "mdMaster_path", "cellSize_m" ], [ folderName, serverConnectionFile, MasterMDName, mdMaster_path, cellSize_m ], "PublishMDMasterMosaicDataset") if not update: B04PublishContourMaster.publishContourMaster( mdMaster_path, serverConnectionFile, MasterMDName, folderName) # ProjectState = MDMaster#ProjectJob.getState(project) # ProjectYear = ProjectJob.getYear(project) # ProjectAlias = ProjectJob.getAlias(project) # ProjectAliasClean = ProjectJob.getAliasClean(project) # @TODO Add more info here! serviceDescription = "Elevation master service '{}'.".format( MasterMDName) serviceTags = ",".join( [MasterMDName, "Master", "Elevation", "Mosaic", "Dataset"]) md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: serviceName = "{}_{}".format(MasterMDName, md_name) local_fgdb_name = "{}.gdb".format(serviceName) arcpy.AddMessage("local_fgdb_name '{}'".format(local_fgdb_name)) local_fgdb_path = os.path.join(mdMaster_path, local_fgdb_name) arcpy.AddMessage("local_fgdb_path '{}'".format(local_fgdb_path)) project_md_path = os.path.join(local_fgdb_path, md_name) arcpy.AddMessage("MD path '{}'".format(project_md_path)) if arcpy.Exists(project_md_path): # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) if cellsizeX != cellSize_m: arcpy.AddMessage( "mosaic dataset cell size {} != requested cell size {}" .format(cellsizeX, cellSize_m)) arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) serviceDescription = "{} {} horizontal spatial reference is {} and cell size is {} {}".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceTags = ",".join([ serviceTags, str(cellsizeX), SpatRefUnitsMD, SpatRefNameMD ]) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. # allowedFieldListMD = descMD.AllowedFields # arcpy.AddMessage("AllowedFields in MD Properties: {0}".format(allowedFieldListMD)) # if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions so Hillshade is default" ) foundHillshade = True break # if Hillshade is found then re-order the list if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions so Hillshade is default") # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format(ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file...") wsPath = os.path.dirname(os.path.dirname(mdMaster_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # @ TODO Include this to overwrite a service # doc = DOM.parse(sddraftPath) # tagsType = doc.getElementsByTagName('Type') # for tagType in tagsType: # if tagType.parentNode.tagName == 'SVCManifest': # if tagType.hasChildNodes(): # tagType.firstChild.data = "esriServiceDefinitionType_Replacement" # # tagsState = doc.getElementsByTagName('State') # for tagState in tagsState: # if tagState.parentNode.tagName == 'SVCManifest': # if tagState.hasChildNodes(): # tagState.firstChild.data = "esriSDState_Published" # Analyze service definition draft arcpy.AddMessage("Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) arcpy.StageService_server(sddraftPath, sdPath) try: # UploadServiceDefinition if not update: arcpy.AddMessage( "Publishing mosaic data set as image service.") else: arcpy.AddMessage( "Updating mosaic data set as image service.") arcpy.UploadServiceDefinition_server( sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) except Exception as e: if runCount < 1: arcpy.AddMessage( "image service already exists, trying to update instead." ) PublishMDMasterMosaicDataset( jobID, serverFunctionPath, True, 1) break else: raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) # else: # arcpy.AddError("Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing.") # arcpy.AddError(" To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields...") else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) # FOR LOOP # # else: # arcpy.AddError("Project file geodatabase not found '{}'. Please add this before proceeding.".format(ProjectMDs_fgdb_path)) else: arcpy.AddError( "MDMaster record not found in the CMDR. Please add this to the CMDR before proceeding." ) arcpy.AddMessage("Operation complete")
def updateCMDR(ProjectJob, project, las_qainfo, updatedBoundary): bound_XMin = updatedBoundary.extent.XMin bound_YMin = updatedBoundary.extent.YMin bound_XMax = updatedBoundary.extent.XMax bound_YMax = updatedBoundary.extent.YMax # extents = [[bound_XMin, bound_YMin], [bound_XMax, bound_YMax]] updatedBoundary_Area = updatedBoundary.getArea("PRESERVE_SHAPE", "SQUAREMETERS") arcpy.AddMessage("Getting DEM Statistics") las_qainfo = getProjectDEMStatistics(las_qainfo) arcpy.AddMessage("Getting SR Info") sr_horz_alias = las_qainfo.getSpatialReference().name sr_horz_unit = las_qainfo.getSpatialReference().linearUnitName sr_horz_wkid = las_qainfo.getSpatialReference().factoryCode arcpy.AddMessage("SR horizontal alias & unit: {} {}".format( sr_horz_alias, sr_horz_unit)) sr_vert_alias, sr_vert_unit = Utility.getVertCSInfo( las_qainfo.getSpatialReference()) arcpy.AddMessage("SR vertical alias & unit: {} {}".format( sr_vert_alias, sr_vert_unit)) arcpy.AddMessage("Updating CMDR Deliver features") Deliver = CMDR.Deliver() deliver = list(Deliver.getDeliver(las_qainfo.ProjectID)) Deliver.setCountLasFiles(deliver, las_qainfo.num_las_files) Deliver.setCountLasPointsDTM(deliver, las_qainfo.pt_count_dtm) Deliver.setCountLasPointsDSM(deliver, las_qainfo.pt_count_dsm) Deliver.setPointSpacingDTM(deliver, las_qainfo.pt_spacing_dtm) Deliver.setPointSpacingDSM(deliver, las_qainfo.pt_spacing_dsm) Deliver.setBoundXMin(deliver, bound_XMin) Deliver.setBoundYMin(deliver, bound_YMin) Deliver.setBoundXMax(deliver, bound_XMax) Deliver.setBoundYMax(deliver, bound_YMax) if las_qainfo.pt_spacing_dtm > 0: Deliver.setPointDensityDTM(deliver, pow((1.0 / las_qainfo.pt_spacing_dtm), 2)) if las_qainfo.pt_spacing_dsm > 0: Deliver.setPointDensityDSM(deliver, pow((1.0 / las_qainfo.pt_spacing_dsm), 2)) Deliver.setDeliverArea(deliver, updatedBoundary_Area) Deliver.setHorzSRName(deliver, sr_horz_alias) Deliver.setHorzUnit(deliver, sr_horz_unit) Deliver.setHorzSRWKID(deliver, sr_horz_wkid) Deliver.setVertSRName(deliver, sr_vert_alias) Deliver.setVertUnit(deliver, sr_vert_unit) las_qainfo.minZ_dsm, las_qainfo.maxZ_dsm = LAS.validateZRange( sr_vert_unit, las_qainfo.minZ_dsm, las_qainfo.maxZ_dsm) las_qainfo.minZ_dtm, las_qainfo.maxZ_dtm = LAS.validateZRange( sr_vert_unit, las_qainfo.minZ_dtm, las_qainfo.maxZ_dtm) Deliver.setValidZMax(deliver, las_qainfo.maxZ_dsm) if las_qainfo.maxZ_dsm >= LAS.maxValidElevation(sr_vert_unit): Deliver.setValidZMax( deliver, min(las_qainfo.maxZ_dtm, LAS.maxValidElevation(sr_vert_unit))) Deliver.setValidZMin( deliver, max(las_qainfo.minZ_dtm, LAS.minValidElevation(sr_vert_unit))) Deliver.setIsLASClassified(deliver, las_qainfo.isClassified) if updatedBoundary is not None: arcpy.AddMessage("Updating CMDR ProjectJob AOI") ProjectJob.updateJobAOI(project, updatedBoundary) arcpy.AddMessage("Updating CMDR QC AOI") QC = CMDR.QC() qc = QC.getQC(las_qainfo.ProjectID) QC.updateAOI(qc, updatedBoundary) arcpy.AddMessage("Updating CMDR Delivery AOI") Deliver.updateAOI(deliver, updatedBoundary) arcpy.AddMessage("Updating CMDR Contract AOI") Contract = CMDR.Contract() contract = Contract.getContract(las_qainfo.ProjectID) Contract.updateAOI(contract, updatedBoundary)
def AddPrjectToCMDR(strProjID, strAlias, strState, strYear, strJobId, strParentDir, strArchiveDir): Utility.printArguments([ "ProjID", "Alias", "State", "Year", "JobID", "ParentDir", "ArchiveDir" ], [ strProjID, strAlias, strState, strYear, strJobId, strParentDir, strArchiveDir ], "A02 AddPrjectToCMDR") a = datetime.now() aa = a # build attributes from parameters if strProjID is None: arcpy.AddError("Project ID is empty. Please supply a project ID.") else: # alias has spaces and invalid characters, Name clean is just the alias without invalid chars strAliasClean = Utility.cleanString(strAlias) # Create project directory path strProjDir = os.path.join(strParentDir, strProjID) Utility.printArguments(["ProjDir", "AliasClean"], [strProjDir, strAliasClean], "A02 AddPrjectToCMDR") # get CMDR from job data workspace and set as current workspace Utility.setWMXJobDataAsEnvironmentWorkspace(strJobId) # get job AOI geometry project_AOI = Utility.getJobAoi(strJobId) # NOTE: Edit session handled in Utility Contract = CMDR.Contract() contract_row = Contract.addOrUpdateProject(project_ID=strProjID, project_UID=None, project_AOI=project_AOI) strUID = Contract.getProjectUID(contract_row) a = doTime( a, "Added project '" + strProjID + "' to CMDR Contract with UID '" + strUID + "'") ProjectJob = CMDR.ProjectJob() ProjectJob.addOrUpdateProject(wmx_job_id=strJobId, project_Id=strProjID, alias=strAlias, alias_clean=strAliasClean, state=strState, year=strYear, parent_dir=strParentDir, archive_dir=strArchiveDir, project_dir=strProjDir, UID=strUID, project_AOI=project_AOI) a = doTime(a, "Added project '" + strProjID + "' to CMDR Project Job") Deliver = CMDR.Deliver() Deliver.addOrUpdateProject(project_Id=strProjID, UID=strUID, project_AOI=project_AOI) a = doTime(a, "Added project '" + strProjID + "' to CMDR Deliver") QC = CMDR.QC() QC.addOrUpdateProject(project_Id=strProjID, UID=strUID, project_AOI=project_AOI) a = doTime(a, "Added project '" + strProjID + "' to CMDR QAQC") Publish = CMDR.Publish() Publish.addOrUpdateProject(project_Id=strProjID, UID=strUID, project_AOI=project_AOI) a = doTime(a, "Added project '" + strProjID + "' to CMDR Publish") doTime(aa, "Operation Complete: A02 Add Project to CMDR")