def processJob(project_job, project, strUID): a = datetime.datetime.now() aa = a # in_cont_fc = r'C:\Users\jeff8977\Desktop\NGCE\CONTOUR\Contours.gdb\Contours_ABC' # scratch_path = r'C:\Users\jeff8977\Desktop\NGCE\CONTOUR\Scratch' project_folder = ProjectFolders.getProjectFolderFromDBRow( project_job, project) derived = project_folder.derived project_fgdb_path = derived.fgdb_path con_folder = derived.contour_path contour_file_gdb_path = os.path.join(con_folder, CONTOUR_GDB_NAME) footprint_path = A05_C_ConsolidateRasterInfo.getRasterFootprintPath( fgdb_path=project_fgdb_path, elev_type=DTM) # Set up the scratch directory scratch_path = os.path.join(con_folder, 'C02Scratch') if not os.path.exists(scratch_path): os.makedirs(scratch_path) in_cont_fc = os.path.join(contour_file_gdb_path, CONTOUR_NAME_WM) a = Utility.doTime(a, "Set up for run") # Create Base Tiling Scheme for Individual Raster Selection base_scheme_poly = gen_base_tiling_scheme(in_cont_fc, scratch_path) a = Utility.doTime(a, "Generated tiling scheme") # Collect Unique Names from Input Feature Class name_list_len = -1 name_list = list( set([ row[0] for row in arcpy.da.SearchCursor(footprint_path, ['name']) ])) # @UndefinedVariable try: name_list_len = len(name_list) except: pass a = Utility.doTime(a, "Retrieved name list of size {}".format(name_list_len)) buildAnnotations(scratch_path, in_cont_fc, base_scheme_poly, name_list, footprint_path, False) a = Utility.doTime(a, "Built annotations") updated_name_list = getContourPrepList(scratch_path, name_list) a = Utility.doTime(a, "Got Contour Prep List") if len(updated_name_list) > 0: arcpy.AddWarning("Failed to build artifacts for {} tiles".format( len(updated_name_list))) for fail in updated_name_list: arcpy.AddWarning("\t{}: Failed".format(fail)) a = Utility.doTime(a, "Finished getting failed artifacts") # raise Exception("Failed to build artifacts for {} tiles".format(len(updated_name_list))) # Merge Multiprocessing Results res_db, res_dir = handle_merge(scratch_path) a = Utility.doTime(a, "Merged results") # Create Final MXD build_results_mxd(in_cont_fc, res_db, res_dir) RunUtil.runTool(r'ngce\pmdm\C\C02_B_PrepContForPub.py', [in_cont_fc, res_db, res_dir], bit32=True, log_path=project_folder.derived.path) # build_results_mxd(in_cont_fc, res_db, res_dir) a = Utility.doTime(aa, "Processed Job")
''' Created on Feb 12, 2016 @author: eric5946 ''' import arcpy from ngce.cmdr.JobUtil import getLogFolderFromWMXJobID from ngce.pmdm import RunUtil # from ngce.pmdm.a import A06CreateProjectMosaicDataset PATH = r'ngce\pmdm\a\A06_A_CreateProjectMosaicDataset.py' jobID = arcpy.GetParameterAsText(0) dateDeliver = arcpy.GetParameterAsText(1) #@TODO: add the start and end dates args = [jobID, dateDeliver] arcpy.AddMessage(args) # A06CreateProjectMosaicDataset.CreateProjectMosaicDataset(jobID) RunUtil.runTool(PATH, args, log_path=getLogFolderFromWMXJobID(jobID))
def processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=False, runCount=0): ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) ProjectID = ProjectJob.getProjectID(project) ProjectState = ProjectJob.getState(project) ProjectYear = ProjectJob.getYear(project) ProjectAlias = ProjectJob.getAlias(project) ProjectAliasClean = ProjectJob.getAliasClean(project) project_wmx_jobid = ProjectJob.getWMXJobID(project) Deliver = CMDR.Deliver() #delivery = Deliver.getDeliver(project_wmx_jobid) delivery = Deliver.getDeliver(ProjectID) dateDeliver = Deliver.getDeliverDate(delivery) startupType = "STARTED" Utility.printArguments([ "ProjectJob", "project", "ProjectUID", "serverConnectionFile", "serverFunctionPath", "update", "runCount", "ProjectFolder", "ProjectID", "ProjectState", "ProjectYear", "ProjectAlias", "ProjectAliasClean", "startupType" ], [ ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update, runCount, ProjectFolder, ProjectID, ProjectState, ProjectYear, ProjectAlias, ProjectAliasClean, startupType ], "A07_A Publish Project") # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) folderName = ProjectState # If the project has been moved for publishing, update the project directory old_path, new_path = updateJobDirectory(project_wmx_jobid, ProjectJob, project) old_ProjectID = ProjectID arcpy.AddMessage( "\n\n-----------------------------------------------------------") try: arcpy.AddMessage("Job directory paths: \n\tOLD: {}\n\tNEW: {}".format( old_path, new_path)) doRepath = True if str(old_path).lower().strip() == str(new_path).lower().strip(): arcpy.AddMessage( "Job directory paths old/new match, checking MD first record project folder value" ) filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], FoldersConfig.DTM) arcpy.AddMessage("checking fgdb '{}' ".format(filegdb_name)) dtm_md_path = os.path.join(new_path, ProjectID, FoldersConfig.published_dir, filegdb_name, FoldersConfig.DTM) arcpy.AddMessage( "checking MD '{}' first record project folder value".format( dtm_md_path)) record_project_path, uid = Utility.getExistingRecord( dtm_md_path, ["Project_Dir"], 0) arcpy.AddMessage("first record is {}".format(record_project_path)) record_project_path = list(record_project_path)[0] arcpy.AddMessage( "MD first record project folder value is {}".format( record_project_path)) # add a slash back in because strings remove it and remove the project name at the end old_path, old_ProjectID = os.path.split("\{}".format( str(record_project_path).strip())) arcpy.AddMessage( "Job directory paths: \n\tOLD: {}\n\tNEW: {}\n\tOLD Project ID: {}\n\tNEW Project ID: {}" .format(old_path, new_path, old_ProjectID, ProjectID)) if str(old_path).lower().strip() == str(new_path).lower().strip(): doRepath = False arcpy.AddMessage("Job directory paths match, doRepath = False") except: pass arcpy.AddMessage( "-----------------------------------------------------------\n\n") md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: update_paths_success = False # @TODO Add more info here! serviceDescription = "for project '{}' within state {} published in the year {}".format( ProjectAlias, ProjectState, ProjectYear) serviceTags = ",".join( [ProjectID, ProjectAliasClean, ProjectState, str(ProjectYear)]) filegdb_name = "{}_{}.gdb".format(ProjectFolder.published.fgdb_name, md_name) if ProjectFolder.published.fgdb_name.endswith(".gdb"): filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], md_name) #ProjectMDs_fgdb_path = os.path.join(ProjectFolder.published.path, filegdb_name) new_project_path = os.path.join(new_path, ProjectID) old_project_path = os.path.join(old_path, ProjectID) if str(ProjectID).lower().strip() != str( old_ProjectID).lower().strip(): old_project_path = os.path.join(old_path, old_ProjectID) #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_publish_path = os.path.join(new_project_path, "PUBLISHED") old_publish_path = os.path.join(old_project_path, "PUBLISHED") #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_delivered_path = os.path.join(new_project_path, "DELIVERED") old_delivered_path = os.path.join(old_project_path, "DELIVERED") new_projectMDs_fgdb_path = os.path.join(new_publish_path, filegdb_name) arcpy.AddMessage( "File Geodatabase Path: {0}".format(new_projectMDs_fgdb_path)) # Ensure the master_md_path exists if arcpy.Exists(new_projectMDs_fgdb_path): project_md_path = os.path.join(new_projectMDs_fgdb_path, md_name) arcpy.AddMessage( "Mosaic Dataset Path: {0}".format(project_md_path)) if arcpy.Exists(project_md_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(new_projectMDs_fgdb_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() #arcpy.AddMessage("Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}".format(new_projectMDs_fgdb_path, old_delivered_path, new_delivered_path)) #arcpy.RepairMosaicDatasetPaths_management(in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_delivered_path, new_delivered_path), where_clause="1=1") update_paths_success = True except: if md_name <> FoldersConfig.DHM and md_name <> FoldersConfig.DCM: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_path)) try: out_table = "{}_Paths".format(project_md_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}".format( out_table)) except: pass project_md_ocs_path = "{}_OCS".format(project_md_path) if arcpy.Exists(project_md_ocs_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(project_md_ocs_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, paths_list="# {0} {1}".format( old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() except: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_ocs_path)) try: out_table = "{}_Paths".format(project_md_ocs_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}". format(out_table)) except: pass serviceName = "{}_{}".format(ProjectID, md_name) arcpy.AddMessage("Service Name: {0}".format(serviceName)) # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) # append the cellsize and units of the Mosaic Dataset to the tags serviceTags = "{}, {}, {}".format(serviceTags, cellsizeX, SpatRefUnitsMD) serviceDescription = "{} {}. Horizontal spatial reference is {} and cell size is {} {}.".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceDescription = "{}. Please note that cell size does not refer to the underlying data's cell size.".format( serviceDescription) serviceDescription = "{}. You must check the meta-data for the underlying elevation data's resolution information (cell width, cell height, and Lidar point spacing).".format( serviceDescription) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. allowedFieldListMD = descMD.AllowedFields arcpy.AddMessage("AllowedFields in MD Properties: {0}".format( allowedFieldListMD)) if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = list([]) ssFunctionsList = "" if ssFunctions is not None: ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False if md_name <> FoldersConfig.INT: for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break else: for i, s in enumerate(ssFunctionsLst): if 'METER' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break # if Hillshade is found then re-order the list # Don't apply hillshade to intensity if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions to (first is default): " .format(ssFunctionsLst)) # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format( ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file: ") wsPath = os.path.dirname(os.path.dirname(project_md_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.AddMessage("\tMDPath='{}'".format(project_md_path)) arcpy.AddMessage("\tSDPath='{}'".format(sddraftPath)) arcpy.AddMessage("\tServiceName='{}'".format(serviceName)) arcpy.AddMessage("\tFolderName='{}'".format(folderName)) arcpy.AddMessage( "\tSummary='{}'".format(serviceDescription)) arcpy.AddMessage("\tTags='{}'".format(serviceTags)) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # Analyze service definition draft arcpy.AddMessage( "Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data ) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) RunUtil.runTool(r'ngce\pmdm\a\A07_B_StageSD.py', [ sddraftPath, sdPath, serverConnectionFile, startupType ], bit32=True, log_path=ProjectFolder.derived.path) # arcpy.StageService_server(sddraftPath, sdPath) # # try: # # # UploadServiceDefinition # # arcpy.AddMessage("Publishing mosaic data set as image service.") # # arcpy.UploadServiceDefinition_server(sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) # # except Exception as e: # # if runCount < 1: # ## PublishMosaicDataset(jobID, serverConnectionFile, True, 1) # # processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=True, runCount=1) # # break # # else: # # raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) else: arcpy.AddError( "Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing." ) arcpy.AddError( " To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields..." ) # Clean up and delete the .sd file Utility.deleteFileIfExists(sdPath, False) # For some reason publishing breaks the referenced mosaics. # The function paths also don't update properly. # So delete them and re-create later. if md_name == FoldersConfig.DHM or md_name == FoldersConfig.DCM: arcpy.AddMessage( "Deleting Mosaic Dataset to recreate later {}".format( project_md_path)) Utility.deleteFileIfExists(project_md_path, True) else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) else: arcpy.AddError( "Project file geodatabase not found '{}'. Please add this before proceeding." .format(new_projectMDs_fgdb_path)) # FOR LOOP ## ## Re-create the MD if it is FoldersConfig.DHM, FoldersConfig.DCM ## A06_A_CreateProjectMosaicDataset.CreateProjectMDs(project_wmx_jobid, dateDeliver=dateDeliver)
def processJob(ProjectJob, project, ProjectUID, serverConnectionFile): cache_path = ContourConfig.CACHE_FOLDER projectID = ProjectJob.getProjectID(project) ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) derived_filegdb_path = ProjectFolder.derived.fgdb_path contour_folder = ProjectFolder.derived.contour_path # PublishFolder = ProjectFolder.published.path # contour_file_gdb_path = os.path.join(contour_folder, CONTOUR_GDB_NAME) # contourMerged_file_gdb_path = os.path.join(PublishFolder, CONTOUR_NAME_WM) # @TODO: move all the derived contour stuff to a published location # P:\OK_SugarCreekElaine_2006\DERIVED\CONTOUR\SCRATCH\RESULTS\Results.mxd contourMxd_Name = "Results.mxd" # ContourConfig.CONTOUR_MXD_NAME contourMxd_path = os.path.join(contour_folder, "C02Scratch", "RESULTS", contourMxd_Name) # ContourBoundFC = os.path.join(contourMerged_file_gdb_path, ContourConfig.CONTOUR_BOUND_FC_WEBMERC) ContourBoundFC = A05_C_ConsolidateRasterInfo.getRasterBoundaryPath( derived_filegdb_path, DTM) temp = os.path.join(contour_folder, "temp") if os.path.exists(temp): shutil.rmtree(temp) os.mkdir(temp) # Get input parameters mxd = contourMxd_path #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN # arcpy.GetParameterAsText(0) ## 2018051 EI: Switched to using envelope here to create all cache tiles. Use AOI for import in C04 #areaOfInterest = ContourBoundFC # arcpy.GetParameterAsText(1) areaOfInterest = "" updateExtents = ContourBoundFC #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN # arcpy.GetParameterAsText(1) localServer = serverConnectionFile # arcpy.GetParameterAsText(2) serviceName = "{}_{}".format( projectID, ContourConfig.CONTOUR_2FT_SERVICE_NAME) # arcpy.GetParameterAsText(3) folder = ProjectJob.getState(project) # arcpy.GetParameterAsText(4) # Using the temp folder to create service definition files sddraft = os.path.join( temp, "{}.sddraft".format(serviceName) ) #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN sd = os.path.join( temp, "{}.sd".format(serviceName) ) #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN tilingScheme = ContourConfig.TILING_SCHEME #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN # cwd + "\\NRCS_tilingScheme.xml" #Cache template file #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- # The following paths and values can be modified if needed # Path to the local cache folder where project tiles will be created # cacheFolder = cache_path # r"C:\arcgisserver\directories\arcgiscache" # cacheDir = os.path.join(cache_path, serviceName) # if folder is not None and len(folder) > 0: # cacheDir = os.path.join(cache_path, "{}_{}".format(folder, serviceName)) # if os.path.exists(cacheDir): # now = datetime.datetime.now() # updatedCacheDir = "{}_{}{}{}_{}{}{}".format(cacheDir, # ("0000{}".format(now.year))[-4:], # ("00{}".format(now.month))[-2:], # ("00{}".format(now.day))[-2:], # ("00{}".format(now.hour))[-2:], # ("00{}".format(now.minute))[-2:], # ("00{}".format(now.second))[-2:]) # arcpy.AddMessage("The existing cache folder will be moved to: {0}".format(updatedCacheDir)) # shutil.move(cacheDir, updatedCacheDir) # Other map service properties cachingInstances = ContourConfig.CACHE_INSTANCES # This should be increased based on server resources #------------------------------------------------------------------------------- #------------------------------------------------------------------------------- Utility.printArguments([ "mxd", "areaOfInterest", "updateExtents", "serviceName", "folder", "sddraft", "sd", "tilingScheme", "cache_path" ], [ mxd, areaOfInterest, updateExtents, serviceName, folder, sddraft, sd, tilingScheme, cache_path ], "C03 CreateContourCache") # List of scales to create tiles at. If additional scales are needed, the tiling # scheme file needs to be updated as well as this list scales = ContourConfig.CONTOUR_SCALES_STRING # Other map service properties that should not be modified updateMode = "RECREATE_ALL_TILES" # @TODO: Can we change this to recreate missing? waitForJobCompletion = "WAIT" # @TODO: What if we don't wait?? cache_dir_path = os.path.join(cache_path, "{}_{}".format(folder, serviceName)) if os.path.exists(cache_dir_path): arcpy.AddMessage( "Cache directory already exists, only recreating empty tiles: {0}". format(cache_dir_path)) updateMode = "RECREATE_EMPTY_TILES" waitForJobCompletion = "DO_NOT_WAIT" # Construct path for local cached service inputService = os.path.join(localServer, folder, serviceName + ".MapServer") if localServer.endswith(".ags"): inputService = os.path.join(localServer[:-4], folder, serviceName + ".MapServer") arcpy.AddMessage( "Location of new service will be: {0}".format(inputService)) # Create a MapDocument object from the input MXD mapDoc = arcpy.mapping.MapDocument(mxd) # Create the SDDraft file for the local cached service arcpy.AddMessage("Creating draft service definition: {0}".format(sddraft)) arcpy.mapping.CreateMapSDDraft(mapDoc, sddraft, serviceName, "ARCGIS_SERVER", localServer, folder_name=folder) # # Parse the SDDraft file in order to modify service properties before publishing # doc = DOM.parse(sddraft) # # Set the antialiasing mode to 'Fast' # newAntialiasingMode = "Fast" # keys = doc.getElementsByTagName('Key') # for key in keys: # if key.hasChildNodes(): # if key.firstChild.data == 'antialiasingMode': # # Modify the antialiasing mode # arcpy.AddMessage("Updating anti-aliasing to: {}".format(newAntialiasingMode)) # key.nextSibling.firstChild.data = newAntialiasingMode # # # Save a new SDDraft file outsddraft = os.path.join(temp + "\\" + serviceName + "_aa.sddraft") # f = open(outsddraft, 'w') # doc.writexml(f) # f.close() updateSDDraft(sddraft, outsddraft) # Analyze the SDDraft file arcpy.AddMessage( "Analyzing draft service definition: {}".format(outsddraft)) analysis = arcpy.mapping.AnalyzeForSD(outsddraft) # Check for analyzer errors if analysis['errors'] == {}: RunUtil.runTool(r'ngce\pmdm\c\C03_B_StageSD.py', [outsddraft, sd, localServer], bit32=True, log_path=ProjectFolder.derived.path) # # arcpy.AddMessage("Staging service definition {}".format(sd)) # # arcpy.StageService_server(outsddraft, sd) # # arcpy.AddMessage("Uploading service definition {} to server {}".format(sd, localServer)) # # arcpy.UploadServiceDefinition_server(sd, localServer) # # arcpy.AddMessage("Service publishing completed") else: # If the SDDraft analysis contained errors, display them arcpy.AddError( "\nERROR\nErrors encountered during analysis of the MXD: " + str(analysis['errors'])) os.remove(sddraft) os.remove(outsddraft) raise Exception( "\nERROR\nErrors encountered during analysis of the MXD: " + str(analysis['errors'])) try: # Create the cache schema for the local project service arcpy.AddMessage("Creating cache schema for service {} in: {}".format( inputService, cache_path)) arcpy.CreateMapServerCache_server( inputService, cache_path, "PREDEFINED", predefined_tiling_scheme=tilingScheme, scales=scales ) # , scales_type="STANDARD", num_of_scales=len(scales)) arcpy.AddMessage("Cache schema created for local project service") except arcpy.ExecuteError: arcpy.AddWarning(arcpy.GetMessages(2)) # Create the cache tiles for the local project service ts = time.time() st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') arcpy.AddMessage("Cache creation started at: {0}".format(st)) Utility.printArguments([ "inputService", "scales", "updateMode", "cachingInstances", "areaOfInterest", "updateExtents", "waitForJobCompletion" ], [ inputService, scales, updateMode, cachingInstances, areaOfInterest, updateExtents, waitForJobCompletion ], 'arcpy.ManageMapServerCasheTiles_server') #Added 16 April 2016 BJN arcpy.ManageMapServerCacheTiles_server(inputService, scales, updateMode, cachingInstances, areaOfInterest, updateExtents, waitForJobCompletion) ts = time.time() st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') arcpy.AddMessage("Cache creation completed at: {0}".format(st)) # Clean up the Service Definition file from the temp folder os.remove(sd)
''' Created on Feb 12, 2016 @author: eric5946 ''' import arcpy import os from ngce.pmdm import RunUtil # from ngce.pmdm.a import A02AddProjectToCMDR PATH = r'ngce\pmdm\a\A02AddProjectToCMDR.py' strProjID = arcpy.GetParameterAsText(0) strAlias = arcpy.GetParameterAsText(1) strState = arcpy.GetParameterAsText(2) strYear = arcpy.GetParameterAsText(3) strJobId = arcpy.GetParameterAsText(4) strParentDir = arcpy.GetParameterAsText(5) strArchiveDir = arcpy.GetParameterAsText(6) args = [strProjID, strAlias, strState, strYear, strJobId, strParentDir, strArchiveDir] project_path = os.path.join(strParentDir, strProjID) # A02AddProjectToCMDR.AddPrjectToCMDR(strProjID, strAlias, strState, strYear, strJobId, strParentDir, strArchiveDir) RunUtil.runTool(PATH, args, log_path=os.path.join(project_path, "DERIVED"))
def processRastersInFolder(fileList, target_path, publish_path, elev_type, bound_path, z_min, z_max, v_name, v_unit, h_name, h_unit, h_wkid, spatial_ref, runAgain=True): a = datetime.now() path = os.path.join(RunUtil.TOOLS_PATH, "A05_B_RevalueRaster.py") Utility.printArguments([ "fileList", "target_path", "publish_path", "elev_type", "bound_path", "spatial_ref", "runAgain" ], [(None if fileList is None else len(fileList)), target_path, publish_path, elev_type, bound_path, spatial_ref, runAgain], "createLasStatistics") grouping = PROCESS_CHUNKS if not runAgain: grouping = int(PROCESS_CHUNKS / 2) if grouping <= 1: grouping = 2 total = len(fileList) if total > 0: fileList_repeat = [] procCount = int(os.environ['NUMBER_OF_PROCESSORS']) if procCount > 4: procCount = procCount - PROCESS_SPARES if procCount <= 0: procCount = 1 arcpy.AddMessage( "processRastersInFolder: Using {}/{} Processors to process {} files in groups of {}" .format(procCount, (procCount + PROCESS_SPARES), total, grouping)) processList = [] indx = 0 for f_paths in grouper(fileList, grouping): f_paths = [x for x in f_paths if x is not None] f_path = ",".join(f_paths) indx = indx + len(f_paths) arcpy.AddMessage( ' processRastersInFolder: Working on {} {}/{}'.format( elev_type, indx, total)) args = [ f_path, elev_type, target_path, publish_path, bound_path, str(z_min), str(z_max), v_name, v_unit, h_name, h_unit, str(h_wkid), spatial_ref ] try: processList.append( RunUtil.runToolx64_async(path, args, "A05_B", target_path)) # give time for things to wake up time.sleep(PROCESS_DELAY) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "processRastersInFolder: PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" arcpy.AddWarning(pymsg) msgs = "processRastersInFolder: GP ERRORS:\n" + arcpy.GetMessages( 2) + "\n" arcpy.AddWarning(msgs) sys.exit(1) waitForResults = True first = True while waitForResults: if not first: time.sleep(1) first = False # arcpy.AddMessage('processRastersInFolder: Looping LEN Process List = {} ProcCount = {} is greater = {}'.format(len(processList), procCount, (len(processList) >= procCount))) for i, [p, l] in enumerate(processList): if p.poll() is not None: # error log messages are handled in retCode = RunUtil.endRun_async(path, p, l) if retCode <> 0: fileList_repeat.append(f_path) del processList[i] waitForResults = (len(processList) >= int(procCount)) # Wait for last subprocesses to complete arcpy.AddMessage( " processRastersInFolder: Waiting for process list to clear {} jobs" .format(len(processList))) while len(processList) > 0: for i, [p, l] in enumerate(processList): if p.poll() is not None: RunUtil.endRun_async(path, p, l) del processList[i] arcpy.AddMessage( " processRastersInFolder: Waiting for process list to clear {} jobs" .format(len(processList))) else: # arcpy.AddMessage("processRastersInFolder: Waiting for process list to clear {} jobs".format(len(processList))) time.sleep(PROCESS_DELAY) if runAgain and len(fileList_repeat) > 0: # try to clean up any errors along the way processRastersInFolder(fileList, target_path, publish_path, elev_type, bound_path, z_min, z_max, v_name, v_unit, h_name, h_unit, h_wkid, spatial_ref, runAgain=False) try: shutil.rmtree(os.path.join(target_path, elev_type, "TEMP"), True) except: pass doTime(a, 'processRastersInFolder: All jobs completed.')
def createLasStatistics(fileList, target_path, spatial_reference=None, isClassified=True, createQARasters=False, createMissingRasters=True, overrideBorderPath=None, runAgain=True): a = datetime.now() path = os.path.join(RunUtil.TOOLS_PATH, "A04_B_CreateLASStats.py") Utility.printArguments([ "fileList", "target_path", "spatial_reference", "isClassified", "createQARasters", "createMissingRasters", "overrideBorderPath" ], [ fileList, target_path, spatial_reference, isClassified, createQARasters, createMissingRasters, overrideBorderPath ], "createLasStatistics") grouping = PROCESS_CHUNKS if not runAgain: grouping = int(PROCESS_CHUNKS / 2) if grouping <= 1: grouping = 2 total = len(fileList) if total > 0: fileList_repeat = [] procCount = int(os.environ['NUMBER_OF_PROCESSORS']) if procCount > 4: procCount = procCount - PROCESS_SPARES arcpy.AddMessage( "\tUsing {}/{} Processors to process {} files in groups of {}". format(procCount, (procCount + PROCESS_SPARES), total, grouping)) processList = [] indx = 0 for f_paths in Utility.grouper(fileList, grouping): f_paths = [x for x in f_paths if x is not None] f_path = ",".join(f_paths) indx = indx + len(f_paths) #arcpy.AddMessage('\t Working on {}/{}: {}'.format(indx, total, f_path)) arcpy.AddMessage('\t Working on {}/{}'.format(indx, total)) args = [ f_path, target_path, spatial_reference, "{}".format(isClassified), "{}".format(createQARasters), "{}".format(createMissingRasters), overrideBorderPath ] try: processList.append( RunUtil.runToolx64_async(path, args, "A04_B", target_path)) # give time for things to wake up time.sleep(PROCESS_DELAY) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = " PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n " + \ str(sys.exc_type) + ": " + str(sys.exc_value) + "\n" arcpy.AddWarning(pymsg) msgs = "GP ERRORS:\n" + arcpy.GetMessages(2) + "\n" arcpy.AddWarning(msgs) sys.exit(1) waitForResults = True first = True while waitForResults: if not first: time.sleep(1) first = False # arcpy.AddMessage('Looping LEN Process List = {} ProcCount = {} is greater = {}'.format(len(processList), procCount, (len(processList) >= procCount))) for i, [p, l] in enumerate(processList): if p.poll() is not None: # error log messages are handled in retCode = RunUtil.endRun_async(path, p, l) if retCode <> 0: fileList_repeat.append(f_path) del processList[i] waitForResults = (len(processList) >= int(procCount)) # Wait for last subprocesses to complete arcpy.AddMessage("\tWaiting for process list to clear {} jobs".format( len(processList))) while len(processList) > 0: for i, [p, l] in enumerate(processList): if p.poll() is not None: retCode = RunUtil.endRun_async(path, p, l) if retCode <> 0: fileList_repeat.append(f_path) del processList[i] if len(processList) > 0: arcpy.AddMessage( "\tWaiting for process list to clear {} jobs". format(len(processList))) else: # arcpy.AddMessage("Waiting for process list to clear {} jobs".format(len(processList))) time.sleep(PROCESS_DELAY) # BRUCE's code here if runAgain and len(fileList_repeat) > 0: # try to clean up any errors along the way createLasStatistics(fileList, target_path, spatial_reference, isClassified, createQARasters, createMissingRasters, overrideBorderPath, runAgain=False) elif not runAgain and len(fileList_repeat) > 0: arcpy.AddError("Error processing .las files.") raise Exception("Error processing .las files.") doTime(a, 'createLasStatistics: All jobs completed.')