def repairMosaicDatasetPaths(mosaicDatasetPath, pathsList): success = True pymsg = "" try: arcpy.RepairMosaicDatasetPaths_management(mosaicDatasetPath, pathsList) except: success = False pymsg = "\tError Info:\n\t" + str(sys.exc_info()[1]) finally: return [success, pymsg]
def mosaic_repair(mdname, paths, query="#"): arcpy.RepairMosaicDatasetPaths_management(mdname, paths, query)
def processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=False, runCount=0): ProjectFolder = ProjectFolders.getProjectFolderFromDBRow( ProjectJob, project) ProjectID = ProjectJob.getProjectID(project) ProjectState = ProjectJob.getState(project) ProjectYear = ProjectJob.getYear(project) ProjectAlias = ProjectJob.getAlias(project) ProjectAliasClean = ProjectJob.getAliasClean(project) project_wmx_jobid = ProjectJob.getWMXJobID(project) Deliver = CMDR.Deliver() #delivery = Deliver.getDeliver(project_wmx_jobid) delivery = Deliver.getDeliver(ProjectID) dateDeliver = Deliver.getDeliverDate(delivery) startupType = "STARTED" Utility.printArguments([ "ProjectJob", "project", "ProjectUID", "serverConnectionFile", "serverFunctionPath", "update", "runCount", "ProjectFolder", "ProjectID", "ProjectState", "ProjectYear", "ProjectAlias", "ProjectAliasClean", "startupType" ], [ ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update, runCount, ProjectFolder, ProjectID, ProjectState, ProjectYear, ProjectAlias, ProjectAliasClean, startupType ], "A07_A Publish Project") # serverFunctionPath = Raster.getServerRasterFunctionsPath(jobID) ssFunctions = None if serverFunctionPath is not None: ssFunctions = Raster.getServerSideFunctions(serverFunctionPath) folderName = ProjectState # If the project has been moved for publishing, update the project directory old_path, new_path = updateJobDirectory(project_wmx_jobid, ProjectJob, project) old_ProjectID = ProjectID arcpy.AddMessage( "\n\n-----------------------------------------------------------") try: arcpy.AddMessage("Job directory paths: \n\tOLD: {}\n\tNEW: {}".format( old_path, new_path)) doRepath = True if str(old_path).lower().strip() == str(new_path).lower().strip(): arcpy.AddMessage( "Job directory paths old/new match, checking MD first record project folder value" ) filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], FoldersConfig.DTM) arcpy.AddMessage("checking fgdb '{}' ".format(filegdb_name)) dtm_md_path = os.path.join(new_path, ProjectID, FoldersConfig.published_dir, filegdb_name, FoldersConfig.DTM) arcpy.AddMessage( "checking MD '{}' first record project folder value".format( dtm_md_path)) record_project_path, uid = Utility.getExistingRecord( dtm_md_path, ["Project_Dir"], 0) arcpy.AddMessage("first record is {}".format(record_project_path)) record_project_path = list(record_project_path)[0] arcpy.AddMessage( "MD first record project folder value is {}".format( record_project_path)) # add a slash back in because strings remove it and remove the project name at the end old_path, old_ProjectID = os.path.split("\{}".format( str(record_project_path).strip())) arcpy.AddMessage( "Job directory paths: \n\tOLD: {}\n\tNEW: {}\n\tOLD Project ID: {}\n\tNEW Project ID: {}" .format(old_path, new_path, old_ProjectID, ProjectID)) if str(old_path).lower().strip() == str(new_path).lower().strip(): doRepath = False arcpy.AddMessage("Job directory paths match, doRepath = False") except: pass arcpy.AddMessage( "-----------------------------------------------------------\n\n") md_list = [ FoldersConfig.DTM, FoldersConfig.DSM, FoldersConfig.DLM, FoldersConfig.DHM, FoldersConfig.DCM, FoldersConfig.INT ] for md_name in md_list: update_paths_success = False # @TODO Add more info here! serviceDescription = "for project '{}' within state {} published in the year {}".format( ProjectAlias, ProjectState, ProjectYear) serviceTags = ",".join( [ProjectID, ProjectAliasClean, ProjectState, str(ProjectYear)]) filegdb_name = "{}_{}.gdb".format(ProjectFolder.published.fgdb_name, md_name) if ProjectFolder.published.fgdb_name.endswith(".gdb"): filegdb_name = "{}_{}.gdb".format( ProjectFolder.published.fgdb_name[:-4], md_name) #ProjectMDs_fgdb_path = os.path.join(ProjectFolder.published.path, filegdb_name) new_project_path = os.path.join(new_path, ProjectID) old_project_path = os.path.join(old_path, ProjectID) if str(ProjectID).lower().strip() != str( old_ProjectID).lower().strip(): old_project_path = os.path.join(old_path, old_ProjectID) #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_publish_path = os.path.join(new_project_path, "PUBLISHED") old_publish_path = os.path.join(old_project_path, "PUBLISHED") #arcpy.AddMessage("OLD File Geodatabase Path: {0}".format(ProjectMDs_fgdb_path)) new_delivered_path = os.path.join(new_project_path, "DELIVERED") old_delivered_path = os.path.join(old_project_path, "DELIVERED") new_projectMDs_fgdb_path = os.path.join(new_publish_path, filegdb_name) arcpy.AddMessage( "File Geodatabase Path: {0}".format(new_projectMDs_fgdb_path)) # Ensure the master_md_path exists if arcpy.Exists(new_projectMDs_fgdb_path): project_md_path = os.path.join(new_projectMDs_fgdb_path, md_name) arcpy.AddMessage( "Mosaic Dataset Path: {0}".format(project_md_path)) if arcpy.Exists(project_md_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(new_projectMDs_fgdb_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() #arcpy.AddMessage("Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}".format(new_projectMDs_fgdb_path, old_delivered_path, new_delivered_path)) #arcpy.RepairMosaicDatasetPaths_management(in_mosaic_dataset=project_md_path, paths_list="# {0} {1}".format(old_delivered_path, new_delivered_path), where_clause="1=1") update_paths_success = True except: if md_name <> FoldersConfig.DHM and md_name <> FoldersConfig.DCM: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_path)) try: out_table = "{}_Paths".format(project_md_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}".format( out_table)) except: pass project_md_ocs_path = "{}_OCS".format(project_md_path) if arcpy.Exists(project_md_ocs_path): try: arcpy.AddMessage( "Repairing Mosaic Dataset Paths: {}\n\told: {}\n\tnew: {}" .format(project_md_ocs_path, old_project_path, new_project_path)) arcpy.RepairMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, paths_list="# {0} {1}".format( old_project_path, new_project_path), where_clause="1=1") Utility.addToolMessages() except: arcpy.AddWarning( "Failed to update paths, mosaic dataset paths should be verified and updated by hand if necessary. {}" .format(project_md_ocs_path)) try: out_table = "{}_Paths".format(project_md_ocs_path) arcpy.ExportMosaicDatasetPaths_management( in_mosaic_dataset=project_md_ocs_path, out_table=out_table, where_clause="1=1", export_mode="ALL", types_of_paths="RASTER;ITEM_CACHE") Utility.addToolMessages() arcpy.AddMessage( "List of repaired Mosaic Dataset Paths: {}". format(out_table)) except: pass serviceName = "{}_{}".format(ProjectID, md_name) arcpy.AddMessage("Service Name: {0}".format(serviceName)) # Retrieve some properties from the Mosaic Dataset to place in the tags field cellsizeResult = arcpy.GetRasterProperties_management( project_md_path, property_type="CELLSIZEX", band_index="") Utility.addToolMessages() cellsizeX = cellsizeResult.getOutput(0) # Get the units of the Mosaic Dataset descMD = arcpy.Describe(project_md_path) SpatRefMD = descMD.SpatialReference SpatRefUnitsMD = SpatRefMD.linearUnitName SpatRefNameMD = SpatRefMD.name arcpy.AddMessage( "Spatial Reference name of Mosaic Dataset: {0}".format( SpatRefNameMD)) arcpy.AddMessage( "Spatial Reference X,Y Units of Mosaic Dataset: {0}". format(SpatRefUnitsMD)) # append the cellsize and units of the Mosaic Dataset to the tags serviceTags = "{}, {}, {}".format(serviceTags, cellsizeX, SpatRefUnitsMD) serviceDescription = "{} {}. Horizontal spatial reference is {} and cell size is {} {}.".format( md_name, serviceDescription, SpatRefNameMD, cellsizeX, SpatRefUnitsMD) serviceDescription = "{}. Please note that cell size does not refer to the underlying data's cell size.".format( serviceDescription) serviceDescription = "{}. You must check the meta-data for the underlying elevation data's resolution information (cell width, cell height, and Lidar point spacing).".format( serviceDescription) arcpy.AddMessage("Service Tags: {0}".format(serviceTags)) arcpy.AddMessage( "Service description: {0}".format(serviceDescription)) # Look for RasterPath in the list of allowed fields, and if found, don't publish # the mosaic dataset. Exposing the contents of RasterPath could compromise the # security of the Image Service. allowedFieldListMD = descMD.AllowedFields arcpy.AddMessage("AllowedFields in MD Properties: {0}".format( allowedFieldListMD)) if True or "RASTERPATH;" not in allowedFieldListMD.upper(): # Create a list to manipulate server-side functions # Bring Hillshade to the top of the list so it is default ssFunctionsLst = list([]) ssFunctionsList = "" if ssFunctions is not None: ssFunctionsLst = ssFunctions.split(";") if len(ssFunctionsLst) > 0: foundHillshade = False if md_name <> FoldersConfig.INT: for i, s in enumerate(ssFunctionsLst): if 'HILLSHADE' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break else: for i, s in enumerate(ssFunctionsLst): if 'METER' in s.upper(): arcpy.AddMessage( "Will re-order SS Functions for {} so {} is default" .format(md_name, s)) foundHillshade = True break # if Hillshade is found then re-order the list # Don't apply hillshade to intensity if foundHillshade: ssFunctionsLst.insert(0, ssFunctionsLst.pop(i)) arcpy.AddMessage( "Re-ordered SS Functions to (first is default): " .format(ssFunctionsLst)) # convert the list of server-side functions into a comma delimited string ssFunctionsList = ",".join(ssFunctionsLst) arcpy.AddMessage( "Server-side Functions: {0}\n".format( ssFunctionsList)) # Create image service definition draft arcpy.AddMessage( "Creating image service definition draft file: ") wsPath = os.path.dirname(os.path.dirname(project_md_path)) sddraftPath = os.path.join(wsPath, serviceName + ".sddraft") arcpy.Delete_management(sddraftPath) arcpy.AddMessage("\tMDPath='{}'".format(project_md_path)) arcpy.AddMessage("\tSDPath='{}'".format(sddraftPath)) arcpy.AddMessage("\tServiceName='{}'".format(serviceName)) arcpy.AddMessage("\tFolderName='{}'".format(folderName)) arcpy.AddMessage( "\tSummary='{}'".format(serviceDescription)) arcpy.AddMessage("\tTags='{}'".format(serviceTags)) arcpy.CreateImageSDDraft(project_md_path, sddraftPath, serviceName, "ARCGIS_SERVER", connection_file_path=None, copy_data_to_server=False, folder_name=folderName, summary=serviceDescription, tags=serviceTags) # Edit the service definition draft if user specified server-side functions # or if user wants to enable download on the Image Service updateSDServerSideFunctions(ssFunctionsLst, ssFunctionsList, sddraftPath, update) # Analyze service definition draft arcpy.AddMessage( "Analyzing service definition draft file...") analysis = arcpy.mapping.AnalyzeForSD(sddraftPath) for key in ('messages', 'warnings', 'errors'): arcpy.AddMessage("----" + key.upper() + "---") analysis_vars = analysis[key] for ((message, code), data ) in analysis_vars.iteritems(): # @UnusedVariable msg = (" ", message, " (CODE %i)" % code) arcpy.AddMessage("".join(msg)) if analysis['errors'] == {}: arcpy.AddMessage( "Staging and publishing service definition...") # StageService arcpy.AddMessage("Staging sddraft file to sd file") sdPath = sddraftPath.replace(".sddraft", ".sd") arcpy.Delete_management(sdPath) RunUtil.runTool(r'ngce\pmdm\a\A07_B_StageSD.py', [ sddraftPath, sdPath, serverConnectionFile, startupType ], bit32=True, log_path=ProjectFolder.derived.path) # arcpy.StageService_server(sddraftPath, sdPath) # # try: # # # UploadServiceDefinition # # arcpy.AddMessage("Publishing mosaic data set as image service.") # # arcpy.UploadServiceDefinition_server(sdPath, serverConnectionFile, "#", "#", "#", "#", startupType) # # except Exception as e: # # if runCount < 1: # ## PublishMosaicDataset(jobID, serverConnectionFile, True, 1) # # processJob(ProjectJob, project, ProjectUID, serverConnectionFile, serverFunctionPath, update=True, runCount=1) # # break # # else: # # raise e else: # if the sddraft analysis contained errors, display them arcpy.AddError(analysis['errors']) else: arcpy.AddError( "Exiting: Found 'RasterPath' in list of allowed MD fields. Please remove this field from the list before publishing." ) arcpy.AddError( " To remove RasterPath from the list, go to Mosaic Dataset Properties, Defaults tab, Allowed Fields..." ) # Clean up and delete the .sd file Utility.deleteFileIfExists(sdPath, False) # For some reason publishing breaks the referenced mosaics. # The function paths also don't update properly. # So delete them and re-create later. if md_name == FoldersConfig.DHM or md_name == FoldersConfig.DCM: arcpy.AddMessage( "Deleting Mosaic Dataset to recreate later {}".format( project_md_path)) Utility.deleteFileIfExists(project_md_path, True) else: arcpy.AddWarning( "Project mosaic dataset not found '{}'.".format( project_md_path)) else: arcpy.AddError( "Project file geodatabase not found '{}'. Please add this before proceeding." .format(new_projectMDs_fgdb_path)) # FOR LOOP ## ## Re-create the MD if it is FoldersConfig.DHM, FoldersConfig.DCM ## A06_A_CreateProjectMosaicDataset.CreateProjectMDs(project_wmx_jobid, dateDeliver=dateDeliver)
# Process: Remove Rasters From Mosaic Dataset arcpy.RemoveRastersFromMosaicDataset_management( gdbideam_raster_GOES16_C13, g_ESRI_variable_8, "UPDATE_BOUNDARY", "MARK_OVERVIEW_ITEMS", "DELETE_OVERVIEW_IMAGES", "DELETE_ITEM_CACHE", "REMOVE_MOSAICDATASET_ITEMS", "UPDATE_CELL_SIZES") # Process: Make Table View try: arcpy.MakeTableView_management(gdbideam_raster_GOES16_C13, raster_GOES16_C13_View, "", "", g_ESRI_variable_9) except: pass # Process: Delete Identical try: arcpy.DeleteIdentical_management(raster_GOES16_C13_View, g_ESRI_variable_10, "", "0") except: pass # Process: Calculate Value (3) arcpy.CalculateValue_management( g_ESRI_variable_11, "# Variables locales\\nzOrderField = \"zorder\"\\ndateField = \"Fecha\"\\n\\n# Importar librerias\\nimport arcpy\\ntry:\\n def evaluateZorder(table):\\n # Creación de cursor de actualización\\n sqlClause = (None, 'ORDER BY {0} DESC'.format(dateField))\\n flag = 0\\n with arcpy.da.UpdateCursor(table, [zOrderField, dateField], sql_clause = sqlClause) as cursor:\\n for row in cursor:\\n if flag == 0:\\n row[0] = -10\\n flag = 1\\n else:\\n row[0] = 0\\n cursor.updateRow(row)\\n return True\\nexcept:\\n pass", "Boolean") # Process: Repair Mosaic Dataset Paths arcpy.RepairMosaicDatasetPaths_management(gdbideam_raster_GOES16_C13, RPATHS, "") arcpy.AddMessage("Finished script")
def main(): total_success = True # Check arguments results = check_args() if not results: sys.exit(0) root_path, remap_paths = results try: print '\n{}'.format('=' * 80) print 'Repair Mosaic Datasets' print '{}\n'.format('=' * 80) print '{:<15}{}'.format('Root folder:', root_path) print '{:<15}{}\n'.format('Remap paths:', remap_paths) print 'Searching {} looking for file geodatabases...'.format(root_path) gdbs = getFileGeodatabases(root_path) for gdb in gdbs: print '\n\n{}'.format('=' * 80) print 'Found file geodatabase: {}'.format(gdb) print '\tChecking for existence of non-referenced mosaic datasets...' # Get any mosaic datasets in geodatabase arcpy.env.workspace = gdb mosaic_datasets = arcpy.ListDatasets('*', 'Mosaic') # Modify list to contain only non-reference mosaic datasets mosaic_datasets[:] = [ mosaic_dataset for mosaic_dataset in mosaic_datasets if not arcpy.Describe(mosaic_dataset).referenced ] if len(mosaic_datasets) == 0: print '\tNone found.' else: print '\tFound {} non-referenced mosaic dataset(s)...'.format( len(mosaic_datasets)) for mosaic_dataset in mosaic_datasets: print '\n\t{}'.format('-' * 70) print '\tRepairing paths in mosaic dataset {}...'.format( mosaic_dataset) results = arcpy.RepairMosaicDatasetPaths_management( mosaic_dataset, remap_paths) if results.maxSeverity == 2: total_success = False print '\n{}'.format(results.getMessages()) except: total_success = False # Get the traceback object tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] # Concatenate information together concerning the error into a message string pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str( sys.exc_info()[1]) # Print Python error messages for use in Python / Python Window print print "***** ERROR ENCOUNTERED *****" print pymsg + "\n" finally: if total_success: print "\n\nDone. Review output for errors.\n" sys.exit(0) else: print "\n\nDone. ERROR(s) occurred during mosaic dataset repair.\n" sys.exit(1)
#arcpy.AddMessage("continent is " + continent) #Run the Nutrition Metrics tool #End commented out section ###-----------------------------------------------------------------------------------### #global to increment over records rec = 0 #This block of code is used to ensure that a downloaded mosaic #dataset will be identified regardless of where the folder is stored. #This code won't be needed for the web version. sourceRasters_stepOne = os.path.split(mosaicDB)[0] sourceRasters_stepTwo = os.path.split(sourceRasters_stepOne)[0] paths = "* " + "'" + str(sourceRasters_stepTwo) + "'" arcpy.RepairMosaicDatasetPaths_management(mosaicDB, paths) #Define no data values bands = 1 nodataval = "" nodatarange = "BAND_1 0 1000000" query = "#" mode = "#" arcpy.DefineMosaicDatasetNoData_management(mosaicDB, bands, nodataval, nodatarange, query, mode) #This is the end of the block of code to identify the source rasters of a mosaic dataset. #This code will only need to be implemented on the desktop version of the tool. #It also should only be implemented if the mosaic dataset that's being used was downloaded #as part of the tool's sample data.