def addOrUpdateProject(self, wmx_job_id, project_Id, alias, alias_clean, state, year, parent_dir, archive_dir, project_dir, UID, project_AOI): # Where WMXJobID = '<Job_ID>' where_clause = "{} = {}".format( arcpy.AddFieldDelimiters(self.fclass, field_ProjectJob_WMXJobID), wmx_job_id) row = [ UID, # field_ProjectJob_UID wmx_job_id, # field_ProjectJob_WMXJobID, project_Id, # field_ProjectJob_ProjID, alias, # field_ProjectJob_Alias alias_clean, # field_ProjectJob_AliasClean state, # field_ProjectJob_State year, # field_ProjectJob_Year parent_dir, # field_ProjectJob_ParentDir archive_dir, # field_ProjectJob_ArchDir project_dir, # field_ProjectJob_ProjDir project_AOI # field_ProjectJob_SHAPE ] Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, where_clause=where_clause, rowValueList=row) return row
def buildAnnotations(scratch_path, in_cont_fc, base_scheme_poly, name_list, footprint_path, runAgain=True): a = datetime.datetime.now() updated_name_list = getContourPrepList(scratch_path, name_list) if len(updated_name_list) <= 0: arcpy.AddMessage("All tile artifacts up to date") else: # Run Contour Preparation for Each Unique Name Found within Input FC pool = Pool(processes=cpu_count() - CPU_HANDICAP) pool.map( partial(contour_prep, in_cont_fc, base_scheme_poly, scratch_path, footprint_path), name_list) pool.close() pool.join() # sometimes things fail for no reason, so try again updated_name_list = getContourPrepList(scratch_path, name_list) if len(updated_name_list) > 0: if runAgain: arcpy.AddWarning("WARNING: Building annotations again.") buildAnnotations(scratch_path, in_cont_fc, base_scheme_poly, name_list, footprint_path, False) Utility.doTime(a, "Finished building annotations")
def createRefDTMMosaic(in_md_path, out_md_path, v_unit): a = datetime.now() if arcpy.Exists(out_md_path): arcpy.AddMessage("Referenced mosaic dataset exists " + out_md_path) else: arcpy.CreateReferencedMosaicDataset_management(in_dataset=in_md_path, out_mosaic_dataset=out_md_path, where_clause="TypeID = 1") raster_function_path = Raster.Contour_Meters_function_chain_path v_unit = str(v_unit).upper() if v_unit.find("FEET") >= 0 or v_unit.find("FOOT") >= 0 or v_unit.find("FT") >= 0: raster_function_path = Raster.Contour_IntlFeet_function_chain_path #if v_unit.find("INTL") >= 0 or v_unit.find("INTERNATIONAL") >= 0 or v_unit.find("STANDARD") >= 0 or v_unit.find("STD") >= 0: # raster_function_path = Raster.Contour_IntlFeet_function_chain_path if v_unit.find("US") >= 0 or v_unit.find("SURVEY") >= 0: arcpy.AddMessage("Using US FOOT Raster Function") raster_function_path = Raster.Contour_Feet_function_chain_path else: arcpy.AddMessage("Using INT FOOT Raster Function") else: arcpy.AddMessage("Using METER Raster Function") arcpy.EditRasterFunction_management(in_mosaic_dataset=out_md_path, edit_mosaic_dataset_item="EDIT_MOSAIC_DATASET", edit_options="REPLACE", function_chain_definition=raster_function_path, location_function_name="") Utility.addToolMessages() arcpy.CalculateStatistics_management(in_raster_dataset=out_md_path, x_skip_factor=SKIP_FACTOR, y_skip_factor=SKIP_FACTOR, ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set") doTime(a, "Created referenced mosaic dataset " + out_md_path)
def importMosaicDatasetGeometries(md_path, footprint_path, lasd_boundary_path): if footprint_path is not None: arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type="FOOTPRINT", target_join_field="Name", input_featureclass=footprint_path, input_join_field="name") Utility.addToolMessages() if lasd_boundary_path is not None: arcpy.ImportMosaicDatasetGeometry_management(md_path, target_featureclass_type="BOUNDARY", target_join_field="OBJECTID", input_featureclass=lasd_boundary_path, input_join_field="OBJECTID") Utility.addToolMessages()
def createReferenceddMosaicDataset(in_md_path, out_md_path, spatial_ref, raster_v_unit): a = datetime.datetime.now() arcpy.CreateReferencedMosaicDataset_management( in_dataset=in_md_path, out_mosaic_dataset=out_md_path, coordinate_system=spatial_ref, number_of_bands="1", pixel_type="32_BIT_SIGNED", where_clause="", in_template_dataset="", extent="", select_using_features="SELECT_USING_FEATURES", lod_field="", minPS_field="", maxPS_field="", pixelSize="", build_boundary="BUILD_BOUNDARY") raster_function_path = Raster.Canopy_Density_function_chain_path arcpy.EditRasterFunction_management( in_mosaic_dataset=out_md_path, edit_mosaic_dataset_item="EDIT_MOSAIC_DATASET", edit_options="REPLACE", function_chain_definition=raster_function_path, location_function_name="") Utility.addToolMessages() # arcpy.CalculateStatistics_management(in_raster_dataset=out_md_path, x_skip_factor="1", y_skip_factor="1", ignore_values="", skip_existing="OVERWRITE", area_of_interest="Feature Set") arcpy.AddMessage( "\tNOTE: !!! Please edit the MR Point Density function. Change to replace input to 'Multiply LAST by 100' with this project's POINT_COUNT_LAST mosaic dataset.\n\n\t{}\n" .format(out_md_path)) doTime(a, "Created DHM '{}'".format(out_md_path))
def getRasterStats(ProjectUID, ProjectID, curr_raster, raster_path, group, elevation_type, raster_format, raster_PixelType, nodata, horz_cs_name, horz_unit_name, horz_cs_wkid, vert_cs_name, vert_unit_name, rows): # NOTE: Order here must match field list in CMDRConfig inMem_NameBound = "in_memory\MemBoundary" if arcpy.Exists(inMem_NameBound): arcpy.Delete_management(inMem_NameBound) Utility.addToolMessages() arcpy.RasterDomain_3d(raster_path, inMem_NameBound, "POLYGON")[0] Utility.addToolMessages() boundary = Utility.getExistingRecord(in_table=inMem_NameBound, field_names=['SHAPE@'], uidIndex=-1)[0][0] newRow = [ ProjectUID, ProjectID, boundary, curr_raster, raster_path, group, elevation_type, raster_format, nodata, raster_PixelType ] arcpy.CalculateStatistics_management(in_raster_dataset=raster_path, skip_existing="OVERWRITE") cellSize = getRasterProperties(raster_path, newRow) newRow.append(horz_cs_name) newRow.append(horz_unit_name) newRow.append(horz_cs_wkid) newRow.append(vert_cs_name) newRow.append(vert_unit_name) newRow.append(None) # Vert WKID, we can't know this in python rows.append(newRow) return cellSize
def DefineBuildOverviews(cellsizeOVR, MasterMD, MasterMD_overview_path, AreaToBuildOVR): arcpy.AddMessage( "\nCell size of First level Overview: {0}".format(cellsizeOVR)) # Define overviews # pixel size of the first level overview is cellsizeOVR # in_template_dataset=AreaToBuildOVR (this can be just the extent of the project, but # for now this is the extent of the entire Master Mosaic Dataset) # overview_factor = 2 # compression method = "LZW" # Sets the location of Mosaic Dataset overview TIFF files # (Note: this folder needs to be in the ArcGIS Server Data Store) arcpy.DefineOverviews_management( in_mosaic_dataset=MasterMD, overview_image_folder=MasterMD_overview_path, in_template_dataset=AreaToBuildOVR, extent="", pixel_size=cellsizeOVR, number_of_levels="", tile_rows="5120", tile_cols="5120", overview_factor="2", force_overview_tiles="NO_FORCE_OVERVIEW_TILES", resampling_method="BILINEAR", compression_method="LZW", compression_quality="100") #arcpy.DefineOverviews_management(MasterMD, MasterMD_overview_path, AreaToBuildOVR, extent="#", pixel_size=cellsizeOVR, # number_of_levels="#", tile_rows="5120", tile_cols="5120", overview_factor="2", # force_overview_tiles="NO_FORCE_OVERVIEW_TILES", resampling_method="BILINEAR", # compression_method="LZW", compression_quality="100") #messages = arcpy.GetMessages() #arcpy.AddMessage("\nResults output from DefineOverviews are: \n{0}\n".format(messages)) Utility.addToolMessages() whereClauseOVR = "#" arcpy.BuildOverviews_management( MasterMD, whereClauseOVR, define_missing_tiles="NO_DEFINE_MISSING_TILES", generate_overviews="GENERATE_OVERVIEWS", generate_missing_images="GENERATE_MISSING_IMAGES", regenerate_stale_images="IGNORE_STALE_IMAGES") # regenerate_stale_images="REGENERATE_STALE_IMAGES") Utility.addToolMessages() #messages = arcpy.GetMessages() #arcpy.AddMessage("\nResults output from BuildOverviews are: \n{0}\n".format(messages)) # Get another record count from the Master MD result = arcpy.GetCount_management(MasterMD) countMasterRastersOVR = int(result.getOutput(0)) arcpy.AddMessage( "After Building Overviews Master Mosaic Dataset: {0} has {1} row(s).". format(MasterMD, countMasterRastersOVR)) return
def validateRasterSpaitialRef(ProjectFolder, start_dir, elev_type, target_path, v_name, v_unit, h_name, h_unit, h_wkid): las_qainfo = LAS.QALasInfo( ProjectFolder, True) # isclassified doesn't matter, disposbale las qa info las_qainfo.lasd_spatial_ref = checkSpatialOnRaster(start_dir, elev_type, target_path, v_name, v_unit, h_name, h_unit, h_wkid) if las_qainfo.lasd_spatial_ref is None: arcpy.AddError( "ERROR: Neither spatial reference in PRJ or {} files are valid CANNOT CONTINUE." .format(elev_type)) arcpy.AddError( "ERROR: Please add a valid projection file (.prj) to the DELIVERED\{} folder." .format(elev_type)) elif not las_qainfo.isValidSpatialReference(): las_qainfo.lasd_spatial_ref = None arcpy.AddError( "ERROR: Spatial Reference for the {} files is not standard: '{}'". format( elev_type, Utility.getSpatialReferenceInfo(las_qainfo.lasd_spatial_ref))) arcpy.AddError( "ERROR: Please add a valid projection file (.prj) to the DELIVERED\{} folder." .format(elev_type)) elif las_qainfo.isUnknownSpatialReference(): las_qainfo.lasd_spatial_ref = None arcpy.AddError( "ERROR: Spatial Reference for the {} files is not standard: '{}'". format( elev_type, Utility.getSpatialReferenceInfo(las_qainfo.lasd_spatial_ref))) arcpy.AddError( "ERROR: Please add a valid projection file (.prj) to the DELIVERED\{} folder." .format(elev_type)) if las_qainfo.lasd_spatial_ref is not None: f_list, all_matching = getFileProcessList( start_dir, elev_type, target_path, None, return_first=False, check_sr=True) # @UnusedVariable if not all_matching: las_qainfo.lasd_spatial_ref = None arcpy.AddError( "Not all raster files have same spatial reference. Please make sure all files have the same spatial reference." ) return las_qainfo.lasd_spatial_ref
def CreateContoursFromMD(strJobId): Utility.printArguments(["WMXJobID"], [strJobId], "C01 CreateContoursFromMD") aa = datetime.now() project_job, project, strUID = getProjectFromWMXJobID(strJobId) # @UnusedVariable processJob(project_job, project, strUID) doTime(aa, "Operation Complete: C01 Create Contours From MD")
def processJob(ProjectJob, project, strUID): a = datetime.now() aa = a archive_dir = ProjectJob.getArchiveDir(project) basedir = ProjectJob.getProjectDir(project) archive_name = ProjectJob.getProjectID(project) Utility.printArguments(["ArchiveDir", "BaseDir", "ArchiveName"], [archive_dir, basedir, archive_name], "A03 ProjectZipArchive") if archive_dir is None or basedir is None or archive_name is None: arcpy.AddError( 'Failed to retrieve project info: archive_dir={} base_dir={} archive_name={}' .format(archive_dir, basedir, archive_name)) else: # ## Currently archiving entire project directory. # ## Uncomment following to just archive the ORIGINAL folder # basedir = os.path.join(basedir, FoldersConfig.original_dir) # ## Uncomment following to just archive the DELIVEREDfolder # basedir = os.path.join(basedir, FoldersConfig.delivered_dir) cwd = os.getcwd() arcpy.AddMessage('Changeing working directory from {} to {}'.format( cwd, basedir)) os.chdir(basedir) arcpy.AddMessage('Current working directory is {}'.format(os.getcwd())) # archive contents of folder basedir arcpy.AddMessage('archiving contents of directory {} to {}.zip'.format( basedir, archive_name)) # shutil.make_archive(archive_name, 'zip', basedir) with zipfile.ZipFile(archive_name + '.zip', "w", zipfile.ZIP_DEFLATED, allowZip64=True) as zf: for root, _, filenames in os.walk(basedir): for name in filenames: name = os.path.join(root, name) name = os.path.normpath(name) a = doTime('adding to archive {} file {}'.format( archive_name, name)) zf.write(name, name) # move the file to the archive directory a = doTime( 'moving zip file to archive directory {}'.format(archive_dir)) shutil.move("{}.zip".format(archive_name), archive_dir) a = doTime('Moved archive {} file to {}'.format( archive_name, archive_dir)) doTime(aa, "Operation Complete: A03 Zip project and move to archive folder")
def PrepareContoursForJob(strJobId): Utility.printArguments(["WMXJobID"], [strJobId], "C02 PrepareContoursForPublishing") aa = datetime.datetime.now() project_job, project, strUID = getProjectFromWMXJobID( strJobId) # @UnusedVariable processJob(project_job, project, strUID) doTime(aa, "Operation Complete: C02 Create Contour Annotoations")
def updateDeliver(self, row, project_Id): # Where ProjectID = '<project_ID>' where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(self.fclass, field_Deliver_ProjID), project_Id) row = list(row) Utility.updateRecord(in_table=self.fclass, field_names=self.fields, rowValueList=row, where_clause=where_clause) return row
def AddMDMasterToCMDR(wmxJobId, masterParentDir, masterName, masterCellSize_m, masterServerConnectionFilePath): masterServiceFolder = None index = masterName.find("/") if index < 0: index = masterName.find("\\") if index >= 0: masterServiceFolder = masterName[0:index] masterName = masterName[index + 1:] Utility.printArguments([ "wmxJobId", "masterParentDir", "masterName", "masterCellSize_m", "masterServerConnectionFilePath", "masterServiceFolder" ], [ wmxJobId, masterParentDir, masterName, masterCellSize_m, masterServerConnectionFilePath, masterServiceFolder ], "B01 AddMDMasterToCMDR") # build attributes from parameters if wmxJobId is not None and int(wmxJobId) > 0: if masterParentDir is not None: if masterName is None: masterName = CMDRConfig.DEFAULT_MDMASTER_NAME # get CMDR from job data workspace and set as current workspace Utility.setWMXJobDataAsEnvironmentWorkspace(wmxJobId) # get job AOI geometry master_AOI = Utility.getJobAoi(wmxJobId) # NOTE: Edit session handled in Utility Master = CMDR.MDMaster() Master.addOrUpdate( wmx_job_ID=wmxJobId, parent_dir=masterParentDir, master_name=masterName, masterServerConnectionFilePath=masterServerConnectionFilePath, masterCellSize_m=masterCellSize_m, masterServiceFolder=masterServiceFolder, master_AOI=master_AOI) # mdMaster_row = list(Master.getExistingMDRow(wmxJobId)) # updateMasterDomain(Master.getMDPath(mdMaster_row), wmxWorkspace) else: arcpy.AddError("Master parent directory not set. cannot continue.") else: arcpy.AddError("Master wmx Job ID is not set. cannot continue.") arcpy.AddMessage("Operation complete")
def updateProject(self, row): wmx_job_id = self.getWMXJobID(row) where_clause = "{} = {}".format( arcpy.AddFieldDelimiters(self.fclass, field_ProjectJob_WMXJobID), wmx_job_id) Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, where_clause=where_clause, rowValueList=row) return row
def AddProjectToMaster(strJobId, MasterMDs_parent_path, masterService): aa = datetime.now() Utility.printArguments(["WMX Job ID", "masterParentDir", "masterService"], [strJobId, MasterMDs_parent_path, masterService], "A08 AddPrjectToMaster") ProjectJob, project, strUID = getProjectFromWMXJobID( strJobId) # @UnusedVariable processJob(ProjectJob, project, strUID, MasterMDs_parent_path, master_md_name) doTime(aa, "Operation Complete: A06 Publish Mosaic Dataset")
def run_merge(lists, results): arcpy.env.overwriteOutput = True a = datetime.datetime.now() # Merge Each Set of Inputs for x in lists: output_name = os.path.split(x[0])[1] merge_fc_path = os.path.join(results, output_name) if arcpy.Exists(merge_fc_path): a = Utility.doTime(a, "Merged exists: {}".format(merge_fc_path)) else: arcpy.Merge_management(x, merge_fc_path) a = Utility.doTime(a, "Merged {}".format(merge_fc_path))
def addOrUpdateProject(self, project_Id, UID, project_AOI): # Where ProjectID = '<project_ID>' where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(self.fclass, field_QC_ProjID), project_Id) row = [UID, project_Id, project_AOI] Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, where_clause=where_clause, rowValueList=row) return row
def RemoveDEMErrantValues(strJobId): aa = datetime.now() Utility.printArguments(["WMX Job ID"], [strJobId], "A05 RemoveDEMErrantValues") arcpy.CheckOutExtension("3D") arcpy.CheckOutExtension("Spatial") ProjectJob, project, strUID = getProjectFromWMXJobID(strJobId) # @UnusedVariable errorMsg = processJob(ProjectJob, project, strUID) arcpy.CheckInExtension("3D") arcpy.CheckInExtension("Spatial") doTime(aa, "Operation Complete: A05 Remove DEM Errant Values") if len(errorMsg) > 0: raise Exception("Failed to process {} raster data correctly".format(" ".join(errorMsg)))
def updateJobDirectory(project_wmx_jobid, ProjectJob, project): project = list(project) arcpy.AddMessage("Working with project: {}".format(project)) ProjectParentDirectory, ProjectDirectory = Utility.getJobProjectDirs( project_wmx_jobid) arcpy.AddMessage( "Splitting project directory: {}".format(ProjectDirectory)) old_path, project_id = os.path.split(ProjectDirectory) # old_path = old_path.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added line 3 April 2019 BJN arcpy.AddMessage("Old parent directory: {}".format(old_path)) arcpy.AddMessage("ProjectID: {}".format(project_id)) new_path = ProjectParentDirectory #.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 3 April 2019 BJN arcpy.AddMessage("New parent directory: {}".format(new_path)) new_project_path = os.path.join(new_path, project_id) arcpy.AddMessage("New project directory: {}".format(new_project_path)) ProjectJob.setParentDir(project, new_path) ProjectJob.setProjectDir(project, new_project_path) arcpy.AddMessage( "Set project directories: \n\tParent: {}\n\tProject: {}".format( new_path, new_project_path)) ProjectJob.updateProject(project) arcpy.AddMessage( "Updated project directories: \n\tParent: {}\n\tProject: {}".format( new_path, new_project_path)) return old_path, new_path
def saveOrUpdateRasterFileStat(self, row, project_id, file_name, elevation_type, group): ''' NOTE: This depends on the fields being in the correct order! @see: CMDRConfig.fields_RasterFileStat for the correct order ''' where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(self.fclass, field_RasterFileStat_ProjID), project_id) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_RasterFileStat_Name), file_name) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_RasterFileStat_ElevType), elevation_type) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_RasterFileStat_Group), group) return Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, rowValueList=row, where_clause=where_clause)
def saveOrUpdate(self, row): project_id = row[self.fields.index(field_LASFileInfo_ProjID)] file_name = row[self.fields.index(field_LASFileInfo_File_Name)] las_class = row[self.fields.index(field_LASFileInfo_File_LAS_Class)] where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_ProjID), project_id) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_File_Name), file_name) if las_class is None: where_clause = "{} and {} is null".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_File_LAS_Class), las_class) else: where_clause = "{} and {} = {}".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_File_LAS_Class), las_class) return Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, rowValueList=row, where_clause=where_clause)
def saveOrUpdate(self, row): project_id = row[self.fields.index(field_LASFileInfo_ProjID)] file_name = row[self.fields.index(field_LASFileInfo_File_Name)] category = row[self.fields.index(field_LASDStatInfo_Category)] item = row[self.fields.index(field_LASDStatInfo_Item)] where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_ProjID), project_id) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASFileInfo_File_Name), file_name) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASDStatInfo_Category), category) where_clause = "{} and {} = '{}'".format( where_clause, arcpy.AddFieldDelimiters(self.fclass, field_LASDStatInfo_Item), item) return Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, rowValueList=row, where_clause=where_clause)
def updateSDDraft(sddraftPath, outsddraft, update=False): ## sddraftPath = sddraftPath.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN ## outsddraft = outsddraft.replace('aiotxftw6na01data', 'aiotxftw6na01') #Added replace method 22 Mar 2019 BJN Utility.printArguments(["sddraftPath", "outsddraft", 'update'], [sddraftPath, outsddraft, update], "C03 Update SD Draft XML") newAntialiasingMode = "Fast" xml = sddraftPath dom = DOM.parse(xml) keys = dom.getElementsByTagName('Key') arcpy.AddMessage( "Editing minInstances setting in service definition draft file...") for key in keys: # Set the min and max instances if key.firstChild.data == 'MinInstances': key.nextSibling.firstChild.data = 0 elif key.firstChild.data == 'MaxInstances': key.nextSibling.firstChild.data = ContourConfig.CACHE_INSTANCES # Set the antialiasing mode to 'Fast' if key.hasChildNodes(): if key.firstChild.data == 'antialiasingMode': # Modify the antialiasing mode arcpy.AddMessage("Updating anti-aliasing to: {}".format( newAntialiasingMode)) key.nextSibling.firstChild.data = newAntialiasingMode if update: arcpy.AddMessage("Changing publish from CREATE to UPDATE service...") tagsType = dom.getElementsByTagName('Type') for tagType in tagsType: if tagType.parentNode.tagName == 'SVCManifest': if tagType.hasChildNodes(): tagType.firstChild.data = "esriServiceDefinitionType_Replacement" tagsState = dom.getElementsByTagName('State') for tagState in tagsState: if tagState.parentNode.tagName == 'SVCManifest': if tagState.hasChildNodes(): tagState.firstChild.data = "esriSDState_Published" # Save a new SDDraft file f = open(outsddraft, 'w') dom.writexml(f) f.close()
def updateProjIDAOI(project_id, row, aoi, fclass, fields, uid_index): # Where WMXJobID = '<Job_ID>' where_clause = "{} = '{}'".format( arcpy.AddFieldDelimiters(fclass, field_Contract_ProjID), project_id) index = fields.index(field_ProjectJob_SHAPE) row = list(row) if aoi is not None: row[index] = aoi Utility.addOrUpdateRecord(in_table=fclass, field_names=fields, uidIndex=uid_index, where_clause=where_clause, rowValueList=row) return row
def updateJobAOI(self, project_row, project_AOI): wmx_job_id = self.getWMXJobID(project_row) # Where WMXJobID = '<Job_ID>' where_clause = "{} = {}".format( arcpy.AddFieldDelimiters(self.fclass, field_ProjectJob_WMXJobID), wmx_job_id) project_row = list(project_row) index = self.fields.index(field_ProjectJob_SHAPE) project_row[index] = project_AOI Utility.addOrUpdateRecord(in_table=self.fclass, field_names=self.fields, uidIndex=self.uid_index, where_clause=where_clause, rowValueList=project_row) return project_row
def getMDMaster(self, jobID): where_clause = "{} = {}".format( arcpy.AddFieldDelimiters(self.fclass, field_MDMaster_WMXJobID), jobID) row, strUID = Utility.getExistingRecord( self.fclass, self.fields, self.uid_index, where_clause) # @UnusedVariable return row
def getRasterProperties(rasterObjectPath, newRow): cellSize = 0 for PropertyType in CMDRConfig.Raster_PropertyTypes: try: propValue = arcpy.GetRasterProperties_management( rasterObjectPath, PropertyType) if propValue is not None: propValue = propValue[0] newRow.append(propValue) Utility.addToolMessages() if PropertyType == "CELLSIZEX": cellSize = newRow[len(newRow) - 1] except: Utility.addToolMessages() # Print error message if an error occurs newRow.append(None) return cellSize
def getExistingMDRow(self, wmx_job_ID): where_clause = "{} = {}".format( arcpy.AddFieldDelimiters(self.fclass, field_MDMaster_WMXJobID), wmx_job_ID) return Utility.getExistingRecord( in_table=self.fclass, field_names=self.fields, uidIndex=CMDRConfig.uid_index_MDMaster, where_clause=where_clause)[0]
def CreateContourCache(jobID, serverConnectionFile): Utility.printArguments(["WMX Job ID", "serverConnectionFile"], [jobID, serverConnectionFile], "C03 CreateContourCache") Utility.setWMXJobDataAsEnvironmentWorkspace(jobID) ProjectJob = CMDR.ProjectJob() project, ProjectUID = ProjectJob.getProject(jobID) # @UnusedVariable if project is not None: processJob(ProjectJob, project, ProjectUID, serverConnectionFile) else: arcpy.AddError("Failed to find project for job.") arcpy.AddMessage("Operation complete")
def gen_base_tiling_scheme(base_fc, scratch): arcpy.env.overwriteOutput = True db, fc = os.path.split(base_fc) base_tiling_scheme = os.path.join(db, 'Base_Tiling_Scheme') if arcpy.Exists(base_tiling_scheme): arcpy.AddMessage("Tiling Scheme Exists: {}".format(base_tiling_scheme)) else: a = datetime.datetime.now() # Copy Template MXD base_mxd = arcpy.mapping.MapDocument(ContourConfig.MXD_TEMPLATE) section_mxd_name = os.path.join(scratch, 'Tiling_Scheme.mxd') if not os.path.exists(section_mxd_name): base_mxd.saveACopy(section_mxd_name) # Set MXD For Processing first_mxd = arcpy.mapping.MapDocument(section_mxd_name) # Set Layers to Reference Input FC broken = arcpy.mapping.ListBrokenDataSources(first_mxd) for item in broken: if item.name.startswith(r'Contour'): item.replaceDataSource(db, "FILEGDB_WORKSPACE", fc) first_mxd.save() # Generate Tiling Scheme for Input MXD arcpy.MapServerCacheTilingSchemeToPolygons_cartography( map_document=first_mxd.filePath, data_frame='Layers', tiling_scheme=ContourConfig.TILING_SCHEME, output_feature_class=base_tiling_scheme, use_map_extent='USE_MAP_EXTENT', clip_to_horizon='CLIP_TO_HORIZON', antialiasing='ANTIALIASING', levels="9027.977411;4513.988705;2256.994353;1128.497176") Utility.doTime( a, "Generated base tiling scheme {}".format(base_tiling_scheme)) # JWS - 3/29 del base_mxd return base_tiling_scheme