resilience_str = arcpy.GetParameterAsText(4)
    do_criteria_bool = arcpy.GetParameter(5)
    criteria_dir_zip = arcpy.GetParameterAsText(6)

    exposure_str = exposure_str.split(',')
    exposure_str = [str.strip() for str in exposure_str]

    sensitivity_str = sensitivity_str.split(',')
    sensitivity_str = [str.strip() for str in sensitivity_str]
	
    resilience_str = resilience_str.split(',')
    resilience_str = [str.strip() for str in resilience_str]
	
	#Unzip all input zipped files & folders:
    ## HABITAT ##
    arcpy.AddMessage("Reading Habitat Shapefile ...")
    unZipFile(layer_dir_zip, "habitat")
    layer_dir = os.path.join(arcpy.env.scratchFolder, "habitat") 	
    ## STRESSORS ##	
    arcpy.AddMessage("Unzipping Stressor Layers Folder ...")
    unZipFile(stressors_dir_zip, "stressors")
    stressors_dir = os.path.join(arcpy.env.scratchFolder, "stressors")
    ## SPATIAL CRITERIA ##	
    if criteria_dir_zip:
        arcpy.AddMessage("Unzipping Spatial Criteria Layers Folder ...")
        unZipFile(criteria_dir_zip, "sp_criteria")
        criteria_dir = os.path.join(arcpy.env.scratchFolder, "sp_criteria") 
    else:
        criteria_dir = None 	
	
    #create dictionary of args to pass onto the InVEST model execute function
 |    http://www.apache.org/licenses/LICENSE-2.0
 |
 | Unless required by applicable law or agreed to in writing, software
 | distributed under the License is distributed on an "AS IS" BASIS,
 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 | See the License for the specific language governing permissions and
 | limitations under the License.
 ------------------------------------------------------------------------------
 """
# dlaReplaceByField.py - use Replace field settings to replace content in a database or service.
# --------------------------------------------------------------------------------------------------------------
'''
Tool to replace data by field value or where clause. The script calls dlaPublish.publish with one or more
xml file names separated by semi colons - the way that a multiple file parameter is passed from Geoprocessing tools.

Data will be deleted in the target dataset using the Replace By Settings, and all data from the source will be appended.
'''
import dlaPublish, arcpy, dla

dlaPublish.useReplaceSettings = True  # setting this to True will use ReplaceByField logic

arcpy.AddMessage("Replacing by Field Value")

xmlFileNames = arcpy.GetParameterAsText(
    0)  # xml file name as a parameter, multiple values separated by ;
dla._errorCount = 0

dlaPublish.publish(xmlFileNames)  # perform the processing

dla.writeFinalMessage("Data Assistant - Replace Data")
Esempio n. 3
0
def main(argc, argv):

    argc = len(argv)
    if (argc < 2):

        #command-line argument codes.
        #-i:config file.
        #-c:command codes
        #-m:mosaic dataset name
        #-s:Source data paths. (as inputs to command (AR/AR)
        #-l:Full path to log file (including file name)

        user_args = \
        [
        "-m: Mosaic dataset path including GDB and MD name [e.g. c:\WorldElevation.gdb\Portland]",
        "-s: Source data paths. (As inputs to command (AR)",
        "-l: Log file output path [path+file name]",
        "-artdem: Update DEM path in ART file"
        ]

        print ("\nMDCS.py v5.7 [20130801]\nUsage: MDCS.py -c:<Optional:command> -i:<config_file>" \
        "\n\nFlags to override configuration values,") \

        for arg in user_args:
            print(arg)

        print (\
        "\nNote: Commands can be combined with '+' to do multiple operations." \
        "\nAvailable commands:")

        user_cmds = solutionsLib.Solutions().getAvailableCommands()
        for key in user_cmds:
            print("\t" + key + ' = ' + user_cmds[key]['desc'])
        sys.exit(1)

    base = Base.Base()
    global log
    log = logger.Logger()
    base.setLog(log)

    argIndx = 1
    md_path_ = artdem = config = com = log_folder = code_base = ''

    while (argIndx < argc):
        (values) = argv[argIndx].split(':')
        if (len(values[0]) < 2
                or values[0][:1] != '-' and values[0][:1] != '#'):
            argIndx += 1
            continue

        exSubCode = values[0][1:len(values[0])].lower()
        subCode = values.pop(0)[1].lower()

        value = ':'.join(values).strip()

        if (subCode == 'c'):
            com = value.replace(' ', '')  #remove spaces in between.
        elif (subCode == 'i'):
            config = value
        elif (subCode == 'm'):
            md_path_ = value
        elif (subCode == 's'):
            base.m_sources = value
        elif (subCode == 'l'):
            log_folder = value
        elif (subCode == 'b'):
            code_base = value
        elif (exSubCode == 'artdem'):
            artdem = value
        elif (subCode == 'p'):
            pMax = value.rfind('$')
            if (pMax == -1):
                argIndx += 1
                continue

            dynamic_var = value[pMax + 1:].upper()
            v = value[0:pMax]
            if (dynamic_var.strip() != ''):
                if ((dynamic_var in base.m_dynamic_params.keys()) == False):
                    base.m_dynamic_params[dynamic_var] = v

        argIndx += 1

    if (code_base != ''):
        base.setCodeBase(code_base)

    if (md_path_ != ''):
        (p, f) = os.path.split(md_path_)
        f = f.strip()
        const_gdb_ext_len_ = len(base.const_geodatabase_ext)
        ext = p[-const_gdb_ext_len_:].lower()
        if ((ext == base.const_geodatabase_ext.lower()
             or ext == base.const_geodatabase_SDE_ext.lower()) and f != ''):
            p = p.replace('\\', '/')
            w = p.split('/')
            workspace_ = ''
            for i in range(0, len(w) - 1):
                workspace_ += w[i] + '/'

            gdb_ = w[len(w) - 1]
            base.m_workspace = workspace_
            base.m_geodatabase = w[len(w) - 1]
            base.m_mdName = f

    if (os.path.isfile(config) == False):
        errMessage = u"Error: Input config file is not specified/not found! " + config
        arcpy.AddMessage(errMessage)
        return False

    if (artdem != ''):
        (base.m_art_ws, base.m_art_ds) = os.path.split(artdem)
        base.m_art_apply_changes = True

    comInfo = {
        'AR': {
            'cb': postAddData
        }  #assign a callback function to run custom user code when adding rasters.
    }

    configName, ext = os.path.splitext(config)
    configName = os.path.basename(configName)

    if (com == ''):
        com = base.const_cmd_default_text

    if (argv[1].lower() == '#gprun'):
        log.isGPRun = True
    log.Project('MDCS')
    log.LogNamePrefix(configName)
    log.StartLog()

    log_output_folder = os.path.join(os.path.dirname(solutionLib_path), 'logs')

    if (log_folder != ''):
        (path, fileName) = os.path.split(log_folder)
        if (path != ''):
            log_output_folder = path
        if (fileName != ''):
            log.LogFileName(fileName)

    log.SetLogFolder(log_output_folder)
    solutions = solutionsLib.Solutions(base)

    success = solutions.run(config, com, comInfo)

    log.Message("Done...", log.const_general_text)

    log.WriteLog('#all')  #persist information/errors collected.
def ProcessRoutine(ArgVariables):
    """Main Function that operates the logic of the script."""
    try:

        arcpy.AddMessage("\nInputData: " + InputData)
        arcpy.AddMessage("WorkingDir: " + WorkingDir)
        arcpy.AddMessage("CreateStandAloneXML: " + CreateStandAloneXML)
        arcpy.AddMessage("UseStartTemplate: " + UseStartTemplate)
        arcpy.AddMessage("StarterTemplate: " + CustomStarterTemplate)

        myDataType, myFeatType = Get_Data_Type(
        )  #Determine data type, and feature type if applicable
        arcpy.AddMessage("Data type being evaluated: " + myDataType)
        arcpy.AddMessage("Feature type being evaluated: " + myFeatType + "\n")

        SourceFile = os.path.split(os.path.splitext(InputData)[0])[
            1]  #The name of the input file. No extension. No full path.
        OriginalMDRecord = os.path.join(
            WorkingDir, SourceFile +
            "_Original.xml")  #File pointer to unmodified original.
        FGDCXML = os.path.join(
            WorkingDir, SourceFile +
            "_FGDC.xml")  #File pointer to the copy we will modify/update.

        #Create and keep 'Original' metadata copy in working directory.
        try:
            MDTools.CreateCopyMDRecord(InputData, OriginalMDRecord)
        except:
            pass

        #After we made a copy of the input's original MD, start process from custom template if it is toggled.
        if str(UseStartTemplate) == "true":
            try:
                arcpy.MetadataImporter_conversion(
                    CustomStarterTemplate, InputData
                )  # This imports only: does not convert and does not sync
                arcpy.AddMessage(
                    "The user's custom starter record is now being imported into the input data set...\n"
                )
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "There was a problem importing from the Custom Starter Template. Please ensure that the file is here: ("
                    + CustomStarterTemplate + ")")
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit(1)

        try:  #Extract any existing metadata, and translate to FGDC format if necessary.
            ExportFGDC_MD_Utility.GetMDContent(
                InputData, FGDCXML, WorkingDir
            )  #Export (translate if necessary) input metadata to FGDC format. Remove ESRI 'sync' & 'reminder' elements.
        except:
            arcpy.AddMessage(
                "No metadata could be found for this record. A new file will be created.\n"
            )
            MDTools.CreateCopyMDRecord(GenericTemplate, FGDCXML)

        MDTools.RemoveNameSpace(
            FGDCXML
        )  #Eliminate namespace tags from root element in xml if present (appear when tool is run on spatial data sets).
        MDTools.CheckMasterNodes(
            FGDCXML
        )  #Ensure all the key FGDC-CSDGM nodes are present in the record.


        if not InputIsXML and not InputIsCSV \
                and not InputIsExcel and not InputIsGDB \
                and desc.DatasetType != "Table": #Only attempt to extract/update spatial properties from spatial data sets.

            try:
                GCS_ExtentList = Get_LatLon_BndBox()[1]
            except:
                arcpy.AddWarning("!!!!!!!")
                arcpy.AddWarning(
                    "A problem was encountered when attempting to retrieve the spatial extent of the input data set. Please review the tool documentation and ensure the data set is a valid input and ENSURE THAT A COORDINATE SYSTEM HAS BEEN DEFINED."
                )
                arcpy.AddWarning("!!!!!!!\n")
                sys.exit()

            #Get/Update Bounding Coordinates
            GCS_ExtentList = Get_LatLon_BndBox()[1]
            Local_ExtentList = Get_LatLon_BndBox()[0]
            if "nan" in str(Local_ExtentList):
                arcpy.AddWarning(
                    "No spatial extent could be found for the input spatial data set. Please review the 'Bounding Extent' in the final metadata record. (Values will be set to maximum global extent).\n"
                )
            arcpy.AddMessage("Bounding Coordinates (Local): " +
                             str(Local_ExtentList))
            arcpy.AddMessage("Bounding Coordinates (Geographic): " +
                             str(GCS_ExtentList) + "\n")

            WestBC = Get_LatLon_BndBox()[1][0]
            EastBC = Get_LatLon_BndBox()[1][2]
            NorthBC = Get_LatLon_BndBox()[1][3]
            SouthBC = Get_LatLon_BndBox()[1][1]
            MDTools.WriteBoundingInfo(FGDCXML, WestBC, EastBC, NorthBC,
                                      SouthBC)

            #Get/Update Spatial Data Organization
            SpatialDataOrgInfo = Get_Spatial_Data_OrgInfo(
                InputData, myDataType, myFeatType)
            MDTools.WriteSpatialDataOrgInfo(FGDCXML, SpatialDataOrgInfo)

            #Get/Update Spatial Reference Information
            SpatialReferenceInfo = SpatialRefTools.SpatialRefInfo(
                GCS_PrjFile, InputData, WorkingDir, GCS_ExtentList)
            MDTools.WriteSpatialRefInfo(FGDCXML, SpatialReferenceInfo)
            #Handle vertical coordinate system?

        #Get/Update Geospatial Presentation Form. Also updates Format Name (within Distribution Info).
        #(Skip this step and leave existing content if tool input is XML).
        if not InputIsXML:
            MDTools.WriteGeospatialForm(FGDCXML, myDataType, myFeatType)

        #Get/Update Native Environment Details
        #This will be used as a switch to determine which .exe for the EA builder needs to be run (for either 10.0, 10.1, or 10.2).
        #The version info is also written out to the XML record in the 'Native Environment' section.
        ESRIVersion = GetESRIVersion_WriteNativeEnv(FGDCXML)

        #Get/Update Metadata Date of Editing
        Now = datetime.datetime.now()
        MDDate = Now.strftime("%Y%m%d")
        MDTools.WriteMDDate(FGDCXML, MDDate)

        #Update Entity/Attribute Section
        if InputIsCSV or InputIsExcel:
            contents_fname = InputData
        elif not InputIsXML and not InputIsGDB:
            data_contents = introspector.introspect_dataset(InputData)
            input_fname = os.path.split(InputData)[1]
            contents_fname = os.path.join(WorkingDir, input_fname + ".p")
            pickle.dump(data_contents, open(contents_fname, "wb"))
        else:
            contents_fname = ''

        #Rerun FGDC Translator tool to handle newly-added elements that are out of order in XML tree.
        MDTools.ReRunFGDCTranslator(FGDCXML)

        #Re-import new metadata to the data set to capture E/A tool changes. If input file is a stand alone .xml this step is skipped
        if not InputIsXML:
            try:
                arcpy.MetadataImporter_conversion(
                    FGDCXML, InputData
                )  # This imports only: does not convert and does not sync
            except BaseException as e:
                arcpy.AddWarning("There was a problem during the metadata"
                                 " importation process.\n{}".format(str(e)))

        #Open up Metadata Editor and allow user to review/update
        outXML = os.path.splitext(FGDCXML)[0] + "temp.xml"
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " " + '"' + Browser + '"' #Start and end quotes are necessary to handle spaces in file names and IE Path when passing to Command Prompt.
        #Arg = '"' + MetadataEditor + '"' + " " + '"' + FGDCXML + '"' + " " + '"' + outXML + '"' + " "
        Arg = '"%s" "%s" "%s"' % (python_exe, mdwiz_py_fname, FGDCXML)
        if contents_fname:
            Arg += ' "{}"'.format(contents_fname)
        arcpy.AddWarning(Arg)
        arcpy.AddMessage("*************************")
        arcpy.AddMessage(
            "\nPLEASE UPDATE/REVIEW THE METADATA INFO IN THE POP-UP WINDOW.")
        arcpy.AddMessage("(Allow a moment for the window to open).\n")
        arcpy.AddMessage("*************************")
        try:
            winsound.PlaySound(
                r"C:\Windows\Media\Cityscape\Windows Exclamation.wav",
                winsound.SND_FILENAME)
        except:
            pass

        p = subprocess.Popen(Arg,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        output, error = p.communicate()

        if output:
            arcpy.AddMessage("MetadataWizard output:\n  {}".format(output))
        if error:
            arcpy.AddWarning(sys.executable)
            arcpy.AddWarning("An error was encountered opening "
                             "the MetadataWizard application:\n")
            arcpy.AddWarning("Error> error  {}".format(error.strip()))
            sys.exit(1)

        p.wait()

        try:
            MDTools.RemoveStyleSheet(
                FGDCXML
            )  #MP actually removes the stylesheet in VB.NET app... this is a redundancy here.
            # MDTools.ReplaceXML(FGDCXML, outXML)
        except:
            arcpy.AddWarning(
                "No content was saved in the Metadata Editor window. The metadata record was not updated.\n"
            )

        #Re-import new metadata to the data set to capture user edits from the Metadata Editor window.
        try:
            arcpy.MetadataImporter_conversion(
                FGDCXML, InputData
            )  # This imports only: does not convert and does not sync
            arcpy.AddMessage(
                "The updated metadata record is now being re-imported into the input data set...\n"
            )
        except:
            arcpy.AddMessage(
                "There was a problem during the metadata importation process!")

        #Remove the Error Report file generated by MP from the Main Metadata Editor.
        MP_ErrorReport = os.path.splitext(
            FGDCXML)[0] + "temp_MP_ErrorReport.xml"
        try:
            os.remove(MP_ErrorReport)
        except:
            pass

        #Remove FGDC XML file if the toggle to preserve 'stand-alone' file is configured to FALSE. This appears to be passed as a string rather than boolean.
        if str(CreateStandAloneXML) == "false":
            try:
                arcpy.Delete_management(FGDCXML)
                arcpy.AddMessage(
                    "The Wizard will now remove the stand-alone FGDC XML, as requested in the tool interface...\n"
                )
            except:
                arcpy.AddMessage(
                    "There was a problem removing the stand-alone XML file. Try removing the file (%s) manually from the working directory.\n"
                    % FGDCXML)

        #Remove the 'ArcpyTranslate.xml' temp file that gets created when exporting from ESRI metadata to FGDC.
        try:
            os.remove(os.path.join(WorkingDir, 'ArcpyTranslate.xml'))
        except:
            pass

    except arcpy.ExecuteError:
        arcpyError()
    except:
        pythonError()
Esempio n. 5
0
			   'passWord': "******",
			   'port':'6080'}

	dd = CreateContectionFile()
	dd.loginInfo = logDict
	path =os.path.join(tempfile.mkdtemp(),'server.ags')

	print path

	dd.filePath = path
	dd.CreateContectionFile()
	clsPublishservice=publishServices()

	#get

	file=r"C:\huaweitest\mxds\new"

	fileList=clsPublishservice.GetMxFileList(file)

	clusterName='default'

	servic_dir=''
	t_begin=datetime.now()
	clsPublishservice.publishServices(fileList,path,clusterName,copy_data_to_server=False,folder=servic_dir)

	t_end=datetime.now()

	time_passed=(t_end-t_begin).seconds

	arcpy.AddMessage("Publishing time:")
	arcpy.AddMessage(str(time_passed))
Esempio n. 6
0
# Script Name: GageWatershed
#
# Created By:  David Tarboton
# Date:        1/25/14

# Import ArcPy site-package and os modules
import arcpy
import os
import subprocess

# Inputs
inlyr = arcpy.GetParameterAsText(0)
desc = arcpy.Describe(inlyr)
p = str(desc.catalogPath)
arcpy.AddMessage("\nInput D8 Flow Direction Grid: " + p)

ogrfile = arcpy.GetParameterAsText(1)
desc = arcpy.Describe(ogrfile)
shfl1 = str(desc.catalogPath)
extn = os.path.splitext(shfl1)[1]  # get extension of a file

# if extention is shapfile do not convert into gjson other wise convert
if extn == ".shp":
    shfl = shfl1
else:
    arcpy.AddMessage("Extracting json outlet file from: " + shfl1)
    basename = os.path.basename(shfl1)  # get last part of the path
    dirname = os.path.dirname(p)  # get directory
    arcpy.env.workspace = dirname  # does not work without specifying the workspace
    arcpy.FeaturesToJSON_conversion(shfl1, basename +
                                    ".json")  # convert feature to json
Esempio n. 7
0
def install_package(overwrite=False, r_library_path=r_library_path):
    """Install ArcGIS R bindings onto this machine."""
    if overwrite is True:
        overwrite = True
    else:
        overwrite = False
    if not overwrite:
        if PACKAGE_VERSION:
            msg = "The ArcGIS R bridge is installed, and overwrite is disabled."
            arcpy.AddError(msg)
            sys.exit()

    (install_dir, arc_version, product) = arcgis_platform()
    arcmap_needs_link = False

    # check that we're in a sane installation environment
    validate_environment()

    # detect if we we have a 10.3.1 install that needs linking
    if product == 'Pro' and arcmap_exists("10.3"):
        arcmap_needs_link = True
        msg_base = "Pro side by side with 10.3 detected,"
        if arcmap_install_path is not None:
            msg = "{} installing bridge for both environments.".format(
                msg_base)
            arcpy.AddMessage(msg)
        else:
            msg = "{} but unable to find install path. ArcGIS bridge " + \
                  "must be manually installed in ArcGIS 10.3.".format(msg_base)
            arcpy.AddWarning(msg)

    # if we're going to install the bridge in 10.3.1, create the appropriate
    # directory before trying to install.
    if arc_version == '10.3.1' and product == 'ArcMap' or arcmap_needs_link:
        r_integration_dir = os.path.join(arcmap_install_path, "Rintegration")
        # TODO escalate privs here? test on non-admin user
        if not os.path.exists(r_integration_dir):
            try:
                write_test = os.path.join(install_dir, 'test.txt')
                with open(write_test, 'w') as f:
                    f.write('test')
                os.remove(write_test)
                os.makedirs(r_integration_dir)
            except IOError:
                arcpy.AddError(
                    "Insufficient privileges to create 10.3.1 bridge directory."
                    " Please start {} as an administrator, by right clicking"
                    " the icon, selecting \"Run as Administrator\", then run this"
                    " script again.".format(product))
                sys.exit()

    # set an R-compatible temporary folder, if needed.
    orig_tmpdir = os.getenv("TMPDIR")
    if not orig_tmpdir:
        set_env_tmpdir()

    download_url = release_info()[0]
    if download_url is None:
        arcpy.AddError(
            "Unable to get current release information. Check internet connection."
        )
        sys.exit()

    # we have a release, write it to disk for installation
    with mkdtemp() as temp_dir:
        zip_name = os.path.basename(download_url)
        package_path = os.path.join(temp_dir, zip_name)
        save_url(download_url, package_path)
        if os.path.exists(package_path):
            # TODO -- need to do UAC escalation here?
            # call the R installation script
            execute_r('Rcmd', 'INSTALL', package_path)
        else:
            arcpy.AddError("No package found at {}".format(package_path))

    # return TMPDIR to its original value; only need it for Rcmd INSTALL
    set_env_tmpdir(orig_tmpdir)

    # at 10.3.1, we _must_ have the bridge installed at the correct location.
    # create a symlink that connects back to the correct location on disk.
    if arc_version == '10.3.1' and product == 'ArcMap' or arcmap_needs_link:
        link_dir = os.path.join(r_integration_dir, PACKAGE_NAME)

        if os.path.exists(link_dir):
            if junctions_supported(link_dir) or hardlinks_supported(link_dir):
                # os.rmdir uses RemoveDirectoryW, and can delete a junction
                os.rmdir(link_dir)
            else:
                shutil.rmtree(link_dir)

        # set up the link
        r_package_path = r_pkg_path()

        if r_package_path:
            arcpy.AddMessage("R package path: {}.".format(r_package_path))
        else:
            arcpy.AddError("Unable to locate R package library. Link failed.")
            sys.exit()

        detect_msg = "ArcGIS 10.3.1 detected."
        if junctions_supported(link_dir) or hardlinks_supported(link_dir):
            arcpy.AddMessage("{} Creating link to package.".format(detect_msg))
            kdll.CreateSymbolicLinkW(link_dir, r_package_path, 1)
        else:
            # working on a non-NTFS volume, copy instead
            vol_info = getvolumeinfo(link_dir)
            arcpy.AddMessage(
                "{} Drive type: {}. Copying package files.".format(
                    detect_msg, vol_info[0]))
            # NOTE: this will need to be resynced when the package is updated,
            #       if installed from the R side.
            shutil.copytree(r_package_path, link_dir)
Esempio n. 8
0
# Load packages and modules
import sys, arcpy, os
from wdpa.qa import arcgis_table_to_df, find_wdpa_rows, poly_checks, INPUT_FIELDS_POLY
from wdpa.export import output_errors_to_excel

# Load input
input_poly = sys.argv[1]
output_path = sys.argv[2]

inputfilename = os.path.basename(input_poly)

# Let us welcome our guest of honour
arcpy.AddMessage('\nAll hail the WDPA\n')

# Convert Polygon table to pandas DataFrame
arcpy.AddMessage('Converting to pandas DataFrame')
poly_df = arcgis_table_to_df(input_poly, INPUT_FIELDS_POLY)
result = dict()

# Run the checks
arcpy.AddMessage('--- Running QA checks on Polygons ---')
for poly_check in poly_checks:  # poly_checks is a dictionary with checks' descriptive names and function names
    arcpy.AddMessage('Running:' + poly_check['name'])
    # checks are not currently optimised, thus return all pids regardless
    wdpa_pid = poly_check['func'](poly_df, True)

    # For each check, obtain the rows that contain errors
    if wdpa_pid.size > 0:
        result[poly_check['name']] = find_wdpa_rows(poly_df, wdpa_pid)

# Write output to file
Esempio n. 9
0
#
# ---------------------------------------------------------------------------

# Import system modules
import sys, os, random, arcpy
import functionlib as fl

# Create the Geoprocessor object
#gp = arcgisscripting.create()

# Set the necessary product code
arcpy.SetProduct("ArcInfo")
arcpy.env.overwriteoutput = 1

# Check out spatial analyst license
arcpy.AddMessage("\tChecking availability of spatial analyst license...")
if arcpy.CheckExtension("spatial") == "Available":
    arcpy.CheckOutExtension("spatial")
else:
    arcpy.AddWarning(
        "\tSpatial Analyst is needed to run this tool.  Processing aborted...")
    raise "LicenseError"

# Script arguments...

aExtent = sys.argv[1]
pointLayer = sys.argv[2]
roadLayer = sys.argv[3]
minCoreSize = sys.argv[4]
stinfDistance = sys.argv[5]
rdinfDistance = sys.argv[6]
conRL = arcpy.GetParameter(1)
conRL_ouRaster = arcpy.GetParameterAsText(2)
conH2OSat = arcpy.GetParameter(3)
conH2OSat_outRaster = arcpy.GetParameterAsText(4)

#Set up workspace
scratchWS = arcpy.env.scratchWorkspace
scratchGDB = arcpy.env.scratchGDB
#output cell size and processing extent should be the same as elevation raster
arcpy.env.cellSize = elevation_raster
output_cell_size = arcpy.env.cellSize
arcpy.env.extent = elevation_raster
extent = arcpy.env.extent
arcpy.env.overwriteOutput = True
arcpy.env.parallelProcessingFactor = "75%"
arcpy.Delete_management("in_memory")

#Get coordinate system information
desc = arcpy.Describe(elevation_raster)
coordSystem = desc.spatialReference

arcpy.AddMessage("Creating constant roughness length raster")
rlConstant = CreateConstantRaster(conRL, "FLOAT", output_cell_size, extent)
arcpy.DefineProjection_management(rlConstant, coordSystem)
rlConstant.save(conRL_ouRaster)

arcpy.AddMessage("Creating constant liquid water saturation raster")
waterConstant = CreateConstantRaster(conH2OSat, "FLOAT", output_cell_size,
                                     extent)
arcpy.DefineProjection_management(waterConstant, coordSystem)
waterConstant.save(conH2OSat_outRaster)
Esempio n. 11
0
def main(fcInputSegments,
         fcInputAttrbNetwork,
         tempWorkspace):

    arcpy.env.overwriteOutput = True

    # Turn off Z and M geometry
    arcpy.env.outputMFlag = "Disabled"
    arcpy.env.outputZFlag = "Disabled"

    # Prep temporary files and layers
    arcpy.MakeFeatureLayer_management(fcInputSegments, "lyrInputSegments")
    arcpy.MakeFeatureLayer_management(fcInputAttrbNetwork, "lyrInputAttrbNetwork")
    fcInputAttrbNetworkTemp = gis_tools.newGISDataset(tempWorkspace, "fcInputAttrbNetworkTemp")
    arcpy.CopyFeatures_management("lyrInputAttrbNetwork", fcInputAttrbNetworkTemp)
    fcBraidDslv = gis_tools.newGISDataset(tempWorkspace, "fcBraidDslv")
    fcSegmentDslv = gis_tools.newGISDataset(tempWorkspace, "fcSegmentDslv")
    fcNodeBraidToBraid = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToBraid")
    fcNodeBraidToBraidSingle = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToBraidSingle")
    fcNodeBraidToBraidDslv = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToBraidDslv")
    fcNodeBraidToMainstem = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToMainstem")
    fcNodeBraidToMainstemSingle = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToMainstemSingle")
    fcNodeBraidToMainstemDslv = gis_tools.newGISDataset(tempWorkspace, "fcNodeBraidToMainstemDslv")
    fcNodeTribConfluence = gis_tools.newGISDataset(tempWorkspace, "fcNodeTribConfluence")
    fcNodeTribConfluenceSingle = gis_tools.newGISDataset(tempWorkspace, "fcNodeTribConfuenceSingle")
    fcNodeTribConfluenceDslv = gis_tools.newGISDataset(tempWorkspace, "fcNodeTribConfluenceDslv")
    fcNodesAll = gis_tools.newGISDataset(tempWorkspace, "fcNodesAll")
    fcNodesToSegments = gis_tools.newGISDataset(tempWorkspace, "fcNodesToSegments")

    # Check if the segmented stream network has a field named LineOID
    if findField(fcInputSegments, "SegmentID"):
        LineOID = "SegmentID"
        pass
    else:
        arcpy.AddMessage("SegmentID attribute field not found in input stream feature class. Using ObjectID field...")
        LineOID = arcpy.Describe(fcInputSegments).OIDFieldName

    # Check if the attributed network as been run through the Generate Network Attributes tool.
    if findField(fcInputAttrbNetworkTemp, "_edgetype_"):
        pass
    else:
        arcpy.AddError("The attributed network input is missing the '_edgetype_' field. Please run the "
                       "network through the Generate Network Attributes tool before running this tool.")

    # Braid-to-braid nodes
    arcpy.AddMessage("GNAT CTT: Generating braid-to-braid nodes...")
    arcpy.MakeFeatureLayer_management(fcInputAttrbNetworkTemp, "lyrInputAttrbNetworkTemp")
    arcpy.SelectLayerByAttribute_management("lyrInputAttrbNetworkTemp","NEW_SELECTION", """ "_edgetype_" = 'braid' """)
    arcpy.Dissolve_management("lyrInputAttrbNetworkTemp", fcBraidDslv, "#", "#", "SINGLE_PART")
    arcpy.Intersect_analysis([fcBraidDslv], fcNodeBraidToBraid, "ONLY_FID", "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraid, "lyrNodeBraidToBraid")
    arcpy.MultipartToSinglepart_management("lyrNodeBraidToBraid", fcNodeBraidToBraidSingle)
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraidSingle, "lyrNodeBraidToBraidSingle")
    arcpy.Dissolve_management("lyrNodeBraidToBraidSingle", fcNodeBraidToBraidDslv, "#", "#", "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToBraidDslv, "lyrNodeBraidToBraidDslv")
    arcpy.AddField_management("lyrNodeBraidToBraidDslv", "NODE_TYPE", "TEXT")
    arcpy.CalculateField_management("lyrNodeBraidToBraidDslv", "NODE_TYPE", '"BB"', "PYTHON_9.3")

    # Braid-to-mainstem nodes
    arcpy.AddMessage("GNAT CTT: Generating braid-to-mainstem nodes...")
    arcpy.Intersect_analysis([fcBraidDslv,fcInputSegments],fcNodeBraidToMainstem, "#", "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstem, "lyrNodeBraidToMainstem")
    arcpy.MultipartToSinglepart_management("lyrNodeBraidToMainstem", fcNodeBraidToMainstemSingle)
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstemSingle, "lyrNodeBraidToMainstemSingle")
    arcpy.Dissolve_management("lyrNodeBraidToMainstemSingle", fcNodeBraidToMainstemDslv, "#", "#", "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeBraidToMainstemDslv, "lyrNodeBraidToMainstemDslv")
    arcpy.AddField_management("lyrNodeBraidToMainstemDslv", "NODE_TYPE", "TEXT")
    arcpy.CalculateField_management("lyrNodeBraidToMainstemDslv", "NODE_TYPE", '"BM"', "PYTHON_9.3")

    # Tributary confluence nodes
    arcpy.AddMessage("GNAT CTT: Generating tributary nodes...")
    arcpy.Dissolve_management("lyrInputSegments", fcSegmentDslv, "#", "#", "SINGLE_PART")
    arcpy.Intersect_analysis([fcSegmentDslv], fcNodeTribConfluence, "ONLY_FID", "#", "POINT")
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluence, "lyrNodeTribConfluence")
    arcpy.MultipartToSinglepart_management("lyrNodeTribConfluence", fcNodeTribConfluenceSingle)
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluenceSingle, "lyrNodeTribConfluenceSingle")
    arcpy.Dissolve_management("lyrNodeTribConfluenceSingle", fcNodeTribConfluenceDslv, "#", "#", "SINGLE_PART")
    arcpy.MakeFeatureLayer_management(fcNodeTribConfluenceDslv, "lyrNodeTribConfluenceDslv")
    arcpy.AddField_management("lyrNodeTribConfluenceDslv", "NODE_TYPE", "TEXT")
    arcpy.CalculateField_management("lyrNodeTribConfluenceDslv", "NODE_TYPE", '"TC"', "PYTHON_9.3")

    # Merge nodes feature classes together
    arcpy.AddMessage("GNAT CTT: Merge and save node feature class...")
    node_list = ["lyrNodeBraidToBraidDslv", "lyrNodeBraidToMainstemDslv", "lyrNodeTribConfluenceDslv"]
    fieldMapping = nodeFieldMap(node_list)
    arcpy.Merge_management(node_list, fcNodesAll, fieldMapping)
    arcpy.MakeFeatureLayer_management(fcNodesAll, "lyrNodesAll")

    # Spatial join nodes to segmented stream network
    arcpy.SpatialJoin_analysis("lyrInputSegments", "lyrNodesAll", fcNodesToSegments, "JOIN_ONE_TO_MANY",
                               "KEEP_COMMON", "#", "INTERSECT")

    # Summarize each node type by attribute field LineOID
    arcpy.AddMessage("GNAT CTT: Summarize nodes per stream segments...")
    arcpy.MakeFeatureLayer_management(fcNodesToSegments, "lyrNodesToSegments")

    # Spatial join each summary table as a new attribute field to final segment network
    node_types = ["BB", "BM", "TC"]
    for n in node_types:
        join_node_summary("lyrInputSegments", n, "lyrNodesToSegments", LineOID, tempWorkspace)

    arcpy.AddMessage("GNAT CTT: Processing complete.")
Esempio n. 12
0
def FAIL(sectionText, err):
    arcpy.AddMessage ('======================')
    arcpy.AddMessage ('FAIL: {0}'.format(sectionText))
    arcpy.AddMessage ('exception:')
    arcpy.AddMessage (err)
    arcpy.AddMessage (err.args)
    arcpy.AddMessage (sys.exc_info()[0])
    arcpy.AddMessage (sys.exc_info()[2].tb_lineno)
    arcpy.AddMessage ('----------------------')
    arcpy.AddMessage ('arcpy messages:')
    arcpy.AddMessage (arcpy.GetMessages(1))
    arcpy.AddMessage (arcpy.GetMessages(2))
    arcpy.AddMessage ('======================')
    return
Esempio n. 13
0
def createTables(surveyGDB, outWorkspace, prefix):
    '''Creates the doamins, tables and relationships of the survey in the target workspace'''
    arcpy.AddMessage('\t-Creating Tables')
    arcpy.env.workspace = surveyGDB
    allTables = getSurveyTables(surveyGDB)

    dscW = arcpy.Describe(arcpy.env.workspace)
    #migrate the domains
    arcpy.AddMessage('\t\t-Creating Domains')
    for domainName in dscW.domains:
        if domainName[0:3] == 'cvd':
            arcpy.AddMessage('\t\t\t-'.format(domainName))
            tempTable = 'in_memory\{0}'.format(domainName)
            domainTable = arcpy.DomainToTable_management(surveyGDB, domainName, tempTable,'CODE', 'DESC')
            newDomain = arcpy.TableToDomain_management(tempTable, 'CODE', 'DESC', outWorkspace, domainName, update_option='REPLACE')
            arcpy.Delete_management(tempTable)

    arcpy.AddMessage("\t\t-Creating Feature Classes & Tables")
    for table in allTables:
        dsc = arcpy.Describe(table)
        newTableName = "{0}_{1}".format(prefix, table)
        templateTable = template=os.path.join(surveyGDB, table)

        if dsc.datatype == u'FeatureClass':
            newTable = arcpy.CreateFeatureclass_management(outWorkspace, newTableName, "POINT", template=templateTable, spatial_reference=dsc.spatialReference)
        else:
            newTable = arcpy.CreateTable_management(outWorkspace, newTableName, template=templateTable)
        arcpy.AddMessage("\t\t\t-Created {0}".format(newTableName))

        #Attach domains to fields
        tableFields = arcpy.ListFields(table)
        for field in tableFields:
            if field.domain != '':
                arcpy.AssignDomainToField_management(newTable, field.name, field.domain)
        if dscW.workspaceType == "RemoteDatabase":
            arcpy.RegisterAsVersioned_management(newTable)

    arcpy.AddMessage('\t\t-Creating Relationships')
    #Reconnect Relationship classes, checking for attachments
    CARDINALITIES = {
    'OneToOne': "ONE_TO_ONE",
    'OneToMany': "ONE_TO_MANY",
    'ManyToMany': "MANY_TO_MANY"
    }

    for child in [(c.name, c.datatype) for c in dscW.children if c.datatype == u'RelationshipClass']:
        dscRC = arcpy.Describe(child[0])
        RCOriginTable = dscRC.originClassNames[0]
        RCDestTable = dscRC.destinationClassNames[0]
        newOriginTable = "{0}_{1}".format(prefix, RCOriginTable)
        newOriginPath = os.path.join(outWorkspace, newOriginTable)
        if dscRC.isAttachmentRelationship:
            #Simple case - attachments have a dedicated tool
            arcpy.EnableAttachments_management(newOriginPath)
        else:
            newDestTable = "{0}_{1}".format(prefix, RCDestTable)
            newDestPath = os.path.join(outWorkspace, newDestTable)
            newRC = os.path.join(outWorkspace, "{0}_{1}".format(prefix, child[0]))
            relationshipType = "COMPOSITE" if dscRC.isComposite else "SIMPLE"
            fwd_label = dscRC.forwardPathLabel if dscRC.forwardPathLabel != '' else 'Repeat'
            bck_label = dscRC.backwardPathLabel if dscRC.backwardPathLabel != '' else 'MainForm'
            msg_dir = dscRC.notification.upper()
            cardinality = CARDINALITIES[dscRC.cardinality]
            attributed = "ATTRIBUTED" if dscRC.isAttributed else "NONE"
            originclassKeys = dscRC.originClassKeys
            originclassKeys_dict = {}
            for key in originclassKeys:
                originclassKeys_dict[key[1]] = key[0]
            originPrimaryKey = originclassKeys_dict[u'OriginPrimary']
            originForiegnKey = originclassKeys_dict[u'OriginForeign']
            arcpy.CreateRelationshipClass_management(newOriginPath, newDestPath, newRC, relationshipType, fwd_label, bck_label, msg_dir, cardinality, attributed, originPrimaryKey, originForiegnKey)
Esempio n. 14
0
def getReplica(token, serviceURL, serviceInfo, now, outDir=None, outDB="outSurvey.zip", lastSync=None):
    '''Downloads the full replica and then process client-side'''
    # See http://resources.arcgis.com/en/help/arcgis-rest-api/#/Create_Replica/02r3000000rp000000/
    arcpy.AddMessage('\t-Getting Replica')
    createReplicaURL = '{0}/createReplica/?f=json&token={1}'.format(serviceURL, token)
    replicaParameters = {
        "geometry": "-180,-90,180,90",
        "geometryType": "esriGeometryEnvelope",
        "inSR":4326,
        "transportType":"esriTransportTypeUrl",
        "returnAttachments":True,
        "returnAttachmentsDatabyURL":False,
        "async":True,
        "syncModel":"none",
        "dataFormat":"filegdb",
    }
    if "syncCapabilities" in serviceInfo:
        if serviceInfo["syncCapabilities"]["supportsAttachmentsSyncDirection"] == True:
            replicaParameters["attachmentsSyncDirection"] = "bidirectional"
    layerList = [str(l["id"]) for l in serviceInfo["layers"]]
    tableList = [str(t["id"]) for t in serviceInfo["tables"]]
    layerList.extend(tableList)
    replicaParameters["layers"] = ", ".join(layerList)
    layerQueries = {}
    createReplReq = urllib2.urlopen(createReplicaURL, urllib.urlencode(replicaParameters))

    #This is asynchronous, so we get a jobId to check periodically for completion
    thisJob = json.loads(createReplReq.read())
    if not "statusUrl" in thisJob:
        raise Exception("invalid job: {0}".format(thisJob))
    jobUrl = thisJob["statusUrl"]
    resultUrl = ""
    #Check for a max 1000 times (10000 sec = 2hr 46 min)
    sanityCounter = 1000
    while resultUrl == "":
        checkReq = urllib2.urlopen("{0}?f=json&token={1}".format(jobUrl, token))
        statusText = checkReq.read()
        arcpy.AddMessage(statusText)
        status = json.loads(statusText)
        if "resultUrl" in status.keys():
            resultUrl = status["resultUrl"]
        if sanityCounter < 0:
            raise Exception('took too long to make replica')
        if status["status"] == "Failed" or status["status"] == "CompletedWithErrors":
            raise Exception('Create Replica Issues: {0}'.format(status["status"]))
        arcpy.AddMessage('\t\t-Check {0}: {1}'.format(str(1001-sanityCounter), status["status"]))
        sanityCounter = sanityCounter - 1
        time.sleep(10)
    #Download the sqlite .geodatabase file
    resultReq = urllib2.urlopen("{0}?token={1}".format(resultUrl, token))
    if outDir == None:
        outDir = tempfile.mkdtemp()
    arcpy.AddMessage('\t-Temporary Directory: {0}'.format(outDir))
    outFile = os.path.join(outDir, outDB)
    with open(outFile, 'wb') as output:
        output.write(resultReq.read())
    del(output)

    ## Deprecated in favor of unzipping and returning filegdb
    ## #transfer from sqlite to GDB
    ## surveyGDB = os.path.join(outDir, 'outSurvey.gdb')
    ## arcpy.CopyRuntimeGdbToFileGdb_conversion(outFile, surveyGDB)
    ## del(outFile)

    surveyGDB = ''
    with zipfile.ZipFile(outFile, 'r') as zipGDB:
        #Get the name of the gdb directory by splitting the first part of the path
        #of a zipped file
        surveyGDB = zipGDB.namelist()[0].split(r'/')[0]
        zipGDB.extractall(outDir)
    print surveyGDB
    return os.path.join(outDir, surveyGDB)
# ---------------------------------------------------------------------------

# Import arcpy module
import os
import arcpy


# Dynamic variables:
rasterFolder = arcpy.GetParameterAsText(0)

# Set the workspace environment to local file geodatabase
arcpy.env.workspace = rasterFolder

rasters = arcpy.ListRasters()

spatialReference = arcpy.GetParameter(1)
projection = spatialReference.exportToString()

# Process: Define Projection
if rasters != None:
    tot = len(rasters)
    arcpy.AddMessage("{0} rasters found in input folder".format(str(tot)))
    i = 0.0
    for raster in rasters:
        i = i+1.0
        arcpy.DefineProjection_management(raster, projection)
        msg = "{0:.2%} : Projection for raster {1} was set.".format((i/tot),str(raster))
        arcpy.AddMessage(msg)
else:

    arcpy.AddWarning("No rasters found in input folder {0}".format(str(rasterFolder)))
Esempio n. 16
0
#////////////////////////////Getting Input Parameters//////////////////////////
out_folder_path = arcpy.GetParameterAsText(
    0)  #The folder including exported files
exc = arcpy.GetParameterAsText(1)  ##excel file
train_1 = arcpy.GetParameterAsText(
    2)  #Train data where is in Rec_folder as train_1.shp
test_1 = arcpy.GetParameterAsText(
    3)  #Validation data where is in Rec_folder as test_1.shp
koordinat = arcpy.GetParameterAsText(4)  #Coordinate system of map
raster_name = arcpy.GetParameterAsText(5)  #The name of LSM map
cell_size = arcpy.GetParameterAsText(6)  #Cell size
field = arcpy.GetParameterAsText(
    7
)  #probability field name. The column name including probability values. Defaults is "ones".
#////////////////////////////////////Starting Analysis/////////////////////////
arcpy.AddMessage(field)
arcpy.env.workspace = out_folder_path
arcpy.CreateFileGDB_management(out_folder_path, "g.gdb")
arcpy.AddMessage("{} file is imported".format(exc))

arcpy.ExcelToTable_conversion(exc, "g.gdb")

arcpy.MakeXYEventLayer_management("g.dbf", "point_x", "point_y", "deneme",
                                  koordinat, field)

arcpy.FeatureToRaster_conversion("deneme", field, raster_name, cell_size)
arcpy.AddMessage("Susceptibility map is saved as {}".format(raster_name))
#///////////////////Calculating AUC Values/////////////////////////////////////
arcpy.AddMessage("ROC is calculating")
mx = float(
    arcpy.GetRasterProperties_management(raster_name, "MAXIMUM").getOutput(0))
Esempio n. 17
0
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: TestRangeFanByBearingLimits")

        # WORKAROUND
        print "Creating New Scratch Workspace (Workaround)"
        TestUtilities.createScratch()

        inputPointsFC = os.path.join(TestUtilities.inputGDB,
                                     "sampleRangePoints")
        outputRangeFansFC = os.path.join(TestUtilities.outputGDB,
                                         "RangeFansByBearingLimits")
        toolbox = TestUtilities.toolbox

        # Set environment settings
        print "Running from: " + str(TestUtilities.currentPath)
        print "Geodatabase path: " + str(TestUtilities.geodatabasePath)

        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "Range")

        inputFeatureCount = int(
            arcpy.GetCount_management(inputPointsFC).getOutput(0))
        print "Input FeatureClass: " + str(inputPointsFC)
        print "Input Feature Count: " + str(inputFeatureCount)

        if (inputFeatureCount < 1):
            print "Invalid Input Feature Count: " + str(inputFeatureCount)

        maximumRangeInMeters = 2000
        leftBearingInDegrees = 45
        rightBearingInDegrees = 75

        ########################################################3
        # Execute the Model under test:
        arcpy.RangeFanByBearingLimits_Range(inputPointsFC,
                                            maximumRangeInMeters,
                                            leftBearingInDegrees,
                                            rightBearingInDegrees,
                                            outputRangeFansFC)
        ########################################################3

        # Verify the results
        outputFeatureCount = int(
            arcpy.GetCount_management(outputRangeFansFC).getOutput(0))
        print "Output FeatureClass: " + str(outputRangeFansFC)
        print "Output Feature Count: " + str(outputFeatureCount)

        if (outputFeatureCount < 1):
            print "Invalid Output Feature Count: " + str(outputFeatureCount)
            raise Exception("Test Failed")

        # WORKAROUND: delete scratch db
        print "Deleting Scratch Workspace (Workaround)"
        TestUtilities.deleteScratch()

        print "Test Successful"

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)
Esempio n. 18
0
terrifix_dissolve = "terrifix_dissolve"
TERRIFIXMAX = "TERRIFIXMAX"

# Process: Make Feature Layer
arcpy.CopyFeatures_management(baseMetrics, "baseMetricsMove")

arcpy.MakeFeatureLayer_management(
    baseMetricsMove, "baseMetricsLayer", "", workspace,
    "OBJECTID OBJECTID VISIBLE NONE;Shape Shape VISIBLE NONE;FID_plssMerge_Dissolve FID_plssMerge_Dissolve VISIBLE NONE;OIT_ID OIT_ID VISIBLE NONE;FID_censusid FID_censusid VISIBLE NONE;COUNTYFP10 COUNTYFP10 VISIBLE NONE;GEOID10 GEOID10 VISIBLE NONE;POP100 POP100 VISIBLE NONE;HU100 HU100 VISIBLE NONE;HseHolds HseHolds VISIBLE NONE;Adjusted_HseHolds Adjusted_HseHolds VISIBLE NONE;FREQUENCY FREQUENCY VISIBLE NONE;Shape_Length Shape_Length VISIBLE NONE;Shape_Area Shape_Area VISIBLE NONE"
)

# Process: select 25/3 wireline
arcpy.Select_analysis(
    plss, "wirelineSelect",
    "MAXADDOWN >= 25 AND MAXADUP >=  3 AND ENDUSERCAT <> '2'")
arcpy.AddMessage(
    "Selected all wireline meeting 25mbps download and 3mbps upload.")

# Process: select 25/3 fixed wireless
arcpy.Select_analysis(
    wireless, "wirelessSelect",
    "TRANSTECH BETWEEN 70 AND 71 AND MAXADDOWN >= 25 AND MAXADUP >=  3 AND ENDUSERCAT <> '2'"
)
arcpy.AddMessage(
    "Selected all wireless meeting 25mbps download and 3mbps upload.")

# Process: Spatial Join
arcpy.SpatialJoin_analysis(OIT_PLSS_QQ, wirelessSelect, "wirelessSpatialJoin",
                           "JOIN_ONE_TO_ONE", "KEEP_COMMON")
arcpy.AddMessage(
    "Spatial joined wireless maximum advertised speeds into 'OIT_PLSS_QQ'.")
Esempio n. 19
0
import arcpy
arcpy.env.overwriteOutput = True
folderforgdb = arcpy.GetParameterAsText(0)
datapoints = arcpy.GetParameterAsText(1)
clipzon = arcpy.GetParameterAsText(2)
nameGDB = arcpy.GetParameterAsText(3)
#folderforgdb = r"E:\programin\semestr2\samrob\s12_GIS_FILE"
#datapoints = r"E:\programin\semestr2\samrob\s12_GIS_FILE\Programming_in_GIS_2020_L7_s12\OSMpoints.shp"
#clipzon = r"E:\programin\semestr2\samrob\s12_GIS_FILE\Programming_in_GIS_2020_L7_s12\CentralAmerica.shp"
#nameGDB = 'salvador'
arcpy.CreateFileGDB_management(folderforgdb, nameGDB + '.gdb')
arcpy.AddMessage('Created new File GDB: {}.gdb'.format(nameGDB))
arcpy.env.workspace = folderforgdb + "\\" + nameGDB + '.gdb'
amenities = ['school', 'hospital', 'place_of_worship']
country = arcpy.GetParameterAsText(4)
arcpy.MakeFeatureLayer_management(clipzon, 'zoneclip', '"NAME" = ' + "'"+country + "'")
arcpy.Clip_analysis(datapoints, 'zoneclip', 'clipshp')
arcpy.AddMessage('Objects are cut for a given area ({})'.format(country))
for i in amenities:
    arcpy.MakeFeatureLayer_management('clipshp', 'clip', '"amenity" = ' + "'" + i + "'")
    arcpy.CopyFeatures_management('clip', 'zones_' + i)
    arcpy.AddField_management('zones_' + i, 'source', 'TEXT')
    arcpy.AddField_management('zones_' + i, 'GID', 'DOUBLE')
    with arcpy.da.UpdateCursor('zones_' + i, ['source', 'GID', 'id']) as cursor:
        for row in cursor:
            row[1] = row[2]
            row[0] = "OpenStreetMap"
            cursor.updateRow(row)
    arcpy.AddMessage('Created file for location '+i)
arcpy.Delete_management('clipshp')
Esempio n. 20
0
    arcpy.SetProgressor('step', "6_7_数据检查", 0, count, 1)
    targetValueList, delTargetValueList = check(datas)

    arcpy.AddMessage("6_7_生成输出图层")
    createLayer(targetpath, outname, tempoutname, delTargetValueList)

    arcpy.AddMessage("6_7_更新")
    result = arcpy.GetCount_management(targetpath)
    count = int(result.getOutput(0))
    arcpy.SetProgressor('step', "6_7_更新", 0, count, 1)
    UpdateDatas(outname, targetValueList, delTargetValueList)


if __name__ == "__main__":

    arcpy.AddMessage("6_7_开始获取变化类型为3的图斑")

    targetpath = arcpy.GetParameterAsText(0)
    photopath = arcpy.GetParameterAsText(1)
    enviroment = arcpy.GetParameterAsText(2)

    outname = "output_6_7"
    tempoutname = "output_6_7_temp"

    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = enviroment

    start(targetpath, photopath, outname, tempoutname)

    arcpy.SetParameterAsText(3, outname)
python_exe = os.path.join(python_dir, 'pythonw.exe')
arcpy.AddWarning("python_exe :" + python_exe)

mdwiz_py_fname = os.path.join(pymdwiz_dir, 'pymdwizard', 'MetadataWizard.py')
arcpy.AddWarning("mdwiz_py_fname :" + mdwiz_py_fname)

WGS84file = os.path.join(installDir, "WGS 1984.prj")

#Check/create the working directory at the user-specified location. This is also checked for in the toolbox validation script.
if not os.path.exists(WorkingDir):
    try:
        os.makedirs(WorkingDir)
    except:
        arcpy.AddMessage(
            "The user-specified working directory could not be located or created. Ensure that write access is granted."
        )
        sys.exit(1)

if not os.path.exists(python_exe):
    arcpy.AddWarning(
        "\nThe python executable associated with this version of the Wizard could not be found. Tool should be here: ("
        + python_exe + ")")
    sys.exit(1)
if not os.path.exists(mdwiz_py_fname):
    arcpy.AddWarning(
        "\nThe main python script file for this tool could not be found. Tool should be here: ("
        + mdwiz_py_fname + ")")
    sys.exit(1)

###Spatial Reference (reference objects set at global level)-----------------------------------------------
Esempio n. 22
0
		
	# Input data	
	#-----------------------------------------------------------------------------
	fcDesc = arcpy.Describe(fcBuf)
	FileGDB = os.path.dirname(fcDesc.catalogPath)

	inHUC =os.path.split(FileGDB)[1][4:16]
	
	theSR = arcpy.Describe(fcBuf).spatialReference.projectionCode
	
	bBox = getBbox(fcBuf, theSR)
	
	for Year in YearsList:
		
   	    if Year not in YrList:
   		    arcpy.AddMessage("Invalid Year...%s" % Year)
   		    sys.exit(0)
   		
   	    cSize = 30
   	
   	    for yr in list56:
   		    if yr == Year:
   			    cSize = 56
   		
   	    env.workspace = FileGDB
   	    env.scratchWorkspace = env.scratchFolder
   
   	    # Get data
   	    outTemp = getNASS(Year,bBox,cSize)
   	    
   	    Rslt = arcpy.GetRasterProperties_management(outTemp, "UNIQUEVALUECOUNT")
def Get_Spatial_Data_OrgInfo(InputDS, myDataType, myFeatType):

    #Does not handle VPF data as this is rare/unseen.

    ### Indirect ==========================================
    #Consider adding indirect spatial reference? Leave this out for now.
    #indspref = "<indspref>[Insert a descriptive location reference here]</indspref>"

    ### Direct Spatial Reference===========================
    Direct_Spatial_Reference_Method = ["Point", "Vector", "Raster"]

    # myDataType = ["Raster", "Vector"]
    # myFeatType = ["Polygon", "Polyline", "Point", "None"] # 'None' will apply to: Raster, Table, feature DS, XML File

    if myDataType == "Vector":
        if myFeatType == "Point":
            DirectSpatialRef = "<direct>" + Direct_Spatial_Reference_Method[
                0] + "</direct>"
        else:
            DirectSpatialRef = "<direct>" + Direct_Spatial_Reference_Method[
                1] + "</direct>"
    if myDataType == "Raster":
        DirectSpatialRef = "<direct>" + Direct_Spatial_Reference_Method[
            2] + "</direct>"

    ### Point and Vector object information=================

    if myFeatType in ["Point", "Polyline"]:
        if myFeatType == "Point":
            SDTS_Type = "Entity point"  # Usually the type should be this versus "Point"--see meta standard
        if myFeatType == "Polyline":
            # In most cases these will be accurate, but in some cases the user may want to change
            #   to a different type that is more specific
            if os.path.splitext(InputDS)[1] == ".shp":
                SDTS_Type = "String"  # shapefiles can never have topology
            else:
                SDTS_Type = "Link"  # other feature classes MAY have topology

        try:
            ObjCount = str(arcpy.GetCount_management(InputDS))
        except:
            arcpy.AddMessage(
                "Error obtaining object count for the vector data set. The count information will be left blank. \n"
            )
            ObjCount = 0

        if ObjCount != 0:
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "<ptvctcnt>" + ObjCount + "</ptvctcnt>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

        elif ObjCount == 0:  #Omit object count if we cound't obtain it.
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

    elif myFeatType == "Polygon":
        SDTS_Type = "G-polygon"
        try:
            ObjCount = str(arcpy.GetCount_management(InputDS))
        except:
            arcpy.AddMessage(
                "Error obtaining object count for vector (polygon) data set. The count information will be left blank. \n"
            )
            ObjCount = 0

        if ObjCount != 0:
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "<ptvctcnt>" + ObjCount + "</ptvctcnt>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

        elif ObjCount == 0:  #Omit object count if we cound't obtain it.
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

    elif myDataType == "GeometricNetwork":
        SDTS_Type = "Network chain, nonplanar graph"

        # Locate Polyline feature class within network data
        NetDS = ""  # Clear
        NetWS = os.path.dirname(InputDS)
        desc = arcpy.Describe(InputDS)
        FClist = desc.featureClassNames  #Returns the names of all features participating in topology.
        for iFClist in FClist:
            try:
                desc2 = arcpy.Describe(os.path.join(NetWS, iFClist))
                if desc2.shapeType == "Polyline":
                    NetDS = os.path.join(NetWS, iFClist)
            except:
                pass
            if arcpy.Exists(NetDS):
                try:
                    ObjCount = str(arcpy.GetCount_management(NetDS))
                except:
                    arcpy.AddMessage(
                        "Error obtaining object count for vector (line) data set. The count information will be left blank.\n"
                    )
                    ObjCount = 0
            else:
                arcpy.AddMessage(
                    "Error obtaining object count for vector (line) data set. The count information will be left blank.\n"
                )
                ObjCount = 0

        if ObjCount != 0:
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "<ptvctcnt>" + ObjCount + "</ptvctcnt>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

        elif ObjCount == 0:  #Omit object count if we cound't obtain it.
            PVOI = \
                "<ptvctinf><sdtsterm>" + \
                "<sdtstype>" + SDTS_Type + "</sdtstype>" + \
                "</sdtsterm></ptvctinf>"

            SpatialDataOrgInfo = DirectSpatialRef + PVOI
            return SpatialDataOrgInfo

    ### Raster object information ================================
    elif myDataType == "Raster":
        # Raster_Object_Type = ["Point", "Pixel", "Grid Cell", "Voxel"](options)
        RasterType = "Grid Cell"  # This is the most probable answer
        try:
            RowCount = str(
                arcpy.GetRasterProperties_management(InputDS, "ROWCOUNT"))
            ColCount = str(
                arcpy.GetRasterProperties_management(InputDS, "COLUMNCOUNT"))
            BandCount = str(
                arcpy.GetRasterProperties_management(InputDS, "BANDCOUNT"))

            ROI = \
                "<rastinfo>" + \
                "<rasttype>" + RasterType + "</rasttype>" + \
                "<rowcount>" + RowCount + "</rowcount>" + \
                "<colcount>" + ColCount + "</colcount>" + \
                "<vrtcount>" + BandCount + "</vrtcount>" + \
                "</rastinfo>"

            SpatialDataOrgInfo = DirectSpatialRef + ROI
            return SpatialDataOrgInfo

        except:
            arcpy.AddMessage(
                "Error obtaining row/column count information for the raster data set. The count information will be left blank.\n"
            )

            #Omit row, column, and band count if unable to extract.
            ROI = \
                "<rastinfo>" + \
                "<rasttype>" + RasterType + "</rasttype>" + \
                "</rastinfo>"

            SpatialDataOrgInfo = DirectSpatialRef + ROI
            return SpatialDataOrgInfo
Esempio n. 24
0
dem_grid_file = '"' + dem_grid_file + '"'
dpsi_raster_file = '"' + dpsi_raster_file + '"'
py_script_to_execute = os.path.join(this_script_dir, 'RoadSurfaceErosion.py')
py_script_to_execute = '"' + py_script_to_execute + '"'
cmd = py_script_to_execute + \
      ' --dp ' + dp_shapefile + \
      ' --rd ' + rd_shapefile + \
      ' --mdb ' + graip_db_file + \
      ' --z ' + dem_grid_file + \
      ' --dpsi ' + dpsi_raster_file

if str(is_stream_connected) == 'true':
    cmd += ' --sc '

# show executing command
arcpy.AddMessage('\nEXECUTING COMMAND:\n' + cmd)

# Capture the contents of shell command and print it to the arcgis dialog box
process = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
arcpy.AddMessage('\nProcess started:\n')
start_message = "Please wait a few minutes. Computation is in progress ..."
arcpy.AddMessage('\n' + start_message + '\n')
streamdata = process.communicate()[0]

messages = streamdata.split("\n")
for msg in messages:
    arcpy.AddMessage(msg)

# if success running the above script
if process.returncode == 0:
    # join roadlines shape file with the roadline graip database table
# arcpy.env.cellSize =Output_Template_Raster

# OutputPrefix
OutputPrefix = arcpy.GetParameterAsText(3)

# Preamble
arcpy.env.workspace = Input_Raster_Folder

tempEnvironment0 = arcpy.env.snapRaster
arcpy.env.snapRaster = Output_Template_Raster
tempEnvironment1 = arcpy.env.extent
arcpy.env.extent = Output_Template_Raster


# Process Layers
rList = arcpy.ListRasters()
count_rasters = len(rList)
i = 1
for r in rList:
    arcpy.AddMessage(
        "Processing Raster layer {0} of {1}.".format(i, count_rasters))
    # Process: Resample
    arcpy.Resample_management(r, "temp{0}".format(r), cellRaster, "MAJORITY")

    # Process: Raster to ASCII
    arcpy.RasterToASCII_conversion("temp{0}".format(
        r), "{1}\\{2}{0}.asc".format(r, Output_Raster_Folder, OutputPrefix))
    i = i+1
    arcpy.AddMessage("New layer {0}{1}.asc created.".format(OutputPrefix, r))
arcpy.AddMessage("ASCii files created in {0}".format(Output_Raster_Folder))
                ################################################################

                if end_points == "START":
                    insert.insertRow((start, oid, 0))

                elif end_points == "END":
                    insert.insertRow((end, oid, length))

                elif end_points == "BOTH":
                    insert.insertRow((start, oid, 0))
                    insert.insertRow((end, oid, length))

                arcpy.SetProgressorPosition()

            except Exception as e:
                arcpy.AddMessage(str(e.message))

                ################################################################

line_keyfield = str(arcpy.ListFields(polyline, "", "OID")[0].name)

mem_point_fl = arcpy.MakeFeatureLayer_management(mem_point, "Points_memory")

arcpy.AddJoin_management(mem_point_fl, "LineOID", polyline, line_keyfield)

if "in_memory" in output:
    arcpy.SetParameter(8, mem_point_fl)

else:
    arcpy.CopyFeatures_management(mem_point_fl, output)
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: RangeRings")

        # WORKAROUND
        print("Creating New Scratch Workspace (Workaround)")
        TestUtilities.createScratch()

        inputPointsFC = os.path.join(TestUtilities.inputGDB,
                                     "sampleRangePoints")
        outputRangeRingsFC = os.path.join(TestUtilities.outputGDB,
                                          "RangeRings")
        outputRangeRadialsFC = os.path.join(TestUtilities.outputGDB,
                                            "RangeRadials")
        toolbox = TestUtilities.toolbox

        # Check For Valid Input
        objects2Check = []
        objects2Check.extend([inputPointsFC, toolbox])
        for object2Check in objects2Check:
            desc = arcpy.Describe(object2Check)
            if desc == None:
                raise Exception("Bad Input")
            else:
                print("Valid Object: " + desc.Name)

        # Set environment settings
        print("Running from: " + str(TestUtilities.currentPath))
        print("Geodatabase path: " + str(TestUtilities.geodatabasePath))

        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "InC")

        inputFeatureCount = int(
            arcpy.GetCount_management(inputPointsFC).getOutput(0))
        print("Input FeatureClass: " + str(inputPointsFC))
        print("Input Feature Count: " + str(inputFeatureCount))

        if (inputFeatureCount < 1):
            print("Invalid Input Feature Count: " + str(inputFeatureCount))

        numberOfRings = 5
        ringInterval = 1000.0
        distanceUnits = "METERS"
        numberOfRadials = 8

        ########################################################3
        # Execute the Model under test:
        arcpy.RangeRings_InC(inputPointsFC, numberOfRings, ringInterval,
                             distanceUnits, numberOfRadials,
                             outputRangeRingsFC, outputRangeRadialsFC)
        ########################################################3

        # Verify the results
        outputFeatureCountRings = int(
            arcpy.GetCount_management(outputRangeRingsFC).getOutput(0))
        print("Output FeatureClass: " + str(outputRangeRingsFC))
        print("Output Feature Count: " + str(outputFeatureCountRings))

        outputFeatureCountRadials = int(
            arcpy.GetCount_management(outputRangeRadialsFC).getOutput(0))
        print("Output FeatureClass: " + str(outputRangeRadialsFC))
        print("Output Feature Count: " + str(outputFeatureCountRadials))

        if (outputFeatureCountRings < 1) or (outputFeatureCountRadials < 1):
            print("Invalid Output Feature Count: " +
                  str(outputFeatureCountRings) + ":" +
                  str(outputFeatureCountRadials))
            raise Exception("Test Failed")

        # WORKAROUND: delete scratch db
        print("Deleting Scratch Workspace (Workaround)")
        TestUtilities.deleteScratch()

        print("Test Successful")

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)
Esempio n. 28
0
# Script Name: DinfDistDown
#
# Created By:  David Tarboton
# Date:        9/29/11

# Import ArcPy site-package and os modules
import arcpy
import os
import subprocess

# Inputs
inlyr = arcpy.GetParameterAsText(0)
desc = arcpy.Describe(inlyr)
ang = str(desc.catalogPath)
arcpy.AddMessage("\nInput D-Infinity Flow Direction Grid: " + ang)

inlyr1 = arcpy.GetParameterAsText(1)
desc = arcpy.Describe(inlyr1)
fel = str(desc.catalogPath)
arcpy.AddMessage("Input Pit Filled Elevation Grid: " + fel)

inlyr2 = arcpy.GetParameterAsText(2)
desc = arcpy.Describe(inlyr2)
src = str(desc.catalogPath)
arcpy.AddMessage("Input Stream Raster Grid: " + src)

statisticalmethod = arcpy.GetParameterAsText(3)
arcpy.AddMessage("Statistical Method: " + statisticalmethod)

distancemethod = arcpy.GetParameterAsText(4)
arcpy.AddMessage("Distance Method: " + distancemethod)
Esempio n. 29
0
Created by: Juel Paul/Land Analytical
Date:  September 30, 2020
--------------------------------------------------------------------------------"""

# Import modules
import arcpy
from arcpy import env
import os

inGDB = arcpy.GetParameterAsText(0)

# Set workspace and environment variables
arcpy.env.workspace = inGDB
arcpy.env.overwriteOutput = True

arcpy.AddMessage("Searching the geodatabase now...")

fcList = arcpy.ListFeatureClasses("*", "point")

fcCount = len(fcList)

arcpy.AddMessage("{0} application points layers in this geodatabase.".format(fcCount))

fnamesDict = {
    "Applicant_" : "Applicant_",
    "Applicatio" : "Applicatio",
    "Applicant1" : "Applicant1",
    "Applicat_1" : "Applicat_1",
    "ApplicantLandName" : "ApplicantLandName",
    "Submission" : "Submission",
    "Site_Locat" : "Site_Locat",
def data_driven_raster_reclassify(In_Reference_Suit, In_Suit_Var,
                                  Out_Suit_Prox, Invert_Boolean):
    try:
        # Path setup-temp workspace: You can edit the script to use this if you want, but I defer to defaults.
        # tempFC=os.path.join(arcpy.env.scratchGDB,"tempFC")# for a temporary data

        # Progressor setup:
        arcpy.SetProgressor("step", "Creating Euclidean Distance raster...", 0,
                            7, 1)

        # Process-Euclidean Distance
        arcpy.AddMessage("Creating Euclidean Distance from Variable Layer")
        EuDist_Ra = EucDistance(In_Suit_Var)

        # Process: Add Field
        arcpy.SetProgressorLabel(
            "Appending and calculating a common field for Zonal Statistics by table..."
        )
        arcpy.SetProgressorPosition()
        arcpy.AddMessage(
            "Adding and Calculating Common Field for Zonal Statistics by Table"
        )
        Added_Field_st1 = arcpy.AddField_management(In_Reference_Suit,
                                                    "All_Same", "LONG")

        # Process: Calculate Field
        Calced_Field_st2 = arcpy.CalculateField_management(
            Added_Field_st1, "All_Same", 1, "PYTHON")

        # Process: Make Feature Layer (2)
        arcpy.SetProgressorLabel(
            "Making Reference Feature Layer with new Field...")
        arcpy.SetProgressorPosition()
        arcpy.AddMessage("Making Variable Feature Layer with new Field")
        Zonal_Input = arcpy.MakeFeatureLayer_management(Calced_Field_st2)

        # Process: Zonal Statistics as Table
        arcpy.SetProgressorLabel(
            "Calculating Zonal Statistics for remap table...")
        arcpy.SetProgressorPosition()
        arcpy.AddMessage("Calculating Zonal Statistics")
        Zonal_Stat_Prox = ZonalStatisticsAsTable(Zonal_Input, "All_Same",
                                                 EuDist_Ra, "outTable")

        # Process: Get Field Values (2-mean and standard deviation of distances)
        arcpy.SetProgressorLabel(
            "Declaring Cursors to read Zonal Statistics table...")
        arcpy.SetProgressorPosition()
        arcpy.AddMessage("Declaring cursors to read Zonal Statistics table")
        Std_Dev = (
            arcpy.da.SearchCursor(Zonal_Stat_Prox, ["STD"]).next()[0]
        )  # Since it was all one field, the first element should be the only element
        Mean = (
            arcpy.da.SearchCursor(Zonal_Stat_Prox, ["MEAN"]).next()[0]
        )  # Since it was all one field, the first element should be the only element
        Qrt_StD = Std_Dev / 4  # one quarter standard deviation
        arcpy.AddMessage("Retrieved Mean of {0} and Std Dev of {1}".format(
            Mean, Std_Dev))

        arcpy.SetProgressorLabel(
            "Calculating Statistics for Distance Raster...")
        arcpy.SetProgressorPosition()
        arcpy.AddMessage("Calculating Statistics of Distance Raster")
        EuDist_Ra_wStats = arcpy.CalculateStatistics_management(EuDist_Ra)

        # Process: Get Max Raster Value for remap
        arcpy.SetProgressorLabel(
            "Retrieving maximum value from value raster...")
        arcpy.SetProgressorPosition()
        Max_Value_Result = arcpy.GetRasterProperties_management(
            EuDist_Ra_wStats, "MAXIMUM")
        Max_Ra_Value = float(Max_Value_Result.getOutput(0))
        arcpy.AddMessage(
            "Maximum Raster Value of {0} is used as the final value in the remap table."
            .format(Max_Ra_Value))

        # Remap List creation
        myremap = RemapRange(
            [[0, Mean, invert_suitability_value(9, Invert_Boolean)],
             [
                 Mean, Mean + (Qrt_StD),
                 invert_suitability_value(8, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD), Mean + (Qrt_StD * 2),
                 invert_suitability_value(7, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 2), Mean + (Qrt_StD * 3),
                 invert_suitability_value(6, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 3), Mean + (Qrt_StD * 4),
                 invert_suitability_value(5, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 4), Mean + (Qrt_StD * 5),
                 invert_suitability_value(4, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 5), Mean + (Qrt_StD * 6),
                 invert_suitability_value(3, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 6), Mean + (Qrt_StD * 7),
                 invert_suitability_value(2, Invert_Boolean)
             ],
             [
                 Mean + (Qrt_StD * 7), (Max_Ra_Value + 1),
                 invert_suitability_value(1, Invert_Boolean)
             ]])
        # float("inf") does not work so this is the short term solution

        # Process: Reclassify
        arcpy.SetProgressorLabel("Starting Data Driven Reclassification...")
        arcpy.SetProgressorPosition()
        arcpy.AddMessage("Starting Data Driven Reclassification")
        Data_Driven_Reclass = Reclassify(EuDist_Ra_wStats, "Value", myremap)
        Data_Driven_Reclass.save(Out_Suit_Prox)

        # Finishing Messages and clean up.
        output_Name = (os.path.split(Out_Suit_Prox)[1])
        arcpy.AddMessage(
            "Finished Data Driven Reclassification of {0}".format(output_Name))
        arcpy.AddMessage("Final Reclassification: {0}".format(myremap))
        arcpy.ResetProgressor()
        arcpy.Delete_management(
            Zonal_Stat_Prox
        )  # delete temporary table- edit script if you want to save it.
    except arcpy.ExecuteError:
        print(arcpy.GetMessages(2))
    except Exception as e:
        print(e.args[0])