Пример #1
0
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: TestCreateMiscCADRGMosaicDataset")
        
        toolbox = TestUtilities.toolbox
        arcpy.ImportToolbox(toolbox, "DefenseScannedMaps")
        arcpy.env.overwriteOutput = True
   
        # Set environment settings
        print("Running from: " + str(TestUtilities.currentPath))
        print("Geodatabase path: " + str(TestUtilities.geodatabasePath))
        
        arcpy.env.overwriteOutput = True

                       
        webMercator = arcpy.SpatialReference(r"WGS 1984 Web Mercator (Auxiliary Sphere)")
           
           
        inputName = "ScannedMapsMisc_Test"
        inputMosaicDatasetFullPath = os.path.join(TestUtilities.inputGDB, inputName)
        
        if arcpy.Exists(inputMosaicDatasetFullPath):
            print("deleting: " + inputMosaicDatasetFullPath)
            arcpy.Delete_management(inputMosaicDatasetFullPath)
           
        ########################################################3
        # Execute the Model under test: 
        arcpy.CreateCADRGMosaicDataset_DefenseScannedMaps(TestUtilities.inputGDB, inputName, webMercator)  
        ########################################################

        
      
        # Check For Valid Input
        inputFeatureCount = int(arcpy.GetCount_management(inputMosaicDatasetFullPath).getOutput(0)) 
        print("Input FeatureClass: " + str(inputMosaicDatasetFullPath))
        print("Input Feature Count: " +  str(inputFeatureCount))
        
        if inputFeatureCount > 0 :
            print("Mosaic Dataset has already been created and populated")
             
        print("Test Successful"        )
                
    except arcpy.ExecuteError: 
        # Get the tool error messages 
        msgs = arcpy.GetMessages() 
        arcpy.AddError(msgs) 
    
        # return a system error code
        sys.exit(-1)
        
    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
    
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
    
        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    
        # return a system error code
        sys.exit(-1)
Пример #2
0
import sys
sys.path.insert(
    0,
    r'\\srvfile01\bdgeocientifica$\Addins_Geoprocesos\MapaGeologico\scripts')

import arcpy
from configs.settings import *

arcpy.ImportToolbox(Services().FEATURE_TO_POLYGON_SERVICE_TOOLBOX,
                    "FeatureToPolygon_Service")
Пример #3
0
    def test_HLZ_Touchdown_Points_002(self):
        ''' This test is for some of the default values in the HLZ Touchdown tool. '''
        try:
            arcpy.AddMessage("test_HLZ_Touchdown_Points_002")
            # move TestSunPositionAndHillshade code in here
            print("Importing toolbox... ")
            arcpy.ImportToolbox(TestUtilities.toolbox, "tdpoints")
            arcpy.env.overwriteOutput = True

            # Inputs
            print("Setting up inputs... ")
            inputAirframeTable = "#"
            inputAirframeString = "#"
            inputSuitableAreas = self.inputSuitableAreas
            inputSlope = self.inputSlope
            outputGeodatabase = self.outputGeodatabase
            outputCenterpoints = "#"
            outputCircles = "#"

            # Testing
            print("Running tool (HLZ Touchdown Points) ...")
            arcpy.HLZTouchdownPoints_tdpoints(inputAirframeTable,
                                              inputAirframeString,
                                              inputSuitableAreas, inputSlope,
                                              outputGeodatabase,
                                              outputCenterpoints,
                                              outputCircles)

            print("Comparing expected results...")
            # count output center points
            countCenterPoints = arcpy.GetCount_management(
                os.path.join(self.outputGeodatabase,
                             self.outputCenterpoints)).getOutput(0)
            # count output circles
            countOutputCircles = arcpy.GetCount_management(
                os.path.join(self.outputGeodatabase,
                             self.outputCircles)).getOutput(0)

            self.assertEqual(countCenterPoints, float(934))
            self.assertEqual(countOutputCircles, float(934))

            #TODO: make sure center points fall within circles

        except arcpy.ExecuteError:
            # Get the arcpy error messages
            msgs = arcpy.GetMessages()
            #TODO: need to add 'msgs' to logger
            print(msgs)

        except:
            # Get the traceback object
            tb = sys.exc_info()[2]
            tbinfo = traceback.format_tb(tb)[0]

            # Concatenate information together concerning the error into a message string
            pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n"\
                + str(sys.exc_info()[1])
            msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

            #TODO: need to add 'msgs' and 'pymsg' to logger

            # Print Python error messages for use in Python / Python Window
            print(pymsg + "\n")
            print(msgs)
Пример #4
0
# Usage: Publish_ZMap_Grid <Input_File_Projection> <Input_File> <Summary> <Tags>
# Description:
#   Publish Geographix export ZMap Grid file to Portal as Map Tile Service
#   - Dependency: ExproDat Team-GIS Data Assistant to convert Geographix export to a tif image
# ---------------------------------------------------------------------------

import os
import tempfile
from shutil import copyfile

# Import arcpy module
import arcpy

# Load required toolboxes
arcpy.ImportToolbox(
    "C:/Users/cliang/AppData/Roaming/ESRI/Desktop10.4/ArcToolbox/My Toolboxes/Team-GIS Data Assistant.tbx"
)

# Script arguments
Input_File = arcpy.GetParameterAsText(0)

Input_File_Projection = arcpy.GetParameterAsText(1)

Tile_Format = arcpy.GetParameterAsText(2)

Levels_Of_Details = arcpy.GetParameterAsText(3)

Service_Summary = arcpy.GetParameterAsText(4)

Service_Tags = arcpy.GetParameterAsText(5)
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: TestWriteMessageFile")

        # Prior to this, run TestTemplateConfig.py to verify the expected configuration exists

        inputPointsFC = os.path.join(TestUtilities.inputGDB,
                                     r"FriendlyOperations/FriendlyUnits")
        desc = arcpy.Describe(inputPointsFC)
        if desc == None:
            print "--> Bad Input Object: " + str(inputPointsFC)
            raise Exception('Bad Input')

        outputMessageFile = os.path.join(
            TestUtilities.outputMessagePath,
            r"Test-WriteMessageFileFromMilitaryFeatures.xml")
        outputMessageFileDebugFormat = outputMessageFile.replace(
            '.xml', '-Debug.xml')

        toolbox = TestUtilities.toolbox

        # Set environment settings
        print "Running from: " + str(TestUtilities.currentPath)
        print "Geodatabase path: " + str(TestUtilities.geodatabasePath)
        print "Message File path: " + str(TestUtilities.outputMessagePath)

        arcpy.env.overwriteOutput = True
        arcpy.ImportToolbox(toolbox, "MFT")

        ########################################################
        # Execute the Model(s) under test:
        standard = "2525"
        messageTypeField = "#"
        orderBy = "#"
        disableGeoTransform = "#"

        toolOutput = arcpy.WriteMessageFileFromMilitaryFeatures_MFT(
            inputPointsFC, outputMessageFile, standard, messageTypeField,
            orderBy, disableGeoTransform)

        disableGeoTransform = "True"

        # Also run the "Debug Format" tool (that maps everything to unknown , doesn't translate points)
        arcpy.WriteMessageFileFromMilitaryFeatures_MFT(
            inputPointsFC, outputMessageFileDebugFormat, standard,
            messageTypeField, orderBy, disableGeoTransform)
        ########################################################

        # Verify the results
        # 1: Check the expected return value
        returnedValue = toolOutput.getOutput(0)
        if (returnedValue <> outputMessageFile):
            print "Unexpected Return Value: " + str(returnedValue)
            print "Expected: " + str(outputMessageFile)
            raise Exception("Test Failed")

        #2: Check Output File Exists
        if not (arcpy.Exists(outputMessageFile)):
            print "Expected output file does not exist: " + outputMessageFile
            raise Exception("Test Failed")

        print "Test Successful"

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)
# visfromodel.py
# Created on: 2013-08-28 11:53:51.00000
#   (generated by ArcGIS/ModelBuilder)
# Usage: visfromodel <Observer_location> <Distance__value_or_field_> <Height_above_surface__meters_> <Elevation_Surface_URL> <wkid> <polygon>
# Description:
# Shows areas that are visible and not visible to observers by a point location and a distance.
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Check out any necessary licenses
arcpy.CheckOutExtension("spatial")

# Load required toolboxes
arcpy.ImportToolbox("C:/agsresources/visibility/VisibilityUtilities.tbx")

# Set Geoprocessing environments
arcpy.env.mask = ""

# Script arguments
Observer_location = arcpy.GetParameterAsText(0)
if Observer_location == '#' or not Observer_location:
    Observer_location = "Database Connections\\defensesolution.sde\\defensesolution.sde.observer" # provide a default value if unspecified

Distance__value_or_field_ = arcpy.GetParameterAsText(1)
if Distance__value_or_field_ == '#' or not Distance__value_or_field_:
    Distance__value_or_field_ = "1000 Meters" # provide a default value if unspecified

Height_above_surface__meters_ = arcpy.GetParameterAsText(2)
if Height_above_surface__meters_ == '#' or not Height_above_surface__meters_:
Пример #7
0
# ---------------------------------------------------------------------------
# Raster_It.py
# Created on: 2012-07-25 08:43:00.00000
#   (generated by ArcGIS/ModelBuilder)
# Description: 
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Load required toolboxes
arcpy.ImportToolbox("Model Functions")


# Local variables:
v1890-1920 = "C:\\CHANG\\PRISM\\ppt\\PACE_Rasters\\1890-1920"
v1890-1920__2_ = "C:\\CHANG\\PRISM\\ppt\\PACE_Rasters\\1890-1920"

# Process: Iterate Rasters
arcpy.IterateRasters_mb(v1890-1920, "", "IMG", "RECURSIVE")

# Process: Mosaic To New Raster
arcpy.MosaicToNewRaster_management("C:\\CHANG\\PRISM\\ppt\\PACE_Rasters\\1890-1920\\us_ppt_1895.04.img", v1890-1920__2_, "1890-1920ppt", "", "8_BIT_UNSIGNED", "", "1", "MEAN", "FIRST")

Пример #8
0
    def execute(self, parameters, messages):
        """The source code of the tool."""
        in_drainage_line = parameters[0].valueAsText
        rivid_field = parameters[1].valueAsText
        fromnode_field = parameters[2].valueAsText
        tonode_field = parameters[3].valueAsText
        divergence_field = parameters[4].valueAsText
        out_csv_file = parameters[5].valueAsText
        in_max_nbr_upstreams = parameters[6].value

        #check to see if a NextDownID already exists
        Next_Down_ID = "NextDownID"
        fieldList = arcpy.ListFields(in_drainage_line, Next_Down_ID)
        fieldCount = len(fieldList)
        if (fieldCount > 0):
            arcpy.AddMessage(
                "Skipping the addition of the NextDownID field as it already exists. If you want to re-calculate this field, delete it and re-run this tool ..."
            )
        else:
            arcpy.AddMessage("Adding NextDownID field ...")
            #Add NextDownID field
            result = arcpy.GetCount_management(in_drainage_line)
            number_of_features = int(result.getOutput(0))
            rivid_list = np.zeros(number_of_features, dtype=np.int32)
            fromnode_list = np.zeros(number_of_features, dtype=np.int32)
            tonode_list = np.zeros(number_of_features, dtype=np.int32)
            divergence_list = np.zeros(number_of_features, dtype=np.int32)
            field_names = [
                rivid_field, fromnode_field, tonode_field, divergence_field
            ]
            for feature_idx, row in enumerate(
                    sorted(arcpy.da.SearchCursor(in_drainage_line,
                                                 field_names))):
                rivid_list[feature_idx] = row[0]
                fromnode_list[feature_idx] = row[1]
                tonode_list[feature_idx] = row[2]
                divergence_list[feature_idx] = row[3]

            #-------------------------------------------------------------------------------
            #Compute connectivity (based on: https://github.com/c-h-david/rrr/blob/master/src/rrr_riv_tot_gen_all_nhdplus.py)
            #-------------------------------------------------------------------------------
            fromnode_list[fromnode_list == 0] = -9999
            #Some NHDPlus v1 reaches have FLOWDIR='With Digitized' but no info in VAA table

            fromnode_list[divergence_list == 2] = -9999
            #Virtually disconnect the upstream node of all minor divergences
            divergence_list = []  #don't need this anymore

            arcpy.AddField_management(in_drainage_line, Next_Down_ID, "LONG",
                                      "", "", "", "", "NULLABLE",
                                      "NON_REQUIRED", "")

            #determine the downstream reach for each reach
            with arcpy.da.UpdateCursor(in_drainage_line,
                                       [rivid_field, Next_Down_ID]) as cursor:
                for row in cursor:
                    rivid_index = np.where(rivid_list == row[0])[0][0]
                    try:
                        row[1] = rivid_list[np.where(
                            fromnode_list == tonode_list[rivid_index])[0][0]]
                    except IndexError:
                        row[1] = -1  #this is an outlet
                        pass
                    cursor.updateRow(row)

            # Delete cursor and row objects to remove locks on the data
            del row
            del cursor

            #empty remaining unecessary lists
            rivid_list = []
            fromnode_list = []
            tonode_list = []

        #Create Network Connecitivty File
        script_directory = os.path.dirname(__file__)
        arcpy.ImportToolbox(
            os.path.join(os.path.dirname(script_directory), "RAPID Tools.pyt"))
        arcpy.CreateNetworkConnectivityFile_RAPIDTools(in_drainage_line,
                                                       rivid_field,
                                                       Next_Down_ID,
                                                       out_csv_file,
                                                       in_max_nbr_upstreams)

        return
Пример #9
0
from UpdateRunAccessibility import main as ura
from SetEndogVar import main as sev
from CreateScenario import main as cs
from CombineTransit import main as ct
import os
import arcpy

iteration = 4

skim_dir = r'I:\000JFLOOD\Cube Land\Data\2045 Forecasts\2045 LU4 Skims'
scenario_location = r'P:\MPO\40 RTP and Air Quality\2045 LRTP\08_LandUseModel_Forecasting\Land Use Model Runs\BY2015'
scenario = 'FY2045Iter4'
previous = 'FY2045Iter3'

tbx_file = r'P:\MPO\20_Data\IndyGeoTools\IndyGeoTools.tbx'
arcpy.ImportToolbox(tbx_file, 'IndyGeoTools')

scenario_dir = os.path.join(scenario_location, scenario)
previous_dir = os.path.join(scenario_location, previous)

auto_skim = os.path.join(skim_dir, 'Highway.csv')
transit_skim = os.path.join(skim_dir, 'Transit.csv')

zone_file = os.path.join(scenario_dir, r'MODEL\Inputs\ZONES.dbf')
taz_file = os.path.join(scenario_dir, r'FILES\TAZ_OUT.dbf')
endogvar_file = os.path.join(previous_dir, r'FILES\EndogVarOut.csv')

#Main script
#After skim csv creation
print 'Combining Transit Skims in {}\n'.format(skim_dir)
ct(skim_dir)
    return datetimeStart


def appendFieldAfter(fc, datetimeStart):
    ''' add the date/time field values after the change '''
    afterRows = arcpy.da.SearchCursor(fc, ["OID@", "datetimestart"])
    for row in afterRows:
        before = datetimeStart[row[0]]
        after = row[1]
        datetimeStart[row[0]] = [before, after]
    return datetimeStart


try:
    print("Importing toolbox... ")
    arcpy.ImportToolbox(TestUtilities.toolbox, "ajdustdates")
    arcpy.env.overwriteOutput = True

    #Set tool param variables
    print("Creating feature set... ")
    inputFeatures = createFeatureSet()

    # get 'datetimestart' field values and OIDs
    # {<OID>:[<before>,<after>]}
    print("getting 'before' dates... ")
    checkDates = getFieldBefore(inputFeatures)

    #Testing
    arcpy.AddMessage("Running tool: Change Sample Data Dates to Recent Dates")
    arcpy.ChangeSampleDataDatestoRecentDates_ajdustdates(inputFeatures)
import arcpy, sys, os

try:
    arcpy.ImportToolbox(
        "C:\Program Files (x86)\ET SpatialTechniques\ET GeoWizards 11.3 Concurrent for ArcGIS 10.3\ET GeoWizards.tbx"
    )
    arcpy.gp.toolbox = "C:\Program Files (x86)\ET SpatialTechniques\ET GeoWizards 11.3 Concurrent for ArcGIS 10.3\ET GeoWizards.tbx"
except:
    try:
        arcpy.ImportToolbox(
            "C:/Program Files (x86)/ET SpatialTechniques/ET GeoWizards 11.0 Concurrent for ArcGIS 10.3/ET GeoWizards.tbx"
        )
        arcpy.gp.toolbox = "C:/Program Files (x86)/ET SpatialTechniques/ET GeoWizards 11.0 Concurrent for ArcGIS 10.3/ET GeoWizards.tbx"
    except:
        arcpy.AddError(
            "\n~~~~ ~~~~ YOU MUST HAVE ET GEOWIZARDS INSTALLED AND LICENSED TO RUN THIS STEP! ~~~~ ~~~~\n"
        )
        sys.exit()

arcpy.env.overwriteOutput = True
arcpy.Delete_management("in_memory")
####### FUNCTIONS


def split_following_num(s):
    prev_char = ''
    for i, char in enumerate(s):
        if char == '_' and prev_char in '0123456789':
            return s[:i].upper()
        prev_char = char
Пример #12
0
def create_layer_file(input_items,
                      meta_folder,
                      voyager_server,
                      hdrs,
                      show_progress=False):
    """Creates a layer for input items in the appropriate meta folders."""
    created = 0
    skipped = 0
    errors = 0
    global processed_count

    for input_item in input_items:
        try:
            lyr = None
            id = input_item[0]
            path = input_item[1]
            name = input_item[2]
            location = input_item[3]
            layer_folder = os.path.join(meta_folder, id[0], id[1:4])
            lyr_mxd = arcpy.mapping.MapDocument(mxd_path)
            dsc = arcpy.Describe(path)

            # Create layer folder if it does not exist.
            if not os.path.exists(layer_folder):
                os.makedirs(layer_folder)

            if not os.path.exists(
                    os.path.join(layer_folder, '{0}.layer.lyr'.format(id))):
                # os.makedirs(layer_folder)
                try:
                    if dsc.dataType in ('FeatureClass', 'Shapefile',
                                        'ShapeFile'):
                        feature_layer = arcpy.MakeFeatureLayer_management(
                            path, os.path.basename(path))
                        lyr = arcpy.SaveToLayerFile_management(
                            feature_layer,
                            os.path.join(layer_folder,
                                         '{0}.layer.lyr'.format(id)))
                    elif dsc.dataType == 'RasterDataset':
                        raster_layer = arcpy.MakeRasterLayer_management(
                            path,
                            os.path.splitext(os.path.basename(path))[0])
                        lyr = arcpy.SaveToLayerFile_management(
                            raster_layer,
                            os.path.join(layer_folder,
                                         '{0}.layer.lyr'.format(id)))
                    elif dsc.dataType in ('CadDrawingDataset',
                                          'FeatureDataset'):
                        arcpy.env.workspace = path
                        lyr_mxd = arcpy.mapping.MapDocument(mxd_path)
                        data_frame = arcpy.mapping.ListDataFrames(lyr_mxd)[0]
                        group_layer = arcpy.mapping.ListLayers(
                            lyr_mxd, 'Group Layer', data_frame)[0]
                        for fc in arcpy.ListFeatureClasses():
                            dataset_name = os.path.splitext(
                                os.path.basename(path))[0]
                            l = arcpy.MakeFeatureLayer_management(
                                fc, '{0}_{1}'.format(dataset_name,
                                                     os.path.basename(fc)))
                            arcpy.mapping.AddLayerToGroup(
                                data_frame, group_layer, l.getOutput(0))
                        arcpy.ResetEnvironments()
                        group_layer.saveACopy(
                            os.path.join(layer_folder,
                                         '{0}.layer.lyr'.format(id)))
                        lyr = '{0}.layer.lyr'.format(id)
                    elif dsc.catalogPath.lower().endswith(
                            '.tab') or dsc.catalogPath.lower().endswith(
                                '.mif'):
                        arcpy.ImportToolbox(
                            r"C:\Program Files (x86)\DataEast\TAB Reader\Toolbox\TAB Reader.tbx"
                        )
                        lyr = arcpy.GPTabsToArcGis_TR(
                            dsc.catalogPath, False, '', True, True,
                            os.path.join(layer_folder,
                                         '{0}.layer.lyr'.format(id)))
                    else:
                        skipped += 1
                        status_writer.send_status(
                            _('Invalid input type: {0}').format(dsc.name))
                        skipped_reasons[name] = _(
                            'Invalid input type: {0}').format(dsc.dataType)
                        continue
                except arcpy.ExecuteError:
                    errors += 1
                    status_writer.send_status(arcpy.GetMessages(2))
                    errors_reasons[name] = arcpy.GetMessages(2)
                    continue
                except RuntimeError as re:
                    errors += 1
                    status_writer.send_status(re.message)
                    errors_reasons[name] = re.message
                    continue
                except AssertionError as ae:
                    status_writer.send_status(
                        _('FAIL: {0}. MXD - {1}').format(repr(ae), mxd_path))
            else:
                lyr = os.path.join(layer_folder, '{0}.layer.lyr'.format(id))
            created += 1

            # Update the index.
            if lyr:
                try:
                    update_index(path, lyr, id, name, location, voyager_server,
                                 hdrs)
                except (IndexError, ImportError) as ex:
                    status_writer.send_state(status.STAT_FAILED, ex)
                processed_count += 1
                status_writer.send_percent(
                    processed_count / result_count,
                    _('Created: {0}').format('{0}.layer.lyr'.format(id)),
                    'create_layer_file')
        except IOError as io_err:
            processed_count += 1
            status_writer.send_percent(processed_count / result_count,
                                       _('Skipped: {0}').format(input_item),
                                       'create_layer_file')
            status_writer.send_status(_('FAIL: {0}').format(repr(io_err)))
            errors_reasons[input_item] = repr(io_err)
            errors += 1
            pass
    return created, errors, skipped
Пример #13
0
# from scripts.functionGeneral import functionCode

#sys.path.insert(0, r'\\srvfile01\bdgeocientifica$\Addins_Geoprocesos\MapaGeologico\scripts')

#sys.path.insert(0, rutaX().rutay('scripts'))
# from configs.model import *
from configs.nls import *
from configs.statics import *
from functionGeneral import *
import arcpy
import pythonaddins
import string
import threading

arcpy.overwriteOutput = True
arcpy.ImportToolbox(Tools().TOOLS_GEOLOGY_MAPS)


#deberia salir2
class configGdb(object):
    """Implementation for addin_addin.conngdb (Button)"""
    def __init__(self):
        self.enabled = True
        self.checked = False
        self.dirname = os.path.dirname(Conexion().path)  #json
        self.connection = True if os.path.exists(Conexion().path) else False

    def onClick(self):
        ubi = pythonaddins.OpenDialog(nls().ConfigGDB().title, False, "#",
                                      nls().ConfigGDB().namebutton,
                                      lambda x: x, "Geodatabase (GDB)")
Пример #14
0
import TestUtilities
import os


class LicenseError(Exception):
    pass


try:
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")

    else:
        raise LicenseError

    arcpy.ImportToolbox(TestUtilities.toolbox)
    arcpy.env.overwriteOutput = True
    arcpy.env.scratchWorkspace = TestUtilities.scratchGDB

    inputPolyArea = os.path.join(TestUtilities.inputGDB, "samplePolygonArea")
    inputRoads = os.path.join(TestUtilities.inputGDB, "roads")
    inputSurface = os.path.join(TestUtilities.inputGDB, "Jbad_SRTM_USGS_EROS")
    psOutputRV = os.path.join(TestUtilities.outputGDB, "PathSlopeOutputRV")
    classValue = '0 3 1;3 10 2;10 15 3;15 20 4;20 30 5;30 45 6;45 60 7;60 85 8;85 10000000000000 9;NODATA 0'

    #Testing Path Slope by Reclass Values
    arcpy.AddMessage("Starting Test: Path Slope by Reclass Values")
    TestUtilities.createScratch()
    arcpy.PathSlopeByRanges_path(inputPolyArea, inputRoads, inputSurface,
                                 'DEGREE', classValue, psOutputRV)
Пример #15
0
#Imports
import sys, os, traceback
import arcpy
from arcpy import env

delete_me = []

try:

    # Load required toolboxes
    scriptpath = sys.path[0]
    #toolboxpath = os.path.join(scriptpath,"..\\Position Analysis Tools.tbx")
    #toolboxpath = os.path.join(scriptpath,"..\\Import and Conversion Tools_10.3.tbx")
    toolboxpath = os.path.join(scriptpath, "..\\Military Analyst Tools.tbx")
    #arcpy.ImportToolbox(toolboxpath)
    arcpy.ImportToolbox(toolboxpath, "ma")

    # Script arguments
    Input_Table = arcpy.GetParameterAsText(0)
    arcpy.AddMessage(Input_Table)
    if Input_Table == '#' or not Input_Table:
        Input_Table = "C:\\Workspace\\Data\\Geometry Importers\\linewizard.dbf"  # provide a default value if unspecified

    Input_Coordinate_Format = arcpy.GetParameterAsText(1)
    if Input_Coordinate_Format == '#' or not Input_Coordinate_Format:
        Input_Coordinate_Format = "DD"  # provide a default value if unspecified

    X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ = arcpy.GetParameterAsText(
        2)
    if X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ == '#' or not X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_:
        X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ = "Lond"  # provide a default value if unspecified
Пример #16
0
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 24 12:47:57 2015

@author: Ben Hudson
"""
import pylab as plt
import numpy as np
import dask
import xray
import archook  #The module which locates arcgis
archook.get_arcpy()
import arcpy  # Hopefully will cut out ArcPy out of this at a later date

#Arcpy can now find TAUDEM routines
arcpy.ImportToolbox('C:\Program Files\TauDEM\TauDEM5Arc\TauDEM Tools.tbx')

# FUNCTIONS


def alterBedDEM(bed_numpy, errbed_numpy):

    #loading may not be the correct term
    # uniform distrubution is maybe too agressive, see what Morgleghem says in paper
    randomErrorLoading = np.random.uniform(-1.0, 1.0, np.shape(errbed_numpy))

    #triangle distribution
    #randomErrorLoading = np.random.triangular(-1.0,0,1.0,np.shape(errbed_numpy))

    #multiply randomErrorLoading, by the error bed
    changeBedBy = randomErrorLoading * errbed_numpy
Пример #17
0
import arcpy
import os
from arcpy.sa import *
arcpy.ImportToolbox(r'G:\Working\ArcMap Stuff\pkRasterHelper.tbx')

#Python List uses square brackets
ages = ['Adult', 'Elders', 'Mature', 'MidAge', 'Total', 'Youth']

#Python Dictionary uses curly brackets and key: values (including nested lists and dictionaries)
suicideloc = {'I': 'Inc', 'R': 'Res'}

# Python List
scales = ['1k', '2k']

# Python List
genders = ['P', 'M', 'F']

for gender in genders:
    for scale in scales:
        for age in ages:
            for k, v in suicideloc.items():
                source_raster = r'F:\Suicide_Vs_Population\LITS_20200729\Suicide_{0}_2008-2017_{1}_KDs.gdb\Rescale_{1}_KD_Suicide_{2}_08_17_{3}_{4}'.format(
                    k, scale, v, age, gender)
                if arcpy.Exists(source_raster):
                    pop_raster = r'F:\BaseMaps\GNAF-AllGeoms-Population-ATS-2020\Population_KD_Resources.gdb\Rescale_Con_KD_ComboPopRes_{}_{}_50_{}'.format(
                        age, gender, scale)
                    min_raster = r'L:\Work\Papers\NationalHotSpots\SuicideVsPopulation\Suicide_Minuses.gdb\xMinus_{}_{}_{}_{}'.format(
                        v, age, gender, scale)
                    con_raster = r'L:\Work\Papers\NationalHotSpots\SuicideVsPopulation\Suicide_Minuses.gdb\xCon_Minus_{}_{}_{}_{}'.format(
                        v, age, gender, scale)
                    rescale_raster = r'L:\Work\Papers\NationalHotSpots\SuicideVsPopulation\Suicide_Minuses.gdb\xRescaleLin_Con_Minus_{}_{}_{}_{}'.format(
Пример #18
0
# toolboxLocation =r'D:\TauDEM\TauDEM5Arc\TauDEM Tools.tbx', ''

print("Clipping Raster...")
polygonForClipping = path + "Avalanche.gdb/AreaPolygon"
baseDEM = path + "DEM_Tirol_5m.tif"
clippedDEM = path + "ClippedDEM" + version + ".tif"

extentFile = arcpy.Describe(polygonForClipping)
extentOfPolygon = str(extentFile.extent.XMin) + " " + str(extentFile.extent.YMin) + " " + str(extentFile.extent.XMax) + " " + str(extentFile.extent.YMax)

arcpy.Clip_management(baseDEM, extentOfPolygon, clippedDEM, polygonForClipping, "0", "ClippingGeometry", "NO_MAINTAIN_EXTENT")
print("Clip Raster successful\n")

print("Removing Pits...")
# Pit Remove
arcpy.ImportToolbox(r'D:\TauDEM\TauDEM5Arc\TauDEM Tools.tbx', '')
arcpy.PitRemove(clippedDEM, None, None, 8, rpath + "ClippedDEM" + version + "fel.tif")

pitRemovedDEM = rpath + "ClippedDEM" + version + "fel.tif"
arcpy.Delete_management(clippedDEM)
print("Pit Remove successful\n")

print("Calculating Flow Direction...")
# Dinf Flow Direction
arcpy.ImportToolbox(r'D:\TauDEM\TauDEM5Arc\TauDEM Tools.tbx', '')
arcpy.DinfFlowDir(pitRemovedDEM, 8, rpath + "ClippedDEM" + version + "ang.tif", rpath + "ClippedDEM" + version + "slp.tif")

flowDirDEM = rpath + "ClippedDEM" + version + "ang.tif"
print("Flow Direction successful\n")

# Delete .slp
Пример #19
0
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------------
# CDU.py
# Created on: 2019-12-12 12:09:02.00000
#   (generated by ArcGIS/ModelBuilder)
# Description:
# ---------------------------------------------------------------------------

# Import arcpy module
import arcpy

# Load required toolboxes
arcpy.ImportToolbox(
    "C:/Users/ferregutie/AppData/Roaming/ESRI/Desktop10.6/ArcToolbox/My Toolboxes/URBAN.tbx"
)

# Local variables:
Coordinate_catastali = ""
json = ""
poligono = ""
output_json = ""
output_testo_PI = ""
output_json__2_ = ""
output_testo_PAT = ""

# Process: da coordinate catastali a layertemporaneo
arcpy.gp.toolbox = "C:/Users/ferregutie/AppData/Roaming/ESRI/Desktop10.6/ArcToolbox/My Toolboxes/URBAN.tbx"
# Warning: the toolbox C:/Users/ferregutie/AppData/Roaming/ESRI/Desktop10.6/ArcToolbox/My Toolboxes/URBAN.tbx DOES NOT have an alias.
# Please assign this toolbox an alias to avoid tool name collisions
# And replace arcpy.gp.coordinateCatastaliToLayer(...) with arcpy.coordinateCatastaliToLayer_ALIAS(...)
arcpy.gp.coordinateCatastaliToLayer(Coordinate_catastali)
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: RangeRings")

        # WORKAROUND
        print("Creating New Scratch Workspace (Workaround)")
        TestUtilities.createScratch()

        inputPointsFC = os.path.join(TestUtilities.inputGDB,
                                     "sampleRangePoints")
        outputRangeRingsFC = os.path.join(TestUtilities.outputGDB,
                                          "RangeRings")
        outputRangeRadialsFC = os.path.join(TestUtilities.outputGDB,
                                            "RangeRadials")
        toolbox = TestUtilities.toolbox

        # Check For Valid Input
        objects2Check = []
        objects2Check.extend([inputPointsFC, toolbox])
        for object2Check in objects2Check:
            desc = arcpy.Describe(object2Check)
            if desc == None:
                raise Exception("Bad Input")
            else:
                print("Valid Object: " + desc.Name)

        # Set environment settings
        print("Running from: " + str(TestUtilities.currentPath))
        print("Geodatabase path: " + str(TestUtilities.geodatabasePath))

        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "Position")

        inputFeatureCount = int(
            arcpy.GetCount_management(inputPointsFC).getOutput(0))
        print("Input FeatureClass: " + str(inputPointsFC))
        print("Input Feature Count: " + str(inputFeatureCount))

        if (inputFeatureCount < 1):
            print("Invalid Input Feature Count: " + str(inputFeatureCount))

        numberOfRings = 5
        ringInterval = 1000.0
        distanceUnits = "METERS"
        numberOfRadials = 8

        ########################################################3
        # Execute the Model under test:
        arcpy.RangeRings_Position(inputPointsFC, numberOfRings, ringInterval,
                                  distanceUnits, numberOfRadials,
                                  outputRangeRingsFC, outputRangeRadialsFC)
        ########################################################3

        # Verify the results
        outputFeatureCountRings = int(
            arcpy.GetCount_management(outputRangeRingsFC).getOutput(0))
        print("Output FeatureClass: " + str(outputRangeRingsFC))
        print("Output Feature Count: " + str(outputFeatureCountRings))

        outputFeatureCountRadials = int(
            arcpy.GetCount_management(outputRangeRadialsFC).getOutput(0))
        print("Output FeatureClass: " + str(outputRangeRadialsFC))
        print("Output Feature Count: " + str(outputFeatureCountRadials))

        if (outputFeatureCountRings < 1) or (outputFeatureCountRadials < 1):
            print("Invalid Output Feature Count: " +
                  str(outputFeatureCountRings) + ":" +
                  str(outputFeatureCountRadials))
            raise Exception("Test Failed")

        # WORKAROUND: delete scratch db
        print("Deleting Scratch Workspace (Workaround)")
        TestUtilities.deleteScratch()

        print("Test Successful")

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)
Пример #21
0
# Requirements: ArcGIS Desktop Standard
#------------------------------------------------------------------------------

#Imports
import sys, os, traceback
import arcpy
from arcpy import env

delete_me = []

try:

    # Load required toolboxes
    scriptpath = sys.path[0]
    toolboxpath = os.path.join(scriptpath, "..\\Position Analysis Tools.tbx")
    arcpy.ImportToolbox(toolboxpath)

    # Script arguments
    Input_Table = arcpy.GetParameterAsText(0)
    arcpy.AddMessage(Input_Table)
    if Input_Table == '#' or not Input_Table:
        Input_Table = "C:\\Workspace\\Data\\Geometry Importers\\linewizard.dbf"  # provide a default value if unspecified

    Input_Coordinate_Format = arcpy.GetParameterAsText(1)
    if Input_Coordinate_Format == '#' or not Input_Coordinate_Format:
        Input_Coordinate_Format = "DD"  # provide a default value if unspecified

    X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ = arcpy.GetParameterAsText(
        2)
    if X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ == '#' or not X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_:
        X_Field__Longitude__UTM__MGRS__USNG__GARS__GeoRef_ = "Lond"  # provide a default value if unspecified
            for vb in vars_rtc:
                print(vb, file=f)
        else:
            print('No valid file.')

# run the batch files

subprocess.call([batfile_wm])
print('Watermap Extent mosaic dataset complete.')
subprocess.call([batfile_rgb])
print('RGB mosaic dataset complete.')
subprocess.call([batfile_rtc])
print('RTC mosaic dataset complete.')

# create AID packages
arcpy.ImportToolbox(r"C:\Users\ASF\Documents\COVID19\Disasters\Esri\AID_GPtools\AID_Management.pyt")
gdb = r'C:\Users\ASF\Documents\COVID19\Disasters''\\'+dirtag+'\\MosaicDatasets''\\'+projtag+'_'+today+'.gdb'

print('Generating watermap extent AID package...')
md_wm = gdb+'\\'+'watermap_extent'
aid_wm = r'C:\Users\ASF\Documents\COVID19\Disasters''\\'+dirtag+'\\AID_Packages\\'+projtag+'_WatermapExtent_'+today+'.zmd'

with arcpy.EnvManager(scratchWorkspace=r"C:\Users\ASF\Documents\COVID19\Disasters\Hurricanes\Hurricanes.gdb", workspace=r"C:\Users\ASF\Documents\COVID19\Disasters\Hurricanes\Hurricanes.gdb"):
    try:
        arcpy.AID.AIDISDP(md_wm, aid_wm, None)
    except:
        print("AID errors generated and ignored.")
        pass
print('Watermap extent AID package complete.')

print('Generating RGB AID package...')
Пример #23
0
    def arcgis_zstat_selected_points_analysis(self, logger=defaultLogger):

        #arcpy.ImportToolbox("Model Functions")
        arcpy.ImportToolbox(TBX_LOCATION)
        arcpy.gp.toolbox = TBX_STR

        #Split points into separate files
        self.string_args['ext'] = 'dbf'
        intersectParam1 = (
            SEL_BUFFERS_90M_FILE + ' #;' + 
            (BUFFERS_FOLDER + BUFFER_FILE).format(**self.string_args) + ' #'
        )

        intersectSHP = TEMP_GRID_FOLDER.format(**self.string_args) + 'intersect_sel_lakes.shp'
        dbfFile2 = (SEL_POINTS_FOLDER + SEL_POINTS_FILE).format(**self.string_args)

        if not os.path.exists(intersectSHP):
            arcpy.Intersect_analysis(intersectParam1, intersectSHP, "ALL", "", "INPUT")

        if not os.path.exists(dbfFile2) and not os.path.exists(dbfFile2.replace('dbf','csv')):
            arcpy.AddField_management(
                intersectSHP, "Zone_FID", "LONG", "", "", "", "", "NULLABLE", "NON_REQUIRED", ""
            )
            arcpy.CalculateField_management(intersectSHP, "Zone_FID", "[FID]", "VB", "")

            arcpy.ExportXYv_stats(
                intersectSHP, "FID;SubSite;SiteCode;Count;CDOM", "COMMA",
                dbfFile2, "ADD_FIELD_NAMES"
            )

        if (not os.path.exists((SEL_SPLIT_FOLDER + 'FID_00.shp').format(**self.string_args)) 
            and not os.path.exists((SEL_SPLIT_FOLDER + 'FID_0.shp').format(**self.string_args))):

            arcpy.gp.SplitLayerByAttributes(
                intersectSHP, "FID", "FID_", SEL_SPLIT_FOLDER.format(**self.string_args)
            )

        for [band_name, band_folder] in self.band_parameters:
            self.string_args['band']=band_name
            outFolder1 = (TEMP_GRID_FOLDER + 'ext_{band}').format(**self.string_args)
            outFolder2 = (TEMP_GRID_FOLDER + 'calc_{band}').format(**self.string_args)


            #Iterate through each file created when splitting points
            for iterationFile in glob((SEL_SPLIT_FOLDER + 'FID_*.shp').format(**self.string_args)):
                FID = iterationFile.split('\\')[-1].split('.')[0]
                dbfFile1 = (
                    SEL_TEMP_DBF_FOLDER + SEL_BANDS_FILE_CALC).format(FID=FID, **self.string_args
                )
                if not os.path.exists(dbfFile1) and not os.path.exists(dbfFile1[0:-3] + 'csv'):
                    print(dbfFile1)
                    arcpy.gp.ExtractByMask_sa(band_folder, iterationFile, outFolder1)
                    arcpy.gp.RasterCalculator_sa("Int(\"{}\" * 0)".format(outFolder1), outFolder2)
                    time.sleep(5)
                    arcpy.BuildRasterAttributeTable_management(outFolder2, "NONE")
                    arcpy.gp.ZonalStatisticsAsTable_sa(
                        outFolder2, "VALUE", outFolder1, dbfFile1, "DATA", "ALL"
                    )

        logger.info('Performed selected points analysis for scene {scene}'
            .format(**self.string_args))
        return None
Пример #24
0
tables = arcpy.ListTables()
# featureClasses = []
fds = arcpy.ListDatasets()
# arcpy.AddMessage(fds)
# for fd in fds:
#     arcpy.env.workspace = fd
#     arcpy.AddMessage(fd)
#     fc1 = arcpy.ListFeatureClasses()
#     arcpy.AddMessage(fcl)
#     if fc1 <> None:
#       for fc in fc1:
#         featureClasses.append(fd+'/'+fc)
rasters = arcpy.ListRasters()

arcpy.ImportToolbox(egis.Toolbox, "usgs")


transDir = arcpy.GetInstallInfo("desktop")["InstallDir"]
translator = os.path.join(transDir, r"Metadata\Translator\ARCGIS2FGDC.xml")
arcpy.env.scratchWorkspace = arcpy.env.scratchGDB #After banging head against it this stopped me from having an "ERROR 000584: Implementation of this Tool's Validate is invalid."

def purgeGeoprocessingFGDC(table,metadataFile):
    addMsgAndPrint('  exporting metadata from ' + table)
    addMsgAndPrint('  exporting metadata to '+metadataFile)
    arcpy.ExportMetadata_conversion(table,translator,metadataFile)
    addMsgAndPrint('  clearing internal metadata')
    arcpy.ClearMetadata_usgs(table)
    addMsgAndPrint('  importing metadata from '+metadataFile)
    arcpy.ImportMetadata_conversion (metadataFile,"FROM_FGDC",table)
    
Пример #25
0
                print layer.name,
            print
                # Stage and upload the service if the sddraft analysis did not contain errors
    if analysis['errors'] == {}:
        try:
            print "Staging service to create service definition"
            arcpy.StageService_server(Sddraft, Sd)

            print "Uploading the service definition and publishing image service"
            arcpy.UploadServiceDefinition_server(Sd, con)

            print "Service successfully published"
        except arcpy.ExecuteError:
            e = sys.exc_info()[1]
            print(e.args[0])
            print arcpy.GetMessages() + "\n\n"
            sys.exit("Failed to stage and upload service")
    else:
        print "Service could not be published because errors were found during analysis."
        print arcpy.GetMessages()
# if Image Service already exists Refresh Service
else:
    try:
        print collection_id + " Image Service exists, Update Service"
        tbx = arcpy.ImportToolbox(out_folder_path+"\\"+ out_name +";System/PublishingTools")
        tbx.RefreshService(collection_id+"Service","ImageServer",workspace_gdb,"#")
    except arcpy.ExecuteError:
        e = sys.exc_info()[1]
        print(e.args[0])
        print arcpy.GetMessages() + "\n\n"
        sys.exit("Failed in Refreshing Service")
Пример #26
0
##*************************************************##
##                                                 ##
## Shouldn't need any changes below                ##
##                                                 ##
##*************************************************##

# import modules
import arcpy, os, xlrd, xlwt, time
from arcpy import env
from arcpy.sa import *
from datetime import datetime
print str(datetime.now())
start_time = time.time()

# import CustomGrid Toolbox ########### Note: need imports first, but not sure where you guys saved this
arcpy.ImportToolbox(r"B:\Aerial Imagery\Data\CustomGridTools.tbx"
                    )  #############################

# list of edge sizes in meters
buff = [10, 20, 30, 40]

# citrus raster name
loc_id = region + "_" + year

# citrus shape file
citrus = path + "\\" + region + ".shp"

# update path string
path = path + r"\20" + year + "\\" + region

# path to classified rasters
classified_raster = path + r"\Classified"
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: CoordinateConversion")
        
        # WORKAROUND
        print("Creating New Scratch Workspace (Workaround)")
        TestUtilities.createScratch()
            
        inputTable =  os.path.join(TestUtilities.csvPath, "SigActs.csv")
        outputDbf =  os.path.join(TestUtilities.scratchPath, "test_coordinate_cc.dbf")
        toolbox = TestUtilities.toolbox        
        
        # Set environment settings
        print("Running from: " + str(TestUtilities.currentPath))
        print("Geodatabase path: " + str(TestUtilities.geodatabasePath))
        
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "InC")
    
        inputFeatureCount = int(arcpy.GetCount_management(inputTable).getOutput(0)) 
        print("Input FeatureClass: " + str(inputTable))
        print("Input Feature Count: " +  str(inputFeatureCount))
            
        if (inputFeatureCount < 1) :
            print("Invalid Input Feature Count: " +  str(inputFeatureCount))
                       
        coordinateConversionFrom = 'MGRS'
        coordinateFieldX = 'Location'
        coordinateFieldY = None
        
        ########################################################3       
        arcpy.ConvertCoordinates_InC(inputTable, coordinateConversionFrom, coordinateFieldX, coordinateFieldY, outputDbf)
        ########################################################3
    
        # Verify the results    
        outputFeatureCount = int(arcpy.GetCount_management(outputDbf).getOutput(0)) 
        print("Output FeatureClass: " + str(outputDbf))
        print("Output Feature Count: " +  str(outputFeatureCount))
                    
        if (outputFeatureCount <>  inputFeatureCount) :
            print("Input / Output Feature Count don't match: " +  str(inputFeatureCount) + ":" + str(outputFeatureCount))
            raise Exception("Test Failed")            
            
        # WORKAROUND: delete scratch db
        print("Deleting Scratch Workspace (Workaround)")
        TestUtilities.deleteScratch()        
        
        print("Test Successful")
                
    except arcpy.ExecuteError: 
        # Get the tool error messages 
        msgs = arcpy.GetMessages() 
        arcpy.AddError(msgs) 
    
        # return a system error code
        sys.exit(-1)
        
    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
    
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
    
        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    
        # return a system error code
        sys.exit(-1)
def RunTest():
    try:
        print("Starting Test: TestImportPatrolRpt")
        print("Setting up inputs and environment...")
        inputLinesFC = os.path.join(TestUtilities.outputGDB, "TrackLines")
        inputPatrolReportTable = os.path.join(TestUtilities.patrolReportTable)
        inputEnemySightingsTable = os.path.join(
            TestUtilities.enemySightingsTable)
        xmlPatrolReport = os.path.join(TestUtilities.xmlPatrolReport)
        arcpy.env.overwriteOutput = True
        arcpy.ImportToolbox(TestUtilities.toolbox, "pdc")

        # How many rows before joining?
        beforePRRowCount = int(
            arcpy.GetCount_management(inputPatrolReportTable).getOutput(0))
        print("Number of rows in Patrol Report Table before: " +
              str(beforePRRowCount))
        beforeESRowCount = int(
            arcpy.GetCount_management(inputEnemySightingsTable).getOutput(0))
        print("Number of rows in Enemy Sightings Table before: " +
              str(beforeESRowCount))

        print("Selecting track OBJECTID = 2 for report...")
        arcpy.MakeFeatureLayer_management(inputLinesFC, "inputLinesFC_layer")
        selectionType = "NEW_SELECTION"
        expression = "OBJECTID = 2"
        arcpy.SelectLayerByAttribute_management("inputLinesFC_layer",
                                                selectionType, expression)
        tempInputTracks = os.path.join(TestUtilities.scratchGDB, "tracks")
        arcpy.CopyFeatures_management("inputLinesFC_layer", tempInputTracks)
        print("Input features have " +
              str(arcpy.GetCount_management(tempInputTracks).getOutput(0)) +
              " rows")

        print("Checking input field type...")
        trackIDFieldName = "TrackGUID"
        trackIDField = None
        fields = arcpy.ListFields(tempInputTracks, trackIDFieldName)
        for field in fields:
            trackIDField = field

        print("Executing ImportPatrolReport_pdc...")
        import types
        print("trackIDField python type: " + str(type(trackIDField)))
        print("trackIDField.type: " + str(trackIDField.type))
        print("trackIDField.name: " + str(trackIDField.name))
        ########################################################
        # Execute the Model under test:
        # ImportPatrolReport_pdc (Track_Lines, Track_ID_Field, Infopath_Patrol_Report_XML, PatrolReport_Table, EnemySightings_Table)
        arcpy.ImportPatrolReport_pdc(tempInputTracks, trackIDField.name,
                                     xmlPatrolReport, inputPatrolReportTable,
                                     inputEnemySightingsTable)
        ########################################################

        print("Verify the results...")
        afterPRRowCount = int(
            arcpy.GetCount_management(inputPatrolReportTable).getOutput(0))
        afterESRowCount = int(
            arcpy.GetCount_management(inputEnemySightingsTable).getOutput(0))

        if not (afterPRRowCount > beforePRRowCount):
            print("ERROR: Patrol Report rows before: " +
                  str(beforePRRowCount) + ", after: " + str(afterPRRowCount))
            raise Exception("Test Failed")
        elif not (afterESRowCount > beforeESRowCount):
            print("ERROR: Enemy Sightings rows before: " +
                  str(beforeESRowCount) + ", after: " + str(afterESRowCount))
            raise Exception("Test Failed")
        else:
            print("Test Successful")
            print("Number of rows in Patrol Report Table after: " +
                  str(afterPRRowCount))
            print("Number of rows in Enemy Sightings Table after: " +
                  str(afterESRowCount))

    except arcpy.ExecuteError:
        # Get the tool error messages
        msgs = arcpy.GetMessages()
        arcpy.AddError(msgs)

        # return a system error code
        sys.exit(-1)

    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]

        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
            sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"

        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        print(pymsg)
        arcpy.AddError(msgs)
        print(msgs)

        # return a system error code
        sys.exit(-1)
def RunTest():
    try:
        arcpy.AddMessage("Starting Test: LocalPeaks")
        
        if arcpy.CheckExtension("Spatial") == "Available":
            arcpy.CheckOutExtension("Spatial")
        else:
            # Raise a custom exception
            raise Exception("LicenseError")        
        
        # WORKAROUND
        print "Creating New Scratch Workspace (Workaround)"    
        TestUtilities.createScratch()
            
        # Verify the expected configuration exists
        inputPolygonFC =  os.path.join(TestUtilities.inputGDB, "samplePolygonArea")
        inputSurface =  os.path.join(TestUtilities.defaultGDB, "Jbad_SRTM_USGS_EROS")
        outputPointsFC =  os.path.join(TestUtilities.outputGDB, "LocalPeaks")
        toolbox = TestUtilities.toolbox
        
        # Check For Valid Input
        objects2Check = []
        objects2Check.extend([inputPolygonFC, inputSurface, toolbox])
        for object2Check in objects2Check :
            desc = arcpy.Describe(object2Check)
            if desc == None :
                raise Exception("Bad Input")
            else :
                print "Valid Object: " + desc.Name 
        
        # Set environment settings
        print "Running from: " + str(TestUtilities.currentPath)
        print "Geodatabase path: " + str(TestUtilities.geodatabasePath)
        
        arcpy.env.overwriteOutput = True
        arcpy.env.scratchWorkspace = TestUtilities.scratchGDB
        arcpy.ImportToolbox(toolbox, "VandR")
    
        inputFeatureCount = int(arcpy.GetCount_management(inputPolygonFC).getOutput(0)) 
        print "Input FeatureClass: " + str(inputPolygonFC)
        print "Input Feature Count: " +  str(inputFeatureCount)
            
        if (inputFeatureCount < 1) :
            print "Invalid Input Feature Count: " +  str(inputFeatureCount)                    
           
        numberOfPeaks = 3
           
        ########################################################3
        # Execute the Model under test:   
        arcpy.FindLocalPeaks_VandR(inputPolygonFC, numberOfPeaks, inputSurface, outputPointsFC)
        ########################################################3
    
        # Verify the results    
        outputFeatureCount = int(arcpy.GetCount_management(outputPointsFC).getOutput(0)) 
        print "Output FeatureClass: " + str(outputPointsFC)
        print "Output Feature Count: " +  str(outputFeatureCount)
                
        if (outputPointsFC < 3) :
            print "Invalid Output Feature Count: " +  str(outputFeatureCount) 
            raise Exception("Test Failed")
            
        # WORKAROUND: delete scratch db
        print "Deleting Scratch Workspace (Workaround)"    
        TestUtilities.deleteScratch()        
        
        print "Test Successful"
                
    except arcpy.ExecuteError: 
        # Get the tool error messages 
        msgs = arcpy.GetMessages() 
        arcpy.AddError(msgs) 
    
        # return a system error code
        sys.exit(-1)
        
    except Exception as e:
        # Get the traceback object
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
    
        # Concatenate information together concerning the error into a message string
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
    
        # Return python error messages for use in script tool or Python Window
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
    
        # return a system error code
        sys.exit(-1)
        
    finally:
        # Check in the 3D Analyst extension
        arcpy.CheckInExtension("Spatial")        
    startPos = "UL"
elif (labelStartPos == "Lower-Left"):
    startPos = "LL"
elif (labelStartPos == "Lower-Right"):
    startPos = "LR"

# Import the custom toolbox with the fishnet tool in it, and run this. This had to be added to a model,
# because of a bug, which will now allow you to pass variables to the Create Fishnet tool.
#UPDATE
if isPro:
    toolboxPath = os.path.dirname(
        arcpy.env.workspace) + "\\ClearingOperations.tbx"
else:
    toolboxPath = os.path.dirname(os.path.dirname(
        arcpy.env.workspace)) + "\\toolboxes\ClearingOperations.tbx"
arcpy.ImportToolbox(toolboxPath, "ClearingOperations")
arcpy.AddMessage("Creating Fishnet Grid")
arcpy.CreateFishnet_ClearingOperations(tempOutput,
                                       originCoordinate, yAxisCoordinate, 0, 0,
                                       str(numberCellsHo),
                                       str(numberCellsVert),
                                       oppCornerCoordinate, "NO_LABELS",
                                       fullExtent, "POLYGON")

# Sort the grid upper left to lower right, and delete the in memory one
arcpy.AddMessage("Sorting the grid for labeling")
tempSort = "tempSort"
arcpy.Sort_management(tempOutput, tempSort, [["Shape", "ASCENDING"]], startPos)
arcpy.Delete_management("in_memory")

# Add a field which will be used to add the grid labels