def main(*argv):
    userName = None
    password = None
    org_url = None
    layerNames = None
    layerName = None
    layerName = None
    sql = None
    fst = None
    fs = None
    results = None
    fl = None
    existingDef = None
    try:

        proxy_port = None
        proxy_url = None    

        securityinfo = {}
        securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI
        securityinfo['username'] = argv[0]
        securityinfo['password'] = argv[1]
        securityinfo['org_url'] = argv[2]
        securityinfo['proxy_url'] = proxy_url
        securityinfo['proxy_port'] = proxy_port
        securityinfo['referer_url'] = None
        securityinfo['token_url'] = None
        securityinfo['certificatefile'] = None
        securityinfo['keyfile'] = None
        securityinfo['client_id'] = None
        securityinfo['secret_id'] = None   
        
        fsId = argv[3]
        layerNames = argv[4]
        sql = argv[5]
        toggleEditCapabilities = argv[6]

        fst = featureservicetools.featureservicetools(securityinfo)
        if fst.valid:

            fs = fst.GetFeatureService(itemId=fsId,returnURLOnly=False)

            outputPrinter("Logged in successful")
            if not fs is None:
                if str(toggleEditCapabilities).upper() == 'TRUE':
                    existingDef = fst.EnableEditingOnService(url=fs.url)
                for layerName in layerNames.split(','):
                    fl = fst.GetLayerFromFeatureService(fs=fs,layerName=layerName,returnURLOnly=False)
                    if not fl is None:
                        outputPrinter(message="Attempting to delete features matching this query: %s " % sql)
                        results = fl.deleteFeatures(where=sql)

                        if 'error' in results:
                            outputPrinter(message="Error in response from server: " % results['error'],typeOfMessage='error')
                            arcpy.SetParameterAsText(7, "false")
                            break

                        else:
                            outputPrinter (message="%s features deleted" % len(results['deleteResults']) )
                            if toggleEditCapabilities:
                                existingDef = fst.EnableEditingOnService(url=fs.url)
                            arcpy.SetParameterAsText(7, "true")
                    else:
                        outputPrinter(message="Layer %s was not found, please check your credentials and layer name" % layerName,typeOfMessage='error')
                        arcpy.SetParameterAsText(7, "false")
                        break
            else:
                outputPrinter(message="Feature Service with id %s was not found" % fsId,typeOfMessage='error')
                arcpy.SetParameterAsText(7, "false")

        else:
            outputPrinter(message="Security handler not created, exiting")
            arcpy.SetParameterAsText(7, "false")

    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        outputPrinter(message="error on line: %s" % line,typeOfMessage='error')
        outputPrinter(message="error in file name: %s" % filename,typeOfMessage='error')
        outputPrinter(message="with error message: %s" % synerror,typeOfMessage='error')
        outputPrinter(message="ArcPy Error Message: %s" % arcpy.GetMessages(2),typeOfMessage='error')
        arcpy.SetParameterAsText(7, "false")
    except (common.ArcRestHelperError),e:
        outputPrinter(message=e,typeOfMessage='error')
        arcpy.SetParameterAsText(7, "false")
    arcpy.AddMessage("check_layer type: %s" % desc.shapeType)
    if desc.shapeType == 'Polygon':
        intersect_layer = check_layer
    else:
        intersect_layer = os.path.join(current_workspace,
                                       "buffer_%s" % uuid.uuid4().hex)
        arcpy.Buffer_analysis(check_layer, intersect_layer, "0.1")

    inFeatures = [probe_path, check_layer]
    intersectOutput = os.path.join(
        current_workspace, "IntersectOutputResult_%s" % uuid.uuid4().hex)
    clusterTolerance = 0
    arcpy.Intersect_analysis(inFeatures, intersectOutput, "", clusterTolerance,
                             "input")

    with arcpy.da.SearchCursor(intersectOutput,
                               ["TIPO_OMO", "SHAPE@area"]) as cursor:
        arcpy.AddMessage("cursor: %s" % cursor)
        for row in cursor:
            zona = row[0]
            if zona in zone:
                zone[zona] += row[1]
            else:
                zone[zona] = row[1]

    del cursor

arcpy.AddMessage(json.dumps(zone, indent=3))

arcpy.SetParameterAsText(1, json.dumps(zone))
Beispiel #3
0
Lat, Lon = point[20:]
feature_type = 'proxy'

#arcpy.AddMessage(str(fid))

try:
    # Save to proxy table in GDB
    conn = dbutil.getDbConn('wtg_gdb')

    dev = (tag_id, animal_id, timeVal, feature_type)
    row = {'latitude': Lat,
           'longitude': Lon,
           'original_fid': fid,
           'source': 'On land ellipse proxy'}
    # instantiate object
    proxyObj = tables.Proxy(*dev, **row)
    feature_id = dbutil.dbTransact(conn,proxyObj.sql_insert(),proxyObj.param_dict())
    arcpy.AddMessage('FID = '+str(feature_id))

    if feature_id:
        conn.commit()
        arcpy.AddMessage('FID = '+str(feature_id))
    else:
        arcpy.AddMessage("didn't work")
except Exception as e:
    print 'error '+e.message
    conn.rollback()
"""
lyr = arcpy.MakeFeatureLayer_management(point_file, point_name)
arcpy.SetParameterAsText(1, lyr)
# get file path of this python module
pyScript = sys.argv[0]


# define function for executing R-Scripts
def executeRScript(rScript, arguments):
    arcpy.SetProgressor("default", "Executing R Script...")
    args = ["R", "--slave", "--vanilla", "--args"]
    for thisArgument in range(0, len(arguments)):
        args.append(arguments[thisArgument])
    scriptSource = open(rScript, 'r')
    rCommand = subprocess.Popen(args,
                                stdin=scriptSource,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                shell=True)
    outString, errString = rCommand.communicate()
    scriptSource.close()
    if errString and "...completed execution of R-script" not in outString:
        arcpy.AddMessage(errString)


# get planning entities with highest resolution per field
thisScriptPath = os.path.dirname(pyScript)
rScriptPath = os.path.join(
    os.path.abspath(os.path.join(thisScriptPath, os.pardir)), "R")
rScript = os.path.join(rScriptPath, "updateTrafficAccidentTable.r")
executeRScript(rScript, [modelFolder, rScriptPath])

arcpy.SetParameterAsText(1, modelFolder)
	#remove entire folder and all its content
	shutil.rmtree(os.path.join(arcpy.env.scratchFolder, "threats"))
	#remove entire folder and all its content
	if access_uri_zip:
		shutil.rmtree(os.path.join(arcpy.env.scratchFolder, "access"))
		
	os.chdir(os.path.join(arcpy.env.scratchFolder, "output"))
	for lyr in glob.glob("*.tif"):
		try:	
			##This step is a workaround to correct proj alignment issue in ArcGIS with output from InVEST
			DefineProj(args[u'landuse_cur_uri'], lyr)
			if "deg_sum_out_c" in lyr:
				arcpy.AddMessage("Clampling Habitat Degradation Output ...")
				ClampRaster(lyr, "deg_c.tif")
				out_rast = os.path.join(arcpy.env.scratchFolder, "output", "deg_c.tif")
				arcpy.SetParameterAsText(6, out_rast)
			else:
				arcpy.AddMessage("Clampling Habitat Quality Output ...")
				ClampRaster(lyr, "quality_c.tif")
				out_rast = os.path.join(arcpy.env.scratchFolder, "output", "quality_c.tif")
				arcpy.SetParameterAsText(7, out_rast)
		except arcpy.ExecuteError:
			arcpy.AddError(arcpy.GetMessages(2))	
		except Exception as ex:
			arcpy.AddError(ex.args[0])		
    
	### Create output maps zipped file ###
	arcpy.AddMessage("Creating output zipped folder with all maps output...")
	out_dir = os.path.join(arcpy.env.scratchFolder, "output")
	files = [os.path.join(out_dir, file) for file in os.listdir(out_dir) if file.endswith(".tif")]
	#create zipfile object as speficify 'w' for 'write' mode
import sys
import os
import arcpy

try:
    from scripts.stn.stn_mdb_to_csv import mdb_to_csv
except ImportError:
    sys.path.append(
        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
    from scripts.stn.stn_mdb_to_csv import mdb_to_csv

mdb_path = arcpy.GetParameterAsText(0)
yr = int(str(arcpy.GetParameterAsText(1)))
out_dir = arcpy.GetParameterAsText(2)
debug = str(arcpy.GetParameterAsText(3)) == 'true'

csv_dict = mdb_to_csv(mdb_path, yr, out_dir, debug=debug)

arcpy.AddMessage(csv_dict)

arcpy.SetParameterAsText(4, csv_dict['DT_RDWY_RTE'])
arcpy.SetParameterAsText(5, csv_dict['DT_RDWY_LINK'])
arcpy.SetParameterAsText(6, csv_dict['DT_RDWY_CHN'])
arcpy.SetParameterAsText(7, csv_dict['DT_RDWY_RTE_LINK'])
arcpy.SetParameterAsText(8, csv_dict['DT_RDWY_LINK_CHN'])
arcpy.SetParameterAsText(9, csv_dict['DT_RDWY_LINK_HIST'])
arcpy.SetParameterAsText(10, csv_dict['DT_RWRL_CUMT_MILG'])
arcpy.SetParameterAsText(11, csv_dict['DT_RP'])
arcpy.SetParameterAsText(12, csv_dict['DT_REF_SITE'])
Beispiel #7
0
def mainFunction(
    featureClasses, tables, csvFiles, csvXYFieldNames, ftpSite, ftpFolder,
    ftpUsername, ftpPassword, gpService
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #
        # Get the arcgis version
        arcgisVersion = arcpy.GetInstallInfo()['Version']
        # Setup scratch folder differently depending on ArcGIS version
        if (arcgisVersion == "10.0"):
            # Setup geodatabase to load data into in temporary workspace
            arcpy.env.scratchWorkspace = r"F:\Temp"
            tempFolder = arcpy.CreateFolder_management(
                arcpy.env.scratchWorkspace, "WebData-" + str(uuid.uuid1()))
        else:
            # Setup geodatabase to load data into in temporary workspace
            tempFolder = arcpy.CreateFolder_management(
                arcpy.env.scratchFolder, "WebData-" + str(uuid.uuid1()))
        arcpy.CreateFileGDB_management(tempFolder, "Data", "CURRENT")
        geodatabase = os.path.join(str(tempFolder), "Data.gdb")

        arcpy.AddMessage("Copying datasets...")
        # Load the feature classes and tables into a list if input values provided
        if (len(featureClasses) > 0):
            # Remove out apostrophes
            featureclassList = string.split(
                str(featureClasses).replace("'", ""), ";")
            # Loop through the feature classes
            for eachFeatureclass in featureclassList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachFeatureclass)
                # Copy feature class into geodatabase using the same dataset name
                arcpy.CopyFeatures_management(
                    eachFeatureclass,
                    os.path.join(geodatabase, describeDataset.name), "", "0",
                    "0", "0")

        if (len(tables) > 0):
            tableList = string.split(str(tables).replace("'", ""), ";")
            # Loop through of the tables
            for eachTable in tableList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachTable)
                # Copy feature class into geodatabase using the same dataset name
                arcpy.TableSelect_analysis(
                    eachTable, os.path.join(geodatabase, describeDataset.name),
                    "")

        # If CSV files provided
        if (len(csvFiles) > 0):
            csvList = string.split(str(csvFiles).replace("'", ""), ";")
            # Loop through of the CSVs
            for eachCSV in csvList:
                # Create a Describe object from the dataset
                describeDataset = arcpy.Describe(eachCSV)
                datasetName = string.split(describeDataset.name, ".")
                # Change CSV name if starts with a digit
                if datasetName[0].isdigit():
                    datasetName[0] = "Layer" + datasetName[0]
                # Create feature layer and convert to feature class
                csvFields = string.split(
                    str(csvXYFieldNames).replace("'", ""), ",")
                # Copy feature class into geodatabase using the same dataset name
                arcpy.MakeXYEventLayer_management(
                    eachCSV, csvFields[0], csvFields[1], "Layer",
                    "PROJCS['NZGD_2000_New_Zealand_Transverse_Mercator',GEOGCS['GCS_NZGD_2000',DATUM['D_NZGD_2000',SPHEROID['GRS_1980',6378137.0,298.257222101]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Transverse_Mercator'],PARAMETER['False_Easting',1600000.0],PARAMETER['False_Northing',10000000.0],PARAMETER['Central_Meridian',173.0],PARAMETER['Scale_Factor',0.9996],PARAMETER['Latitude_Of_Origin',0.0],UNIT['Meter',1.0]];-4020900 1900 10000;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision",
                    "")
                arcpy.CopyFeatures_management(
                    "Layer", os.path.join(geodatabase, datasetName[0]), "",
                    "0", "0", "0")

        # Check input datasets are provided before zipping up
        if ((len(featureClasses) > 0) or (len(tables) > 0)
                or (len(csvFiles) > 0)):
            arcpy.AddMessage("Zipping data...")
            # Setup the zip file
            if (arcgisVersion == "10.0"):
                zipFile = os.path.join(arcpy.env.scratchWorkspace,
                                       "WebData-" + str(uuid.uuid1()) + ".zip")
            else:
                zipFile = os.path.join(arcpy.env.scratchFolder,
                                       "WebData-" + str(uuid.uuid1()) + ".zip")
            zippedFolder = zipfile.ZipFile(zipFile, "w", allowZip64=True)

            # Zip up the geodatabase
            root_len = len(os.path.abspath(str(tempFolder)))
            # For each of the directories in the folder
            for root, dirs, files in os.walk(str(tempFolder)):
                archive_root = os.path.abspath(root)[root_len:]
                # For each file
                for f in files:
                    fullpath = os.path.join(root, f)
                    archive_name = os.path.join(archive_root, f)
                    zippedFolder.write(fullpath, archive_name)
            # Close zip file
            zippedFolder.close()

            # EXTERNAL FUNCTION - Send data to server
            FTPUpload.mainFunction(zipFile, ftpSite, ftpFolder, ftpUsername,
                                   ftpPassword)
        else:
            #--------------------------------------------Logging--------------------------------------------#
            arcpy.AddError("Process stopped: No datasets provided")
            # Log error
            if (enableLogging == "true"):
                logger.error("Process stopped: No datasets provided")
            #-----------------------------------------------------------------------------------------------#

        # Call geoprocessing service to update data on server
        arcpy.AddMessage("Updating data on server...")
        arcpy.ImportToolbox(gpService, "toolbox")
        arcpy.DataUpdateFromZip_toolbox("Existing")

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                errorMessage = errorMessage + " " + unicode(
                    e.args[i]).encode('utf-8')
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Beispiel #8
0
    for name in fields:
        fieldNames.append(name.name)

    with arcpy.da.SearchCursor(master_18_19, ['*'],
                               where_clause=expression) as cursor:
        for row in cursor:
            result_Dict = {}
            foundList.append(getAPNValue[i])
            for i2 in range(len(row)):
                if fieldNames[i2] != u'created_user' and fieldNames[
                        i2] != u'created_date' and fieldNames[
                            i2] != u'last_edited_user' and fieldNames[
                                i2] != u'last_edited_date':
                    result_Dict[fieldNames[i2]] = row[i2]

            jsonString = json.dumps(result_Dict)
            APN_List.append(jsonString)

    del cursor

for i2 in getAPNValue:
    if i2 not in foundList:
        missingList.append(i2)

arcpy.AddMessage(APN_List)
arcpy.SetParameterAsText(1, APN_List)
arcpy.SetParameterAsText(2, missingList)

print APN_List
            outStreamLink = StreamLink(FlowAccum, FlowDir)

        # Converts a raster representing a linear network to features representing the linear network.
        # creates field grid_code
        arcpy.SetProgressorLabel("Creating Stream Linear network")
        StreamToFeature(outStreamLink, FlowDir, streams, "SIMPLIFY")
        AddMsgAndPrint(
            "\nSuccessfully created stream linear network using a flow accumulation value >= "
            + str(acreThresholdVal))

        # ------------------------------------------------------------------------------------------------ Delete unwanted datasets
        ##        arcpy.Delete_management(Fill_hydroDEM)
        ##        arcpy.Delete_management(outStreamLink)

        # ------------------------------------------------------------------------------------------------ Compact FGDB
        arcpy.SetProgressorLabel("Compacting FGDB: " +
                                 os.path.basename(watershedGDB_path))
        arcpy.Compact_management(watershedGDB_path)
        AddMsgAndPrint("\nSuccessfully Compacted FGDB: " +
                       os.path.basename(watershedGDB_path))

        # ------------------------------------------------------------------------------------------------ Prepare to Add to Arcmap

        arcpy.SetParameterAsText(3, streams)
        if not reuseCulverts:
            arcpy.SetParameterAsText(4, culverts)

        AddMsgAndPrint("\nAdding Layers to ArcGIS Pro Session")

    except:
        print_exception()
Beispiel #10
0
    thisScriptPath = os.path.dirname(pyScript)
    rScriptPath = os.path.join(
        os.path.abspath(os.path.join(thisScriptPath, os.pardir)), "R")
    rScript = os.path.join(rScriptPath, "repairAAADataTable.r")

    # prepare communication
    arcpy.SetProgressor("default", "Executing R Script...")
    args = [
        "R", "--slave", "--vanilla", "--args", dataPath, ModelDataFolderName,
        rScriptPath
    ]

    # run R-script
    scriptSource = open(rScript, 'r')
    rCommand = subprocess.Popen(args,
                                stdin=scriptSource,
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                shell=True)

    # get console prints, warnings and errors
    outString, errString = rCommand.communicate()
    scriptSource.close()

    # send warnings and errors to ArcGIS
    if errString and "...completed execution of R-script" not in outString:
        arcpy.AddMessage(errString)

# send parameter to ArcGIS
arcpy.SetParameterAsText(3, dataPath)
Beispiel #11
0
##        tWorkspace = CreateTempWorkspace(outWorkspace)
##else:
##    tWorkspace = CreateTempWorkspace(outWorkspace)
##arcpy.Workspace = outWorkspace
##tmpRaster = tWorkspace + os.sep + 'tmpRast'

# Rescale inputs if requested.  Rescaled inputs will be stored in outRescale.
if rescale == 'true':
    cost2 = fl.Rescale(cost1, outRaster, Min, Max)
    cost1 = cost2

# Invert any inputs with this option selected
if invertCost1 == 'true':
    cost2 = fl.InvertRaster(cost1, outRaster)

arcpy.AddMessage("Outraster is: " + outRaster)
# Add Cost Raster to Display

cost2.save(outRaster)

try:
    arcpy.SetParameterAsText(5, outRaster)
    params = arcpy.GetParameterInfo()
except:
    arcpy.getMessages(2)

# Clean up temporary workspace
# CleanFiles(tWorkspace)
arcpy.Delete_management("in_memory")
del arcpy
def mainFunction(
    portalUrl, portalAdminName, portalAdminPassword, folderName, itemAccess,
    groupID, tags
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #
        printMessage("Connecting to Portal - " + portalUrl + "...", "info")
        # Generate token for portal
        token = generateToken(portalAdminName, portalAdminPassword, portalUrl)

        # Get the folder ID for the folder
        folderID = getfolderID(portalUrl, portalAdminName, folderName, token)

        if folderID:
            # Get all the items in the folder and iterate through these
            getItemsFolder(portalUrl, portalAdminName, folderID, itemAccess,
                           groupID, tags, token)
        else:
            printMessage("Folder does not exist in this users content...",
                         "error")
            sys.exit()

        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameterAsText(1, output)
                # ArcGIS desktop not installed
                else:
                    return output
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(
                            e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(
                            e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage, "error")
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Beispiel #13
0
import sys
import os
import arcpy

arcpy.env.OverwriteOutput = True

try:
    from scripts.util import make_guid
except ImportError:
    sys.path.append(
        os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
    from scripts.util import make_guid

arcpy.SetParameterAsText(0, make_guid())
            os.path.abspath(os.path.join(thisScriptPath, os.pardir)), "R")
        rScript = os.path.join(rScriptPath,
                               "repairSpecialCharactersInExcelFile.r")

        # prepare communication
        arcpy.SetProgressor("default", "Executing R Script...")
        args = [
            "R", "--slave", "--vanilla", "--args", shp, outCSVPath, rScriptPath
        ]

        # run R-script
        scriptSource = open(rScript, 'rb')
        rCommand = subprocess.Popen(args,
                                    stdin=scriptSource,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE,
                                    shell=True)

        # get console prints, warnings and errors
        resString, errString = rCommand.communicate()
        scriptSource.close()

        # send warnings and errors to ArcGIS
        if errString and "...completed execution of R script" not in resString:
            arcpy.AddMessage(errString)

except:
    arcpy.AddMessage("Unable to find house address data.")

arcpy.SetParameterAsText(1, outCSVPath)
            if popTab[i][0] in userTab[11]:
                birthYearFieldName = popTab[i][0]
            if popTab[i][0] in userTab[12]:
                genderFieldName = popTab[i][0]
            if popTab[i][0] in userTab[13]:
                migrationFieldNames.append(popTab[i][0])
            if popTab[i][0] in userTab[19]:
                maritalStatusFieldName = popTab[i][0]
        
except:
    arcpy.AddMessage("Unable to find house address data.")            



# send parameter to ArcGIS
arcpy.SetParameterAsText(4, coordinatesPath)
arcpy.SetParameterAsText(5, addressPath)
arcpy.SetParameterAsText(6, geometriesPath)
arcpy.SetParameterAsText(7, populationDataPath)
arcpy.SetParameterAsText(8, xFieldName)
arcpy.SetParameterAsText(9, yFieldName)
arcpy.SetParameterAsText(10, streetFieldName)
arcpy.SetParameterAsText(11, houseNumberFieldName)
arcpy.SetParameterAsText(12, houseNumberAdditionFieldName)
arcpy.SetParameterAsText(13, cityFieldName)
arcpy.SetParameterAsText(14, ZIPCodeFieldName)
arcpy.SetParameterAsText(15, popStreetFieldName)
arcpy.SetParameterAsText(16, popHouseNumberFieldName)
arcpy.SetParameterAsText(17, popHouseAddNumberFieldName)
arcpy.SetParameterAsText(18, popCityFieldName)
arcpy.SetParameterAsText(19, popZIPFieldName)
def mainFunction(portalUrl,portalAdminName,portalAdminPassword,featureServiceURL,dateTimeField,maxAge): # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)  
    try:
        # --------------------------------------- Start of code --------------------------------------- #     
        printMessage("Connecting to Portal - " + portalUrl + "...","info")
        # Generate token for portal
        token = generateToken(portalAdminName, portalAdminPassword, portalUrl)

        # Get the current time and max time for the query
        currentTime = datetime.datetime.now()
        maxTime = datetime.datetime.now() - datetime.timedelta(minutes = int(maxAge))
        unixmaxTime = time.mktime(maxTime.timetuple()) * 1000

        printMessage("Querying Feature Service - " + featureServiceURL + "/query" + "...","info")
        # Setup parameters for web map query
        dict = {}
        dict['f'] = 'json'
        dict['token'] = token
        dict['where'] = '1=1'
        dict['outFields'] = '*'        
        # Python version check
        if sys.version_info[0] >= 3:
            # Python 3.x
            # Encode parameters
            params = urllib.parse.urlencode(dict)
        else:
            # Python 2.x
            # Encode parameters
            params = urllib.urlencode(dict)
        params = params.encode('utf-8')

        # POST the request
        requestURL = urllib2.Request(featureServiceURL + "/query",params)
        response = urllib2.urlopen(requestURL)
        # Python version check
        if sys.version_info[0] >= 3:
            # Python 3.x
            # Read json response
            responseJSON = json.loads(response.read().decode('utf8'))
        else:
            # Python 2.x
            # Read json response
            responseJSON = json.loads(response.read())    

        # Log results
        if "error" in responseJSON:
            errDict = responseJSON['error']
            message =  "Error Code: %s \n Message: %s" % (errDict['code'],
            errDict['message'])
            printMessage(message,"error")
        else:
            featuresToDelete = []
            deleteFeaturesQuery = ""
            count = 0
            for feature in responseJSON["features"]:
                featureDateTime = feature["attributes"][dateTimeField]
                # Get the features that are older than the max time
                if (featureDateTime < unixmaxTime):
                    featuresToDelete.append(feature["attributes"]["objectid"])

                    # If the first feature
                    if count == 0:
                        deleteFeaturesQuery = "OBJECTID = '" + str(feature["attributes"]["objectid"]) + "'"
                    else:
                        deleteFeaturesQuery = deleteFeaturesQuery + " OR " + "OBJECTID = '" + str(feature["attributes"]["objectid"]) + "'"
                count = count + 1
     
            printMessage("Object IDs of features to delete - " + str(featuresToDelete) + "...","info")
            printMessage("Querying Feature Service - " + featureServiceURL + "/deleteFeatures" + "...","info")
            # Setup parameters for web map query
            dict = {}
            dict['f'] = 'json'
            dict['token'] = token
            dict['where'] = deleteFeaturesQuery
            # Python version check
            if sys.version_info[0] >= 3:
                # Python 3.x
                # Encode parameters
                params = urllib.parse.urlencode(dict)
            else:
                # Python 2.x
                # Encode parameters
                params = urllib.urlencode(dict)
            params = params.encode('utf-8')

            # POST the request
            requestURL = urllib2.Request(featureServiceURL + "/deleteFeatures",params)
            response = urllib2.urlopen(requestURL)
            printMessage(response.read(),"info") 
            # Python version check
            if sys.version_info[0] >= 3:
                # Python 3.x
                # Read json response
                responseJSON = json.loads(response.read().decode('utf8'))
            else:
                # Python 2.x
                # Read json response
                responseJSON = json.loads(response.read())    

            # Log results
            if "error" in responseJSON:
                errDict = responseJSON['error']
                message =  "Error Code: %s \n Message: %s" % (errDict['code'],
                errDict['message'])
                printMessage(message,"error")
            else:
                printMessage(responseJSON,"info")         
        # --------------------------------------- End of code --------------------------------------- #
        # If called from gp tool return the arcpy parameter   
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                # If ArcGIS desktop installed
                if (arcgisDesktop == "true"):
                    arcpy.SetParameterAsText(1, output)
                # ArcGIS desktop not installed
                else:
                    return output 
        # Otherwise return the result          
        else:
            # Return the output if there is any
            if output:
                return output      
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []
    # If arcpy error
    except arcpy.ExecuteError:           
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)   
        printMessage(errorMessage,"error")           
        # Logging
        if (enableLogging == "true"):
            # Log error          
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""         
        # Build and show the error message
        # If many arguments
        if (e.args):
            for i in range(len(e.args)):        
                if (i == 0):
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = unicode(e.args[i]).encode('utf-8')
                else:
                    # Python version check
                    if sys.version_info[0] >= 3:
                        # Python 3.x
                        errorMessage = errorMessage + " " + str(e.args[i]).encode('utf-8').decode('utf-8')
                    else:
                        # Python 2.x
                        errorMessage = errorMessage + " " + unicode(e.args[i]).encode('utf-8')
        # Else just one argument
        else:
            errorMessage = e
        printMessage(errorMessage,"error")
        # Logging
        if (enableLogging == "true"):
            # Log error            
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")            
            # Remove file handler and close log file        
            logMessage.flush()
            logMessage.close()
            logger.handlers = []   
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)            
Beispiel #17
0
import arcpy, os, sys

source = arcpy.GetParameterAsText(0)

message = []

message.append("Looking for file: {}".format(source))

if arcpy.Exists(source):
    message.append("File {} found. Loading...".format(source))
    worked = True
else:
    message.append("File not found or corrupt. Nothing loaded.")
    worked = False

string = "\nUpload process complete. FAILED"
if worked:
    string = "\nUpload process complete. SUCCESS!"

message.append(string)

arcpy.SetParameterAsText(1,"This is a message.")



Beispiel #18
0
tableName = sys.argv[1] # Or use arcpy.GetParameterAsText(0)
fieldName = sys.argv[2] # Or use arcpy.GetParameterAsText(1)

print "Checking table:", tableName
print "For fieldname", fieldName

# Using two arguments( InputValue, wildCard ) so that the
# numerator will either return 1 or 0 fields. Since no "*" is used,
# at most one field will exactly match the wildcard.
fields = arcpy.ListFields(tableName)

fieldNames = [field.name for field in fields]

if fieldName in fieldNames: # if the fieldName is already a field in the table, set exists to true and does_not_exist to false
        print "Field", fieldName, "found in", tableName
        arcpy.AddMessage("Field " + fieldName + " found in " + tableName)
        arcpy.SetParameterAsText(2, "True") # Sets the 3rd parameter, Exists, to true (0-based)
        arcpy.SetParameterAsText(3, "False") # Sets the 4th parameter, Does_Not_Exist, to false (0-based)



else: # else the fieldName is not yet a field in the table,so set exists to false and does_not_exist to true
        print"Field", fieldName, "NOT found in", tableName
        arcpy.AddMessage("Field " + fieldName + " NOT found in " + tableName)
        arcpy.SetParameterAsText(2, "False") # Sets the 3rd parameter, Exists, to false
        arcpy.SetParameterAsText(3, "True") # Sets the 4th parameter, Does_Not_Exist, to true

raw_input("Click ENTER to close the window")

    arcpy.AddMessage("Done!")
    arcpy.AddMessage("Output written to %s is:" % outGDB)
    outFClist = []
    for direction in stoplist:
        outPolysFC = outPolysname
        outStopsFC = outStopsname
        if direction != None:
            outStopsFC += str(direction)
            outPolysFC += str(direction)
        outFClist.append(outStopsFC)
        outFClist.append(outPolysFC)
        arcpy.AddMessage("- " + outStopsFC)
        arcpy.AddMessage("- " + outPolysFC)

    # Tell the tool that this is output. This will add the output to the map.
    outFClistwpaths = [os.path.join(outGDB, fc) for fc in outFClist]
    arcpy.SetParameterAsText(8, ';'.join(outFClistwpaths))

except CustomError:
    arcpy.AddError("Failed to create buffers around stops for this route.")
    pass

except:
    arcpy.AddError("Failed to create buffers around stops for this route.")
    raise

finally:
    if OverwriteOutput:
        arcpy.env.overwriteOutput = OverwriteOutput
Beispiel #20
0
    # =====================================USER/TOOLBOX INPUTS===============================================

    # project data
    project_fc_param = arcpy.GetParameterAsText(0)  #
    proj_name = arcpy.GetParameterAsText(
        1)  # os.path.basename(project_fc) # os.path.basename(project_fc)
    proj_juris = arcpy.GetParameterAsText(2)  # project jurisdiction
    project_type = arcpy.GetParameterAsText(
        3
    )  # params.ptype_commdesign  # params.ptype_fwy, params.ptype_arterial, or params.ptype_sgr

    #comm design tool doesn't use perf outcomes, adt, speed limit, or PCI, so user shouldn't enter these
    if project_type == params.ptype_commdesign:
        performance_outcomes = 'All'
        arcpy.SetParameterAsText(
            4, performance_outcomes
        )  # user doesn't choose perf outcomes for comm design program
        arcpy.SetParameterAsText(5, 0)  # avg daily traffic;
        arcpy.SetParameterAsText(6, 0)  #posted speed limit
        arcpy.SetParameterAsText(7, 0)  # pavement condition index
        adt = project_speedlim = pci = 0  # set all of these equal to zero since they're not used.
    else:
        performance_outcomes = arcpy.GetParameterAsText(
            4
        )  # ['Reduce VMT', 'Reduce Congestion', 'Encourage Non-SOV Travel']
        adt = int(arcpy.GetParameterAsText(
            5))  # avg daily traffic; user-entered value
        project_speedlim = int(arcpy.GetParameterAsText(
            6))  #posted speed limit; user-entered value
        pci = int(arcpy.GetParameterAsText(
            7))  # pavement condition index, will be user-entered value
Beispiel #21
0
        arcpy.AddError(txt)
        
def checkFile(fileName):
    if not os.path.exists(fileName):
        msg("{} not found.\nExiting.".format(fileName),"error")
        sys.exit(1)
    else: return
    
## ---Set derived variables---
msg("Locating scenario geodatabase")
upliftGDB = os.path.join(statsRootFldr,"{}_Uplift.gdb".format(scenarioName))
checkFile(upliftGDB)

#Set the output table
outFC = os.path.join(upliftGDB,"{}_Uplift{}".format(scenarioName,HUCFilter))
arcpy.SetParameterAsText(4,outFC)

## ---Processes---
#Get a list of species uplift tables in the uplift GDB corresponding to the uplift filter
msg("Getting list of species tables")
arcpy.env.workspace = upliftGDB
sppTbls = arcpy.ListTables("*{}".format(HUCFilter))   #Get all the species tables (they have two underscores in them...)
#Exit if no tables were found. 
if len(sppTbls) == 0:
    msg("No results for HUC {}.\nExiting".format(HUCFilter),"error")
    sys.exit(1)

#Select catchments into the output feature class
msg("Initializing output feature class")
msg("...Selecting features in HUC {}".format(HUCFilter))
whereClause = "REACHCODE LIKE '{}%'".format(HUCFilter)
    if os.path.exists(gdbpath):

        arcpy.Delete_management(gdbpath)

    arcpy.CreateFileGDB_management(folder, name)

    return folder + "/" + name


if __name__ == "__main__":

    arcpy.AddMessage("1_创建规划院对接数据")

    folder = arcpy.GetParameterAsText(0)
    xiannname = arcpy.GetParameterAsText(1)
    xzqdm = arcpy.GetParameterAsText(2)

    folder = folder.replace("\\", "/")

    name = xzqdm + xiannname

    arcpy.AddMessage("1_创建文件夹路径")
    shapedir, photodir = createFolder(folder, name)

    arcpy.AddMessage("1_创建临时GDB")
    tempgdb = createTempGBD(folder, name)

    arcpy.SetParameterAsText(3, shapedir)
    arcpy.SetParameterAsText(4, tempgdb)
    arcpy.SetParameterAsText(5, photodir)
    arcpy.AddMessage("1_结束")
        iGroupIndex = int(arcpy.GetParameterAsText(2))
        iStats = int(arcpy.GetParameterAsText(3))
        pOutFile = ""
        if (pOutFile == ""):
            (pOutFile, ext) = os.path.splitext(pQHFile)
            pOutFile = "{}_stats{}".format(pOutFile, ext)

        #pParams = (pQHFile, pFilter, pOutFile, iQHType, 0)
        pProcessor = fdgroupbystats.ClassOp()
        pProcessor.DebugLevel = debugLevel
        pParams = (pQHFile, iValueIndex, iGroupIndex, iStats, pOutFile)
        (sOK, pOutFile, sMsg) = pProcessor.execute(
            pParams)  #pQHFile, pOutFile, iValueIndex, iGroupIndex, iStats)
        if (sOK == apwrutils.C_OK):
            arcpy.AddMessage("pOutFile={}".format(pOutFile))
            arcpy.SetParameterAsText(4, pOutFile)
            if (bImport):
                (fDir, fName,
                 fExt) = apwrutils.Utils.getFilePathExtName(pOutFile)
                pTable = "{}_Tbl".format(fName)
                dds = time.clock()
                #try:
                #    i = 0
                #    recs = []
                #    names = ""
                #    formats = (np.int, np.float32, np.uint32, np.str, np.float32)
                #    #..expecting the first line of the file contains the "," delimited field names
                #    with open(pOutFile, 'r') as f:
                #         for s in f:
                #             if(i==0):
                #                 names = s.split(",")
Beispiel #24
0
def print_fc_prop(desc):
    write_it(desc.name + ":")
    write_it("\tShape Type: " + desc.shapeType)
    write_it("\tShape Field Name: " + desc.shapeFieldName)
    write_it("\tFeature Type: " + desc.featureType)
    write_it("\tHas Spatial Index: " + str(desc.hasSpatialIndex))
    write_it("\tHas M: " + str(desc.hasM))
    write_it("\tHas Z: " + str(desc.hasZ))



def write_it(string):
    print string
    outFile.write(string + "\n")

        

#**********************************************************
# Main
arcpy.env.workspace = gdb
fcList = arcpy.ListFeatureClasses()
for fc in fcList:
    desc = arcpy.Describe(fc)
    print_fc_prop(desc)
    write_it("\n")


outFile.close()
arcpy.SetParameterAsText(2, output)
print "Done!"
def mainFunction(
    agsServerSite, username, password, service
):  # Get parameters from ArcGIS Desktop tool by seperating by comma e.g. (var1 is 1st parameter,var2 is 2nd parameter,var3 is 3rd parameter)
    try:
        # --------------------------------------- Start of code --------------------------------------- #

        # Get the server site details
        protocol, serverName, serverPort, context = splitSiteURL(agsServerSite)

        # If any of the variables are blank
        if (serverName == None or serverPort == None or protocol == None
                or context == None):
            return -1

        # Add on slash to context if necessary
        if not context.endswith('/'):
            context += '/'

        # Add on admin to context if necessary
        if not context.endswith('admin/'):
            context += 'admin/'

        # Get token
        token = getToken(username, password, serverName, serverPort, protocol)

        # If token received
        if (token != -1):
            # Check server web adaptor
            webAdaptors = getWebAdaptor(serverName, serverPort, protocol,
                                        token)

            for webAdaptor in webAdaptors:
                # Get the server site details on web adaptor
                protocolWeb, serverNameWeb, serverPortWeb, contextWeb = splitSiteURL(
                    webAdaptor['webAdaptorURL'])

                # Query arcgis server via the web adaptor
                webStatusVersion = checkWebAdaptor(serverNameWeb,
                                                   serverPortWeb, protocolWeb,
                                                   contextWeb, token)

                if (webStatusVersion != -1):
                    arcpy.AddMessage("ArcGIS Server With Web Adaptor " +
                                     webAdaptor['webAdaptorURL'] +
                                     " is running correctly on version " +
                                     str(webStatusVersion) + "...")
                    # Logging
                    if (enableLogging == "true"):
                        logger.info("ArcGIS Server With Web Adaptor " +
                                    webAdaptor['webAdaptorURL'] +
                                    " is running correctly on version " +
                                    str(webStatusVersion) + "...")
                # Else
                else:
                    arcpy.AddError(
                        "There is an issue with the web adaptor - " +
                        webAdaptor['webAdaptorURL'])
                    # Logging
                    if (enableLogging == "true"):
                        logger.error(
                            "There is an issue with the web adaptor - " +
                            webAdaptor['webAdaptorURL'])
                    # Email
                    if (sendErrorEmail == "true"):
                        # Send email
                        sendEmail("There is an issue with the web adaptor - " +
                                  webAdaptor['webAdaptorURL'])

            # List to hold services and their status
            servicesStatus = []

            # If a service is provided
            if (len(str(service)) > 0):
                # Query the service status
                realtimeStatus = getServiceStatus(serverName, serverPort,
                                                  protocol, service, token)
                # Check the service
                serviceInfo = checkService(serverName, serverPort, protocol,
                                           service, token)

                serviceDetails = {
                    'status': realtimeStatus,
                    'info': serviceInfo,
                    'service': service
                }
                servicesStatus.append(serviceDetails)
            # Else
            else:
                # Get all services
                services = getServices(serverName, serverPort, protocol, token)
                # Query all services
                # Iterate through services
                for eachService in services:
                    # Query the service status
                    realtimeStatus = getServiceStatus(serverName, serverPort,
                                                      protocol, eachService,
                                                      token)
                    # Check the service
                    serviceInfo = checkService(serverName, serverPort,
                                               protocol, eachService, token)

                    serviceDetails = {
                        'status': realtimeStatus,
                        'info': serviceInfo,
                        'service': eachService
                    }
                    servicesStatus.append(serviceDetails)

            stoppedServices = 0
            errorServices = 0
            errors = []
            # Iterate through services
            for eachServicesStatus in servicesStatus:
                # If status is stopped add to stopped counter
                if (eachServicesStatus['status'] == "STOPPED"):
                    stoppedServices = stoppedServices + 1
                else:
                    # If error with service add to error counter
                    if 'error' in eachServicesStatus['info']:
                        errorServices = errorServices + 1
                        errors.append(
                            eachServicesStatus['info']['error']['message'])

            # If any services are stopped/have errors
            if (stoppedServices > 0) or (errorServices > 0):
                arcpy.AddError(
                    str(stoppedServices) + " services are stopped...")
                arcpy.AddError(str(errorServices) + " services have errors...")
                for error in errors:
                    arcpy.AddError(error)
                # Logging
                if (enableLogging == "true"):
                    logger.error(
                        str(stoppedServices) + " services are stopped")
                    logger.error(str(errorServices) + " services have errors")
                    for error in errors:
                        logger.error(error)
                # Email
                if (sendErrorEmail == "true"):
                    errorMessage = str(
                        stoppedServices) + " services are stopped" + "\n"
                    errorMessage += str(
                        errorServices) + " services have errors" + "\n" + "\n"
                    for error in errors:
                        errorMessage += error + "\n"
                    # Send email
                    sendEmail(errorMessage)
            else:
                arcpy.AddMessage("All services are running correctly...")
                # Logging
                if (enableLogging == "true"):
                    logger.info("All services are running correctly...")

        # --------------------------------------- End of code --------------------------------------- #

        # If called from gp tool return the arcpy parameter
        if __name__ == '__main__':
            # Return the output if there is any
            if output:
                arcpy.SetParameterAsText(1, output)
        # Otherwise return the result
        else:
            # Return the output if there is any
            if output:
                return output
        # Logging
        if (enableLogging == "true"):
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        pass
    # If arcpy error
    except arcpy.ExecuteError:
        # Build and show the error message
        errorMessage = arcpy.GetMessages(2)
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
    # If python error
    except Exception as e:
        errorMessage = ""
        # Build and show the error message
        for i in range(len(e.args)):
            if (i == 0):
                errorMessage = unicode(e.args[i]).encode('utf-8')
            else:
                errorMessage = errorMessage + " " + unicode(
                    e.args[i]).encode('utf-8')
        arcpy.AddError(errorMessage)
        # Logging
        if (enableLogging == "true"):
            # Log error
            logger.error(errorMessage)
            # Log end of process
            logger.info("Process ended.")
            # Remove file handler and close log file
            logging.FileHandler.close(logMessage)
            logger.removeHandler(logMessage)
        if (sendErrorEmail == "true"):
            # Send email
            sendEmail(errorMessage)
Beispiel #26
0
def Calculate(self, parameters, messages):
    import importlib
    try:
        import arcsdm.sdmvalues
        import arcsdm.workarounds_93
        try:
            importlib.reload(arcsdm.sdmvalues)
            importlib.reload(arcsdm.workarounds_93)
        except:
            reload(arcsdm.sdmvalues)
            reload(arcsdm.workarounds_93)
        gp.OverwriteOutput = 1
        gp.LogHistory = 1
        EvidenceLayer = parameters[0].valueAsText

        # Test if EvidenceLayer has attribute table or not #AL 090620
        test_raster = arcpy.Raster(EvidenceLayer)
        if not test_raster.hasRAT:
            arcpy.AddError(
                "ERROR: EvidenceLayer does not have an attribute table. Use 'Build Raster Attribute Table' tool to add it."
            )
            raise

        # Test data type of Evidence Layer #AL 150520,030620
        evidenceDescr = arcpy.Describe(EvidenceLayer)
        evidenceCoord = evidenceDescr.spatialReference.name
        arcpy.AddMessage("Evidence Layer is " + EvidenceLayer +
                         " and its data type is " + evidenceDescr.datatype +
                         " and coordinate system is " + evidenceCoord)
        if (evidenceDescr.datatype == "RasterBand"):
            # Try to change RasterBand to RasterDataset #AL 210720
            evidence1 = os.path.split(EvidenceLayer)
            evidence2 = os.path.split(evidence1[0])
            if (evidence1[1] == evidence2[1] or evidence1[1][:4] == "Band"):
                EvidenceLayer = evidence1[0]
                evidenceDescr = arcpy.Describe(EvidenceLayer)
                arcpy.AddMessage("Evidence Layer is now " + EvidenceLayer +
                                 " and its data type is " +
                                 evidenceDescr.datatype)
            else:
                arcpy.AddError(
                    "ERROR: Data Type of Evidence Layer cannot be RasterBand, use Raster Dataset."
                )
                raise
        valuetype = gp.GetRasterProperties(EvidenceLayer, 'VALUETYPE')
        valuetypes = {1: 'Integer', 2: 'Float'}
        #if valuetype != 1:
        # valuetype: 0 = 1-bit, 1 = 2-bit, 2 = 4-bit, 3 = 8-bit unsigned integer, 4 = 8-bit signed integer, 5 = 16-bit unsigned integer
        # 6 = 16-bit signed integer, 7 = 32-bit unsigned integer, 8 = 32-bit signed integer, 9 = 32-bit floating point
        # 10 = 64-bit double precision, 11 = 8-bit complex, 12 = 16-bit complex, 13 = 32-bit complex, 14 = 64-bit complex
        if valuetype > 8:  # <==RDB  07/01/2010 - new  integer valuetype property value for arcgis version 10
            gp.adderror(
                'ERROR: ' + EvidenceLayer +
                ' is not an integer-type raster because VALUETYPE is ' +
                str(valuetype))  #AL 040520
            raise ErrorExit
        CodeName = parameters[1].valueAsText  #gp.GetParameterAsText(1)
        TrainingSites = parameters[2].valueAsText
        # Test coordinate system of Training sites and confirm it is same than Evidence Layer #AL 150520
        trainingDescr = arcpy.Describe(TrainingSites)
        trainingCoord = trainingDescr.spatialReference.name
        if (evidenceCoord != trainingCoord):
            arcpy.AddError("ERROR: Coordinate System of Evidence Layer is " +
                           evidenceCoord + " and Training points it is " +
                           trainingCoord + ". These must be same.")
            raise
        Type = parameters[3].valueAsText
        wtstable = parameters[4].valueAsText

        # If using non gdb database, lets add .dbf
        # If using GDB database, remove numbers and underscore from the beginning of the Weights table name (else block) #AL 061020
        wdesc = arcpy.Describe(gp.workspace)
        if (wdesc.workspaceType == "FileSystem"):
            if not (wtstable.endswith('.dbf')):
                wtstable += ".dbf"
        else:
            wtsbase = os.path.basename(wtstable)
            while len(wtsbase) > 0 and (wtsbase[:1] <= "9"
                                        or wtsbase[:1] == "_"):
                wtsbase = wtsbase[1:]
            wtstable = os.path.dirname(wtstable) + "\\" + wtsbase
        Confident_Contrast = float(parameters[5].valueAsText)
        #Unitarea = float( parameters[6].valueAsText)
        Unitarea = float(parameters[6].value)
        MissingDataValue = int(
            parameters[7].valueAsText)  # Python 3 fix, long -> int
        #gp.AddMessage("Debug step 12");
        arcsdm.sdmvalues.appendSDMValues(gp, Unitarea, TrainingSites)
        arcpy.AddMessage("=" * 10 + " Calculate weights " + "=" * 10)
        # Process: ExtractValuesToPoints
        arcpy.AddMessage("%-20s %s (%s)" % ("Creating table:", wtstable, Type))

        #tempTrainingPoints = gp.createscratchname("OutPoints", "FC", "shapefile", gp.scratchworkspace)
        #gp.ExtractValuesToPoints_sa(TrainingSites, EvidenceLayer, tempTrainingPoints, "NONE", "VALUE_ONLY")
        assert isinstance(EvidenceLayer, object)
        tempTrainingPoints = arcsdm.workarounds_93.ExtractValuesToPoints(
            gp, EvidenceLayer, TrainingSites, "TPFID")
        # Process: Summarize Frequency and manage fields

        #Statistics = gp.createuniquename("WtsStatistics.dbf")

        Statistics = gp.createuniquename("WtsStatistics")
        if gp.exists(Statistics): gp.Delete_management(Statistics)
        gp.Statistics_analysis(tempTrainingPoints, Statistics,
                               "rastervalu sum", "rastervalu")
        # Process: Create the table

        gp.CreateTable_management(os.path.dirname(wtstable),
                                  os.path.basename(wtstable), Statistics)

        gp.AddField_management(wtstable, "Count", "long")
        gp.AddField_management(wtstable, "Area", 'double')
        gp.AddField_management(wtstable, "AreaUnits", 'double')
        gp.AddField_management(wtstable, "CLASS", "long")
        if CodeName != None and len(CodeName) > 0:
            gp.AddField_management(wtstable, "CODE", "text", "5", "#", "#",
                                   "Symbol")
        gp.AddField_management(wtstable, "AREA_SQ_KM", "double")
        gp.AddField_management(wtstable, "AREA_UNITS", "double")
        gp.AddField_management(wtstable, "NO_POINTS", "long")
        gp.AddField_management(wtstable, "WPLUS", "double", "10", "4", "#",
                               "W+")
        gp.AddField_management(wtstable, "S_WPLUS", "double", "10", "4", "#",
                               "W+ Std")
        gp.AddField_management(wtstable, "WMINUS", "double", "10", "4", "#",
                               "W-")
        gp.AddField_management(wtstable, "S_WMINUS", "double", "10", "4", "#",
                               "W- Std")
        # Database table field name cannot be same as alias name when ArcGIS Pro with File System Workspace is used. #AL
        gp.AddField_management(wtstable, "CONTRAST", "double", "10", "4", "#",
                               "Contrast_")
        gp.AddField_management(wtstable, "S_CONTRAST", "double", "10", "4",
                               "#", "Contrast_Std")
        gp.AddField_management(wtstable, "STUD_CNT", "double", "10", "4", "#",
                               "Studentized_Contrast")
        gp.AddField_management(wtstable, "GEN_CLASS", "long", "#", "#", "#",
                               "Generalized_Class")
        gp.AddField_management(wtstable, "WEIGHT", "double", "10", "6", "#",
                               "Generalized_Weight")
        gp.AddField_management(wtstable, "W_STD", "double", "10", "6", "#",
                               "Generalized_Weight_Std")
        OIDName = gp.Describe(wtstable).OIDFieldName

        #Fill output table rows depending on Type
        desc = gp.describe(EvidenceLayer)
        cellsize = desc.MeanCellWidth
        if desc.datatype == 'RasterLayer': EvidenceLayer = desc.catalogpath
        if Type == "Descending":
            wtsrows = gp.InsertCursor(wtstable)
            try:
                rows = gp.SearchCursor(EvidenceLayer, '', '', '', 'Value D')
            except:
                # Test if EvidenceLayer has attribute table or not #AL 090620
                test_raster = arcpy.Raster(EvidenceLayer)
                if not test_raster.hasRAT:
                    arcpy.AddError(
                        "ERROR: EvidenceLayer does not have an attribute table. Use 'Build Raster Attribute Table' tool to add it."
                    )
                    raise
            row = rows.Next()
            while row:
                #gp.AddMessage("Inserting row.")
                wtsrow = wtsrows.NewRow()
                wtsrow.rastervalu = row.Value
                wtsrow.SetValue('class', row.Value)
                if CodeName != None and len(CodeName) > 0:
                    wtsrow.Code = row.GetValue(CodeName)
                #This related to Access Personal geodatabase bug
                #arcpy.AddMessage("DEBUG: Rowcount:%s"%(str(row.Count)));
                wtsrow.Count = row.Count
                statsrows = gp.SearchCursor(Statistics,
                                            'rastervalu = %i' % row.Value)
                if statsrows:
                    statsrow = statsrows.Next()
                    if statsrow:
                        rowFreq = statsrow.Frequency
                    else:
                        rowFreq = 0
                wtsrow.Frequency = rowFreq
                #gp.addmessage('Desc: Class: %d, Count: %d,  Freq: %d'%(row.Value,row.Count, rowFreq))
                wtsrows.InsertRow(wtsrow)
                row = rows.next()
            del wtsrows, wtsrow

        else:  # Ascending or Categorical or Unique
            wtsrows = gp.InsertCursor(wtstable)
            try:
                rows = gp.SearchCursor(EvidenceLayer)
            except:
                # Test if EvidenceLayer has attribute table or not #AL 090620
                test_raster = arcpy.Raster(EvidenceLayer)
                if not test_raster.hasRAT:
                    arcpy.AddMessage(
                        "ERROR: EvidenceLayer does not have an attribute table. Use 'Build Raster Attribute Table' tool to add it."
                    )
                    raise
            row = rows.Next()
            wtsrow = wtsrows.NewRow()  # Unicamp added 080818 (AL 210720)
            while row:
                wtsrow = wtsrows.NewRow()
                wtsrow.rastervalu = row.Value
                wtsrow.SetValue('class', row.Value)
                if CodeName != None and len(CodeName) > 0:
                    wtsrow.Code = row.GetValue(CodeName)
                #arcpy.AddMessage("DEBUG: Rowcount:%s"%(str(row.Count)));
                wtsrow.Count = row.Count
                statsrows = gp.SearchCursor(Statistics,
                                            'rastervalu = %i' % row.Value)
                if statsrows:
                    statsrow = statsrows.Next()
                    if statsrow:
                        wtsrow.Frequency = statsrow.Frequency
                    else:
                        wtsrow.Frequency = 0
                wtsrows.InsertRow(wtsrow)
                row = rows.Next()
            del wtsrows, wtsrow
        del row, rows
        # Calculate fields
        #gp.AddMessage('Calculating weights...')
        #gp.AddMessage("[count] * %f * %f /1000000.0"%(cellsize,cellsize))
        arcpy.CalculateField_management(
            wtstable, "area", "!count! * %f / 1000000.0" % (cellsize**2),
            "PYTHON_9.3")
        arcpy.CalculateField_management(wtstable, "areaunits",
                                        "!area! / %f" % Unitarea, "PYTHON_9.3")

        #gp.CalculateField_management (wtstable, "area", "!count! * %f / 1000000.0"%(cellsize**2))
        #gp.CalculateField_management (wtstable, "areaunits", "!area! / %f"% Unitarea)
        # Calculate accumulative fields
        if Type in ("Ascending", "Descending"):
            wtsrows = gp.UpdateCursor(wtstable)
            wtsrows.reset()
            wtsrow = wtsrows.Next()
            lastTotalArea = 0  # Unicamp added 080818 (AL 210720)
            lastTotalTP = 0  # Unicamp added 080818 (AL 210720)
            if wtsrow:
                if wtsrow.GetValue('class') is not MissingDataValue:
                    lastTotalTP = wtsrow.Frequency
                    lastTotalArea = wtsrow.Area  # sq km
                    lastTotalAreaUnits = wtsrow.AreaUnits  # unit cells
                    wtsrow.NO_POINTS = lastTotalTP
                    wtsrow.AREA_SQ_KM = lastTotalArea  # sq km
                    wtsrow.AREA_UNITS = lastTotalAreaUnits  # unit cells
                else:
                    lastTotalTP = 0
                    lastTotalArea = 0
                    lastTotalAreaUnits = 0
                    wtsrow.NO_POINTS = wtsrow.Frequency
                    wtsrow.AREA_SQ_KM = wtsrow.Area  # sq km
                    wtsrow.AREA_UNITS = wtsrow.AreaUnits  # unit cells
                #gp.addmessage('%s: Freq: %d, Area: %f,  UnitAreas: %f'%(Type, wtsrow.Frequency,wtsrow.Area, wtsrow.AreaUnits))
                wtsrows.UpdateRow(wtsrow)
                wtsrow = wtsrows.Next()
            while wtsrow:
                if wtsrow.GetValue('class') is not MissingDataValue:
                    lastTotalTP += wtsrow.Frequency
                    lastTotalArea += wtsrow.Area
                    lastTotalAreaUnits += wtsrow.AreaUnits
                    wtsrow.NO_POINTS = lastTotalTP
                    wtsrow.AREA_SQ_KM = lastTotalArea  # sq km
                    wtsrow.AREA_UNITS = lastTotalAreaUnits  # unit cells
                else:
                    wtsrow.NO_POINTS = wtsrow.Frequency
                    wtsrow.AREA_SQ_KM = wtsrow.Area  #sq km
                    wtsrow.AREA_UNITS = wtsrow.AreaUnits  # unit cells
                #gp.addmessage('%s: Freq: %d, Area: %f,  UnitAreas: %f'%(Type,wtsrow.Frequency,wtsrow.Area, wtsrow.AreaUnits))
                wtsrows.UpdateRow(wtsrow)
                wtsrow = wtsrows.Next()
            totalArea = lastTotalArea  # sq km
            totalTPs = lastTotalTP
            del wtsrow, wtsrows
        #Calculate non-accumulative fields
        elif Type in ("Categorical", "Unique"):
            totalArea = 0
            totalTPs = 0
            wtsrows = gp.UpdateCursor(wtstable)
            wtsrow = wtsrows.Next()
            while wtsrow:
                wtsrow.NO_POINTS = wtsrow.Frequency
                wtsrow.AREA_SQ_KM = wtsrow.Area  # sq km
                wtsrow.AREA_UNITS = wtsrow.AreaUnits  # unit cells
                #gp.addMessage("Debug class: " + str(wtsrow.GetValue('class')));

                if wtsrow.getValue("class") != MissingDataValue:
                    totalTPs += wtsrow.Frequency
                    totalArea += wtsrow.Area
                wtsrows.UpdateRow(wtsrow)
                wtsrow = wtsrows.Next()
            del wtsrow, wtsrows
        else:
            gp.AddWarning('Type %s not implemented' % Type)

        #Calculate weights, etc from filled-in fields
        wtsrows = gp.UpdateCursor(wtstable)
        wtsrow = wtsrows.Next()
        while wtsrow:
            #gp.AddMessage('Got to here...%i'%wtsrow.Class)
            #No calculations for missingdata class
            if wtsrow.GetValue('class') == MissingDataValue:
                wtsrow.wplus = 0.0
                wtsrow.s_wplus = 0.0
                wtsrow.wminus = 0.0
                wtsrow.s_wminus = 0.0
                wtsrow.contrast = 0.0
                wtsrow.s_contrast = 0.0
                wtsrow.stud_cnt = 0.0
            else:
                #gp.addMessage("Debug:" + str((wtsrow.NO_POINTS, wtsrow.AREA_SQ_KM, Unitarea, totalTPs, totalArea, Type)));
                wts = MakeWts(wtsrow.NO_POINTS, wtsrow.AREA_SQ_KM, Unitarea,
                              totalTPs, totalArea, Type)
                if not wts:
                    gp.AddError("Weights calculation aborted.")
                    raise ErrorExit
                (wp, sp, wm, sm, c, sc, c_sc) = wts
                #gp.AddMessage( "Debug out: " +  str((wp,sp,wm,sm,c,sc,c_sc)))
                wtsrow.wplus = wp
                wtsrow.s_wplus = sp
                wtsrow.wminus = wm
                wtsrow.s_wminus = sm
                wtsrow.contrast = c
                wtsrow.s_contrast = sc
                wtsrow.stud_cnt = c_sc
            wtsrows.UpdateRow(wtsrow)
            wtsrow = wtsrows.Next()
        del wtsrow, wtsrows

        #Generalize table
        #Get Study Area size in Evidence counts
        try:
            evRows = gp.SearchCursor(EvidenceLayer)
        except:
            # Test if EvidenceLayer has attribute table or not #AL 090620
            test_raster = arcpy.Raster(EvidenceLayer)
            if not test_raster.hasRAT:
                arcpy.AddMessage(
                    "ERROR: EvidenceLayer does not have an attribute table. Use 'Build Raster Attribute Table' tool to add it."
                )
                raise
        evRow = evRows.Next()
        studyArea = 0
        while evRow:
            studyArea = studyArea + evRow.Count
            evRow = evRows.Next()
        del evRow, evRows
        #gp.AddMessage("studyArea size(cells)=" + str(studyArea))

        #Get total number of training points
        ds = gp.GetCount_management(tempTrainingPoints)  #TP selected
        #gp.AddMessage("ds="+str(ds))

        Success = True  #Assume Valid Table: Has confident classes
        if Type in ("Ascending", "Descending", "Categorical"):
            #gp.AddMessage("Generalizing " + Type + "...")
            if Type != "Categorical":  #i.e., Ascending or Descending
                #Select confident rows
                WgtsTblRows = gp.SearchCursor(
                    wtstable, "STUD_CNT >= " + str(Confident_Contrast))
                #Get confidence row OID with maximum contrast
                WgtsTblRow = WgtsTblRows.Next()
                maxContrast = -9999999.0
                patNoTPs = 0
                patArea = 0.0
                maxOID = -1
                while WgtsTblRow:
                    if WgtsTblRow.Class is not MissingDataValue:
                        if (WgtsTblRow.Contrast > maxContrast) and (
                                WgtsTblRow.STUD_CNT >= Confident_Contrast):
                            maxContrast = WgtsTblRow.Contrast
                            maxOID = WgtsTblRow.GetValue(OIDName)
                            maxWplus = WgtsTblRow.Wplus
                            maxWplus_Std = WgtsTblRow.S_Wplus
                            maxWminus = WgtsTblRow.Wminus
                            maxWminus_Std = WgtsTblRow.S_Wminus
                            maxStdContr = WgtsTblRow.STUD_CNT
                            patNoTPs += WgtsTblRow.No_points
                            patArea += WgtsTblRow.Area_units
                    WgtsTblRow = WgtsTblRows.Next()
                #Set state of calculation
                #gp.AddMessage("Max OID: " + str(maxOID))
                if maxOID >= 0:
                    #Select rows with OID <= maxOID and Set new field values
                    Where = OIDName + " <= " + str(maxOID)
                    WgtsTblRows = gp.UpdateCursor(wtstable, Where)
                    WgtsTblRow = WgtsTblRows.Next()
                    while WgtsTblRow:
                        """ Missing data row should be processed after Gen_Class=2 is complete.
                            Then MD row should be found. If found, get area and num points of
                            pattern=2 and compute MD std.
                        """
                        if WgtsTblRow.Class == MissingDataValue:
                            WgtsTblRow.Gen_Class = MissingDataValue
                            WgtsTblRow.Weight = 0.0
                            WgtsTblRow.W_Std = 0.0
                        else:
                            WgtsTblRow.Gen_Class = 2
                            WgtsTblRow.Weight = maxWplus
                            WgtsTblRow.W_Std = maxWplus_Std
                        WgtsTblRows.UpdateRow(WgtsTblRow)
                        WgtsTblRow = WgtsTblRows.Next()
                    #gp.AddMessage("Set IN rows.")

                    #Select rows with OID > maxOID and Set new field values
                    Where = OIDName + " > " + str(maxOID)
                    WgtsTblRows = gp.UpdateCursor(wtstable, Where)
                    WgtsTblRow = WgtsTblRows.Next()
                    while WgtsTblRow:
                        if WgtsTblRow.Class == MissingDataValue:
                            #gp.AddMessage("Setting missing data gen_class...")
                            WgtsTblRow.Gen_Class = MissingDataValue
                            WgtsTblRow.Weight = 0.0
                            WgtsTblRow.W_Std = 0.0
                        else:
                            WgtsTblRow.Gen_Class = 1
                            WgtsTblRow.Weight = maxWminus
                            WgtsTblRow.W_Std = maxWminus_Std
                        WgtsTblRows.UpdateRow(WgtsTblRow)
                        WgtsTblRow = WgtsTblRows.Next()
                    #gp.AddMessage("Set OUT rows.")
                else:
                    gp.AddWarning(
                        "No Contrast for type %s satisfied the user defined confidence level %s"
                        % (Type, Confident_Contrast))
                    gp.AddWarning("Table %s is incomplete." % wtstable)
                    #gp.Delete(wtstable)
                    Success = False  # Invalid Table: No confidence

            else:  #Categorical
                #Get Wts and Wts_Std for class values outside confidence
                Out_Area = 0
                Out_NumTPs = 0.0
                #Out_SumWStds = 0.0
                Out_Num = 0

                #>>>>>>>>>>>>>>>>Out Rows>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
                #Select rows having less than specified absolute confidence; they are assigned to Out_Gen_Class
                WhereClause = "(STUD_CNT > -%f) and (STUD_CNT < %f)" % (
                    Confident_Contrast, Confident_Contrast)
                #gp.AddMessage(WhereClause)
                WgtsTblRows = gp.SearchCursor(wtstable, WhereClause)
                WgtsTblRow = WgtsTblRows.Next()
                #Categorical might have a Class.Value = 0
                Out_Gen_Class = int(99)
                if WgtsTblRow:
                    #gp.AddMessage("Processing no-confidence rows...")
                    while WgtsTblRow:
                        #gp.AddMessage("Class="+str(WgtsTblRow.Class))
                        ##
                        ##                Missing data row should be processed after Outside classes are complete.
                        ##                Then MD row should be found. If found, get area and num points of
                        ##                Outside classes and compute MD std.
                        ##
                        if WgtsTblRow.Class != MissingDataValue:
                            #Process Out Rows for total TPs=Out_NumTPs, total Area=Out_Area, number=Out_Num
                            #Categorical might have a Class.Value = 0, therefore
                            #Give Outside generalized class a value=10^n + 99, some n >= 0...
                            if WgtsTblRow.Class >= Out_Gen_Class:
                                Out_Gen_Class += 100
                            Out_NumTPs += WgtsTblRow.no_points
                            Out_Area += WgtsTblRow.Area
                            Out_Num = Out_Num + 1
                        WgtsTblRow = WgtsTblRows.Next()

                    #Calculate Wts from Out Area and Out TPs for combined Out Rows
                    if Out_Num > 0:
                        if Out_NumTPs == 0: Out_NumTPs = 0.001
                        Wts = MakeWts(float(Out_NumTPs), Out_Area, Unitarea,
                                      totalTPs, totalArea, Type)
                        if not Wts:
                            gp.AddError("Weights calculation aborted.")
                        #raise ErrorExit
                    #gp.AddMessage("Num Out TPs=%d, Area Out Rows=%f: %f, %f"%(Out_NumTPs,Out_Area,Wts[0],Wts[1]))
                    #gp.AddMessage("Got wts stats." + str(Wts))
                    #At,Aj,Adt,Adj = studyArea/Unit,float(ds),Out_Area/fac/Unit,float(Out_NumTPs)
                #<<<<<<<<<<<<<<<<<Out Rows<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<
                #Select all rows and Set new field values
                WgtsTblRows = gp.UpdateCursor(wtstable)
                WgtsTblRow = WgtsTblRows.Next()
                In_Num = 0
                while WgtsTblRow:
                    if WgtsTblRow.Class == MissingDataValue:
                        WgtsTblRow.Gen_Class = MissingDataValue
                        WgtsTblRow.Weight = 0.0
                        WgtsTblRow.W_Std = 0.0
                        #gp.AddMessage('got md std....')
                    elif abs(WgtsTblRow.STUD_CNT
                             ) >= Confident_Contrast:  #In Rows
                        WgtsTblRow.Gen_Class = WgtsTblRow.Class
                        WgtsTblRow.Weight = WgtsTblRow.Wplus
                        WgtsTblRow.W_Std = WgtsTblRow.S_Wplus
                        In_Num += 1
                    elif Out_Num > 0:  #Out Rows
                        if WgtsTblRow.Class == Out_Gen_Class:
                            gp.AddError(
                                "Categorical: Class value of the outside generalized class is same as an inside class."
                            )
                            raise ErrorExit
                        WgtsTblRow.Gen_Class = Out_Gen_Class
                        WgtsTblRow.Weight = Wts[2]
                        WgtsTblRow.W_Std = Wts[3]
                    WgtsTblRows.UpdateRow(WgtsTblRow)
                    #gp.AddMessage("Class=" + str(WgtsTblRow.Class))
                    WgtsTblRow = WgtsTblRows.Next()
                if In_Num == 0:
                    gp.AddWarning(
                        "No row Contrast for type %s satisfied the user confidence contrast = %s"
                        % (Type, Confident_Contrast))
                    gp.AddWarning("Table %s is incomplete." % wtstable)
                    Success = False  # Invalid Table: fails confidence test
        #end of Categorical generalization
        else:  #Type is Unique
            #gp.AddMessage("Setting Unique Generalization")
            WgtsTblRows = gp.UpdateCursor(wtstable)
            WgtsTblRow = WgtsTblRows.Next()
            while WgtsTblRow:
                WgtsTblRow.Gen_Class = WgtsTblRow.Class
                WgtsTblRow.Weight = 0.0
                WgtsTblRow.W_Std = 0.0
                WgtsTblRows.UpdateRow(WgtsTblRow)
                #gp.AddMessage("Class=" + str(WgtsTblRow.Class))
                WgtsTblRow = WgtsTblRows.Next()
        del WgtsTblRow, WgtsTblRows
        gp.AddMessage("Done creating table.")
        gp.AddMessage("Success: %s" % str(Success))
        #Delete extraneous fields
        gp.DeleteField_management(
            wtstable, "area;areaunits;count;rastervalu;frequency;sum_raster")
        #Set Output Parameter
        gp.SetParameterAsText(4, gp.Describe(wtstable).CatalogPath)
        arcpy.AddMessage("Setting success parameter..")
        arcpy.SetParameterAsText(8, Success)

    except ErrorExit:
        Success = False  # Invalid Table: Error
        gp.SetParameterAsText(8, Success)
        print('Aborting wts calculation')
    except arcpy.ExecuteError as e:
        #TODO: Clean up all these execute errors in final version
        arcpy.AddError("\n")
        arcpy.AddMessage("Calculate weights caught arcpy.ExecuteError: ")
        if (len(e.args) > 0):
            args = e.args[0]
            args.split('\n')
            arcpy.AddError(args)

        arcpy.AddMessage("-------------- END EXECUTION ---------------")
        raise arcpy.ExecuteError

    except Exception as msg:
        # get the traceback object
        import sys
        import traceback
        gp.AddMessage(msg)
        errors = gp.GetMessages(2)

        # generate a message string for any geoprocessing tool errors
        msgs = "\n\nCW - GP ERRORS:\n" + gp.GetMessages(2) + "\n"
        gp.AddMessage("GPMEs: " + str(len(errors)) + " " + gp.GetMessages(2))
        if (len(errors) > 0):
            gp.AddError(msgs)

        tb = sys.exc_info()[2]

        # tbinfo contains the line number that the code failed on and the code from that line
        tbinfo = traceback.format_tb(tb)[0]
        # concatenate information together concerning the error into a message string
        pymsg = "CW - PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
            str(traceback.format_exc)+ "\n" #+  : " + str(sys.exc_value) + "\n"

        # return gp messages for use with a script tool
        if (len(errors) < 1):
            gp.AddError(pymsg)

        # print messages for use in Python/PythonWin
        print(pymsg)
        print(msgs)
        raise
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
#Enable editor tracking
try:
        arcpy.EnableEditorTracking_management(fc_name, "CREATEUSER","CREATEDATE", "EDITUSER", "EDITDATE", "NO_ADD_FIELDS", time_format)
        arcpy.AddMessage ("Sasquatch enabled editor tracking on "+fc_name)
except arcpy.ExecuteError:
        msgs = arcpy.GetMessages(2)
        arcpy.AddError(msgs)
except:
        tb = sys.exc_info()[2]
        tbinfo = traceback.format_tb(tb)[0]
        pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(sys.exc_info()[1])
        msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages(2) + "\n"
        arcpy.AddError(pymsg)
        arcpy.AddError(msgs)
arcpy.SetParameterAsText(6, gdb_name)












Beispiel #28
0
def main():

    toolbox = TaskRepository()

    # user defined values
    input = arcpy.GetParameterAsText(0)

    #paramter
    coordinates = []

    #datadbase
    database = r'data.gdb'

    # read the request json
    inputJSON = toolbox.createJSONObject(input)
    profiles_open_route = ['driving-car', 'foot-walking']
    profile_set_open_route = profiles_open_route[1]

    # get the epsg
    for e in inputJSON['options']:
        poi = e['poi']
        profile = e['profile']
        epsg_in = e['epsg']['input']
        try:
            epsg_out = e['epsg']['output']
            arcpy.AddMessage(
                "output is epsg set, coordinates will return in epsg {}".
                format(epsg_out))
        except KeyError:
            arcpy.AddMessage(
                "no output epsg set, coordinates will return in epsg {}".
                format(epsg_in))
            epsg_out = epsg_in

    # the shapefile with the green spaces centroids
    #supported: green_areas, public_transport
    arcpy.AddMessage("poi {} is set".format(poi))
    green_points = '{}\{}'.format(database, poi)

    #set the open route profile
    for x in profiles_open_route:
        if profile in x or profile == x:
            profile_set_open_route = x

    # get the coordinates and transform if needed
    for c in inputJSON['coordinates']:
        id = str(c['id'])
        x = c['x']
        y = c['y']
        x_string = str(x)
        y_string = str(y)
        comma = ','
        x_set = x
        y_set = y

        if comma in x_string:
            x_set = toolbox.replaceComma(x_string)

        if comma in y_string:
            y_set = toolbox.replaceComma(y_string)

        coordinates.append({
            'id': id,
            'x': x_set,
            'y': y_set,
            'org_x': c['x'],
            'org_y': c['y']
        })

    # set the workspace
    toolbox.setWorkspace(4326)
    temp_shapefile = toolbox.createShape(coordinates, epsg_in)

    #generate the intersected green_points shape to speed up performance
    green_points_interect = toolbox.Intersect(
        green_points,
        toolbox.getConvexHullShapefile(temp_shapefile, "3000 Meters"))

    #create the nearest table
    out_table = os.path.join("in_memory", "nearest_table")
    arcpy.AddMessage("calc distances")
    nearest_tabel = arcpy.GenerateNearTable_analysis(temp_shapefile,
                                                     green_points_interect,
                                                     out_table, "",
                                                     "NO_LOCATION", "NO_ANGLE",
                                                     "ALL", "1", "PLANAR")

    array_poi = []
    cursor_poi = arcpy.da.SearchCursor(green_points_interect,
                                       ['OBJECTID', "SHAPE@X", "SHAPE@Y"])
    for row in cursor_poi:
        array_poi.append({'FID': row[0], 'X': row[1], 'Y': row[2]})

    array_points = []
    cursor_points = arcpy.da.SearchCursor(
        temp_shapefile,
        ['OID', 'ORG_X', 'ORG_Y', "POINT_X", "POINT_Y", 'ORG_ID'])
    for row in cursor_points:
        array_points.append({
            'FID': row[0],
            'ORG_X': row[1],
            'ORG_Y': row[2],
            'X': float(row[3]),
            'Y': float(row[4]),
            'ID': row[5]
        })

    #merge everything in one table to calc the distances
    merge_array = []
    cursor_table = arcpy.da.SearchCursor(nearest_tabel, ['IN_FID', 'NEAR_FID'])
    for row in cursor_table:
        merge_array.append({
            'StartPointX':
            toolbox.getPointByFID(row[0], 'X', array_points),
            'StartPointY':
            toolbox.getPointByFID(row[0], 'Y', array_points),
            'EndPointX':
            toolbox.getPointByFID(row[1], 'X', array_poi),
            'EndPointY':
            toolbox.getPointByFID(row[1], 'Y', array_poi),
            'ORG_X':
            toolbox.getPointByFID(row[0], 'ORG_X', array_points),
            'ORG_Y':
            toolbox.getPointByFID(row[0], 'ORG_Y', array_points),
            'ID':
            toolbox.getPointByFID(row[0], 'ID', array_points)
        })

    i = 0
    result = []
    for x in merge_array:
        i += 1
        #toolbox.getDistanceBing([StartPoint.Y,StartPoint.X], [EndPoint.Y,EndPoint.X],profile_set_google)
        array_open_route = toolbox.getDistanceRouteOpenRouteService(
            [x['StartPointX'], x['StartPointY']],
            [x['EndPointX'], x['EndPointY']], profile_set_open_route)
        Endpoint = toolbox.transformPoint(float(x['EndPointX']),
                                          float(x['EndPointY']), 4326,
                                          int(epsg_out))
        Endpoint = [str(Endpoint.X), str(Endpoint.Y)]
        Start_key = ''
        #transform back in origin epsg or if set in user choice epsg
        if epsg_in != epsg_out:
            Startpoint = toolbox.transformPoint(float(x['StartPointX']),
                                                float(x['StartPointY']), 4326,
                                                int(epsg_out))
            Startpoint = [str(Startpoint.X), str(Startpoint.Y)]
            Start_key = '"startpoint":{"x":"' + Startpoint[
                0] + '","y":"' + Startpoint[1] + '"},'

        result.append({
            "x":
            str(x['ORG_X']),
            "y":
            str(x['ORG_Y']),
            "id":
            str(x['ID']),
            "values": [{
                "endpoint": [{
                    "x": str(Endpoint[0]),
                    "y": str(Endpoint[1])
                }],
                "distance_open_route": [{
                    "value": str(array_open_route[0]),
                    "unit": "m"
                }],
                "duration_open_route": [{
                    "value": str(array_open_route[1]),
                    "unit": "s"
                }]
            }]
        })
    toolbox.deleteWorkspace()
    arcpy.SetParameterAsText(1, json.dumps(result))
Beispiel #29
0
thisScriptPath = os.path.dirname(pyScript)
rScriptPath = os.path.join(
    os.path.abspath(os.path.join(thisScriptPath, os.pardir)), "R")
rScript = os.path.join(rScriptPath, "excludeDataSources.r")

# prepare communication
arcpy.SetProgressor("default", "Executing R Script...")
args = [
    "R", "--slave", "--vanilla", "--args", provDat, ModelDataFolderName,
    rScriptPath
]

# run R-script
scriptSource = open(rScript, 'r')
rCommand = subprocess.Popen(args,
                            stdin=scriptSource,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.PIPE,
                            shell=True)

# get console prints, warnings and errors
outString, errString = rCommand.communicate()
scriptSource.close()

# send warnings and errors to ArcGIS
if errString and "...completed execution of R-script" not in outString:
    arcpy.AddMessage(errString)

# send parameter to ArcGIS
arcpy.SetParameterAsText(2, provDat)
Beispiel #30
0
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        outputPrinter(message="error on line: %s" % line,typeOfMessage='error')
        outputPrinter(message="error in file name: %s" % filename,typeOfMessage='error')
        outputPrinter(message="with error message: %s" % synerror,typeOfMessage='error')
        outputPrinter(message="ArcPy Error Message: %s" % arcpy.GetMessages(2),typeOfMessage='error')
        arcpy.SetParameterAsText(7, "false")
    except (common.ArcRestHelperError),e:
        outputPrinter(message=e,typeOfMessage='error')
        arcpy.SetParameterAsText(7, "false")
    except:
        line, filename, synerror = trace()
        outputPrinter(message="error on line: %s" % line,typeOfMessage='error')
        outputPrinter(message="error in file name: %s" % filename,typeOfMessage='error')
        outputPrinter(message="with error message: %s" % synerror,typeOfMessage='error')
        arcpy.SetParameterAsText(7, "false")
    finally:
        existingDef = None
        userName = None
        password = None
        org_url = None
        fsId = None
        layerNames = None
        layerName = None
        sql = None
        fst = None
        fs = None
        results = None
        fl = None

        del existingDef