def calc_CO2_emissions():

    # Get the value of the input parameter
    inputFC = arcpy.GetParameterAsText(0)  # Polyline feature class
    wildlife_units = arcpy.GetParameterAsText(
        1
    )  # Field from input FC representing number of wildlife units transferred
    capacity_per_trip = arcpy.GetParameter(
        2)  # Transportation capacity (wildlife units per trip)
    CO2_emission = arcpy.GetParameter(3)  # Amount of CO2 emission (kg/unit)
    scenarioFC = arcpy.GetParameterAsText(
        4)  # Scenario: Polyline feature class
    wildlife_units_scn = arcpy.GetParameterAsText(
        5
    )  # Field from scenario FC representing number of wildlife units transferred
    capacity_per_trip_scn = arcpy.GetParameter(
        6)  # Scenario: Transportation capacity (wildlife units per trip)
    CO2_emission_scn = arcpy.GetParameter(
        7)  # Scenario: Amount of CO2 emission (kg/unit)

    try:
        # create empty list to append all output layers
        outList = []

        ### ADD FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage(
            'Adding CO2 Emission Field to Input Feature Class ...')
        arcpy.SetProgressorLabel(
            'Adding CO2 Emission Field to Input Feature Class ...')

        # add new CO2 emission field
        arcpy.AddField_management(in_table=inputFC,
                                  field_name="CO2_EMISSIONS_KG",
                                  field_type="LONG")

        ### ADD FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage('Calculating CO2 Emissions for Each Flow ...')
        arcpy.SetProgressorLabel('Calculating CO2 Emissions for Each Flow ...')

        tot_emissions = 0
        with arcpy.da.UpdateCursor(
                inputFC,
            ['SHAPE@LENGTH', wildlife_units, 'CO2_EMISSIONS_KG']) as cursor:
            for row in cursor:
                total_trips = math.ceil(float(row[1]) / capacity_per_trip)
                #SHAPE@LENGTH will be likely in meters (depending on coordinate system)
                row[2] = row[0] * total_trips * CO2_emission
                tot_emissions += row[2]
                cursor.updateRow(row)

        outList.append(inputFC)
        arcpy.AddMessage(
            "The current scenario produces an estimated amount of CO2 equal to: "
            + str(tot_emissions) + " kilograms")

        if scenarioFC and scenarioFC != "#":
            ### ADD FIELD: creating new field to store CO2 total emission per trip ###
            arcpy.AddMessage(
                'Adding CO2 Emission Field to Scenario Feature Class ...')
            arcpy.SetProgressorLabel(
                'Adding CO2 Emission Field to Scenario Feature Class ...')

            # add new CO2 emission field
            arcpy.AddField_management(in_table=scenarioFC,
                                      field_name="CO2_EMISSIONS_KG",
                                      field_type="LONG")

            ### ADD FIELD: creating new field to store CO2 total emission per trip ###
            arcpy.AddMessage(
                'Calculating CO2 Emissions for Each Flow in Scenario Feature Class...'
            )
            arcpy.SetProgressorLabel(
                'Calculating CO2 Emissions for Each Flow in Scenario Feature Class...'
            )

            tot_emissions_scn = 0
            with arcpy.da.UpdateCursor(
                    scenarioFC,
                ['SHAPE@LENGTH', wildlife_units_scn, 'CO2_EMISSIONS_KG'
                 ]) as cursor:
                for row in cursor:
                    total_trips_scn = math.ceil(
                        float(row[1]) / capacity_per_trip_scn)
                    #SHAPE@LENGTH will be likely in meters (depending on coordinate system)
                    row[2] = row[0] * total_trips_scn * CO2_emission_scn
                    tot_emissions_scn += row[2]
                    cursor.updateRow(row)

            outList.append(scenarioFC)
            arcpy.AddMessage(
                "The future scenario produces an estimated amount of CO2 equal to: "
                + str(tot_emissions_scn) + " kilograms")

            diff_tot_emissions = tot_emissions_scn - tot_emissions
            if diff_tot_emissions > 0:
                arcpy.AddMessage(
                    "The future scenario will increase the estimated amount of CO2 by: "
                    + str(tot_emissions) + " kilograms")
            elif diff_tot_emissions < 0:
                arcpy.AddMessage(
                    "The future scenario will decrease the estimated amount of CO2 by: "
                    + str(tot_emissions) + " kilograms")
            else:
                arcpy.AddMessage(
                    "The future scenario will produce no change in the estimated amount of CO2!"
                )

        #### Set Parameters ####
        results = ";".join(outList)
        arcpy.SetParameterAsText(8, results)

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))
Exemple #2
0
arcpy.AddFields_management(final_output_fc_path, stats_tbl_fields)

stats_table_fields_list = [f.name for f in stats_table_fields]
stats_table_fields_list.insert(0, 'SHAPE@')
# final_outfc_fields = ','.join(stats_table_fields_list)

cnt = int(arcpy.GetCount_management(in_mem_stats_tbl)[0])
arcpy.SetProgressor('step',
                    f'Inserting {cnt} rows into output feature class ...', 0,
                    cnt, 1)
# add features with geometry to the output feature class
counter = 1
with arcpy.da.SearchCursor(in_mem_stats_tbl, '*') as cursor:
    for row in cursor:
        arcpy.SetProgressorPosition(counter)
        arcpy.SetProgressorLabel(f'Inserting row {counter} of {cnt} ...')

        stats_cursor_fields = cursor.fields
        search_val = row[stats_cursor_fields.index(in_pxw_join_field)]

        if in_should_transform_fields and in_transform_fields.rowCount > 0:
            for i in range(0, in_transform_fields.rowCount):
                find_val = in_transform_fields.getValue(i, 0)
                rep_val = in_transform_fields.getValue(i, 1)

                if find_val and rep_val == 'None':
                    search_val = search_val.replace(find_val, '')
                elif find_val and rep_val == '':
                    search_val = f'{find_val}{search_val}'
                elif find_val == '' and rep_val:
                    search_val = f'{search_val}{rep_val}'
Exemple #3
0
import os, glob, ntpath, arcgis, arcpy, sys, timeit, datetime
from arcgis.gis import GIS
from arcgis.raster.analytics import create_image_collection


arcpy.SetProgressorLabel("Attempting to login...")
#import parameters
portal_url = arcpy.GetParameterAsText(0)
portal_username = arcpy.GetParameterAsText(1)
portal_password = arcpy.GetParameterAsText(2)
prj_name = arcpy.GetParameterAsText(3)
local_image_folder_path = arcpy.GetParameterAsText(4)

#Login
starttime = time.time()
try:
    gis = GIS(url=portal_url, username=portal_username, password=portal_password)
    gis_user = gis.users.me
except Exception:
    e = sys.exc_info()[1]
    print(e.args[0])
    arcpy.AddError(e.args[0])
    sys.exit(arcpy.AddError("Program came to an abrupt end, please correct error message stated above and try again. "))
endtime = time.time()
arcpy.AddMessage("Logging in took: {} seconds".format(round(endtime - starttime,2)))


gis_user_folders = gis_user.folders
gis_user_folderTitles = [f.get("title") for f in gis_user_folders]

Exemple #4
0
            for row in rows:
                updateDict[musym] = str(row[0])
        del row, rows, wc

    #for k,v in updateDict.iteritems():
    #arcpy.AddMessage(k + v)
    aCnt = len(updateDict)

    arcpy.SetProgressor("Step", "Initializing tool", 0, aCnt, 1)

    c = 0

    for key in updateDict:
        time.sleep(0.05)
        c += 1
        arcpy.SetProgressorLabel("Updating " + key + " (" + str(c) + " of " +
                                 str(aCnt) + ")")
        upVal = updateDict.get(key)
        if len(upVal) > 6:
            arcpy.AddWarning('Illegal value for ' + key +
                             ', greater than 6 characters (' + upVal + ')')
            arcpy.SetProgressorPosition()
        elif upVal == 'None':
            arcpy.AddWarning('No update value specified for ' + key)
            arcpy.SetProgressorPosition()
        else:
            n = 0
            wc = '"AREASYMBOL" = ' "'" + areaParam + "' AND \"MUSYM\" = '" + key + "'"

            with arcpy.da.Editor(aWS) as edit:
                ##
    for interp in interpLst:

        outTbl = arcpy.ValidateTableName(interp)
        outTbl = outTbl.replace("__", "_")
        tblName = 'tbl_' + outTbl + aggMod

        if interp.find("{:}") <> -1:
            interp = interp.replace("{:}", ";")

        # for eSSA in areaList:
        for state in states:
            p = [x for x in areaList if x[:2] == state]
            theReq = ",".join(map("'{0}'".format, p))
            n = n + 1
            arcpy.SetProgressorLabel('Collecting ' + interp + ' for: ' +
                                     state + " (" + str(n) + ' of ' +
                                     str(jobCnt) + ')')

            #send the request
            intrpLogic, intrpData, intrpMsg = getIntrps(
                interp, theReq, aggMethod)

            #if it was successful...
            if intrpLogic:
                if len(intrpData) == 0:
                    AddMsgAndPrint(
                        'No records returned for ' + state + ': ' + interp, 1)
                    failInterps.append(state + ":" + interp)
                else:
                    AddMsgAndPrint('Response for ' + interp + ' on ' + state +
                                   ' = ' + intrpMsg)
Exemple #6
0
def deleteRows(source, targetUrl, expr):
    # delete Rows using chunks of _chunkSize
    retval = False
    error = False
    # delete section
    ids = getOIDs(targetUrl, expr)
    try:
        lenDeleted = 100
        #Chunk deletes using chunk size at a time
        rowsProcessed = 0
        numFeat = len(ids)
        if numFeat == 0:
            dla.addMessage("0 Rows to Delete, exiting")
            return True  # nothing to delete is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        arcpy.SetProgressor("default", "Deleting Rows")
        while rowsProcessed < numFeat and error == False:
            #Chunk deletes using chunk size at a time
            next = rowsProcessed + chunk
            msg = "Deleting rows " + str(rowsProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            oids = ",".join(str(e) for e in ids[rowsProcessed:next])
            url = targetUrl + '/deleteFeatures'
            token = getSigninToken()
            params = {'f': 'pjson', 'objectIds': oids, 'token': token}
            result = sendRequest(url, params)
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Delete rows from Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenDeleted = len(result['deleteResults'])
                    total = rowsProcessed + chunk
                    if total > numFeat:
                        total = numFeat
                    msg = str(lenDeleted) + " rows deleted, " + str(
                        total) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    error = True
                    dla.showTraceback()
                    dla.addMessage("Delete rows from Service failed")
                    dla.addError(json.dumps(result))
            rowsProcessed += chunk
    except:
        retval = False
        error = True
        dla.showTraceback()
        dla.addMessage("Delete rows from Service failed")
        pass

    return retval
Exemple #7
0
def main():

    try:
        # Error checking and argument fetching
        if arcpy.GetArgumentCount() < 1:
            raise InvalidArgumentError("Required argument missing")

		# Set up the tool's parameters
        paramIndex = 0
        jobsTable = arcpy.GetParameterAsText(paramIndex)
        paramIndex += 1
        sqlQuery = arcpy.GetParameterAsText(paramIndex)
        paramIndex += 1
        wmxDbAlias = arcpy.GetParameterAsText(paramIndex)
        paramIndex += 1

        # Import the Workflow Manager toolbox
        wmxToolbox = getWorkflowManagerToolboxLocation()
        arcpy.ImportToolbox(wmxToolbox, "WMXAdminUtils")

        # Get the list of jobs matching the query
        result = arcpy.ListJobs_WMXAdminUtils(jobsTable, sqlQuery, wmxDbAlias)
        logPreviousToolMessages()
        numOutputs = result.outputCount

        if numOutputs <= 0:
            return
        
        # Output is a semicolon-delimited list of job IDs, so split up the
        # list, as required.
        jobListString = result.getOutput(0)
        if jobListString == None or len(jobListString) <= 0:
            arcpy.AddMessage("No jobs matched query")
            return

        jobsToDelete = jobListString.split(";")
        arcpy.AddMessage("Jobs to delete: " + str(jobListString))

        # Set up the progress bar
        arcpy.SetProgressor("step", "Deleting jobs...", 0, len(jobsToDelete), 1)

        # Delete each job
        jobCount = 0
        jobsDeleted = []
        for job in jobsToDelete:
            arcpy.SetProgressorLabel("Deleting job " + str(job))
            arcpy.DeleteJob_WMXAdminUtils(job, wmxDbAlias)
            logPreviousToolMessages()
            jobCount += 1
            jobsDeleted.append(job)
            
            arcpy.SetProgressorPosition(jobCount)

        # Set the return value for this tool (a multivalue containing the list of IDs
        # for the jobs that were deleted)
        jobsDeletedStr = ""
        for jobId in jobsDeleted:
            jobsDeletedStr += jobId + ";"

        jobsDeletedStr = jobsDeletedStr.rstrip(";")
        arcpy.SetParameterAsText(paramIndex, jobsDeletedStr)
        arcpy.AddMessage("Deleted jobs: " + jobsDeletedStr)

    except Exception, ex:
        arcpy.AddError("Caught exception: " + str(ex))
Exemple #8
0
result = workspace + "\\HAND"

drainage_net = workspace + "\\" + "drainage_net"
actual_drainage_network = workspace + "\\actual_dr"

riverf_shp = workspace + "\\" + "drainage_pt.shp"
riverfelevated_shp = workspace + "\\" + "drain_pt_val.shp"

start_of_drainages = "p_o_start"
start_of_drainages2 = "p_o_start2"
start_of_drainages_fullpath = workspace + "\\" + start_of_drainages + ".shp"
start_of_drainages2_fullpath = workspace + "\\" + start_of_drainages2 + ".shp"

########################################
arcpy.SetProgressor("step", "propering layers...", 0, 6, 1)
arcpy.SetProgressorLabel("message")
arcpy.SetProgressorPosition(1)
#############################################

arcpy.AddMessage("filling dem raster")
outFill = Fill(demRaster)
outFill.save(filled)


def sorter(myset):
    myset2 = {}
    for x in myset:
        myset2[x] = x
    import operator
    sorted_x = sorted(myset2.items(), key=operator.itemgetter(1))
    sorted2 = []
Exemple #9
0
def main(argv=None):
    # main function - list the source and target datasets, then delete rows/append where there is a match on non-prefixed name
    dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
    logname = os.path.join(outputFolder, 'gzCreateProjectFiles.log')
    gzSupport.startLog()

    success = True
    try:

        gzSupport.addMessage("Getting list of datasets for Target " +
                             targetGDB)
        targets = gzSupport.listDatasets(targetGDB)
        tNames = targets[0]
        tFullNames = targets[1]

        gzSupport.addMessage("Getting list of datasets for Source " +
                             sourceGDB)
        sources = gzSupport.listDatasets(sourceGDB)
        sNames = sources[0]
        sFullNames = sources[1]

        t = 0
        arcpy.SetProgressor("Step", "Creating Files...", 0, len(tNames), 1)

        for name in tNames:
            arcpy.SetProgressorPosition(t)
            arcpy.SetProgressorLabel("Creating file for " + name + "...")
            # for each source name
            if debug:
                gzSupport.addMessage(name)
            try:
                # look for the matching name in target names
                s = sNames.index(name)
            except:
                # will get here if no match
                s = -1
            if s > -1:
                # create file if there is a match
                fileName = outputFolder + os.sep + prefixStr + name.title(
                ) + ".xml"
                if os.path.exists(fileName):
                    os.remove(fileName)
                try:
                    #arcpy.AddToolbox(os.path.join(dir,"Gizinta.tbx"))
                    #arcpy.gzCreateProject_gizinta(sFullNames[s],tFullNames[t],fileName) # this doesn't always work...
                    gzCreateProject.createGzFile(sFullNames[s], tFullNames[t],
                                                 fileName)
                    retVal = True
                    gzSupport.addMessage("Created " + fileName)
                except:
                    retVal = False
                if retVal == False:
                    gzSupport.addMessage("Failed to create file for " + name)
                    gzSupport.showTraceback()
                    success = False
            else:
                gzSupport.addMessage("Skipping " + name)
            t = t + 1
    except:
        gzSupport.showTraceback()
        arcpy.AddError("Error creating project files")
        success = False

    finally:
        arcpy.ResetProgressor()
        arcpy.SetParameter(gzSupport.successParameterNumber, success)
        arcpy.env.workspace = targetGDB
        arcpy.RefreshCatalog(outputFolder)
        gzSupport.closeLog()
Exemple #10
0
def calc_CO2_emissions(flows_lyr):

    # Local variable:
    out_flows_CO2_fc = os.path.join(arcpy.env.scratchGDB, "CO2Emissions_fc")
    out_CO2_Name = "CO2 Emissions"

    try:
        #Create feature class from copy of input feature layer
        arcpy.AddMessage('Creating Feature Class from Input Feature Layer ...')
        arcpy.SetProgressorLabel(
            'Creating Feature Class from Input Feature Layer ...')
        arcpy.CopyFeatures_management(flows_lyr, out_flows_CO2_fc)

        ### ADD FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage('Adding CO2 Emission Field to Feature Class ...')
        arcpy.SetProgressorLabel(
            'Adding CO2 Emission Field to Feature Class ...')
        # add new CO2 emission field
        arcpy.AddField_management(in_table=out_flows_CO2_fc,
                                  field_name="CO2_EMISSIONS_KG",
                                  field_type="LONG")

        ### CALCULATE FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage('Calculating CO2 Emissions for Each Flow Line ...')
        arcpy.SetProgressorLabel(
            'Calculating CO2 Emissions for Each Flow Line ...')
        tot_emissions = 0
        ## Is there a field in the input feature layer representing quantity of flows?
        ## If not, assign a default value == 1 to the flow as units.
        if flow_units != '':
            #Check user input to make sure the transport units field specified matches one of the attributes of the inputFC
            fieldnames = [f.name for f in arcpy.ListFields(out_flows_CO2_fc)]
            if flow_units.capitalize() not in fieldnames and flow_units.upper(
            ) not in fieldnames:
                arcpy.AddError(
                    "ERROR: The chosen transportation units attribute does not exist in the input layer!"
                )
                raise arcpy.ExecuteError

            cursor = arcpy.da.UpdateCursor(out_flows_CO2_fc, [
                'SHAPE@LENGTH', fieldnames[fieldnames.index(flow_units)],
                'CO2_EMISSIONS_KG'
            ])
            for row in cursor:
                if row[1] is None or str(row[1]).upper() == "n/a" or str(
                        row[1]).upper() == r"n\a" or str(
                            row[1]).upper() == "NA":
                    continue
                else:
                    total_trips = math.ceil(float(row[1]) / capacity_per_trip)
                    #SHAPE@LENGTH will be likely in meters (depending on coordinate system)
                    row[2] = row[0] * total_trips * CO2_emission
                    tot_emissions += row[2]
                    cursor.updateRow(row)

            #Export feature layer to CSV##
            arcpy.AddMessage(
                'Exporting Flows CO2 Emissions Layer to CSV Table ...')
            outTable_CSV = os.path.join(arcpy.env.scratchFolder,
                                        "CO2_Emission_Table.csv")
            ExportToCSV(fc=out_flows_CO2_fc, output=outTable_CSV)

        #arcpy.AddMessage('Writing Total Estimated CO2 to Output Report File ...')
        #arcpy.SetProgressorLabel('Writing Total Estimated CO2 to Output Report File ...')
        #out_txt = os.path.join(arcpy.env.scratchFolder,"CO2_Report.txt")
        #file = open(out_txt,"w")
        #file.write("The current scenario produces a total estimated amount of released CO2 equal to: " + #str(tot_emissions) + " kilograms")
        #file.close()
        #arcpy.AddMessage("The current scenario produces a total estimated amount of released CO2 equal to: " + str(tot_emissions) + " kilograms")

        # Process: Create a feature layer from the joined feature class to send back as output to GP tools
        out_fl = arcpy.MakeFeatureLayer_management(out_flows_CO2_fc,
                                                   out_CO2_Name)

        # Execute FeatureClassToGeodatabase
        arcpy.AddMessage("Converting Feature Class to Shapefile...")
        arcpy.FeatureClassToShapefile_conversion(out_flows_CO2_fc,
                                                 arcpy.env.scratchFolder)

        #### Set Parameters ####
        arcpy.SetParameter(11, out_fl)
        #arcpy.SetParameter(5, out_txt)
        arcpy.SetParameter(12, outTable_CSV)

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))
# calculatexy.py
# Created on: 2014-07-22 17:07:58.00000
#   (generated by Xuebin Wei)
# Description: 
# ---------------------------------------------------------------------------

import arcpy

md = arcpy.GetParameterAsText(0)
arcpy.env.workspace = md
tables = arcpy.ListTables()

arcpy.SetProgressor("step", "Adding X,Y to table...",0, 35, 1)

for table in tables:
	arcpy.SetProgressorLabel("Calculating table "+ str(table))
	if table == "aotizhongxin":
		x = 116.395001
		y = 39.985994
	elif table == "dingling":
		x = 116.24608
		y = 40.296149
	elif table == "changpin":
		x = 116.2264
		y = 40.224198
	elif table == "nongzhanguan":
		x = 116.464188
		y = 39.933936
	elif table == "daxing":
		x = 116.339936
		y = 39.736666
    def execute(self, pParams):
        sOK = apwrutils.C_OK
        (pFLAddress, pTSTbl, pTSTblSum) = pParams
        pEditor = None
        pTableView = ""
        try:
            pWorkspace = apwrutils.Utils.getWorkspace(pTSTbl)
            oDesc = arcpy.Describe(pTSTbl)
            sName = oDesc.name
            sQT = ""
            lFlds = arcpy.ListFields(pTSTbl, apwrutils.FN_FEATUREID)
            if (len(lFlds) == 0):
                arcpy.AddError("required field {} does not exist.".format(
                    apwrutils.FN_FEATUREID))
                sOK = apwrutils.C_NOTOK
                return (sOK, )
            pFld = lFlds[0]
            sType = pFld.type
            if (sType == 'String' or sType == 'Date'):
                sQT = "'"
            ds = time.clock()
            if (pTSTblSum == ""):
                pTSTblSum = os.path.join(pWorkspace, "{}_sum".format(sName))
            if (arcpy.Exists(pTSTblSum) == False):
                pTSTblSum = os.path.join(pWorkspace, "{}_sum".format(sName))
                arcpy.Statistics_analysis(in_table=pTSTbl,
                                          out_table=pTSTblSum,
                                          statistics_fields="FeatureID COUNT",
                                          case_field="FeatureID")
                arcpy.AddMessage("Create/pouplate {} table".format(
                    pTSTblSum, apwrutils.Utils.GetDSMsg(ds)))

            sTblWithCnt = "{}_WithCnt".format(sName)
            pTblWithCnt = os.path.join(pWorkspace, sTblWithCnt)
            pTableView = sTblWithCnt
            ds = time.clock()
            if (arcpy.Exists(pTblWithCnt)):
                arcpy.Delete_management(pTblWithCnt)
                arcpy.AddMessage("Deleting {}.  dt={}".format(
                    pTblWithCnt, apwrutils.Utils.GetDSMsg(ds)))

            arcpy.CreateTable_management(pWorkspace, sTblWithCnt)
            if (sQT == "'"):
                arcpy.AddField_management(pTblWithCnt, apwrutils.FN_FEATUREID,
                                          "TEXT", "", "", 30)
            else:
                arcpy.AddField_management(pTblWithCnt, apwrutils.FN_FEATUREID,
                                          "LONG")

            arcpy.AddField_management(pTblWithCnt, "H_CM", "LONG")
            arcpy.AddField_management(pTblWithCnt, FN_ADDRCNT, "LONG")
            arcpy.AddMessage("Creating {}.  dt={}".format(
                pTblWithCnt, apwrutils.Utils.GetDSMsg(ds)))

            nCnt = int(arcpy.GetCount_management(pTSTblSum)[0])
            nMod = 1
            if (nCnt > 100):
                nMod = int(nCnt / 99)
            ds1 = time.clock()
            nAdded = 0
            arcpy.SetProgressor('step',
                                'Processing 0 of {} records.'.format(nCnt), 0,
                                nCnt, nMod)
            arcpy.SetProgressorPosition(0)
            dHCnt = {}
            pEditor = arcpy.da.Editor(pWorkspace)
            pEditor.startEditing(False, False)
            #sIDs = ""

            with arcpy.da.InsertCursor(
                    pTblWithCnt,
                [apwrutils.FN_FEATUREID, "H_CM", FN_ADDRCNT]) as inRows:
                with arcpy.da.SearchCursor(pTSTblSum,
                                           [apwrutils.FN_FEATUREID]) as rows:
                    for i, row in enumerate(rows):
                        if (i % nMod) == 0:
                            nn = arcpy.GetCount_management(pTblWithCnt)[0]
                            sMsg = "Process {} of {} features. {} recs added to {}. dt={} ".format(
                                i + 1, nCnt, nn, pTblWithCnt,
                                apwrutils.Utils.GetDSMsg(ds1))
                            arcpy.SetProgressorLabel(sMsg)
                            arcpy.AddMessage(sMsg)
                            arcpy.SetProgressorPosition(i + 1)
                            ds1 = time.clock()

                        sFeatureID = row[0]
                        sWhere = "{} = {}{}{} and {} >=0 ".format(
                            apwrutils.FN_FEATUREID, sQT, sFeatureID, sQT,
                            "H_CM")
                        tblView = "tbl{}".format(sFeatureID)
                        if (arcpy.Exists(tblView)):
                            arcpy.Delete_management(tblView)
                        arcpy.MakeTableView_management(pTSTbl, tblView, sWhere)
                        dHCnt = dict()
                        with arcpy.da.UpdateCursor(
                                tblView, ["H_CM", FN_ADDRCNT]) as upRows:
                            for upRow in upRows:
                                intH = upRow[0]
                                try:
                                    if (intH in dHCnt) == False:
                                        nAddCnt = 0
                                        try:
                                            sWhereTS = "{} = {} and {} <= {}".format(
                                                "STATION_ID", sFeatureID,
                                                "HAND_cm", intH)
                                            thmName = "thm{}".format(intH)
                                            if (arcpy.Exists(thmName)):
                                                arcpy.Delete_management(
                                                    thmName)
                                            arcpy.MakeFeatureLayer_management(
                                                pFLAddress, thmName, sWhereTS)
                                            nAddCnt = int(
                                                arcpy.GetCount_management(
                                                    thmName)[0])
                                            pRowCnt = (sFeatureID, intH,
                                                       nAddCnt)
                                            arcpy.AddMessage(
                                                "sWhereTS: {} nCnt: {}".format(
                                                    sWhereTS, pRowCnt))
                                            dHCnt.setdefault(intH, nAddCnt)
                                            #if(sIDs==""):
                                            #    sIDs = sFeatureID
                                            #else:
                                            #    sIDs = "{},{}".format(sIDs, sFeatureID)
                                            if (nAddCnt > 0):
                                                nAdded = nAdded + 1
                                                #arcpy.AddMessage("Added={} nThisCnt={}".format(nAdded, nAddCnt))
                                                #arcpy.AddMessage(sIDs)
                                                inRows.insertRow(pRowCnt)
                                                upRow[1] = nAddCnt
                                                upRows.updateRow(upRow)
                                                arcpy.Delete_management(
                                                    thmName)
                                                arcpy.AddMessage(
                                                    "Added={} nThisCnt={}".
                                                    format(nAdded, nAddCnt))
                                        except:
                                            sMsg = "{} {}".format(
                                                arcpy.GetMessages(2), trace())
                                            arcpy.AddMessage(sMsg)
                                        finally:
                                            dHCnt.setdefault(intH, nAddCnt)

                                    else:
                                        nAddCnt = dHCnt[intH]
                                        if (nAddCnt > 0):
                                            upRow[1] = nAddCnt
                                            upRows.updateRow(upRow)
                                except:
                                    pass

            arcpy.MakeTableView_management(pTblWithCnt, pTableView)
        except:
            sMsg = "{} {}".format(arcpy.GetMessages(2), trace())
            arcpy.AddError(sMsg)
            sOK = apwrutils.C_NOTOK
        finally:
            if (pEditor != None):
                pEditor.stopEditing(True)

        return (sOK, pTableView)
Exemple #13
0
import arcpy
import sixbynine
import time

msg1 = arcpy.GetIDMessage("dt1")
msg2 = arcpy.GetIDMessage("dt2")
msg3 = arcpy.GetIDMessage("dt3")

arcpy.AddWarning(arcpy.GetIDMessage("dt1") + arcpy.GetIDMessage("dt2"))

arcpy.SetProgressor("step", "", 0, 100, 1)

prog_msg = arcpy.GetIDMessage("dt3") + arcpy.GetIDMessage("dt2")

for i in range(1, 10):
    arcpy.SetProgressorLabel(f"{i} {prog_msg}")
    time.sleep(1)

arcpy.SetParameter(0, sixbynine.compute())
            comFC = os.path.join(
                "IN_MEMORY",
                "xxComLines")  # temporary featureclass containing all lines

            # Read soils layer to get polygon OID and associated attribute value,
            # load this information into 'dAtt' dictionary. This will be used to
            # populate the left and right attributes of the new polyline featureclass.

            dAtt = dict()
            theFields = ["OID@", fld1Name]

            if AS == "*":
                # Processing entire layer instead of by AREASYMBOL
                fldQuery = ""
                AS = descInput['baseName']
                arcpy.SetProgressorLabel("Processing Common Lines for " + AS)

            else:
                # Processing just this AREASYMBOL
                fldQuery = arcpy.AddFieldDelimiters(
                    comFC, fld2Name) + " = '" + AS + "'"
                arcpy.SetProgressorLabel(AS + ": " + str(iCnt) + " of " +
                                         str(numOfareasymbols))

            # Isolate the features that pertain to a specific areasymbol
            arcpy.MakeFeatureLayer_management(inputFC, selLayer, fldQuery)
            numOfASfeatures = int(
                arcpy.GetCount_management(selLayer).getOutput(0))

            # process this survey
            if numOfASfeatures > 0:
Exemple #15
0
def publish(xmlFileNames):
    # function called from main or from another script, performs the data update processing
    global _useReplaceSettings
    dla._errCount = 0

    arcpy.SetProgressor("default","Data Assistant")
    arcpy.SetProgressorLabel("Data Assistant")
    xmlFiles = xmlFileNames.split(";")
    layers = []

    for xmlFile in xmlFiles: # multi value parameter, loop for each file
        xmlFile = dla.getXmlDocName(xmlFile)
        dla.addMessage("Configuration file: " + xmlFile)
        xmlDoc = dla.getXmlDoc(xmlFile) # parse the xml document
        if xmlDoc == None:
            return
        prj = dla.setProject(xmlFile,dla.getNodeValue(xmlDoc,"Project"))
        if prj == None:
            dla.addError("Unable to open your project, please ensure it is in the same folder as your current project or your Config file")
            return False

        source = dla.getDatasetPath(xmlDoc,"Source")
        target = dla.getDatasetPath(xmlDoc,"Target")
        targetName = dla.getDatasetName(target)
        dla.addMessage(source)
        dla.addMessage(target)

        if dlaService.checkLayerIsService(source) or dlaService.checkLayerIsService(target):
            token = dlaService.getSigninToken() # when signed in get the token and use this. Will be requested many times during the publish
            # exit here before doing other things if not signed in
            if token == None:
                dla.addError("User must be signed in for this tool to work with services")
                return False

        expr = getWhereClause(xmlDoc)
        if _useReplaceSettings == True and (expr == '' or expr == None):
            dla.addError("There must be an expression for replacing by field value, current value = " + str(expr))
            return False

        errs = False
        if dlaService.validateSourceUrl(source) == False:
            dla.addError("Source path does not appear to be a valid feature layer")
            errs = True

        if _useReplaceSettings == True:
            if dlaService.validateTargetReplace(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True
        elif _useReplaceSettings == False:
            if dlaService.validateTargetAppend(target) == False:
                dla.addError("Target path does not have correct privileges")
                errs = True

        if errs:
            return False


        dla.setWorkspace()

        if dla.isTable(source) or dla.isTable(target):
            datasetType = 'Table'
        else:
            datasetType = 'FeatureClass'

        if not dla.isStaged(xmlDoc):
            res = dlaExtractLayerToGDB.extract(xmlFile,None,dla.workspace,source,target,datasetType)
            if res != True:
                table = dla.getTempTable(targetName)
                msg = "Unable to export data, there is a lock on existing datasets or another unknown error"
                if arcpy.TestSchemaLock(table) != True and arcpy.Exists(table) == True:
                    msg = "Unable to export data, there is a lock on the intermediate feature class: " + table
                dla.addError(msg)
                print(msg)
                return
            else:
                res = dlaFieldCalculator.calculate(xmlFile,dla.workspace,targetName,False)
                if res == True:
                    dlaTable = dla.getTempTable(targetName)
                    res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
        else:
            dla.addMessage('Data previously staged, will proceed using intermediate dataset')
            dlaTable = dla.workspace + os.sep + dla.getStagingName(source,target)
            res = doPublish(xmlDoc,dlaTable,target,_useReplaceSettings)
            if res == True:
                dla.removeStagingElement(xmlDoc)
                xmlDoc.writexml(open(xmlFile, 'wt', encoding='utf-8'))
                dla.addMessage('Staging element removed from config file')

        arcpy.ResetProgressor()
        if res == False:
            err = "Data Assistant Update Failed, see messages for details"
            dla.addError(err)
            print(err)
        else:
            layers.append(target)

    arcpy.SetParameter(_outParam,';'.join(layers))
Exemple #16
0
    def execute(self, pParams):
        sOK = apwrutils.C_OK 
        ds = time.clock()
        ds1 = time.clock()
        arcpy.env.parallelProcessingFactor = "50%"
        if(self.DebugLevel>0): arcpy.AddMessage("arcpy.Exists({})={}, arcpy.env.scratchWorkspace={}".format(pScratchWorkspace, arcpy.Exists(pScratchWorkspace), arcpy.env.scratchWorkspace))
        if(pScratchWorkspace!=""): 
            if(arcpy.Exists(pScratchWorkspace)):
                arcpy.env.scratchWorkspace = pScratchWorkspace
        
        if(self.DebugLevel>0):
            arcpy.AddMessage("arcpy.Exists({})={}, arcpy.env.scratchWorkspace={}".format(pScratchWorkspace, arcpy.Exists(pScratchWorkspace), arcpy.env.scratchWorkspace))
            arcpy.AddMessage("arcpy.env.scratchFolder={}, arcpy.Exists(arcpy.env.scratchFolder)={}".format(arcpy.env.scratchFolder, arcpy.Exists(arcpy.env.scratchFolder)))        

        sp=" "
        try:
            (dem, multiplier, str, pFolder, pFLCatchment) = pParams
            rdem = arcpy.sa.Raster(dem)
            cellSize = rdem.meanCellWidth
            arcpy.env.cellSize = cellSize
            arcpy.env.snapRaster = rdem.catalogPath   #SnapRaster
            arcpy.env.overwriteOutput = True
            arcpy.env.addOutputsToMap = False
            oDesc = arcpy.Describe(dem) 
            sNamedem = oDesc.name 
            timedem = arcpy.sa.Times(dem, multiplier)   # Process: Times
            arcpy.AddMessage("{}Apply multiplier to {}. dt={}".format(sp*2, sNamedem, apwrutils.Utils.GetDSMsg(ds1,"")))
            ds1 = time.clock()
            intDEM = arcpy.sa.Int(timedem)              # Process: Int   
            arcpy.AddMessage("{}Apply int on the multiplied raster {}. dt={}".format(sp*2,sNamedem, apwrutils.Utils.GetDSMsg(ds1,"")))

            pRFolderBase = os.path.join(pFolder, FD_HANDBase) 

            if(os.path.exists(pRFolderBase)==False):
                apwrutils.Utils.makeSureDirExists(pRFolderBase) 
            if((pFLCatchment==None) or (pFLCatchment=="")):
                pNibbleBase = os.path.join(pRFolderBase, LN_NibbleBase) 
                pLocalMin = os.path.join(pRFolderBase, LN_LocalMin) 
                pHandBase = os.path.join(pRFolderBase, LN_HANDBase)    
                ds1 = time.clock()
                pNibbleBase = arcpy.sa.Nibble(intDEM, str, "All_VALUES")  # Process: Nibble
                arcpy.AddMessage("{}Apply nibble operation on int dem of {} with {} as mask. dt={}".format(sp*2,sNamedem, str, apwrutils.Utils.GetDSMsg(ds1,"")))
                ds1 = time.clock()
                multiplier = float(multiplier)
                pLocalMin = arcpy.sa.FloatDivide(pNibbleBase, multiplier) # Process: Divide
                arcpy.AddMessage("{}Apply divid operation on int nibbling results. dt={}".format(sp*2,apwrutils.Utils.GetDSMsg(ds1,"")))
                ds1 = time.clock()
                pHandBase = arcpy.sa.Minus(dem, pLocalMin)    # Process: Minus
                arcpy.AddMessage("{}Apply arcpy.sa.Minus(dem, pLocalMin) to produce HANDBASE. dt={}".format(sp*2,apwrutils.Utils.GetDSMsg(ds1,"")))
                pHandBase = arcpy.sa.Con(pHandBase, 0, pHandBase, "value < 0")
                rdsHandBase = os.path.join(pRFolderBase, LN_HANDBase)
                rdsLocalMin = os.path.join(pRFolderBase, LN_LocalMin)     
                    
                pHandBase.save(rdsHandBase) 
                pLocalMin.save(rdsLocalMin) 
                if(pProcessor.DebugLevel>0):
                    rdsNibbleBase = os.path.join(pRFolderBase, LN_NibbleBase) 
                    pNibbleBase.save(rdsNibbleBase) 
            else:
                #..the following 3 names will be used for the final 
                pNibbleBase = os.path.join(pRFolderBase, LN_NibbleBase) 
                pLocalMin = os.path.join(pRFolderBase, LN_LocalMin) 
                pHandBase = os.path.join(pRFolderBase, LN_HANDBase)    
                 
                pCATPath = os.path.join(pFolder, "CATPath")
                apwrutils.Utils.makeSureDirExists(pCATPath) 

                pCatHandBasePath = os.path.join(pCATPath, "CATHandBase")
                apwrutils.Utils.makeSureDirExists(pCatHandBasePath)
                pCatLocalMinPath = os.path.join(pCATPath, "CatLocalMin")
                apwrutils.Utils.makeSureDirExists(pCatLocalMinPath)
                pCatNibbleBasePath = os.path.join(pCATPath, "CATNibbleBase")
                if(pProcessor.DebugLevel>0):
                    apwrutils.Utils.makeSureDirExists(pCatNibbleBasePath)

                oDesc = arcpy.Describe(pFLCatchment)
                oidFieldName = oDesc.oidFieldName
                sr = oDesc.spatialReference
                ds1 = time.clock()
                nCnt = int(arcpy.GetCount_management(pFLCatchment)[0])
                arcpy.AddMessage( "{}Processing {} catchments.".format(sp*2,nCnt))
                nMod = 1
                if (nCnt>10):
                    nMod = int(nCnt/10)
                #arcpy.SetProgressor('step', 'Processing {} catchments.'.format(nCnt), 0, nCnt, nMod)      
                multiplier = float(multiplier)
                sLocalMinNames = ""
                sNibbleNames = ""
                sHandBaseNames = ""
                ds1 = time.clock()
                dds1 = time.clock()
                with arcpy.da.SearchCursor(pFLCatchment, [apwrutils.FN_ShapeAt, oidFieldName]) as rows:
                    for i, row in enumerate(rows):
                        pPolyGeom = row[0]
                        oid = row[1]
                        try:
                            catStr = arcpy.sa.ExtractByMask(str, pPolyGeom)
                            catIntDem = arcpy.sa.ExtractByMask(intDEM, pPolyGeom)
                            pCatNibbleBase = arcpy.sa.Nibble(catIntDem, catStr, "All_VALUES")  # Process: Nibble
                            pCatHandBase = arcpy.sa.Minus(catIntDem, pCatNibbleBase)    # Process: Minus
                            pCatLocalMin = arcpy.sa.FloatDivide(pCatNibbleBase, multiplier) # Process: Divide
                            pCatHandBase = arcpy.sa.FloatDivide(pCatHandBase, multiplier) 
                            pCatHandBase = arcpy.sa.Con(pCatHandBase, 0, pCatHandBase, "value < 0")
                                          
                            sLocalMinName = "{}{}".format(LN_LM,oid)
                            sNibbleName = "{}{}".format(LN_NibbleBase,oid)
                            sHandBaseName =  "{}{}".format(LN_HB,oid)
                        
                            rdsLocalMin = os.path.join(pCatLocalMinPath, sLocalMinName)  
                            rdsHandBase = os.path.join(pCatHandBasePath, sHandBaseName ) 
                            pCatHandBase.save(rdsHandBase)                             
                            pCatLocalMin.save(rdsLocalMin)

                            #..Construct 2 raster names for later mosaic (3 if nibblebase is included)
                            if(sLocalMinNames==""):
                                sLocalMinNames = sLocalMinName
                            else:
                                sLocalMinNames = "{};{}".format(sLocalMinNames, sLocalMinName)

                            if(sHandBaseNames==""):
                                sHandBaseNames = sHandBaseName
                            else:
                                sHandBaseNames = "{};{}".format(sHandBaseNames, sHandBaseName)

                            if(pProcessor.DebugLevel>0): 
                                rdsNibbleBase = os.path.join(pCatNibbleBasePath, sNibbleName ) 
                                pCatNibbleBase.save(rdsNibbleBase) 
                                if(sNibbleNames==""):
                                    sNibbleNames = sNibbleName
                                else:
                                    sNibbleNames = "{};{}".format(sNibbleNames, sNibbleName)

                            if(i % nMod)==0:
                                sMsg = "{}Processing {} of {} catchments. ddt={}".format(sp*4, i+1, nCnt,  apwrutils.Utils.GetDSMsg(dds1,""))
                                arcpy.SetProgressorLabel(sMsg)
                                arcpy.AddMessage(sMsg) 
                                arcpy.SetProgressorPosition(i+1)
                                dds1 = time.clock()
                        except:
                            arcpy.AddWarning("{} {}".format(arcpy.GetMessages(2), trace()))
                            pass

                sMsg = "{}Completed processing {} of {} catchments. dt={}".format(sp*2,  nCnt, nCnt,  apwrutils.Utils.GetDSMsg(ds1,""))
                arcpy.AddMessage(sMsg) 
                ds1 = time.clock()
                #Construct mosaic DS:
                arcpy.env.workspace = pCatLocalMinPath
                arcpy.MosaicToNewRaster_management(sLocalMinNames, pRFolderBase, LN_LocalMin, sr, "8_BIT_UNSIGNED", cellSize, "1", "LAST","FIRST") 
                sMsg = "{}Mosaic {} catchment LocalMin to form the LocalMin raster. dt={}".format(sp*2,nCnt, apwrutils.Utils.GetDSMsg(ds1,"") )
                arcpy.AddMessage(sMsg) 
                ds1 = time.clock()

                arcpy.env.workspace = pCatHandBasePath
                arcpy.MosaicToNewRaster_management(sHandBaseNames, pRFolderBase, LN_HANDBase, sr, "8_BIT_UNSIGNED", cellSize, "1", "LAST","FIRST") 
                sMsg = "{}Mosaic {} catchment Handbase to form the HandBase raster. dt={}".format(sp*2,nCnt, apwrutils.Utils.GetDSMsg(ds1,"") )
                arcpy.AddMessage(sMsg) 
                ds1 = time.clock()

                if(pProcessor.DebugLevel>0):            
                    arcpy.env.workspace = pCatNibbleBasePath
                    arcpy.MosaicToNewRaster_management(sNibbleNames, pRFolderBase, LN_NibbleBase, sr, "8_BIT_UNSIGNED", cellSize, "1", "LAST","FIRST") 
                    sMsg = "{}Mosaic {} catchment Nibble to form the NibbleBase raster. dt={}".format(sp*2,nCnt, apwrutils.Utils.GetDSMsg(ds1,"") )
                    arcpy.AddMessage(sMsg) 
                    ds1 = time.clock()

            arcpy.AddMessage("Saving the Handbase result rasters. dt={}".format(apwrutils.Utils.GetDSMsg(ds,"")))
            ds = time.clock()
            sOK = apwrutils.C_OK   

        except:
            arcpy.AddError("{}{}".format(arcpy.GetMessages(2), trace()))
            sOK = apwrutils.C_NOTOK
        finally:
            arcpy.ResetEnvironments()
            if(self.DebugLevel>0):
                arcpy.AddMessage("arcpy.env.workspace={}".format(arcpy.env.workspace))
                arcpy.AddMessage("arcpy.env.cellSize={}".format(arcpy.env.cellSize))
                arcpy.AddMessage("arcpy.env.snapRaster={}".format(arcpy.env.snapRaster))
                arcpy.AddMessage("arcpy.env.overwriteOutput={}".format(arcpy.env.overwriteOutput ))
                arcpy.AddMessage("arcpy.env.addOutputsToMap={}".format(arcpy.env.addOutputsToMap))
                arcpy.AddMessage("arcpy.env.parallelProcessingFactor={}".format(arcpy.env.parallelProcessingFactor))
        return (sOK, pHandBase, pLocalMin)
Exemple #17
0
def addRows(source, targetUrl, expr):
    # add rows using _chunkSize
    retval = False
    error = False
    # add section
    try:
        arcpy.SetProgressor("default", "Adding Rows")
        arcpy.SetProgressorLabel("Adding Rows")
        rowjs = rowsToJson(source)
        url = targetUrl + '/addFeatures'
        try:
            numFeat = len(rowjs['features'])
        except:
            numFeat = 0
        if numFeat == 0:
            dla.addMessage("0 Rows to Add, exiting")
            return True  # nothing to add is OK
        if numFeat > _chunkSize:
            chunk = _chunkSize
        else:
            chunk = numFeat
        rowsProcessed = 0
        while rowsProcessed < numFeat and error == False:
            next = rowsProcessed + chunk
            rows = rowjs['features'][rowsProcessed:next]
            msg = "Adding rows " + str(rowsProcessed) + ":" + str(next)
            dla.addMessage(msg)
            arcpy.SetProgressorLabel(msg)
            token = getSigninToken()
            params = {
                'rollbackonfailure': 'true',
                'f': 'json',
                'token': token,
                'features': json.dumps(rows)
            }
            result = sendRequest(url, params)
            try:
                if result['error'] != None:
                    retval = False
                    dla.addMessage("Add rows to Service failed")
                    dla.addMessage(json.dumps(result))
                    error = True
            except:
                try:
                    lenAdded = len(result['addResults'])
                    total = rowsProcessed + chunk
                    if total > numFeat:
                        total = numFeat
                    msg = str(lenAdded) + " rows added, " + str(
                        total) + "/" + str(numFeat)
                    print(msg)
                    dla.addMessage(msg)
                    retval = True
                except:
                    retval = False
                    dla.addMessage(
                        "Add rows to Service failed. Unfortunately you will need to re-run this tool."
                    )
                    #dla.showTraceback()
                    #dla.addError(json.dumps(result))
                    error = True
            rowsProcessed += chunk
    except:
        retval = False
        dla.addMessage("Add rows to Service failed")
        dla.showTraceback()
        error = True
        pass

    return retval
Exemple #18
0
def main(argv=None):
    success = True
    try:
        if not arcpy.Exists(gzSupport.workspace):
            gzSupport.addMessage(gzSupport.workspace +
                                 " does not exist, attempting to create")
            gzSupport.createGizintaGeodatabase()
        else:
            gzSupport.compressGDB(gzSupport.workspace)
        if len(datasets) > 0:
            progBar = len(datasets) + 1
            arcpy.SetProgressor("step", "Importing Datasets...", 0, progBar, 1)
            deleteExistingRows(datasets)
            arcpy.SetProgressorPosition()
        for dataset in datasets:
            gzSupport.sourceIDField = dataset.getAttributeNode(
                "sourceIDField").nodeValue
            sourceName = dataset.getAttributeNode("sourceName").nodeValue
            targetName = dataset.getAttributeNode("targetName").nodeValue
            arcpy.SetProgressorLabel("Loading " + sourceName + " to " +
                                     targetName + "...")
            if not arcpy.Exists(os.path.join(sourceWorkspace, sourceName)):
                gzSupport.addError(
                    os.path.join(sourceWorkspace,
                                 sourceName + " does not exist, exiting"))
                return
            if not arcpy.Exists(os.path.join(gzSupport.workspace, targetName)):
                gzSupport.addMessage(
                    os.path.join(gzSupport.workspace, targetName) +
                    " does not exist")
                mode = "export"
            else:
                mode = "import"

            arcpy.env.Workspace = gzSupport.workspace
            try:
                if mode == "import":
                    retVal = gzSupport.importDataset(sourceWorkspace,
                                                     sourceName, targetName,
                                                     dataset)
                elif mode == "export":
                    retVal = gzSupport.exportDataset(sourceWorkspace,
                                                     sourceName, targetName,
                                                     dataset)
                if retVal == False:
                    success = False
            except:
                gzSupport.showTraceback()
                success = False
                retVal = False
            gzSupport.logDatasetProcess(sourceName, targetName, retVal)
        arcpy.SetProgressorPosition()
    except:
        gzSupport.showTraceback()
        gzSupport.addError("A Fatal Error occurred")
        success = False
        gzSupport.logDatasetProcess("", "", False)
    finally:
        arcpy.ResetProgressor()
        arcpy.RefreshCatalog(gzSupport.workspace)
        arcpy.ClearWorkspaceCache_management(sourceWorkspace)
        arcpy.ClearWorkspaceCache_management(gzSupport.workspace)

    if success == False:
        gzSupport.addError(
            "Errors occurred during process, look in log files for more information"
        )
    if gzSupport.ignoreErrors == True:
        success = True

    gzSupport.closeLog()
    arcpy.SetParameter(SUCCESS, success)
Exemple #19
0
# Purpose: Compute the percentage of each route type that is present in a
# GTFS dataset and save the results to a table in the default project geodatabase

# Load required libraries
import arcpy
import numpy as np
import SSDataObject as ssdo

# Read parameters from the toolbox front end
inFC = arcpy.GetParameterAsText(0)
outTable = arcpy.GetParameterAsText(1)
dissolvedFC = 'dissolvedFC'

arcpy.AddMessage("Table will be saved to: " + outTable)

arcpy.SetProgressorLabel("Dissolving routes by route type...") # Update the user on our progress so far

# Dissolve the input shapes/routes feature class using 'route_type' as a dissolve field.
# This avoids double-counting parts of the networks where multiple routes use
# the same road/rail infrastructure.
arcpy.Dissolve_management(
    inFC,
    dissolvedFC,
    dissolve_field = ["route_type"],
    statistics_fields = [["OBJECTID", "COUNT"], ["route_id", "COUNT"]],
    multi_part = "MULTI_PART",
    unsplit_lines = "DISSOLVE_LINES"
)

arcpy.SetProgressorLabel("Computing route statistics...")
Exemple #20
0
def SelectAddCauses():
    """Draws telecoupling causes on top of basemap interactively"""

    # Local variable:
    out_layer = "Causes_lyr"

    # Get the value of the input parameter
    inFeatureSet = arcpy.GetParameterAsText(0)
    in_RecordSet = arcpy.GetParameter(1)

    arcpy.SetProgressorLabel('Creating Causes Categories ...')
    arcpy.AddMessage('Creating Causes Categories ...')

    if inFeatureSet or inFeatureSet != "#":
        try:
            # Process: Make Feature Layer (temporary)
            arcpy.MakeFeatureLayer_management(in_features=inFeatureSet,
                                              out_layer=out_layer)
            arcpy.AddField_management(in_table=out_layer,
                                      field_name="DESCRIPTION",
                                      field_type="TEXT",
                                      field_length=100)

            causeTable = json.loads(in_RecordSet.JSON)

            idx = 0
            countRows = int(arcpy.GetCount_management(out_layer).getOutput(0))

            if countRows != len(causeTable['features']):
                arcpy.AddError(
                    "Number of records in 'Input Attribute Table' MUST equal number of causes on the map!!"
                )
                raise arcpy.ExecuteError
            else:
                with arcpy.da.UpdateCursor(out_layer, 'DESCRIPTION') as cursor:
                    for row in cursor:
                        row[0] = causeTable['features'][idx]['attributes'][
                            'DESCRIPTION']
                        # Update the cursor with the updated list
                        cursor.updateRow(row)
                        idx += 1
                del cursor

            # Process: Copy Feature Class
            outCauses_fc = os.path.join(arcpy.env.scratchGDB, "Causes")
            arcpy.CopyFeatures_management(out_layer, outCauses_fc)

            # Process: Delete Unwanted/Unnecessary fields
            arcpy.SetProgressorLabel('Removing Unwanted Fields ...')
            arcpy.AddMessage('Removing Unwanted Fields ...')
            arcpy.DeleteField_management(outCauses_fc, "Id")

            # Process: Export Data to CSV File
            arcpy.SetProgressorLabel(
                'Exporting Feature Class Attributes to CSV ...')
            arcpy.AddMessage('Exporting Feature Class Attributes to CSV ...')
            outTable_CSV = os.path.join(arcpy.env.scratchFolder,
                                        "Causes_Table.csv")
            ExportToCSV(fc=outCauses_fc, output=outTable_CSV)

            #### Set Parameters ####
            arcpy.SetParameterAsText(2, outCauses_fc)

        except Exception:
            e = sys.exc_info()[1]
            arcpy.AddError('An error occurred: {}'.format(e.args[0]))
    else:
        arcpy.AddError('No Features Have Been Added to the Map!')
def main():
    scratch_datasets = []
    new_fields = [
        'Crash_Count', 'Crash_Count_Weight', 'Crash_Frequency', 'Crash_Rate',
        'Weighted_Crash_Frequency', 'Weighted_Crash_Rate'
    ]

    try:
        streets_intersection = arcpy.GetParameterAsText(0)
        crashes = arcpy.GetParameterAsText(1)
        time_interval, time_unit = arcpy.GetParameterAsText(2).split(' ')
        time_interval = float(time_interval)
        if time_unit == 'Years':
            time_interval = time_interval * 365
        elif time_unit == 'Weeks':
            time_interval = time_interval * 7
        snap_distance = arcpy.GetParameterAsText(3)
        weight_field = arcpy.GetParameterAsText(4)
        weight_table = arcpy.GetParameter(5)
        adt_field = arcpy.GetParameterAsText(6)
        output_crash_rates = arcpy.GetParameterAsText(7)
        params = arcpy.GetParameterInfo()
        shape_type = arcpy.Describe(streets_intersection).shapeType

        weight_provided = False
        if weight_field is not None and weight_field != '':
            weight_provided = True

        adt_provided = False
        if adt_field is not None and adt_field != '':
            adt_provided = True

        arcpy.SetProgressorLabel("Creating Temporary Crash Layer...")
        arcpy.MakeFeatureLayer_management(crashes, "Crash Layer")
        crashes_snap = os.path.join(arcpy.env.scratchGDB, "Crash_Snap")
        if arcpy.Exists(crashes_snap):
            arcpy.Delete_management(crashes_snap)
        arcpy.CopyFeatures_management("Crash Layer", crashes_snap)
        scratch_datasets.append(crashes_snap)

        crash_count_field = new_fields[0]
        crash_count_weight_field = new_fields[1]
        arcpy.AddField_management(crashes_snap,
                                  crash_count_field,
                                  "Double",
                                  field_alias="Crash Count")
        fields = [crash_count_field]
        if weight_provided:
            arcpy.AddField_management(crashes_snap,
                                      crash_count_weight_field,
                                      "Double",
                                      field_alias="Crash Count Weight")
            fields.append(crash_count_weight_field)
            fields.append(weight_field)
            for field in arcpy.Describe(crashes).fields:
                if field.name == weight_field:
                    if field.domain is not None and field.domain != '':
                        database = get_workspace(crashes)
                        if database is not None:
                            for domain in arcpy.da.ListDomains(database):
                                if domain.name == field.domain:
                                    if domain.domainType == 'CodedValue':
                                        for key, value in domain.codedValues.items(
                                        ):
                                            for i in range(
                                                    0, weight_table.rowCount):
                                                if weight_table.getValue(
                                                        i, 0) == value:
                                                    weight_table.setValue(
                                                        i, 0, str(key))
                                    break

        with arcpy.da.UpdateCursor(crashes_snap, fields) as cursor:
            for row in cursor:
                row[0] = 1.0
                if len(fields) == 3:
                    value = str(row[2])
                    for i in range(0, weight_table.rowCount):
                        if value == weight_table.getValue(i, 0):
                            row[1] = weight_table.getValue(i, 1)
                            break
                cursor.updateRow(row)

        if (shape_type == "Polyline"):
            arcpy.SetProgressorLabel("Snapping Crashes to Nearest Street...")
        else:
            arcpy.SetProgressorLabel(
                "Snapping Crashes to Nearest Intersection...")
        snapEnv = [streets_intersection, "EDGE", snap_distance]
        arcpy.Snap_edit(crashes_snap, [snapEnv])

        fms = arcpy.FieldMappings()
        desc = arcpy.Describe(streets_intersection)
        for field in desc.fields:
            if field.type == 'Geometry' or field.type == 'OID' or field.name in new_fields:
                continue
            if shape_type == "Polyline" and field.name == desc.AreaFieldName:
                continue
            fm = arcpy.FieldMap()
            fm.addInputField(streets_intersection, field.name)
            fms.addFieldMap(fm)
        fm = arcpy.FieldMap()
        fm.addInputField(crashes_snap, crash_count_field)
        fm.mergeRule = 'Sum'
        fms.addFieldMap(fm)
        if weight_provided:
            fm = arcpy.FieldMap()
            fm.addInputField(crashes_snap, crash_count_weight_field)
            fm.mergeRule = 'Sum'
            fms.addFieldMap(fm)

        crashes_join = os.path.join(arcpy.env.scratchGDB, "Crash")
        if arcpy.Exists(crashes_join):
            arcpy.Delete_management(crashes_join)
        arcpy.SpatialJoin_analysis(streets_intersection, crashes_snap,
                                   crashes_join, "JOIN_ONE_TO_ONE", "KEEP_ALL",
                                   fms, "Intersect", "0 Feet")
        scratch_datasets.append(crashes_join)

        if weight_provided:
            with arcpy.da.UpdateCursor(crashes_join,
                                       [crash_count_weight_field]) as cursor:
                for row in cursor:
                    if row[0] == 0:
                        row[0] = None
                    cursor.updateRow(row)

        arcpy.SetProgressorLabel("Calculating Crash Statistics")
        templateDir = os.path.dirname(__file__)
        crash_frequency_field = new_fields[2]
        crash_rate_field = new_fields[3]
        weighted_crash_frequency_field = new_fields[4]
        weighted_crash_rate_field = new_fields[5]

        add_fields = []
        fields = [crash_count_field]

        if (shape_type == "Polyline"):
            fields.append('SHAPE@')
            add_fields = [[crash_frequency_field, "Crashes Per Mile Per Year"],
                          [
                              crash_rate_field,
                              "Crashes Per Million Vehicle Miles"
                          ],
                          [
                              weighted_crash_frequency_field,
                              "Weighted Crashes Per Mile Per Year"
                          ],
                          [
                              weighted_crash_rate_field,
                              "Weighted Crashes Per Million Vehicle Miles"
                          ]]
        else:
            add_fields = [
                [crash_frequency_field, "Crashes Per Year"],
                [crash_rate_field, "Crashes Per Million Entering Vehicles"],
                [weighted_crash_frequency_field, "Weighted Crashes Per Year"],
                [
                    weighted_crash_rate_field,
                    "Weighted Crashes Per Million Entering Vehicles"
                ]
            ]

        arcpy.AddField_management(crashes_join,
                                  add_fields[0][0],
                                  "Double",
                                  field_alias=add_fields[0][1])
        fields.append(add_fields[0][0])
        if adt_provided:
            arcpy.AddField_management(crashes_join,
                                      add_fields[1][0],
                                      "Double",
                                      field_alias=add_fields[1][1])
            fields.append(add_fields[1][0])
            fields.append(adt_field)
        if weight_provided:
            fields.append(crash_count_weight_field)
            arcpy.AddField_management(crashes_join,
                                      add_fields[2][0],
                                      "Double",
                                      field_alias=add_fields[2][1])
            fields.append(add_fields[2][0])
            if adt_provided:
                arcpy.AddField_management(crashes_join,
                                          add_fields[3][0],
                                          "Double",
                                          field_alias=add_fields[3][1])
                fields.append(add_fields[3][0])

        with arcpy.da.UpdateCursor(crashes_join, fields) as cursor:
            for row in cursor:
                if row[cursor.fields.index(crash_count_field)] is None:
                    continue

                miles = 1.0
                if 'SHAPE@' in cursor.fields:
                    miles = row[cursor.fields.index('SHAPE@')].getLength(
                        'GEODESIC', 'MILES')
                row[cursor.fields.index(crash_frequency_field)] = row[
                    cursor.fields.index(crash_count_field)] / (
                        (time_interval / 365) * miles)

                if crash_count_weight_field in cursor.fields and row[
                        cursor.fields.index(
                            crash_count_weight_field)] is not None:
                    row[cursor.fields.index(
                        weighted_crash_frequency_field
                    )] = row[cursor.fields.index(crash_count_weight_field)] / (
                        (time_interval / 365) * miles)

                if adt_field in cursor.fields and row[cursor.fields.index(
                        adt_field)] is not None:
                    row[cursor.fields.index(crash_rate_field)] = (
                        row[cursor.fields.index(crash_count_field)] * 1000000
                    ) / (time_interval * row[cursor.fields.index(adt_field)] *
                         miles)
                    if crash_count_weight_field in cursor.fields and row[
                            cursor.fields.index(
                                crash_count_weight_field)] is not None:
                        row[cursor.fields.index(weighted_crash_rate_field)] = (
                            row[cursor.fields.index(crash_count_weight_field)]
                            * 1000000) / (time_interval *
                                          row[cursor.fields.index(adt_field)] *
                                          miles)
                cursor.updateRow(row)

        arcpy.SetProgressorLabel("Creating Crash Rate Layer...")
        field_info = ""
        fields_to_hide = ['Join_Count', 'TARGET_FID', new_fields[0]]
        if weight_provided:
            fields_to_hide.append(new_fields[1])
        field_list = arcpy.ListFields(crashes_join)
        for field in field_list:
            if field.name in fields_to_hide:
                field_info = "{0}{1} {1} HIDDEN;".format(
                    field_info, field.name)
            else:
                field_info = "{0}{1} {1} VISIBLE;".format(
                    field_info, field.name)
        arcpy.MakeFeatureLayer_management(crashes_join,
                                          "Output Crash Layer",
                                          field_info=field_info[:-1])
        arcpy.SelectLayerByAttribute_management(
            "Output Crash Layer", "NEW_SELECTION",
            '{0} IS NOT NULL'.format(new_fields[2]))
        arcpy.CopyFeatures_management("Output Crash Layer", output_crash_rates)

        lyrx_json = _CRASH_RATE_POINT
        if (shape_type == "Polyline"):
            lyrx_json = _CRASH_RATE_POLYLINE
        with tempfile.NamedTemporaryFile(delete=False) as temp_lyrx:
            temp_lyrx.write(lyrx_json.encode())
        lyrx_path = "{0}.lyrx".format(temp_lyrx.name)
        os.rename(temp_lyrx.name, lyrx_path)
        params[7].symbology = lyrx_path

    finally:
        for dataset in scratch_datasets:
            if arcpy.Exists(dataset):
                arcpy.Delete_management(dataset)
                for filename in filenames
                if os.path.splitext(filename)[1] == '.las'
            ])
    elif i_desc.dataType == 'LasDataset':
        input_las_files.append(i)

# Get CPU Count
cpu_ct = cpu_count() if len(input_las_files) > cpu_count() else len(
    input_las_files)
arcpy.AddMessage(
    f'Total CPU Count: {cpu_count()} | File Count: {len(input_las_files)} | CPUs Used: {cpu_ct}'
)

# Classify Ground Points
if ground_method == 'AGGRESSIVE':
    arcpy.SetProgressorLabel(f'Classifying Ground Points: Standard Method...')
    classify_ground_arg = ['STANDARD', reuse_ground, dem_resolution]
    classify_ground_args = [[las_file] + classify_ground_arg + global_args
                            for las_file in input_las_files]
    with Pool(processes=cpu_ct) as pool:
        results = pool.starmap(lidarlas.ClassifyLasGround,
                               classify_ground_args)

if ground_method:
    arcpy.SetProgressorLabel(
        f'Classifying Ground Points: {ground_method.capitalize()} Method...')
    classify_ground_arg = [ground_method, reuse_ground, dem_resolution]
    classify_ground_args = [[las_file] + classify_ground_arg + global_args
                            for las_file in input_las_files]
    with Pool(processes=cpu_ct) as pool:
        results = pool.starmap(lidarlas.ClassifyLasGround,
## get all the fields in Project FC:
projectFields = arcpy.ListFields(projects)
projectFieldNames = []
for field in projectFields:
    projectFieldNames.append(field.name)

## set progressor (progress indicator bar in the geoprocessing windo)
arcpy.SetProgressor("step", "Calculating distances... ", 0, len(input_dict), 1)

for inputFC in input_dict:
    if input_dict[
            inputFC] != "":  ## if input is provided, then calculate the distance
        feature = input_dict[inputFC]
        fieldName = inputFC

        arcpy.SetProgressorLabel("Calculating distances for " + str(feature))
        ## calculate distances
        arcpy.Near_analysis(projects, feature, "", "NO_LOCATION", "NO_ANGLE")

        ## Add the new field
        if not (fieldName in projectFieldNames):
            arcpy.AddField_management(projects, fieldName, "DOUBLE")

        if (fieldName == "d_tra" or fieldName == "d_sub"):
            arcpy.CalculateField_management(
                projects, fieldName,
                "!NEAR_DIST! * " + str(transmissionDistMultiplier),
                "PYTHON_9.3")
        else:
            arcpy.CalculateField_management(projects, fieldName, "!NEAR_DIST!",
                                            "PYTHON_9.3")
    # This distance is designed to minimize problems of no data crashes if the HEL Determiation tool's resampled 3-meter DEM doesn't perfectly snap with results from this tool.
    arcpy.Buffer_analysis(source_clu, clu_buffer, "410 Meters", "FULL", "",
                          "ALL", "")

    # --------------------------------------------------------------------------------------------------------- Clip out the DEMs that were entered
    arcpy.AddMessage("\nClipping Raster Layers...")
    x = 0
    del_list = [
    ]  # Start an empty list that will be used to clean up the temporary clips after merge is done
    mergeRasters = ""

    while x < datasets:
        current_dem = source_dems[x].replace("'", "")
        out_clip = temp_dem + "_" + str(x)

        arcpy.SetProgressorLabel("Clipping " + current_dem + " " + str(x + 1) +
                                 " of " + str(datasets))

        try:
            AddMsgAndPrint("\tClipping " + current_dem + " " + str(x + 1) +
                           " of " + str(datasets))
            extractedDEM = arcpy.sa.ExtractByMask(current_dem, clu_buffer)
            extractedDEM.save(out_clip)
        except:
            arcpy.AddError(
                "\nThe input CLU fields may not cover the input DEM files. Clip & Merge failed...Exiting!\n"
            )
            sys.exit()

        # Create merge statement
        if x == 0:
            # Start list of layers to merge
Exemple #25
0
api = OpenSkyApi()

l = []

fois = arcpy.GetParameterAsText(0)
airport = arcpy.GetParameterAsText(1)

workspace = r"C:\Users\Hong\Documents\ArcGIS\ADS-B"
arcpy.env.workspace = workspace
arcpy.env.overwriteOutput = True
max = long(fois) + 4
arcpy.SetProgressor("step", "Get real-time ADS-B data...", 0, max, 1)

for i in xrange(long(fois)):
    states = api.get_states(bbox=box(airport))
    arcpy.SetProgressorLabel("Loading {}".format(i))
    for s in states.states:
        time = datetime.fromtimestamp(s.time_position).isoformat()
        l.append([
            s.callsign, s.longitude, s.latitude, s.geo_altitude, s.heading,
            s.icao24, time
        ])
    t.sleep(15)
    arcpy.SetProgressorPosition()

try:
    arcpy.SetProgressorLabel("Saving to {}.csv".format(time))
    col = [
        'callsign', 'lon', 'lat', 'geoaltitude', 'heading', 'icao24', 'time'
    ]
    data = pd.DataFrame(l, columns=col)
# Main - wrap everything in a try statement
try:
    #--------------------------------------------------------------------- Check for ArcInfo License exit if not available
    if not arcpy.ProductInfo() == "ArcInfo":
        arcpy.AddError("\nThis tool requires an ArcInfo/Advanced license level for ArcGIS Desktop. Exiting...\n")
        sys.exit()

    # Check out Spatial Analyst License
    if arcpy.CheckExtension("Spatial") == "Available":
        arcpy.CheckOutExtension("Spatial")
    else:
        arcpy.AddError("\nSpatial Analyst Extension not enabled. Please enable Spatial Analyst from the Tools/Extensions menu. Exiting...\n")
        sys.exit()

    arcpy.SetProgressorLabel("Setting Variables")
    #------------------------------------------------------------------------  Input Parameters
    InputContours = arcpy.GetParameterAsText(0)
    dam = arcpy.GetParameterAsText(1)
    #outputPoolName = arcpy.GetParameterAsText(2)
    #outputDamName = arcpy.GetParametersAsText(3)

    # ------------------------------------------------------------------------ Define Variables
    InputContours = arcpy.Describe(InputContours).CatalogPath

    # Exit if Contour layer not created from NRCS Engineering tools
    if InputContours.find('.gdb') > 0 and InputContours.find("_Contours") > 0:
        watershedGDB_path = InputContours[:InputContours.find('.')+4]
    else:
        arcpy.AddError("Input contours layer was not generated using the \"NRCS Engineering Tools\". Exiting...\n")
        sys.exit()
# main
import string, os, sys, traceback, locale, arcpy
from arcpy import env

try:
    arcpy.OverwriteOutput = True

    # Script arguments...
    saLayer = arcpy.GetParameterAsText(0)  # input folder

    #PrintMsg(" \nGetting record count from SDM tables", 0)
    dCount = GetSDMCount(saLayer)  # dictionary containing SDM record counts

    arcpy.SetProgressor("step",
                        "Getting table record count from Soil Data Access...",
                        1, len(dCount), 1)
    tblList = sorted(dCount.keys())
    PrintMsg(" \nTABLE, COUNT", 0)
    for tbl in tblList:
        arcpy.SetProgressorLabel(tbl)
        PrintMsg(tbl + ", " + Number_Format(dCount[tbl], 0, False), 0)

    PrintMsg(" ", 0)

except MyError, e:
    # Example: raise MyError, "This is an error message"
    PrintMsg(str(e) + " \n ", 2)

except:
    errorMsg()
def AddMediaFlows():

    # Local variable:
    out_layer_src_fl = "source_lyr"
    out_view_tbl = "layer_view"
    out_layer_pnt_fc = r"in_memory\country_lyr"
    out_media_flows_fc = r"in_memory\media_lyr"
    out_merged_flows_fc = r"in_memory\merged_lyr"
    out_name_merge_fl = "Merged Media Flows"
    out_name_fl = "Media Flows"

    try:

        ### 1. Create temp feature layer from source point and add XY coordinates ###
        # Process: Make Feature Layer (temporary)
        arcpy.AddMessage('Creating Feature Layer from Source Point ...')
        arcpy.SetProgressorLabel(
            'Creating Feature Layer from Source Point ...')
        arcpy.MakeFeatureLayer_management(in_features=source_FeatureSet,
                                          out_layer=out_layer_src_fl)
        #### Add XY Coordinates To Point Layer ####
        arcpy.AddMessage('Adding XY Coordinates to Source Point Layer ...')
        arcpy.SetProgressorLabel(
            'Adding XY Coordinates to Source Point Layer ...')
        arcpy.AddXY_management(out_layer_src_fl)

        ### 2. The user should only add a single source location. If so, then store the XY coordinate values into a list object ###
        countRows = int(
            arcpy.GetCount_management(out_layer_src_fl).getOutput(0))

        if countRows > 1:
            arcpy.AddError(
                "ERROR: You need to specify ONLY ONE source location on the map!!"
            )
            raise arcpy.ExecuteError
        else:
            with arcpy.da.SearchCursor(out_layer_src_fl,
                                       ['POINT_X', 'POINT_Y']) as cursor:
                for row in cursor:
                    srcPnt_XY = [row[0], row[1]]
            del cursor

        ### 3. Create List of Strings Based on Selected Input Feature Layer Field ###
        fNames_lst = []
        # Search Cursor: the following only works with ArcGIS 10.1+ ###
        arcpy.SetProgressorLabel(
            'Creating List of Values From Chosen Input Feature Layer Field ...'
        )
        arcpy.AddMessage(
            'Creating List of Values From Chosen Input Feature Layer Field ...'
        )
        with arcpy.da.SearchCursor(input_fc, input_fc_field) as cursor:
            for row in cursor:
                fNames_lst.append(row[0])
        del cursor

        ### 4. Read HTML input report file and parse its content grabbing desired tags ###
        arcpy.SetProgressorLabel('Reading and Parsing HTML File ...')
        arcpy.AddMessage('Reading and Parsing HTML File ...')
        soup = BeautifulSoup(open(input_html), 'html.parser')
        links_geo = soup.find_all(
            lambda tag: tag.name == 'p' and tag.get('class') == ['c1'])
        #links_p = soup.find_all('p')
        text_elements_geo = [
            links_geo[i].find_all(text=True) for i in range(len(links_geo))
        ]

        ### 5. Initialize a dictionary to store frequency of geographic locations to be mapped: ###
        ### Keys ---> unique values of the selected input feature field;
        ### Values ---> count frequency of word match between parsed HTML string and dictionary key (e.g. country name) ###
        #country_parse = {k: None for k in country_lst}
        arcpy.SetProgressorLabel(
            'Creating Dictionary with Frequency Counts of Geographic Locations from Parsed HTML ...'
        )
        arcpy.AddMessage(
            'Creating Dictionary with Frequency Counts of Geographic Locations from Parsed HTML ...'
        )
        country_parse = {}
        for el in text_elements_geo:
            for geo in fNames_lst:
                if len(el) == 1 and geo in el[0]:
                    if not geo in country_parse:
                        country_parse[geo] = 1
                    else:
                        country_parse[geo] += 1

        ### 6. Create a temporary point layer from the input Polygon feature class ###
        arcpy.SetProgressorLabel(
            'Creating Temporary Point Layer from Input Feature Layer ...')
        arcpy.AddMessage(
            'Creating Temporary Point Layer from Input Feature Layer ...')
        ### Process: Feature To Point Layer ###
        arcpy.FeatureToPoint_management(input_fc, out_layer_pnt_fc, "INSIDE")

        ### 7. Add Fields Required as Input by the XY To Line GP tool ###
        arcpy.SetProgressorLabel(
            'Adding New Field to Temporary Point Layer ...')
        arcpy.AddMessage('Adding New Field to Temporary Point Layer ...')
        #### Add Fields to temporary feature layer ###
        arcpy.AddField_management(in_table=out_layer_pnt_fc,
                                  field_name="FROM_X",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=out_layer_pnt_fc,
                                  field_name="FROM_Y",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=out_layer_pnt_fc,
                                  field_name="TO_X",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=out_layer_pnt_fc,
                                  field_name="TO_Y",
                                  field_type="DOUBLE")
        arcpy.AddField_management(in_table=out_layer_pnt_fc,
                                  field_name="Frequency",
                                  field_type="SHORT")
        #### Add XY Coordinates To Point Layer ####
        arcpy.AddXY_management(out_layer_pnt_fc)

        ### 8. Fill Out Values for All Newly Added Fields in the Temp Point Feature Layer ###
        arcpy.SetProgressorLabel(
            'Transferring Values from Dictionary to Temporary Point Feature Layer ...'
        )
        arcpy.AddMessage(
            'Transferring Values from Dictionary to Temporary Point Feature Layer ...'
        )
        fields_selection = [
            'FROM_X', 'FROM_Y', input_fc_field, 'TO_X', 'TO_Y', 'POINT_X',
            'POINT_Y', 'Frequency'
        ]
        with arcpy.da.UpdateCursor(out_layer_pnt_fc,
                                   fields_selection) as cursor:
            for row in cursor:
                if row[2] in country_parse.keys():
                    row[0] = srcPnt_XY[0]
                    row[1] = srcPnt_XY[1]
                    row[3] = row[5]
                    row[4] = row[6]
                    row[7] = country_parse[row[2]]
                    # Update the cursor with the updated list
                    cursor.updateRow(row)
                else:
                    cursor.deleteRow()
        del cursor

        ### 9. Remove Unnecessary Fields From the Temp Point Feature Layer ###
        arcpy.SetProgressorLabel(
            'Removing Unnecessary Fields from Temporary Point Feature Layer ...'
        )
        arcpy.AddMessage(
            'Removing Unnecessary Fields from Temporary Point Feature Layer ...'
        )
        fields = arcpy.ListFields(out_layer_pnt_fc)
        keepFields = ['FROM_X', 'FROM_Y', 'TO_X', 'TO_Y', 'Frequency']
        dropFields = [
            f.name for f in fields if f.name not in keepFields
            and f.type != 'OID' and f.type != 'Geometry'
        ]
        # delete fields
        arcpy.DeleteField_management(out_layer_pnt_fc, dropFields)

        ### 10. Export temp feature class to CSV and use to draw flow lines ###
        arcpy.SetProgressorLabel(
            'Creating Table View from Temporary Feature Layer ...')
        arcpy.AddMessage(
            'Creating Table View from Temporary Feature Layer ...')
        arcpy.MakeTableView_management(in_table=out_layer_pnt_fc,
                                       out_view=out_view_tbl)

        ### 11. If Merging Box is Checked, Merge Temp Point Feature Class To Copy of Input Flow Layer ###
        if is_checked_table:
            arcpy.SetProgressorLabel(
                'Creating Media Information Radial Flow Lines ...')
            arcpy.AddMessage(
                'Creating Media Information Radial Flow Lines ...')
            arcpy.XYToLine_management(in_table=out_view_tbl,
                                      out_featureclass=out_media_flows_fc,
                                      startx_field='FROM_X',
                                      starty_field='FROM_Y',
                                      endx_field='TO_X',
                                      endy_field='TO_Y',
                                      line_type=lineType_str,
                                      id_field='Frequency',
                                      spatial_reference=out_layer_pnt_fc)

            arcpy.SetProgressorLabel(
                'Merging Media Information Flows With Existing Flow Layer ...')
            arcpy.AddMessage(
                'Merging Media Information Flows With Existing Flow Layer ...')
            arcpy.Merge_management([out_media_flows_fc, input_flow_lyr],
                                   out_merged_flows_fc)
            # Process: Create a feature layer from the joined feature class to send back as output to GP tools
            out_fl = arcpy.MakeFeatureLayer_management(out_merged_flows_fc,
                                                       out_name_merge_fl)
            # Execute FeatureClassToGeodatabase
            arcpy.AddMessage("Converting Feature Class to Shapefile...")
            arcpy.FeatureClassToShapefile_conversion(out_merged_flows_fc,
                                                     arcpy.env.scratchFolder)
        else:
            arcpy.SetProgressorLabel(
                'Creating Media Information Radial Flow Lines ...')
            arcpy.AddMessage(
                'Creating Media Information Radial Flow Lines ...')
            arcpy.XYToLine_management(in_table=out_view_tbl,
                                      out_featureclass=out_media_flows_fc,
                                      startx_field='FROM_X',
                                      starty_field='FROM_Y',
                                      endx_field='TO_X',
                                      endy_field='TO_Y',
                                      line_type=lineType_str,
                                      id_field='Frequency',
                                      spatial_reference=out_layer_pnt_fc)
            # Process: Create a feature layer from the joined feature class to send back as output to GP tools
            out_fl = arcpy.MakeFeatureLayer_management(out_media_flows_fc,
                                                       out_name_fl)
            # Execute FeatureClassToGeodatabase
            arcpy.AddMessage("Converting Feature Class to Shapefile...")
            arcpy.FeatureClassToShapefile_conversion(out_media_flows_fc,
                                                     arcpy.env.scratchFolder)

        arcpy.SetParameter(7, out_fl)
        arcpy.ResetProgressor()

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))
Exemple #29
0
    n = 0
    arcpy.SetProgressor('step', 'Starting Parent Material Group Name Tool...',
                        0, jobCnt, 1)

    compDict = dict()

    # for eSSA in areaList:
    for state in states:
        #p = [x for x in areaList if x[:2] == state]
        #theReq = ",".join(map("'{0}'".format, p))

        n = n + 1

        arcpy.SetProgressorLabel('Collecting parent material table for: ' +
                                 state + " (" + str(n) + ' of ' + str(jobCnt) +
                                 ')')

        #send the request
        #True, funcDict, cResponse
        pmLogic, pmData, pmMsg = getPMgrp(state, dBool)

        #if it was successful...
        if pmLogic:
            if len(pmData) == 0:
                AddMsgAndPrint('No records returned for ' + state, 1)
                failPM.append(state)
                arcpy.SetProgressorPosition()
            else:
                AddMsgAndPrint(
                    'Response for parent material table request on ' + state +
Exemple #30
0
def CanevasRestituion(fc, sh):

    try:
        #Setting the workspace
        environ = get_geodatabase_path(fc)
        arcpy.env.workspace = environ

        #On call la jointure de reseau vers PL
        spatialJoin_ReseauxPL("PL", "Reseaux", "Reso_PL_Join")

        fields = ("Post_Name", "Code_Poste", "Code_Transfo", "Puiss_Nom", "X",
                  "Y")

        arcpy.AddMessage("Printing the header ...")
        sh.write(0, 0, "Nom Poste")
        sh.write(0, 1, "Coord_X")
        sh.write(0, 2, "Coord_Y")
        sh.write(0, 3, "Code Poste")
        sh.write(
            0, 4, "Code Transfo"
        )  #codeTransfo.strip(codePoste) : il va rester en principe "01"
        sh.write(0, 5, "Puissance Transfo")
        sh.write(0, 6, "NB de PL inventories")
        sh.write(0, 7, "PL en section 3x70mm")
        sh.write(0, 8, "PL en section 3x50mm")
        sh.write(0, 9, "PL en section 4x25mm")
        sh.write(0, 10, "PL en section 4x16mm")
        sh.write(0, 11, "PL en section 2x16mm")
        sh.write(0, 12, "PL en Classique Monophase")
        sh.write(0, 13, "PL en Classique Triphase")
        sh.write(0, 14, "NB PL en direct")
        sh.write(0, 15, "NB BCC ouvertes")
        sh.write(0, 16, "NB Installation avec compact")
        sh.write(0, 17, "3x70mm")
        sh.write(0, 18, "3x50mm")
        sh.write(0, 19, "4x25mm")
        sh.write(0, 20, "4x16mm")
        sh.write(0, 21, "2x16mm")
        sh.write(0, 22, "Classique monophase")
        sh.write(0, 23, "Classique triphase")
        sh.write(0, 24, "NB Supports BT")
        sh.write(0, 25, "Casse")
        sh.write(0, 26, "Pourri")
        sh.write(0, 27, "Pourri-Operationnel")
        sh.write(0, 28, "Tombe")
        sh.write(0, 29, "NB lampes EP")
        arcpy.AddMessage("Header printed successfully ...")
        arcpy.AddMessage(
            "--------------------------------------------------------------------------------------"
        )
        line = 1

        fcPostePL = 'F:/GIS_PROJECT_GDB/COMPILE_GEODATABASE/NEWBELL/postes_pl'
        n = int(arcpy.GetCount_management(fcPostePL).getOutput(0))
        p = 1
        count = 0
        arcpy.SetProgressor(
            "step", "Step progressor: Processing from 0 to {0}".format(n), 0,
            n, p)

        time.sleep(readTime)
        cursor1 = arcpy.da.SearchCursor(
            fcPostePL,
            ["POSTES_DAN"])  # On parcours la table qui stocke les transfo PL
        for row1 in cursor1:
            count += 1
            if (count % p) == 0:
                arcpy.SetProgressorLabel(
                    "Searching statistics for Poste " + row1[0] +
                    "... printing at rows {0}".format(count))
                arcpy.SetProgressorPosition(count)
            #----------------------------------------------------------------------------------------------------------------------------
            arcpy.AddMessage(
                "Printing data at the row {0} for Poste {1}".format(
                    count, row1[0]))
            #Recuperation des données et ecriture dans excel
            ##                            sh.write(line, 0, row[0]) #NomPoste
            ##                            sh.write(line, 1, row[4]) #Coordonnées_X
            ##                            sh.write(line, 2, row[5]) #Coordonnées_Y
            ##                            sh.write(line, 3, row[1]) #CodePoste
            ##                            sh.write(line, 4, row[2]) #code du transfo
            ##                            sh.write(line, 5, row[3])
            codeTrans = row1[0] + "01"
            sh.write(line, 0, "EMPTY_NAME")  #NomPoste
            sh.write(line, 1, "EMPTY_X")  #Coordonnées_X
            sh.write(line, 2, "EMPTY_Y")  #Coordonnées_Y
            sh.write(line, 3, row1[0])  #CodePoste
            sh.write(line, 4, codeTrans)  #code du transfo
            sh.write(line, 5, "EMPTY_POWER")

            #----------------------------------------------------------------------------------------------------------------------------
            #Count the number of PL for the current transfo
            nbPL = countFCByTransfo(
                "PL", "Code_Transfo LIKE '" + row1[0] +
                "%' AND Data_Creator IS NOT NULL")
            sh.write(line, 6, nbPL)

            #----------------------------------------------------------------------------------------------------------------------------
            #Nombre de PL en fonction de la section du cable et du reseau
            PL3par70 = summaryOfField(
                "Reso_PL_Join", ("Stats3par70_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE '3x70mm%'", str(row1[0]))
            #PL3par70 = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE '3x70mm%'")
            sh.write(line, 7, PL3par70)

            PL3par50 = summaryOfField(
                "Reso_PL_Join", ("Stats3par50_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE '3x50mm%'", str(row1[0]))
            #PL3par50 = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE '3x50mm%'")
            sh.write(line, 8, PL3par50)

            PL4par25 = summaryOfField(
                "Reso_PL_Join", ("Stats4par25_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE '4x25mm%'", str(row1[0]))
            #PL4par25 = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE '4x25mm%'")
            sh.write(line, 9, PL4par25)

            PL4par16 = summaryOfField(
                "Reso_PL_Join", ("Stats4par16_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE '4x16mm%'", str(row1[0]))
            #PL4par16 = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE '4x16mm%'")
            sh.write(line, 10, PL4par16)

            PL2par16 = summaryOfField(
                "Reso_PL_Join", ("Stats2par16_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE '2x16mm%'", str(row1[0]))
            #PL2par16 = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE '2x16mm%'")
            sh.write(line, 11, PL2par16)

            PL_ClassMono = summaryOfField(
                "Reso_PL_Join", ("StatsClassMono_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" +
                row1[0] + "%' AND Section_Cable LIKE 'Classique Monophas%'",
                str(row1[0]))
            #PL_ClassMono = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE 'Classique Monophas%'")
            sh.write(line, 12, PL_ClassMono)

            PL_ClassTri = summaryOfField(
                "Reso_PL_Join", ("StatsClassTri_" + str(row1[0])),
                "Code_Transfo_1 LIKE '" + row1[0] +
                "%' AND Section_Cable LIKE 'Classique Triphas%'", str(row1[0]))
            #PL_ClassTri = countFCByTransfo("Reso_PL_Join", "Code_Transfo LIKE '"+row[1]+"%' AND Section_Cable LIKE 'Classique Triphas%'")
            sh.write(line, 13, PL_ClassTri)

            #-----------------------------------------------------------------------------------------------------------------------------
            #PL en fraude directe
            PL_EnDirect = countFCByTransfo(
                "PL", "Code_Transfo LIKE '" + row1[0] +
                "%' AND Anomallies = 'AN032'")
            sh.write(line, 14, PL_EnDirect)

            #PL avec BCC ouverte
            PL_OpenBCC = countFCByTransfo(
                "PL", "Code_Transfo LIKE '" + row1[0] +
                "%' AND BCC = 'BCC sans couvercle'")
            sh.write(line, 15, PL_OpenBCC)

            #PL avec disjoncteur compact
            PL_CompactDisj = countFCByTransfo(
                "PL", "Code_Transfo LIKE '" + row1[0] +
                "%' AND Calibre_Disjoncteur = 'Compact'")
            sh.write(line, 16, PL_CompactDisj)

            #-----------------------------------------------------------------------------------------------------------------------------
            #Longueur des reseaux par section de cable
            len3par70 = lenghtReseauxByTransfo("Reseaux", row1[0], "3x70mm")
            sh.write(line, 17, len3par70)

            len3par50 = lenghtReseauxByTransfo("Reseaux", row1[0], "3x50mm")
            sh.write(line, 18, len3par50)

            len4par25 = lenghtReseauxByTransfo("Reseaux", row1[0], "4x25mm")
            sh.write(line, 19, len4par25)

            len4par16 = lenghtReseauxByTransfo("Reseaux", row1[0], "4x16mm")
            sh.write(line, 20, len4par16)

            len2par16 = lenghtReseauxByTransfo("Reseaux", row1[0], "2x16mm")
            sh.write(line, 21, len2par16)

            lenClassMono = lenghtReseauxByTransfo("Reseaux", row1[0],
                                                  "Classique Monophas")
            sh.write(line, 22, lenClassMono)

            lenClassTriph = lenghtReseauxByTransfo("Reseaux", row1[0],
                                                   "Classique Triphas")
            sh.write(line, 23, lenClassTriph)

            #-------------------------------------------------------------------------------------------------------------------------------
            #Nombre de Supports
            Nbre_Support = countFCByTransfo(
                "Supports", "ID_Support LIKE '" + row1[0] + "%'")
            sh.write(line, 24, Nbre_Support)

            #Supports cassé
            supportCasse = countFCByTransfo(
                "Supports",
                "ID_Support LIKE '" + row1[0] + "%' AND Etat LIKE 'Cass%'")
            sh.write(line, 25, supportCasse)

            SupportPourri = countFCByTransfo(
                "Supports",
                "ID_Support LIKE '" + row1[0] + "%' AND Etat = 'Pourri'")
            sh.write(line, 26, SupportPourri)

            SupportPourriOP = countFCByTransfo(
                "Supports", "ID_Support LIKE '" + row1[0] +
                "%' AND Etat = 'Pourri-Operationnel'")
            sh.write(line, 27, SupportPourriOP)

            SupportTombe = countFCByTransfo(
                "Supports",
                "ID_Support LIKE '" + row1[0] + "%' AND Etat LIKE 'Tomb%'")
            sh.write(line, 28, SupportTombe)

            #-------------------------------------------------------------------------------------------------------
            # Eclairage_Public
            NbreEP = countFCByTransfo("Eclairage_Public",
                                      "Code_Transfo LIKE '" + row1[0] + "%'")
            sh.write(line, 29, NbreEP)
            arcpy.AddMessage(
                "Data printed succesfully at the row {0}".format(count))
            arcpy.AddMessage(
                "--------------------------------------------------------------------------------------"
            )

            line += 1

        #Reset the Reso_PL_Join by deleting the current Reso_PL_Join feature class
        arcpy.Delete_management("Reso_PL_Join")
        arcpy.AddMessage(str(count) + "Items found. Printing found items: ")
    except Exception as ex:
        arcpy.AddMessage("Rolling back all the printed data")
        # Si une exception est capturée, on affiche le line number et le message d'erreur
        tb = sys.exc_info()[2]
        arcpy.AddError("An excption occured on line %i" % tb.tb_lineno)
        arcpy.AddError("Error message : " + ex.message)
        arcpy.AddMessage("RollBack done...")