Exemplo n.º 1
0
        SHP = arcpy.GetParameterAsText(0)
        # Çıktı Dosyası
        GDB = arcpy.GetParameterAsText(1)
        #GDB="C:\\150827\\Maras.gdb"
        # KMZ Yapılsın mı Yapılmasın mı
        KMZ = arcpy.GetParameterAsText(2)
    except:
        arcpy.AddMessage("\nBaşlangıç Değerlerini Almada Sıkıntı: " +
                         arcpy.GetMessages(2))
        raise Exception
    try:
        for x in range(0, number, 1):
            whereClause = "FID" + "=" + str("%d") % (x)
            fields = ['FID', 'ProjectNam']
            with arcpy.da.SearchCursor(SHP, fields, whereClause) as cursor:
                for RES in cursor:
                    res2 = RES[1]
            duzelt = res2.split()
            YOL = GDB + "\\" + duzelt[0] + duzelt[1]

        if not arcpy.Exists(YOL):
            Clause = "ProjectNam" + " = '" + res2 + "'" + "AND PointType='K'"
            arcpy.SelectLayerByAttribute_management(SHP, "NEW_SELECTION",
                                                    Clause)
            arcpy.PointsToLine_management(SHP, YOL, "", "", "CLOSE")
    except:
        arcpy.AddMessage("\nHesaplamalarda Sıkıntı: " + arcpy.GetMessages(2))
        raise Exception
except:
    arcpy.AddError("\nError running script")
    raise Exception
def select(sql, instance, dbms_admin, dbms_admin_pwd):
    cursor = createConn(instance, dbms_admin, dbms_admin_pwd)
    try:
        cursor.execute(sql)
    except pypyodbc.Error, msg:
        arcpy.AddError(msg)
    #check output fields are there
    fieldNames = []
    for field in arcpy.ListFields(inputRoadFeatures):
        fieldNames.append(field.name)
    if not ("FT_MINUTES" in fieldNames) or not ("TF_MINUTES" in fieldNames):
        raise Exception(
            "Output field (TF_MINUTES or FT_MINUTES) are not present in output"
        )

    print("Test Passed")

except arcpy.ExecuteError:
    # Get the arcpy error messages
    msgs = arcpy.GetMessages()
    arcpy.AddError(msgs)
    print(msgs)

    # return a system error code
    sys.exit(-1)

except:
    # Get the traceback object
    tb = sys.exc_info()[2]
    tbinfo = traceback.format_tb(tb)[0]

    # Concatenate information together concerning the error into a message string
    pymsg = "PYTHON ERRORS:\nTraceback info:\n" + tbinfo + "\nError Info:\n" + str(
        sys.exc_info()[1])
    msgs = "ArcPy ERRORS:\n" + arcpy.GetMessages() + "\n"
Exemplo n.º 4
0
def solve_odcm():
    """Read tool inputs and call the origin destination cost matrix class that performs the computation."""
    # Read tool inputs
    origins = arcpy.GetParameter(0)
    destinations = arcpy.GetParameter(1)
    network_data_source = arcpy.GetParameter(2)
    travel_mode = arcpy.GetParameterAsText(3)
    output_folder = arcpy.GetParameterAsText(4)
    cutoff = arcpy.GetParameterAsText(5)
    if not cutoff:
        cutoff = "0"
    target_count = arcpy.GetParameterAsText(6)
    max_od_size = arcpy.GetParameterAsText(7)
    num_workers = arcpy.GetParameterAsText(8)

    # Get catalog paths to origins feature class, destinations feature class and the network data source since the ODCM
    # tool requires catalog paths.
    # arcpy.GetParameter returns a layer object if a layer is specified as input to the GP tool. The layer object
    # has a dataSource property that contains the catalog path.
    # arcpy.GetParameterAsText returns a GP value object whose value property can be used to get the value as string.
    if hasattr(origins, "dataSource"):
        origins = origins.dataSource
    else:
        origins = origins.value
    if hasattr(destinations, "dataSource"):
        destinations = destinations.dataSource
    else:
        destinations = destinations.value
    if hasattr(network_data_source, "dataSource"):
        network_data_source = network_data_source.dataSource
    else:
        network_data_source = network_data_source.value

    # The ODCM command line tool requires a value of zero to be passed as target count to find all destinations
    if not target_count:
        target_count = "0"

    # arcpy.AddMessage(origins)
    # arcpy.AddMessage(destinations)
    # arcpy.AddMessage(f"Network data source: {network_data_source}")
    # arcpy.AddMessage(f"cutoff: {cutoff}")
    # arcpy.AddMessage(f"number of destinations to find: {target_count}")

    # Get the directory containing the script
    cwd = os.path.dirname(os.path.abspath(__file__))

    # Create a list of arguments to call the ODCM command line tool.
    odcm_inputs = [
        os.path.join(sys.exec_prefix, "python.exe"),
        os.path.join(cwd, "odcm.py"), "--origins", origins, "--destinations",
        destinations, "--network-data-source", network_data_source,
        "--travel-mode", travel_mode, "--max-od-size", max_od_size, "--cutoff",
        cutoff, "--target-count", target_count, "--workers", num_workers,
        "--folder", output_folder
    ]
    # We do not want to show the console window when calling the command line tool from within our GP tool. This can be
    # done by setting this hex code.
    create_no_window = 0x08000000
    # Store any output messages (failure as well as success) from the command line tool in a log file
    output_msg_file = os.path.join(cwd, "odcm_outputs.txt")
    with open(output_msg_file, "w") as output_fp:
        try:
            odcm_result = subprocess.run(odcm_inputs,
                                         stderr=subprocess.STDOUT,
                                         stdout=output_fp,
                                         check=True,
                                         creationflags=create_no_window)
        except subprocess.CalledProcessError as ex:
            arcpy.AddError(
                f"Call to ODCM command line tool failed. Check {output_msg_file} for additional details."
            )
            arcpy.AddError(f"{ex}")
            raise SystemExit(-1)
Exemplo n.º 5
0
        # Lecture des paramètres
        arcpy.AddMessage(sys.argv)
        if len(sys.argv) > 1:
            env = sys.argv[1].upper()

        if len(sys.argv) > 2:
            tables = sys.argv[2].replace(";", ",")

        #Définir l'objet pour copier les données des tables de SIB_PRO.
        oCopierDonneesTablesSib = CopierDonneesTablesSib()

        #Valider les paramètres obligatoires.
        oCopierDonneesTablesSib.validerParamObligatoire(env, tables)

        #Exécuter le traitement pour copier les données des tables de SIB_PRO.
        oCopierDonneesTablesSib.executer(env, tables)

    #Gestion des erreurs
    except Exception, err:
        #Afficher l'erreur
        arcpy.AddError(traceback.format_exc())
        arcpy.AddError(err.message)
        arcpy.AddError("- Fin anormale de l'application")
        #Sortir avec un code d'erreur
        sys.exit(1)

    #Afficher le message de succès du traitement
    arcpy.AddMessage("- Succès du traitement")
    #Sortir sans code d'erreur
    sys.exit(0)
Exemplo n.º 6
0
def appendSDMValues(gp, unitCell, TrainPts):
    try:
        arcpy.AddMessage("\n" + "=" * 10 + " arcsdm values  " + "=" * 10)
        with open(
                os.path.join(os.path.dirname(__file__), "arcsdm_version.txt"),
                "r") as myfile:
            data = myfile.readlines()
        #Print version information
        arcpy.AddMessage("%-20s %s" % ("", data[0]))
        installinfo = arcpy.GetInstallInfo()

        arcpy.AddMessage("%-20s %s (%s)" %
                         ("Arcgis environment: ", installinfo['ProductName'],
                          installinfo['Version']))

        if not gp.workspace:
            gp.adderror('Workspace not set')
            raise arcpy.ExecuteError("Workspace not set!")
        if not (arcpy.Exists(gp.workspace)):
            gp.adderror('Workspace %s not found' % (gp.workspace))
            raise arcpy.ExecuteError('Workspace %s not found' % (gp.workspace))
        desc = arcpy.Describe(gp.workspace)
        gp.addmessage("%-20s %s (%s)" %
                      ("Workspace: ", gp.workspace, desc.workspaceType))

        if not gp.scratchworkspace:
            gp.adderror('Scratch workspace mask not set')
        wdesc = arcpy.Describe(gp.scratchworkspace)
        gp.addmessage(
            "%-20s %s (%s)" %
            ("Scratch workspace:", gp.scratchworkspace, wdesc.workspaceType))
        # TODO: These should be moved to common CHECKENV class/function TR

        # Tools wont work if type is different from eachother (joins do not work filesystem->geodatabase! TR
        if (wdesc.workspaceType != desc.workspaceType):
            gp.AddError(
                "Workspace and scratch workspace must be of the same type!")
            raise arcpy.ExecuteError("Workspace type mismatch")

        mapUnits = getMapUnits()
        mapUnits = mapUnits.lower().strip()
        if not mapUnits.startswith('meter'):
            gp.addError(
                'Incorrect output map units: Check units of study area.')
        conversion = getMapConversion(mapUnits)
        #gp.addMessage("Conversion from map units to km^2: " + str(conversion));
        gp.addmessage("%-20s %s" % ('Map Units:', mapUnits))

        if not gp.mask:
            gp.adderror('Study Area mask not set')
            raise arcpy.ExecuteError("Mask not set. Check Environments!")
            #AL
        else:
            if not arcpy.Exists(gp.mask):
                gp.addError("Mask " + gp.mask + " not found!")
                raise arcpy.ExecuteError("Mask not found")
            #gp.AddMessage("Mask set");
            desc = gp.describe(gp.mask)
            gp.addMessage(
                "%-20s %s" %
                ("Mask:", "\"" + desc.name + "\" and it is " + desc.dataType))
            if (desc.dataType == "FeatureLayer"
                    or desc.dataType == "FeatureClass"):
                arcpy.AddWarning(
                    'Warning: You should only use single value raster type masks!'
                )
            gp.addMessage("%-20s %s" %
                          ("Mask size:", str(getMaskSize(mapUnits))))
            #gp.AddMessage("Masksize: " + str(getMaskSize(mapUnits)));

        if not gp.cellsize:
            gp.adderror('Study Area cellsize not set')
        if (gp.cellsize == "MAXOF"):
            arcpy.AddWarning("Cellsize should have definitive value?")
            #raise arcpy.ExecuteError("SDMValues: Cellsize must have value");

        cellsize = arcpy.env.cellSize  #float(str(arcpy.env.cellSize).replace(",","."))
        gp.addmessage("%-20s %s" % ("Cell Size:", cellsize))
        #gp.addMessage("Debug: " + str(conversion));
        total_area = getMaskSize(
            mapUnits
        )  # Now the getMaskSize returns it correctly in sqkm   : * cellsize **2 * conversion
        #gp.addMessage("Debug));
        unitCell = float(unitCell)
        num_unit_cells = total_area / unitCell
        num_tps = gp.GetCount_management(TrainPts)
        gp.addmessage("%-20s %s" % ('# Training Sites:', num_tps))
        gp.addmessage("%-20s %s" %
                      ("Unit Cell Area:", "{}km^2, Cells in area: {} ".format(
                          unitCell, num_unit_cells)))

        if (num_unit_cells == 0):
            raise arcpy.ExecuteError("ERROR: 0 Cells in Area!")
            #AL
        priorprob = num_tps / num_unit_cells
        if not (0 < priorprob <= 1.0):
            arcpy.AddError(
                'Incorrect no. of training sites or unit cell area. TrainingPointsResult {}'
                .format(priorprob))
            raise arcpy.ExecuteError
            #raise SDMError('Incorrect no. of training sites or unit cell area. TrainingPointsResult {}'.format(priorprob))
        gp.addmessage("%-20s %0.6f" % ('Prior Probability:', priorprob))
        #gp.addmessage("Debug priorprob:" + str(getPriorProb(TrainPts, unitCell)))

        gp.addmessage("%-20s %s" %
                      ('Training Set:', gp.describe(TrainPts).catalogpath))
        gp.addmessage("%-20s %s" %
                      ('Study Area Raster:', gp.describe(gp.mask).catalogpath))
        gp.addmessage("%-20s %s" %
                      ('Study Area Area:', str(total_area) + "km^2"))
        #gp.addmessage('Map Units to Square Kilometers Conversion: %f'%conversion)
        arcpy.AddMessage("")
        # Empty line at end
    except arcpy.ExecuteError as e:
        if not all(e.args):
            arcpy.AddMessage("Calculate weights caught arcpy.ExecuteError: ")
            args = e.args[0]
            args.split('\n')
            arcpy.AddError(args)
        arcpy.AddMessage("-------------- END EXECUTION ---------------")
        raise
    except:
        # get the traceback object
        tb = sys.exc_info()[2]
        # tbinfo contains the line number that the code failed on and the code from that line
        tbinfo = traceback.format_tb(tb)[0]
        gp.addError(tbinfo)
        # concatenate information together concerning the error into a message string
        #pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
        #    str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n"
        # generate a message string for any geoprocessing tool errors
        if len(gp.GetMessages(2)) > 0:
            msgs = "SDM GP ERRORS:\n" + gp.GetMessages(2) + "\n"
            gp.AddError(msgs)
        #gp.AddError(pymsg)
        raise
        arcpy.AddMessage("Running Normalize Transformation ......")
        maxVal = arcpy.GetRasterProperties_management(r, "MAXIMUM")
        minVal = arcpy.GetRasterProperties_management(r, "MINIMUM")
        maxRaster = CreateConstantRaster(maxVal, "FLOAT", descR.MeanCellHeight,
                                         descR.extent)
        if minVal != 0:
            minRaster = CreateConstantRaster(minVal, "FLOAT",
                                             descR.MeanCellHeight,
                                             descR.extent)
            outRaster = (r - minRaster) / (maxRaster - minRaster)
        else:
            outRaster = r / maxRaster

    elif analysisType == "Log":
        arcpy.AddMessage("Running Log Transformation ......")
        outRaster = Ln(r)

    else:
        #Has to be Square-root
        arcpy.AddMessage("Running Square-root Transformation ......")
        outRaster = SquareRoot(r)

    outRasterName = arcpy.GetParameterAsText(4)
    outRaster.save(outRasterName)

    #Set message about running
    arcpy.AddMessage(analysisType + " Transformation Complete")

except LicenseError:
    arcpy.AddError("Spatial Analyst license is unavailable")
Exemplo n.º 8
0
def virhe(Virheilmoitus):
    arcpy.AddError(Virheilmoitus)
    sys.exit()
Exemplo n.º 9
0
    def readPointsShape(self, directory):
        # Inicializacion de variables
        num_Zmax = []
        num_Zmin = []
        self.clc = calculosD()
        self.areas = []
        # Variables de contadores
        cont1 = 0
        cont2 = 0
        # Variables para eliminar duplicados
        max_drop = []
        min_drop = []
        # Variables para almacenar los puntos
        self.pointXmax = []
        self.pointYmax = []
        self.pointZmax = []
        self.pointXmin = []
        self.pointYmin = []
        self.pointZmin = []

        try:
            # Lee la data del archivo
            sf = shapefile.Reader(directory)
            fields = [x[0] for x in sf.fields][1:]
            records = [y[:] for y in sf.records()]

            # Se escribe en el datagrama y se agrupa por Shape_Area
            self.data = pd.DataFrame(columns=fields, data=records)
            columnsneeded = ['X', 'Y', 'Z', 'Shape_Area']

            if columnsneeded == [
                    ele for ele in columnsneeded if (ele in self.data.columns)
            ]:
                s = self.data[['X', 'Y', 'Z']].groupby(self.data['Shape_Area'])

                # Se obtienen los max y min de los datos seleccionados
                maxT = pd.DataFrame(s['Z'].max())
                minT = pd.DataFrame(s['Z'].min())

                # Asignacion  valores min y max a listas
                for i in maxT['Z']:
                    num_Zmax.append(i)

                for i in minT['Z']:
                    num_Zmin.append(i)
            else:
                raise KeyError

            # Extraccion de fila correspondiente a los max valores
            for names, groups in s:
                self.areas.append(names)
                max_values = groups[groups['Z'] == num_Zmax[cont1]]
                # Almacena temporalmente la data del punto seleccionado
                tempMax = max_values.drop_duplicates(subset=None,
                                                     keep="first",
                                                     inplace=False)

                # Comprobacion de tamano para escoger el primer caso
                if len(tempMax) > 1:
                    p = tempMax[tempMax['X'] == tempMax['X'].max()]
                    max_drop.append(p)
                else:
                    max_drop.append(tempMax)
                cont1 = cont1 + 1

            # Extraccion de fila correspondiente a los min valores
            for names, groups in s:
                min_values = groups[groups['Z'] == num_Zmin[cont2]]
                # Almacena temporalmente la data del punto seleccionado y elimina duplicados
                tempMin = min_values.drop_duplicates(subset=None,
                                                     keep="first",
                                                     inplace=False)
                # Comprobacion de ta`mano para escoger el dato correcto
                if len(tempMin) > 1:
                    p = tempMin[tempMin['X'] == tempMin['X'].min()]
                    min_drop.append(p)
                else:
                    min_drop.append(tempMin)
                cont2 = cont2 + 1

            # Reemplazo de los datos por posibles inconsistencias en el formato de origen
            for rows in max_drop:
                for x in rows.X:
                    if (isinstance(x, float)):
                        self.pointXmax.append(float(x))
                    else:
                        # Reemplaza la coma por el punto
                        xm = str(x).replace(',', '.')
                        self.pointXmax.append(float(xm))

                for y in rows.Y:
                    if (isinstance(y, float)):
                        self.pointYmax.append(float(y))
                    else:
                        # Reemplaza la coma por el punto
                        ym = str(y).replace(',', '.')
                        self.pointYmax.append(float(ym))

                for z in rows.Z:
                    if (isinstance(z, float)):
                        self.pointZmax.append(float(z))
                    else:
                        # Reemplaza la coma por el punto
                        zm = str(z).replace(',', '.')
                        self.pointZmax.append(float(zm))

            # Reemplazo para los valores minimos
            for rows in min_drop:
                for x in rows.X:
                    if (isinstance(x, float)):
                        self.pointXmin.append(float(x))
                    else:
                        xm = str(x).replace(',', '.')
                        self.pointXmin.append(float(xm))

                for y in rows.Y:
                    if (isinstance(y, float)):
                        self.pointYmin.append(float(y))
                    else:
                        ym = str(y).replace(',', '.')
                        self.pointYmin.append(float(ym))

                for z in rows.Z:
                    if (isinstance(z, float)):
                        self.pointZmin.append(float(z))
                    else:
                        zm = str(z).replace(',', '.')
                        self.pointZmin.append(float(zm))

            # Se envia la data a procesarse
            self.clc.calcularDistaciaElongacion(self.pointXmax, self.pointXmin,
                                                self.pointYmax, self.pointYmin,
                                                self.pointZmax, self.pointZmin)

        except shapefile.ShapefileException:
            arcpy.AddError("No se ha podido leer el archivo especificado")
            arcpy.AddMessage("{0}".format(directory))
        except KeyError:
            arcpy.AddError(
                "El archivo debe contener las siguientes coordenadas de las cuencas: {0}"
                .format(columnsneeded))
        else:
            return 1
        return 0
def main():

    try:
        # Get the input parameters to this tool
        if arcpy.GetArgumentCount() != 1:
            raise Exception("Incorrect number of arguments")

        polygonFC = ".\\SampleData.gdb\\Quadrants"
        emailNotificationName = arcpy.GetParameterAsText(0)

        # Get any necessary licenses before importing the toolbox
        checkOutLicenses("", ["JTX"])

        # Import the Workflow Manager toolbox
        wmxToolbox = getWorkflowManagerToolboxLocation()
        arcpy.ImportToolbox(wmxToolbox, "WMXAdminUtils")
        arcpy.env.overwriteOutput = True

        # Make a feature layer from the polygon FC passed in;
        # features will be selected from this layer
        polygonLayer = "PolygonLayer"
        arcpy.MakeFeatureLayer_management(polygonFC, polygonLayer)

        # Iterate through all of the features in this feature class
        rows = arcpy.SearchCursor(polygonFC)
        counter = 1
        for row in rows:
            # The OBJECTID field will be unique for all features,
            # so retrieve its value for each feature and create a new
            # selection based on this value.
            objectID = row.OBJECTID
            arcpy.SelectLayerByAttribute_management(
                polygonLayer, "NEW_SELECTION", "OBJECTID = " + str(objectID))

            # Create a new spatial notifier
            snName = "Monitor Region " + str(counter) + " (Sample 2)"
            snDesc = "This spatial notification will monitor all of the features in the region bounded by the polygon with OBJECTID " + str(
                objectID)
            summarize = "SUMMARIZE"
            arcpy.CreateSpatialNotificationWithEmailNotifier_WMXAdminUtils(
                snName, emailNotificationName, snDesc, summarize)
            logPreviousToolMessages()

            # Add an area condition for this spatial notifier
            #
            # NOTE: To see the full list of options available for each argument,
            # launch the GP tool from ArcMap or ArcCatalog.  The arguments
            # correspond to options provided by Workflow Manager.
            geomOp = "INTERSECTS"
            doNotUseInverse = "USE_OPERATION"
            useFeature = "USE_SELECTED_FEATURE"
            arcpy.AddAreaEvaluatorToSN_WMXAdminUtils(snName, geomOp,
                                                     doNotUseInverse,
                                                     useFeature, polygonLayer)
            logPreviousToolMessages()

            # Increment the counter and move on to the next feature
            counter += 1

        # Free up any cursor objects and layers
        if row != None:
            del row
        if rows != None:
            del rows

        arcpy.Delete_management(polygonLayer)

    except Exception, ex:
        printUsage()
        arcpy.AddError("Caught exception: " + str(ex))
Exemplo n.º 11
0
#Import modules
Exemplo n.º 12
0
def outputError(msg):
    print(msg)
    arcpy.AddError(msg)
Exemplo n.º 13
0
    ext = arcpy.Describe(DEM).extent
    area = (ext.width * ext.height) / 1000000
    IT = Init_Time.strftime('%Y-%m-%d %H:%M:%S')
    FT = Fin_Time.strftime('%Y-%m-%d %H:%M:%S')
    Sim_Time = (Fin_Time - Init_Time)
    import datetime
    Sim_Time = Sim_Time - datetime.timedelta(
        microseconds=Sim_Time.microseconds)

    R = open(RES, 'w')
    R.write("{: <25} {: <20}\n".format("DEM name:", DEM_name))
    R.write("{: <25} {: <20}\n".format("Threshold area [km^2]:",
                                       "%.2f" % (bl_tresh / 1000000)))
    R.write("{: <25} {: <20}\n".format("Resolution [m]:", "%.2f" % pixelsize))
    R.write("{: <25} {: <20}\n".format("Extension [km^2]:", "%.2f" % area))
    R.write("{: <25} {: <20}\n".format("Initial simulation time:", IT))
    R.write("{: <25} {: <20}\n".format("Final simulation time:", FT))
    R.write("{: <25} {: <20}\n".format("Simulation time:", Sim_Time))
    R.close()

    arcpy.AddMessage(' ')
    arcpy.AddMessage('HYDROBASE COMPLETED!')

    #---------------------------------------------------------------------------------------------------

except:
    arcpy.AddError(arcpy.GetMessages())
    arcpy.AddMessage(traceback.format_exc())
    #print arcpy.GetMessages()
# -*- coding: utf-8 -*-
# This script runs the Buffer tool. The user supplies the input and output paths, and the buffer distance.

# Import arcpy module
import arcpy
arcpy.env.overwriteOutput = True

try:
    # Script arguments
    inPath = arcpy.GetParameterAsText(0)
    outPath = arcpy.GetParameterAsText(1)
    bufferDistance = arcpy.GetParameterAsText(2)

    # Run the Buffer tool
    arcpy.Buffer_analysis(inPath, outPath, bufferDistance)

    # Report a success message
    arcpy.AddMessage("All done!")
except:
    # Report an error messages
    arcpy.AddError("Could not complete the buffer")

    # Report any error messages that the Buffer tool might have generated
    arcpy.AddMessage(arcpy.GetMessages())
Exemplo n.º 15
0

class CustomError(Exception):
    pass


try:
    # Figure out what version of ArcGIS they're running
    BBB_SharedFunctions.DetermineArcVersion()
    ArcVersion = BBB_SharedFunctions.ArcVersion
    ProductName = BBB_SharedFunctions.ProductName
    if BBB_SharedFunctions.ProductName == "ArcGISPro" and ArcVersion in [
            "1.0", "1.1", "1.1.1"
    ]:
        arcpy.AddError(
            "The BetterBusBuffers toolbox does not work in versions of ArcGIS Pro prior to 1.2.\
You have ArcGIS Pro version %s." % ArcVersion)
        raise CustomError

    #----- Get input parameters -----
    # Output files and location
    outFile = arcpy.GetParameterAsText(0)
    # GTFS SQL dbase - must be created ahead of time.
    SQLDbase = arcpy.GetParameterAsText(1)
    BBB_SharedFunctions.ConnectToSQLDatabase(SQLDbase)
    # Points to Analyze
    inPointsLayer = arcpy.GetParameterAsText(2)
    # Unique ID for input points
    inLocUniqueID = arcpy.GetParameterAsText(3)

    # Weekday or specific date to analyze.
Exemplo n.º 16
0
# PARAMETERS
# ***************************************************************
#set the orientation data boolean here
isOrientationData = False

# Cross-section layer, use this as a reference to the feature layer
lineLayer = arcpy.GetParameterAsText(0)

#use this as the basename for intermediate files (because lineLayer may
#have slashes in the name/path)
lineLyrName = arcpy.Describe(lineLayer).name

#can't figure out how to put this in the validator class ??
result = arcpy.GetCount_management(lineLayer)
if int(result.getOutput(0)) > 1:
    arcpy.AddError(lineLayer + ' has more than one line in it.')
    raise SystemError

# elevation raster layer
dem = arcpy.GetParameterAsText(1)

#coordinate priority - corner from which the measuring will begin
cp = getCPValue(arcpy.GetParameterAsText(2))

# structural data points layer
ptLayer = arcpy.GetParameterAsText(3)

# collar Z field
ptz_field = arcpy.GetParameterAsText(4)

#strike field
def run_CBA():

    # Get the value of the input parameter
    inputFC = arcpy.GetParameterAsText(0)  #Polygon or point feature class
    inputFC_join_field = arcpy.GetParameterAsText(
        1)  #field in the input feature class used for join
    economics_table = arcpy.GetParameterAsText(
        2)  # CSV file with data on economic costs and revenues
    economics_join_field = arcpy.GetParameterAsText(
        3
    )  #field in the economics table used for join with input feature class
    outLyrName = arcpy.GetParameterAsText(4)

    try:

        outCBA_fc = os.path.join(arcpy.env.scratchGDB, outLyrName)
        arcpy.CopyFeatures_management(inputFC, outCBA_fc)

        ### JOIN: only keep common records between input layer and join table ###
        ### ADD FIELD: creating new field to store CO2 total emission per trip ###
        arcpy.AddMessage('Copying Input Feature and Adding New Fields ...')
        arcpy.SetProgressorLabel(
            'Copying Input Feature and Adding New Fields ...')

        # add three new fields ("costs", "revenues", "returns")
        arcpy.AddField_management(outCBA_fc, "COSTS", "DOUBLE")
        arcpy.AddField_management(outCBA_fc, "REVENUES", "DOUBLE")
        arcpy.AddField_management(outCBA_fc, "RETURNS", "DOUBLE")

        # Process: Copy Table to a temporary GDB table (workaround for bug in MakeTableView --ArcGIS 10.3.1)
        ### JOIN: only keep common records between input layer and join table ###
        arcpy.AddMessage('Creating Join Dictionary for Economics Data ...')
        arcpy.SetProgressorLabel(
            'Creating Join Dictionary for Economics Data ...')

        strata_tab = os.path.join(arcpy.env.scratchGDB, "JoinTable")
        arcpy.CopyRows_management(economics_table, strata_tab)

        ### Create list of value fields, leaving out OID field and key/join field ###
        flistObj = arcpy.ListFields(strata_tab)
        flist = [
            f.name for f in flistObj
            if f.type != "OID" and f.name != economics_join_field
        ]

        ### Create empty dict object then populate each key with a sub dict by row using value fields as keys ###
        strataDict = {}
        for r in arcpy.SearchCursor(strata_tab):
            fieldvaldict = {}
            for field in flist:
                fieldvaldict[field] = r.getValue(field)

            strataDict[r.getValue(economics_join_field)] = fieldvaldict

        del strata_tab, flistObj

        arcpy.AddMessage('Calculating Returns from Costs and Revenues ...')
        arcpy.SetProgressorLabel(
            'Calculating Returns from Costs and Revenues ...')

        #LOOP through nested dictionaries to check whether any values are missing or n/a
        for k, v in strataDict.iteritems():
            for k2, v2 in strataDict[k].iteritems():
                if v2 is None or v2 == "n/a" or v2 == r"n\a":
                    strataDict[k][k2] = 0.0

        with arcpy.da.UpdateCursor(
                outCBA_fc,
            [inputFC_join_field, 'COSTS', 'REVENUES', 'RETURNS'],
                where_clause="\"%s\" IS NOT NULL" %
                inputFC_join_field) as cursor:

            for row in cursor:
                costs = 0.0
                revenues = 0.0
                strata = row[0]
                if not strata in strataDict:
                    arcpy.AddWarning(
                        "The attribute \"{}\" was not found in the economics table!"
                        .format(strata))
                    continue
                else:
                    cost_lst = [
                        float(v) for k, v in strataDict[strata].iteritems()
                        if 'cost' in k
                    ]
                    costs += sum(cost_lst)

                    revenues_lst = [
                        float(v) for k, v in strataDict[strata].iteritems()
                        if 'revenue' in k
                    ]
                    revenues += sum(revenues_lst)

                    returns = revenues - costs

                    row[1] = costs
                    row[2] = revenues
                    row[3] = returns

                    cursor.updateRow(row)

        arcpy.AddWarning("Warning: negative monetary values in 'RETURNS' " + \
                         "can be the consequence of missing values in the financial table!!")

        #### Set Parameters ####
        arcpy.SetParameterAsText(5, outCBA_fc)

    except Exception:
        e = sys.exc_info()[1]
        arcpy.AddError('An error occurred: {}'.format(e.args[0]))
Exemplo n.º 18
0
def CreateMasterMosaicDataset(master_fgdb_path, master_md_name, MDMasterFC_path, masterCellSize_meters):
    Utility.printArguments(["master_fgdb_path", "master_md_name", "MDMasterFC_path", "masterCellSize_meters"],
                           [master_fgdb_path, master_md_name, MDMasterFC_path, masterCellSize_meters], "B02 CreateMasterMosaicDataset")
    
    
    # Ensure the Master gdb exists
    if os.path.exists(master_fgdb_path):
        master_md_path = os.path.join(master_fgdb_path, master_md_name)
        arcpy.AddMessage("Full Master Mosaic Name:             {0}".format(master_md_path))
        
        if not arcpy.Exists(master_md_path):
            
                        
            # SpatRefMaster = "PROJCS['WGS_1984_Web_Mercator_Auxiliary_Sphere',GEOGCS['GCS_WGS_1984',DATUM['D_WGS_1984',SPHEROID['WGS_1984',6378137.0,298.257223563]],PRIMEM['Greenwich',0.0],UNIT['Degree',0.0174532925199433]],PROJECTION['Mercator_Auxiliary_Sphere'],PARAMETER['False_Easting',0.0],PARAMETER['False_Northing',0.0],PARAMETER['Central_Meridian',0.0],PARAMETER['Standard_Parallel_1',0.0],PARAMETER['Auxiliary_Sphere_Type',0.0],UNIT['Meter',1.0],AUTHORITY['EPSG',3857]]"
            SpatRefMaster = RasterConfig.SpatRef_WebMercator
            
            # Create the Master Mosaic Dataset
            arcpy.CreateMosaicDataset_management(master_fgdb_path, master_md_name,
                                                 coordinate_system=SpatRefMaster,
                                                 num_bands="1", pixel_type="32_BIT_FLOAT", product_definition="NONE", product_band_definitions="#")
            Utility.addToolMessages()
            
            # If a boundary is specified (it is optional)...
            # Write one record to the boundary so it can be subsequently replaced by the import Mosaic Dataset Geometry tool
            addMasterBoundary(master_fgdb_path, master_md_name, MDMasterFC_path)
                        
            Raster.addStandardMosaicDatasetFields(md_path=master_md_path)
            
                        
                        
                        
                        
            
#                         arcpy.AddField_management(master_md_path, field_name="ProjectID", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="100", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="ProjectDate", field_type="DATE", field_precision="#", field_scale="#",
#                                                   field_length="#", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="RasterPath", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="512", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
# #                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="ProjectSrs", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="100", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="ProjectSrsUnits", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="ProjectSrsUnitsZ", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="ProjectSource", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
#                         arcpy.AddField_management(master_md_path, field_name="PCSCode", field_type="TEXT", field_precision="#", field_scale="#",
#                                                   field_length="20", field_alias="#", field_is_nullable="NULLABLE", field_is_required="NON_REQUIRED", field_domain="#")
#                         Utility.addToolMessages()
            
#                         arcpy.AddMessage("Creating Indexes on previously created fields in Master GDB...")
            
            # Create indexes on all metadata fields to facilitate query
            
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectID", index_name="ProjectID", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectDate", index_name="ProjectDate", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectSrs", index_name="ProjectSrs", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectSrsUnits", index_name="ProjectSrsUnits", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectSrsUnitsZ", index_name="ProjectSrsUnitsZ", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="ProjectSource", index_name="ProjectSource", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
#                         arcpy.AddIndex_management(master_md_path, fields="PCSCode", index_name="PCSCode", unique="NON_UNIQUE", ascending="ASCENDING")
#                         Utility.addToolMessages()
            
            # Set the desired Master MD properties (non-default parameters are listed below):
            #   default mosaic method is "BYATTRIBUTE" w ProjectDate
            #      order_base = 3000 (a year far into the future)
            #   default_compression_type="LERC"
            #   limited the transmission_fields
            #   start_time_field="ProjectDate" (in case we decide to enable time later)
            #   max_num_of_records_returned="2000" (default is 1000)
            #   max_num_of_download_items="40" (default is 20)
            #   max_num_per_mosaic = "40"      (default is 20)
            #   data_source_type="ELEVATION"
            #   cell_size = 1
            #   rows_maximum_imagesize="25000"
            #   columns_maximum_imagesize="25000"
            #   metadata_level = "BASIC"
            
            transmissionFields = CMDRConfig.TRANSMISSION_FIELDS
            arcpy.AddMessage("transmissionFields: {0}".format(transmissionFields))
            
            arcpy.AddRastersToMosaicDataset_management(in_mosaic_dataset=master_md_path, raster_type="Raster Dataset", input_path=RasterConfig.MasterTempRaster, update_cellsize_ranges="UPDATE_CELL_SIZES", update_boundary="UPDATE_BOUNDARY", update_overviews="NO_OVERVIEWS", maximum_pyramid_levels="", maximum_cell_size="0", minimum_dimension="1500", spatial_reference="", filter="#", sub_folder="SUBFOLDERS", duplicate_items_action="ALLOW_DUPLICATES", build_pyramids="NO_PYRAMIDS", calculate_statistics="NO_STATISTICS", build_thumbnails="NO_THUMBNAILS", operation_description="#", force_spatial_reference="NO_FORCE_SPATIAL_REFERENCE")
            Utility.addToolMessages()
            
            arcpy.SetMosaicDatasetProperties_management(master_md_path, rows_maximum_imagesize="25000", columns_maximum_imagesize="25000",
                                                      allowed_compressions="LERC;JPEG;None;LZ77", default_compression_type="LERC", JPEG_quality="75",
                                                      LERC_Tolerance="0.001", resampling_type="BILINEAR", clip_to_footprints="NOT_CLIP",
                                                      footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", clip_to_boundary="NOT_CLIP",
                                                      color_correction="NOT_APPLY", allowed_mensuration_capabilities="#",
                                                      default_mensuration_capabilities="NONE",
                                                      allowed_mosaic_methods="NorthWest;Center;LockRaster;ByAttribute;Nadir;Viewpoint;Seamline;None",
                                                      default_mosaic_method="ByAttribute", order_field=CMDRConfig.PROJECT_DATE, order_base="3000",
                                                      sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", view_point_x="600",
                                                      view_point_y="300", max_num_per_mosaic="40", cell_size_tolerance="0.8", cell_size="{0} {0}".format(masterCellSize_meters),
                                                      metadata_level="BASIC",
                                                      transmission_fields=transmissionFields,
                                                      use_time="DISABLED", start_time_field=CMDRConfig.PROJECT_DATE, end_time_field="#", time_format="#",
                                                      geographic_transform="#",
                                                      max_num_of_download_items="40", max_num_of_records_returned="2000",
                                                      data_source_type="ELEVATION", minimum_pixel_contribution="1", processing_templates="None",
                                                      default_processing_template="None")
            Utility.addToolMessages()
            
#             arcpy.SetMosaicDatasetProperties_management(in_mosaic_dataset="C:/temp/MDMaster/MDMaster_DSM.gdb/DSM", rows_maximum_imagesize="25000", columns_maximum_imagesize="25000", allowed_compressions="None;JPEG;LZ77;LERC", default_compression_type="None", JPEG_quality="75", LERC_Tolerance="0.001", resampling_type="BILINEAR", clip_to_footprints="NOT_CLIP", footprints_may_contain_nodata="FOOTPRINTS_MAY_CONTAIN_NODATA", clip_to_boundary="NOT_CLIP", color_correction="NOT_APPLY", allowed_mensuration_capabilities="#", default_mensuration_capabilities="NONE", allowed_mosaic_methods="ByAttribute;NorthWest;Center;LockRaster;Nadir;Viewpoint;Seamline;None", default_mosaic_method="ByAttribute", order_field="Project_Date", order_base="3000", sorting_order="ASCENDING", mosaic_operator="FIRST", blend_width="0", view_point_x="600", view_point_y="300", max_num_per_mosaic="40", cell_size_tolerance="0.8", cell_size="1 1", metadata_level="BASIC", transmission_fields="Name;MinPS;MaxPS;LowPS;HighPS;Tag;GroupName;ProductName;CenterX;CenterY;ZOrder;Shape_Length;Shape_Area;Project_ID;Project_Date;Porject_Source;Project_SR_XY;Project_SR_XY_Units;Project_SR_XY_Code;Project_SR_Z_Units", use_time="DISABLED", start_time_field="Project_Date", end_time_field="", time_format="", geographic_transform="", max_num_of_download_items="40", max_num_of_records_returned="2000", data_source_type="ELEVATION", minimum_pixel_contribution="1", processing_templates="None", default_processing_template="None")
            # set statistics Min = -300 and Max = 2000M
            # set nodata = default no data value
            arcpy.SetRasterProperties_management(master_md_path, data_type="ELEVATION", statistics="1 0 2000 # #", stats_file="#", nodata="1 {}".format(RasterConfig.NODATA_DEFAULT))
            Utility.addToolMessages()
            
            arcpy.RemoveRastersFromMosaicDataset_management(in_mosaic_dataset=master_md_path, where_clause="1=1", update_boundary="UPDATE_BOUNDARY", mark_overviews_items="MARK_OVERVIEW_ITEMS", delete_overview_images="DELETE_OVERVIEW_IMAGES", delete_item_cache="DELETE_ITEM_CACHE", remove_items="REMOVE_MOSAICDATASET_ITEMS", update_cellsize_ranges="UPDATE_CELL_SIZES")
            Utility.addToolMessages()
                        
        else:
            arcpy.AddWarning("Master Mosaic Dataset already exists: {0}. Cannot continue".format(master_md_path))
    else:
        arcpy.AddError("Master Geodatabase doesn't exist {0}".format(master_fgdb_path))
    
    arcpy.AddMessage("Operation complete")
Exemplo n.º 19
0
def getMaskSize(mapUnits):
    try:
        #if globalmasksize is > 0, use that:
        global globalmasksize
        if (globalmasksize > 0):
            return globalmasksize
        desc = arcpy.Describe(arcpy.env.mask)

        # Mask datatypes: RasterLayer or RasterBand (AL added 040520)
        if (desc.dataType == "RasterLayer" or desc.dataType == "RasterBand"):
            # If mask type is raster, Cell Size must be numeric in Environment #AL 150520
            if not (str(arcpy.env.cellSize).replace('.', '', 1).replace(
                    ',', '', 1).isdigit()):
                arcpy.AddMessage("*" * 50)
                arcpy.AddError(
                    "ERROR: Cell Size must be numeric when mask is raster. Check Environments!"
                )
                arcpy.AddMessage("*" * 50)
                raise ErrorExit

            dwrite(" Counting raster size")
            dwrite("   File: " + desc.catalogpath)
            tulos = arcpy.GetRasterProperties_management(
                desc.catalogpath, "COLUMNCOUNT")
            tulos2 = arcpy.GetRasterProperties_management(
                desc.catalogpath, "ROWCOUNT")
            #dwrite (str(tulos.getOutput(0)));
            #dwrite (str(tulos2.getOutput(0)));
            rows = int(tulos2.getOutput(0))
            columns = int(tulos.getOutput(0))

            #count = rows * columns;
            raster_array = arcpy.RasterToNumPyArray(desc.catalogpath,
                                                    nodata_to_value=-9999)
            #Calculate only on single level...
            # There is no simple way to calculate nodata... so using numpy! TR
            count = 0
            dwrite("    Iterating through mask in numpy..." + str(columns) +
                   "x" + str(rows))
            for i in range(0, int(rows)):
                for j in range(0, int(columns)):
                    if (raster_array[i][j] != -9999):
                        count = count + 1
            dwrite("     count:" + str(count))
            #maskrows = arcpy.SearchCursor(desc.catalogpath)
            #maskrow = maskrows.next()
            #count =  0
            #while maskrow:
            #    count += maskrow.count
            #    maskrow = maskrows.next()
            #dwrite( "     count:" + str(count));
            cellsize = float(str(arcpy.env.cellSize.replace(",", ".")))
            count = count * (cellsize * cellsize)

        # Mask datatypes: FeatureLayer, FeatureClass or ShapeFile (Unicamp added 241018/AL 210720)
        elif (desc.dataType == "FeatureLayer"
              or desc.dataType == "FeatureClass"
              or desc.dataType == "ShapeFile"):
            #arcpy.AddMessage( " Calculating mask size");
            maskrows = arcpy.SearchCursor(desc.catalogpath)
            shapeName = desc.shapeFieldName
            #arcpy.AddMessage("Debug: shapeName = " + shapeName);
            maskrow = maskrows.next()
            count = 0
            while maskrow:
                feat = maskrow.getValue(shapeName)
                count += feat.area
                maskrow = maskrows.next()
            dwrite(" count:" + str(count))

        # other datatypes are not allowed
        else:
            raise arcpy.ExecuteError(desc.dataType +
                                     " is not allowed as Mask!")

        # Mask Size calculation continues
        mapUnits = mapUnits.lower().strip()
        if not mapUnits.startswith('meter'):
            arcpy.AddError(
                'Incorrect output map units: Check units of study area.')
        conversion = getMapConversion(mapUnits)
        count = count * conversion
        #Count is now in Sqkm -> So multiply that with 1000m*1000m / cellsize ^2
        #multiplier = (1000 * 1000) / (cellsize * cellsize); #with 500 x 500 expect "4"
        #arcpy.AddMessage("Debug:" + str(multiplier));
        #count = count * multiplier;
        #arcpy.AddMessage("Size: " + str(count));
        globalmasksize = count
        return count
    except arcpy.ExecuteError as e:

        raise
    except:
        # get the traceback object
        tb = sys.exc_info()[2]
        #gp.addError("sdmvalues.py excepted:");
        # tbinfo contains the line number that the code failed on and the code from that line
        tbinfo = traceback.format_tb(tb)[0]
        arcpy.AddError(tbinfo)
        # concatenate information together concerning the error into a message string
        #pymsg = "PYTHON ERRORS:\nTraceback Info:\n" + tbinfo + "\nError Info:\n    " + \
        #    str(sys.exc_type)+ ": " + str(sys.exc_value) + "\n"
        # generate a message string for any geoprocessing tool errors
        if len(arcpy.GetMessages(2)) > 0:
            msgs = "SDM GP ERRORS:\n" + arcpy.GetMessages(2) + "\n"
            arcpy.AddError(msgs)
        #gp.AddError(pymsg)
        raise
Exemplo n.º 20
0
prj = ""
indata = arcpy.GetParameterAsText()
row_count = arcpy.GetCount_management(indata)

try:
    # check if indata is in StatePlane, has no PRJ, or one other than StatePlane
    if row_count > 0:
        arcpy.SetParameterAsText(
            1,
            "true")  #The first parameter refers to the "HasSomething" variable
        arcpy.SetParameterAsText(
            2,
            "false")  #The second parameter refers to the "HasNothing" variable
        arcpy.AddMessage("Selection has rows")

    else:
        arcpy.SetParameterAsText(
            1, "false"
        )  #The first parameter refers to the "HasSomething" variable
        arcpy.SetParameterAsText(
            2,
            "true")  #The second parameter refers to the "HasNothing" variable
        arcpy.AddMessage("Selection has no rows")

except:
    tb = sys.exc_info()[2]
    tbinfo = traceback.format_tb(tb)[0]
    pymsg = tbinfo + "\n" + str(sys.exc_type)
    arcpy.AddError("Python Messages: " + pymsg + " GP Messages: " +
                   arcpy.GetMessages(2))
                row[f + 2] = nodeDict[streamID][nodeID][field]
                cursor.updateRow(row)


#enable garbage collection
gc.enable()

try:
    print("Step 2: Measure Channel Width")

    #keeping track of time
    startTime = time.time()

    # Check if the output exists
    if not arcpy.Exists(nodes_fc):
        arcpy.AddError("\nThis output does not exist: \n" +
                       "{0}\n".format(nodes_fc))
        sys.exit("This output does not exist: \n" + "{0}\n".format(nodes_fc))

    if overwrite_data is True:
        env.overwriteOutput = True
    else:
        env.overwriteOutput = False

    # Determine input spatial units
    proj_nodes = arcpy.Describe(nodes_fc).spatialReference
    proj_rb = arcpy.Describe(rb_fc).spatialReference
    proj_lb = arcpy.Describe(lb_fc).spatialReference

    # Check to make sure the rb_fc/lb_fc and input points are
    # in the same projection.
    if proj_nodes.name != proj_rb.name:
def main(*argv):
    """ main driver of program """
    try:
        fc_new = argv[0]
        fc_old = argv[1]
        unique_field = argv[2]
        out_gdb = argv[3]

        #  Local Variables
        #
        scratchGDB = env.scratchGDB
        dis_new = os.path.join(scratchGDB, "dis_new")
        dis_old = os.path.join(scratchGDB, "dis_old")
        out_fc = os.path.join(out_gdb, os.path.basename(fc_old))
        #  Logic
        #
        dis_new = arcpy.Dissolve_management(in_features=fc_new,
                                            out_feature_class=dis_new,
                                            dissolve_field=unique_field)[0]
        dis_old = arcpy.Dissolve_management(in_features=fc_old,
                                            out_feature_class=dis_old,
                                            dissolve_field=unique_field)[0]
        fields = [unique_field, 'SHAPE@']
        new_sdf = SpatialDataFrame.from_featureclass(dis_new, fields=[unique_field])
        old_sdf = SpatialDataFrame.from_featureclass(dis_old, fields=[unique_field])
        #  Find Added and Removed Features
        #
        unew = set(new_sdf[unique_field].unique().tolist())
        uold = set(old_sdf[unique_field].unique().tolist())
        adds = list(unew - uold)
        deletes = list(uold - unew)
        old_df = old_sdf[old_sdf[unique_field].isin(deletes)].copy()
        old_df['STATUS'] = "REMOVED FEATURE"
        new_df = new_sdf[new_sdf[unique_field].isin(adds)].copy()
        new_df['STATUS'] = "NEW FEATURE"
        # Find Geometry Differences
        #
        df2 = new_sdf[~new_sdf[unique_field].isin(adds)].copy()
        df2.index = df2[unique_field]
        df1 = old_sdf[~old_sdf[unique_field].isin(deletes)].copy()
        df1.index = df1[unique_field]
        ne = df1 != df2
        ne = ne['SHAPE']
        updated = df2[ne].copy()
        updated['STATUS'] = "GEOMETRY MODIFIED"
        updated.reset_index(inplace=True, drop=True)
        del ne
        del df1
        del df2
        del new_sdf
        del old_sdf
        joined = pd.concat([updated,
                            old_df,
                            new_df])
        joined.reset_index(inplace=True, drop=True)
        del updated
        del new_df
        del old_df
        out_fc = joined.to_featureclass(out_gdb, "modifed_dataset")
        del joined
        arcpy.SetParameterAsText(4, out_fc)
    except arcpy.ExecuteError:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
        arcpy.AddError("ArcPy Error Message: %s" % arcpy.GetMessages(2))
    except FunctionError as f_e:
        messages = f_e.args[0]
        arcpy.AddError("error in function: %s" % messages["function"])
        arcpy.AddError("error on line: %s" % messages["line"])
        arcpy.AddError("error in file name: %s" % messages["filename"])
        arcpy.AddError("with error message: %s" % messages["synerror"])
        arcpy.AddError("ArcPy Error Message: %s" % messages["arc"])
    except:
        line, filename, synerror = trace()
        arcpy.AddError("error on line: %s" % line)
        arcpy.AddError("error in file name: %s" % filename)
        arcpy.AddError("with error message: %s" % synerror)
  
    
        featurelist = [Del1,Del2,Del3,Del4]        
        for feature in featurelist:
                arcpy.Delete_management(feature, "")



#..............................................................................................................






        #print "Features classes copied successfully for Quad Summary to Folder."+str(QPath8)
        print
        print"******************************************************************"
        print"*                  CONGRATULATIONS                               *"
        print"******************************************************************"
        print
        print "Please, run the 'LiDarPDelete' script ."



except Exception as e:
    print e.message
    arcpy.AddError(e.message)

 
Exemplo n.º 24
0
import arcpy, os
def getListLayerPathesOfMXD(inputMXDFile):
  mxd = arcpy.mapping.MapDocument(inputMXDFile)
  for layer in arcpy.mapping.ListLayers(mxd):
    if layer.supports("DATASOURCE"): arcpy.AddMessage(layer.dataSource)
        
if __name__ == '__main__':
  inputMXDFile = arcpy.GetParameterAsText(0) 
  if os.path.splitext(inputMXDFile)[1] == ".mxd": getListLayerPathesOfMXD(inputMXDFile)
    else: arcpy.AddError("input file is not map document")
def createConn(instance, dbms_admin, dbms_admin_pwd):
    try:
        if dbms_admin_pwd != '':
            cnnctnstring = 'DRIVER={0};SERVER={1};DATABASE=master;UID={2};PWD={3}'.format(
                'SQL Server', instance, dbms_admin, dbms_admin_pwd)
        else:
            cnnctnstring = 'DRIVER={0};SERVER={1};DATABASE=master;Trusted_Connection=Yes'.format(
                'SQL Server', instance)
        cnnctn = pypyodbc.connect(cnnctnstring, autocommit=True)
        cursor = cnnctn.cursor()
        return cursor
    except pypyodbc.Error, msg:
        arcpy.AddError(msg)
    except Exception as e:
        print e.args[0]
        arcpy.AddError(e.args[0])


# -----------------------------------------------------------------------------
# Execute T-SQL
# -----------------------------------------------------------------------------
def executeSQL(sql, instance, dbms_admin, dbms_admin_pwd):
    cursor = createConn(instance, dbms_admin, dbms_admin_pwd)
    try:
        arcpy.AddMessage(sql)
        cursor.execute(sql)
    except pypyodbc.Error, msg:
        arcpy.AddError(msg)
        pass

    def execute(self, parameters, messages):
        """The source code of the tool."""
        # Describe the supplied netCDF File.
        ncFP = arcpy.NetCDFFileProperties(parameters[0].valueAsText)
        selectedVars = parameters[1].valueAsText.split(';')

        # Coordinate variables can be identified via values of their standard
        # name attribute or values of their untis attribute.

        XCoordNamesList = [
            "longitude", "projection_x_coordinate", "grid_longitude"
        ]
        YCoordNamesList = [
            "latitude", "projection_y_coordinate", "grid_longitude"
        ]
        XUnitNamesList = [
            "degrees_east", "degree_east", "degree_E", "degrees_E", "degreeE",
            "degreesE"
        ]
        YUnitNamesList = [
            "degrees_north", "degree_north", "degree_N", "degrees_N",
            "degreeN", "degreesN"
        ]
        XAxisNameList = ["X"]
        YAxisNameList = ["Y"]

        # Case 1: Data are organized by a single station dimension and there
        # are spatial variables also organized by that station dimension.

        dimNames = ncFP.getDimensionsByVariable(str(selectedVars[0]))
        # Must assume that the station dimension is the right most dimension.
        station_dimension = dimNames[-1]
        varNames = ncFP.getVariablesByDimension(station_dimension)
        x_variable = ""
        y_variable = ""
        row_dimension = ""
        for var in varNames:
            if (debug):
                arcpy.AddMessage(var)
            # Identify the coordinate variable by its standard name, units, or
            # axis attribute.

            try:
                SNattributeValue = ncFP.getAttributeValue(var, "standard_name")
            except:
                SNattributeValue = "missing"

            try:
                UNattributeValue = ncFP.getAttributeValue(var, "units")
            except:
                UNattributeValue = "missing"

            try:
                AXattributeValue = ncFP.getAttributeValue(var, "axis")
            except:
                AXattributeValue = "missing"

            if SNattributeValue in XCoordNamesList:
                x_variable = var
            if SNattributeValue in YCoordNamesList:
                y_variable = var
            if UNattributeValue in XUnitNamesList:
                x_variable = var
            if UNattributeValue in YUnitNamesList:
                y_variable = var
            if AXattributeValue in XAxisNameList:
                x_variable = var
            if AXattributeValue in YAxisNameList:
                y_variable = var

        # Case #2: Two dimensional lat/long coordinate variable
        # If unsuccessful in locating variable for x and y coordinates based on
        # station dimension, check to see if variable of interest is
        # georeferenced via a 2 dimensional lat/long coordinate variable

        # Coordinate Variable Method will only work if CDL conforms to the
        # CF 1.6 convention (section 2.4) that "All other dimensions should,
        # whenever possible, be placed to the left of the spatiotemporal
        # dimensions."

        if (not x_variable) or (not y_variable):
            try:
                coordAttributeValue = ncFP.getAttributeValue(
                    str(selectedVars[0]), "coordinates")
                coordVariables = coordAttributeValue.split(' ')
                for element in coordVariables:
                    try:
                        SNattributeValue = ncFP.getAttributeValue(
                            element, "standard_name")
                    except:
                        SNattributeValue = "missing"

                    try:
                        UNattributeValue = ncFP.getAttributeValue(
                            element, "units")
                    except:
                        UNattributeValue = "missing"

                    try:
                        AXattributeValue = ncFP.getAttributeValue(
                            element, "axis")
                    except:
                        AXattributeValue = "missing"

                    if SNattributeValue in XCoordNamesList:
                        x_variable = element
                    if SNattributeValue in YCoordNamesList:
                        y_variable = element
                    if UNattributeValue in XUnitNamesList:
                        x_variable = element
                    if UNattributeValue in YUnitNamesList:
                        y_variable = element
                    if AXattributeValue in XAxisNameList:
                        x_variable = element
                    if AXattributeValue in YAxisNameList:
                        y_variable = element

            except:
                CoordAttributeValue = "missing"

        # Convert the python list of selected variable into a single
        # (comma delimited) string if necessary.
        if selectedVars.count > 1:
            variable_list = ','.join([str(x) for x in selectedVars])
        else:
            variable_list = selectedVars[0]

        # Set the row dimensions parameter of the Make NetCDF Feature Layer tool
        # to the right-most dimension name of the first variable (this
        # should be the name of the station dimension.
        try:
            dimNames = ncFP.getDimensionsByVariable(selectedVars[0])
            row_dimension = str(dimNames[-1])
        except:
            row_dimension = ""

        if (x_variable) and (y_variable) and (row_dimension):
            if (debug):
                arcpy.AddWarning("netCDFFile Name: %s" % \
                    parameters[0].valueAsText)
                arcpy.AddWarning("Variable List: %s" % variable_list)
                arcpy.AddWarning("x_variable: %s" % x_variable)
                arcpy.AddWarning("y_variable: %s" % y_variable)
                arcpy.AddWarning("Output Feature Layer: %s " % \
                    parameters[2].valueAsText)
                arcpy.AddWarning("Row Dimensions: %s " % row_dimension)

            result1 = arcpy.MakeNetCDFFeatureLayer_md(
                parameters[0].valueAsText, variable_list, x_variable,
                y_variable, parameters[2].valueAsText, row_dimension)
            # Force the netCDF Feature Layer to be added to the display
            arcpy.SetParameter(3, result1)
        else:
            if (not x_variable) or (not y_variable):
                msg1 = "Unable to automatically determine x and y variables " \
                   + "from the netCDF file. Use Make NetCDF Feature Layer tool."
                arcpy.AddError(msg1)
            if (not row_dimension):
                msg1 = "Unable to automatically determine row dimension " \
                     + "variable(s) from the netCDF file. Use Make NetCDF " \
                    + "Feature Layer tool."
                arcpy.AddError(msg1)
        return
def addFieldsFromSchema(schemasFolder, featureClass, schema):

    # Adds the fields defined in a field schema CSV file to the specified feature class

    try:
        arcpy.AddMessage("Starting: AddFieldsFromSchema")

        # Make sure the feature class exists

        if arcpy.Exists(featureClass):

            subTypes = {}

            # Make sure the specified field schema CSV file exists

            fieldSchemaFile = os.path.join(schemasFolder,
                                           "Fields_" + schema + ".csv")
            if os.path.exists(fieldSchemaFile):
                with open(fieldSchemaFile, 'r') as csvFile:
                    reader = csv.reader(csvFile, dialect='excel')

                    # Skip the headers

                    header = next(reader)

                    # Read all the rows and add fields accordingly

                    fieldName = None

                    for line in reader:
                        try:
                            fieldType = Utility.fieldTypeLookup(line[1])
                            # Add a line as a new field if this is the first occurence of that "field"

                            if line[0] != fieldName:
                                addAField(featureClass, fieldType, line)
                                fieldName = line[0]

                            # Set the domain for the field, including subtype code if necessary

                            if bool(line[5]):
                                if bool(line[8]):
                                    arcpy.AssignDomainToField_management(
                                        featureClass, line[0], line[5],
                                        line[8] + ": " + subTypes[line[8]])
                                else:
                                    if len(subTypes) > 0:
                                        subTypeItems = subTypes.items()
                                        for subTypeItem in subTypeItems:
                                            arcpy.AssignDomainToField_management(
                                                featureClass, line[0], line[5],
                                                subTypeItem[0] + ": " +
                                                subTypeItem[1])
                                    else:
                                        arcpy.AssignDomainToField_management(
                                            featureClass, line[0], line[5])

                            # Set the default value for the field

                            if bool(line[6]):
                                defaultValue = line[6]
                                castedDefault = castValue(
                                    fieldType, defaultValue)

                                if bool(line[8]):
                                    arcpy.AssignDefaultToField_management(
                                        featureClass, line[0], castedDefault,
                                        line[8] + ": " + subTypes[line[8]])
                                else:
                                    arcpy.AssignDefaultToField_management(
                                        featureClass, line[0], castedDefault)

                            # Check to see if this field sets a subtype and if it does, go create the subtypes

                            if line[7] == "True":
                                subTypes = addSubtypes(schemasFolder, schema,
                                                       featureClass, line[0])

                        except Exception as err:
                            arcpy.AddError(
                                traceback.format_exception_only(
                                    type(err), err)[0].rstrip())
                            arcpy.AddError('Failed at: ' + ':'.join(line))
    except Exception as err:
        arcpy.AddError(
            traceback.format_exception_only(type(err), err)[0].rstrip())

    else:
        arcpy.AddMessage("Success! - Completed: AddFieldsFromSchema")

    finally:
        arcpy.AddMessage("Exiting: AddFieldsFromSchema")
Exemplo n.º 28
0
old_file = open("WATERS_Services.PointIndexingService.pyt.xml");
for line in old_file:
   new_file.write(line);
new_file.close();
old_file.close();
   
#------------------------------------------------------------------------------
#- Step 30
#- Import the toolbox
#------------------------------------------------------------------------------
arcpy.AddMessage("Importing the toolbox.");
try:
   owservices = arcpy.ImportToolbox(temp_tool);

except Exception as err:
   arcpy.AddError(err)
   exit -1;

#------------------------------------------------------------------------------
#- Step 40
#- Run Navigation Service
#------------------------------------------------------------------------------
arcpy.AddMessage("Dry running Navigation Service.");
try:
   __builtin__.dz_deployer = True;
   # the values provided below become the initial AGS defaults
   navsrv_results = owservices.NavigationService(
       pNavigationType='Upstream with Tributaries'       
      ,pStartPermanentIdentifier=None
      ,pStartReachCode=None
      ,pStartMeasure=None
Exemplo n.º 29
0
try:
    env.overwriteOutput = True
    #1)
    #Set Variables
    env.workspace = out_gdb = arcpy.GetParameterAsText(
        0)  #geodatabase within workspace
    allID = arcpy.GetParameterAsText(1)  #feature class within geodatabase

    fieldList = arcpy.ListFields(allID)
    for field in fieldList:
        print field.name

    #Input variable
    idList = []
    #Read the fc for different values in the ID field
    with arcpy.da.SearchCursor(allID, "ID") as cursor:
        for row in cursor:
            if row[0] not in idList:
                idList.append(row[0])
    print idList

    #Use the ID list to select the points to create a new fc
    for id in idList:
        arcpy.Select_analysis(allID, allID + "_" + id,
                              "ID" + '=' + "'" + id + "'")

    print "All the separate location by ID files have been created."

except:  #report any error messages
    print arcpy.AddError("Could not complete separate by attribute.")
    print arcpy.AddMessage(arcpy.GetMessages())
                                        "'" + str(dateUTC) + "'", "PYTHON", "")
        arcpy.CalculateField_management(inShapeFile, "Time_UTC",
                                        "'" + str(timeUTC) + "'", "PYTHON", "")

        arcpy.CalculateField_management(inShapeFile, "Filename",
                                        "'" + myShapeFile + "'", "PYTHON", "")

        arcpy.AddMessage(
            "Shapefile attribution completed based information from the R1/R2 header file"
        )

        # Process: Updated the calculation in the Area_ha field
        arcpy.CalculateField_management(inShapeFile, "Area_ha",
                                        "!Shape!.area/10000", "PYTHON_9.3", "")
        arcpy.AddMessage(
            "Your Area field has been updated to take edits into account!")

    except arcpy.ExecuteError:
        #Return Geoprocessing tool specific errors
        line, filename, err = trace()
        arcpy.AddError("Geoprocessing error on " + line + " of " + filename +
                       " :")
        for msg in range(0, arcpy.GetMessageCount()):
            if arcpy.GetSeverity(msg) == 2:
                arcpy.AddReturnMessage(msg)
    except:
        #Returns Python and non-tool errors
        line, filename, err = trace()
        arcpy.AddError("Python error on " + line + " of " + filename)
        arcpy.AddError(err)