Example #1
0
def check_valid_table_name(table_name,
                           out_workspace,
                           error_code,
                           add_argument1=None,
                           add_argument2=None):
    '''Check if the table name is valid for the output workspace and fail with an error if the name
    is invalid'''

    valid_name = arcpy.ValidateTableName(table_name, out_workspace)
    if valid_name != table_name:
        arcpy.AddIDMessage("ERROR", error_code, add_argument1, add_argument2)
        raise arcpy.ExecuteError
def gdf_to_tbl(gdf, tbl):
    gdf_cols = gdf.columns.values.tolist()
    if 'geometry' in gdf_cols:
        gdf_cols.remove('geometry')
        gdf = gdf[gdf_cols].copy()
    
    x = np.array(np.rec.fromrecords(gdf.values))
    names = gdf.dtypes.index.tolist()
    names = [str(arcpy.ValidateTableName(name)) for name in names]
    x.dtype.names = tuple(names)
    arcpy.da.NumPyArrayToTable(x, tbl)
    return tbl
Example #3
0
def make_service_name(service_info, output_workspace, output_folder_path_len):
    global service_output_name_tracking_list
    global output_type

    # establish a unique name that isn't too long
    # 160 character limit for filegeodatabase
    max_path_length = 230  # sanity length for windows systems
    if output_type == 'Workspace':
        max_name_len = 150  # based on fgdb
    else:
        max_name_len = max_path_length - output_folder_path_len

    parent_name = ''
    parent_id = ''
    service_name = service_info.get('name')
    service_id = str(service_info.get('id'))

    # clean up the service name (remove invalid characters)
    service_name_cl = service_name.encode(
        'ascii',
        'ignore')  # strip any non-ascii characters that may cause an issue
    # remove multiple underscores and any other problematic characters
    service_name_cl = re.sub(
        r'[_]+', '_', arcpy.ValidateTableName(service_name_cl,
                                              output_workspace))
    service_name_cl = service_name_cl.rstrip('_')

    if len(service_name_cl) > max_name_len:
        service_name_cl = service_name_cl[:max_name_len]

    service_name_len = len(service_name_cl)

    if service_info.get('parentLayer'):
        parent_name = service_info.get('parentLayer').get('name')
        parent_id = str(service_info.get('parentLayer').get('id'))

    if output_folder_path_len + service_name_len > max_path_length:  # can be written to disc
        # shorten the service name
        max_len = max_path_length - output_folder_path_len
        if max_len < service_name_len:
            service_name_cl = service_name_cl[:max_len]

    # check if name already exists
    if service_name_cl not in service_output_name_tracking_list:
        service_output_name_tracking_list.append(service_name_cl)
    else:
        if service_name_cl + "_" + service_id not in service_output_name_tracking_list:
            service_name_cl += "_" + service_id
            service_output_name_tracking_list.append(service_name_cl)
        else:
            service_name_cl += parent_id + "_" + service_id

    return service_name_cl
Example #4
0
def importallsheets(in_excel, outGDB):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        out_table = os.path.join(
            outGDB,
            arcpy.ValidateTableName(
                '{0}_{1}'.format(os.path.basename(in_excel), sheet), outGDB))
        print('Converting {} to {}'.format(sheet, out_table))

        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
def duplicados(capa, ruta):
    global gdb
    nombre_gdb = "duplicados_%s" % (
        datetime.datetime.now().strftime("%b_%d_%Y_%H_%M_%S"))
    nombre_gdb = nombre_gdb.replace(".", "")
    gdb = arcpy.CreateFileGDB_management(ruta, nombre_gdb)
    ##capa_copia=arcpy.CopyFeatures_management(in_features=capa,out_feature_class="in_memory\\%s"%(arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    capa_copia = arcpy.CopyFeatures_management(
        in_features=capa,
        out_feature_class="%s\\%s" %
        (gdb, arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    arcpy.AddField_management(capa_copia, "dupli", "TEXT", "", "", "", "",
                              "NULLABLE", "NON_REQUIRED", "")
    with arcpy.da.UpdateCursor(
            capa_copia,
        ["SHAPE@Y", "SHAPE@X", "SHAPE@AREA", "dupli"]) as cursor:
        for fila in cursor:
            ##       x=str(fila[1])[0:len(str(fila[1]))-2]
            ##       y=str(fila[0])[0:len(str(fila[0]))-2]
            Cx = float(str(fila[1])[0:len(str(fila[1]))])
            Cy = float(str(fila[0])[0:len(str(fila[0]))])
            CArea = float(str(fila[2])[0:len(str(fila[2]))])
            x = "{0:.2f}".format(Cx)  ###funcion para dejar solo dos decimales
            y = "{0:.2f}".format(Cy)  ###funcion para dejar solo dos decimales
            Area = "{0:.2f}".format(CArea)
            fila[3] = str(x) + "_" + str(y) + "_" + str(Area)
            cursor.updateRow(fila)

    arcpy.FindIdentical_management(in_dataset=capa_copia,
                                   out_dataset=str(gdb) + "\\duplicados",
                                   fields=["dupli"],
                                   output_record_option="ONLY_DUPLICATES")
    arcpy.Delete_management(
        "%s\\%s" % (gdb, arcpy.ValidateTableName(arcpy.Describe(capa).name)))
    registros = int(arcpy.GetCount_management(str(gdb) + "\\duplicados")[0])
    return str(registros) + ";" + "%s\\duplicados" % (gdb)
Example #6
0
def importallsheets(in_excel, out_gdb):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]
    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        # The out_table s based on the input excel filename
        #an underscore seperator followed by the sheet name
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName(
                "{0}_{1}".format(os.path.basename(in_excel), sheet), out_gdb))
        print('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
 def Eliminate(inFeatures, outFeatureClass, numero):
     nombre = arcpy.ValidateTableName(
         "M" + str(datetime.datetime.now().strftime("%b_%S")) +
         str(random.randrange(0, 50000)) + str(numero), "in_memory")
     templfeatures = arcpy.ValidateTableName(
         "blocklayer2" + "_" +
         str(datetime.datetime.now().strftime("%b_%S")) +
         str(random.randrange(0, 50000)) + str(numero), "in_memory")
     arcpy.MakeFeatureLayer_management(grilla, templGrilla)
     arcpy.MakeFeatureLayer_management(inFeatures, templfeatures)
     fc_grilla = arcpy.SelectLayerByAttribute_management(
         templGrilla, "NEW_SELECTION", "PageNumber  = %s" % str(numero))
     fc_select = arcpy.SelectLayerByLocation_management(
         templfeatures, "have_their_center_in", templGrilla)
     arcpy.CopyFeatures_management(templfeatures,
                                   "in_memory" + "\\" + nombre)
     arcpy.MakeFeatureLayer_management("in_memory" + "\\" + nombre,
                                       "in_memory" + "\\" + nombre + "_lyr")
     arcpy.AddField_management(in_table="in_memory" + "\\" + nombre +
                               "_lyr",
                               field_name="Area",
                               field_type="DOUBLE")
     arcpy.CalculateField_management(in_table="in_memory" + "\\" + nombre +
                                     "_lyr",
                                     field="Area",
                                     expression="!SHAPE.area!",
                                     expression_type="PYTHON")
     fc_filtro = arcpy.SelectLayerByAttribute_management(
         "in_memory" + "\\" + nombre + "_lyr", "NEW_SELECTION",
         expresion_seleccion)
     arcpy.Eliminate_management(in_features="in_memory" + "\\" + nombre +
                                "_lyr",
                                out_feature_class=outFeatureClass,
                                selection=selection,
                                ex_where_clause=ex_where_clause,
                                ex_features=capa_exclusion)
Example #8
0
 def genPath(self):
     self.path = str(arcpy.env.workspace)
     self.base = arcpy.ValidateTableName("OSM")
     #        if (arcpy.Exists(outFC)):
     #            d = arcpy.Describe(outFC)
     #            path = d.path
     #            base = d.basename
     #        else:
     outFC = os.path.join(self.path, self.base)
     nr = 0
     #if (outFC == path):
     #    path = os.path.dirname(path)
     while (arcpy.Exists(outFC)):
         self.base = self.base + str(nr + 1)
         outFC = os.path.join(self.path, self.base)
     return outFC
Example #9
0
def importallsheets(in_excel, table_prefix, out_gdb):
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    arcpy.AddMessage('{} sheets found: {}'.format(len(sheets),
                                                  ','.join(sheets)))
    for sheet in sheets:
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName("{0}_{1}".format(table_prefix, sheet),
                                    out_gdb))

        arcpy.AddMessage('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
Example #10
0
def create_summary_table(table_fields):
    """This function creates the output summary table """
    try:
        #   Create the output summary table
        arcpy.AddMessage("Generating Summary Table...")
        valid_table_name = arcpy.ValidateTableName("summary_table",
                                                   "in_memory")
        summary_table = arcpy.CreateTable_management("in_memory",
                                                     valid_table_name)

        #   Add the fields to Summary table
        for fldname in table_fields:
            arcpy.AddField_management(summary_table, fldname, "TEXT")
        arcpy.AddMessage("Summary Table is created.")
        return summary_table
    except arcpy.ExecuteError as error:
        arcpy.AddError("Error occurred during execution:" + str(error))
Example #11
0
def shapefile_to_aoi(shape_file_path):
    """This function dissolves the extracted shapefile and returns it"""
    try:
        shape_file_name = os.path.basename(shape_file_path)[:-4]
        arcpy.AddMessage("Found shapefile: {0}".format(shape_file_path))
        arcpy.AddMessage("Dissolving extracted shapefile...")
        valid_output_name = arcpy.ValidateTableName(
            shape_file_name + "_Output", "in_memory")
        output_fc_name = os.path.join("in_memory", valid_output_name)
        arcpy.Dissolve_management(shape_file_path, output_fc_name)
        #  Loading extracted shape file into feature set to be returned
        #  as Output Parameter
        arcpy.AddMessage("Complete.")
        return output_fc_name

    except Exception as error:
        arcpy.AddError("Error:" + str(error))
        return False
Example #12
0
def validate_path(path):
    """
    If our path contains a DB name, make sure we have a valid DB name
    and not a standard file name.
    """
    dirname, file_name = os.path.split(path)
    file_base = os.path.splitext(file_name)[0]
    if dirname == '':
        # a relative path only, relying on the workspace
        dirname = arcpy.env.workspace
    path_ext = os.path.splitext(dirname)[1].lower()
    if path_ext in ['.mdb', '.gdb', '.sde']:
        # we're working in a database
        file_name = arcpy.ValidateTableName(file_base)
        if file_name != file_base:
            msg("Warning: renamed output table to {}".format(file_name))
    validated_path = os.path.join(dirname, file_name)
    return validated_path
Example #13
0
 def __setAttributes(self):
     """Set attributes of object based on Esri REST Endpoint for FeatureService"""
     values = {"f": "json"}
     layerInfo = self._getEsriRESTJSON(self.url, values)
     #Geometry Type
     geometryType = getGeometryType(layerInfo['geometryType'])
     self.geometryType = geometryType
     #Name
     name = arcpy.ValidateTableName(layerInfo['name'])
     self.name = name
     #Spatial Reference - both the wkid and the arcpy SpatialReference object
     #in case it's in a wkt
     try:
         wkid = layerInfo['extent']['spatialReference']['wkid']
     except:
         wkid = 4326
     sr = arcpy.SpatialReference()
     sr.factoryCode = int(wkid)
     sr.create()
     self.sr = sr
     self.wkid = wkid
     #field used to update the feature class are a subset of all the fields in a feature class
     fields = layerInfo['fields']
     updateFields = []
     for field in fields:
         if (field['type'] in [
                 'esriFieldTypeOID', 'esriFieldTypeGeometry',
                 'esriFieldTypeGUID'
         ] or 'shape' in field['name'].lower()
                 or field['name'] in self.userFields):
             pass
         else:
             updateFields.append(field)
     updateFields.insert(0, {
         "name": 'Shape@',
         "type": "esriFieldTypeGeometry"
     })
     self.updateFields = updateFields
     #Max values
     if layerInfo.has_key('maxRecordCount'):
         self.maxRecordCount = int(layerInfo['maxRecordCount'])
     else:
         self.maxRecordCount = 1000
Example #14
0
def copyFCtoGEO(InFileLocations, outFilegdbpath, file_list, failed_list):
    for fc in fcs_in_workspace(InFileLocations):
        try:
            basename, extension = os.path.splitext(fc)
            outFC = arcpy.ValidateTableName(basename)
            copiedFC = outFilegdbpath + os.sep + str(outFC)
            outFeatureClass = os.path.join(outFilegdbpath, copiedFC)
            if not arcpy.Exists(outFeatureClass):
                addSRList = str(fc) + "," + str(outFC)
                print addSRList
                file_list.append(addSRList)
                arcpy.CopyFeatures_management(fc, outFeatureClass)
            else:
                print "FC already copied"
        except:
            print "Failed  " + str(fc)
            addFailed = str(fc)
            failed_list.append(addFailed)
    return file_list, failed_list
Example #15
0
def importallsheets(in_excel, out_gdb):
    # Function taken from ESRI documentation http://pro.arcgis.com/en/pro-app/tool-reference/conversion/excel-to-table.htm
    workbook = xlrd.open_workbook(in_excel)
    sheets = [sheet.name for sheet in workbook.sheets()]

    print('{} sheets found: {}'.format(len(sheets), ','.join(sheets)))
    for sheet in sheets:
        # The out_table is based on the input excel file name
        # a underscore (_) separator followed by the sheet name
        out_table = os.path.join(
            out_gdb,
            arcpy.ValidateTableName(
                "{0}_{1}".format(os.path.basename(in_excel), sheet),
                out_gdb))

        print('Converting {} to {}'.format(sheet, out_table))

        # Perform the conversion
        arcpy.ExcelToTable_conversion(in_excel, out_table, sheet)
    return()
Example #16
0
def validatename(ws,out_basename,ext):
	# Validation of the outputs name
	out_basename = str(out_basename)
	ext = str(ext)
	if ext != '.png':
		out_basename = arcpy.ValidateTableName(out_basename+ext,ws)
		if len(ext)>0:
			out_basename = out_basename[0:-len(ext)]
	if verbose:
		arcpy.AddMessage(u'basename = {}, with extension = {}'.format(out_basename,ext))
	if arcpy.Exists(os.path.join(ws,out_basename+ext)):
		i = 1
		out_basename_temp = out_basename + '_{}'.format(i)
		if verbose:
			arcpy.AddMessage(u'attempted basename = {}, with extension = {}'.format(out_basename_temp,ext))
		while arcpy.Exists(os.path.join(ws,out_basename_temp+ext)):
			i+=1
			out_basename_temp = out_basename + '_{}'.format(i)
			if verbose:
				arcpy.AddMessage(u'attempted basename = {}, with extension = {}'.format(out_basename_temp,ext))
		out_basename = out_basename_temp
	return out_basename
Example #17
0
def OsmToTempGdb(osmfile):  # MAKE SURE I KNOW WHAT THE GEODATABASE HERE IS REFERRING TO # allpopfcs
    """ _ """
    # print "workspace:", env.workspace
    # env.workspace = holdinggeodatabase
    # print "new workspace", env.workspace

    # Use "Load OM File" tool to convert OSM toolbox to .gdb feature class
    #   Find this tool at <https://www.arcgis.com/home/item.html?id=c18d3d0d5c62465db60f89225fdd2698>
    # OSMGPFileLoader_osmtools
    # (in_osmFile, in_conserveMemory, out_targetdataset, out_osmPoints, out_osmLines, out_osmPolygons)
    #should I clear scratchGDB here?
    validname = (os.path.basename(osmfile[:len(osmfile)-4]))
    validatedTableName = arcpy.ValidateTableName(validname)
    nameOfTargetDataset = arcpy.os.path.join(osm_gdb, validatedTableName)
    # unvalidatedNameOfTargetDataset = os.path.join(holdinggeodatabase, FeatureDatasetName)

    osmLineFC = os.path.join(osm_gdb, validatedTableName, (validatedTableName + "_osm_ln"))
    osmPointFC = os.path.join(osm_gdb, validatedTableName, (validatedTableName + "_osm_pt"))
    osmPolygonFC = os.path.join(osm_gdb, validatedTableName, (validatedTableName + "_osm_ply"))
    allthree = [osmLineFC, osmPointFC, osmPolygonFC]
    keystoadd = "name;addr:street;addr:city;addr:state;addr:postcode"
    print nameOfTargetDataset
    print osmPolygonFC
    print osmLineFC
    print osmPointFC
    arcpy.ImportToolbox(osmtoolbox)
    # Use toolbox to import the 3 FCs of OSM data for target country into scratchGDB
    try:
        arcpy.OSMGPFileLoader_osmtools(in_osmFile=osmfile, in_conserveMemory="CONSERVE_MEMORY",
                                       in_attributeselect=keystoadd, out_targetdataset=nameOfTargetDataset,
                                       out_osmPoints=osmPointFC, out_osmLines=osmLineFC, out_osmPolygons=osmPolygonFC)
    except Exception:
        print "an exception error was thrown during the OSM file loading process. Check if osm polygon was created"

    # May need to do another osmtools geoprocessing step.
    for fc in allthree:
        arcpy.OSMGPAttributeSelector_osmtools(in_osmfeatureclass=fc, in_attributeselect=keystoadd)
Example #18
0
def build_feature_table_version1(fgdb, spatial_ref, raw_name, attributes, protocol):
    """Create a feature class of PO observation items (features)."""
    valid_feature_name = arcpy.ValidateTableName(raw_name, fgdb)
    field_names = protocol["csv"]["features"]["feature_field_names"]
    field_types = protocol["csv"]["features"]["feature_field_types"]
    arcpy.CreateFeatureclass_management(
        fgdb, valid_feature_name, "POINT", "#", "#", "#", spatial_ref
    )
    view = arcpy.MakeTableView_management(
        os.path.join(fgdb, valid_feature_name), "view"
    )
    try:
        # Protocol Attributes
        for attribute in attributes:
            arcpy.AddField_management(
                view,
                attribute["name"],
                attribute["type"],
                "",
                "",
                "",
                attribute["alias"],
                "",
                "",
                attribute["domain"],
            )
        # Standard Attributes
        for i, field_name in enumerate(field_names):
            alias = field_name.replace("_", " ")
            arcpy.AddField_management(
                view, field_name, field_types[i], "", "", "", alias
            )
        # Link to related data
        arcpy.AddField_management(view, "GpsPoint_ID", "LONG")
        arcpy.AddField_management(view, "Observation_ID", "LONG")
    finally:
        arcpy.Delete_management(view)
Example #19
0
def runTool(outGDB, SQLDbase, RouteText, inNetworkDataset, imp, BufferSize,
            restrictions, TrimSettings):
    try:
        OverwriteOutput = arcpy.env.overwriteOutput  # Get the orignal value so we can reset it.
        arcpy.env.overwriteOutput = True
        # Source FC names are not prepended to field names.
        arcpy.env.qualifiedFieldNames = False

        BBB_SharedFunctions.CheckArcVersion(min_version_pro="1.2")
        BBB_SharedFunctions.CheckOutNALicense()
        BBB_SharedFunctions.CheckWorkspace()

        # ===== Get trips and stops associated with this route =====

        # ----- Figure out which route the user wants to analyze based on the text input -----
        try:

            arcpy.AddMessage("Gathering route, trip, and stop information...")

            # Connect to or create the SQL file.
            conn = sqlite3.connect(SQLDbase)
            c = BBB_SharedFunctions.c = conn.cursor()

            # Get list of routes in the GTFS data
            routefetch = "SELECT route_short_name, route_long_name, route_id FROM routes;"
            c.execute(routefetch)
            # Extract the route_id based on what the user picked from the GUI list
            # It's better to do it by searching the database instead of trying to extract
            # the route_id from the text they chose because we don't know what kind of
            # characters will be in the route names and id, so parsing could be unreliable
            route_id = ""
            for route in c:
                routecheck = route[0] + ": " + route[1] + " [" + route[2] + "]"
                if routecheck == RouteText:
                    route_id = route[2]
                    route_short_name = route[0]
                    break

            if not route_id:
                arcpy.AddError("Could not parse route selection.")
                raise BBB_SharedFunctions.CustomError

            # Name feature classes
            outStopsname = arcpy.ValidateTableName("Stops_" + route_short_name,
                                                   outGDB)
            outPolysname = arcpy.ValidateTableName(
                "Buffers_" + route_short_name, outGDB)

        except:
            arcpy.AddError("Error determining route_id for analysis.")
            raise

        # ----- Get trips associated with route and split into directions -----
        try:
            # Some GTFS datasets use the same route_id to identify trips traveling in
            # either direction along a route. Others identify it as a different route.
            # We will consider each direction separately if there is more than one.

            # Get list of trips
            trip_route_dict = {}
            triproutefetch = '''
                SELECT trip_id, direction_id FROM trips
                WHERE route_id='%s'
                ;''' % route_id
            c.execute(triproutefetch)

            # Fill some dictionaries for use later.
            trip_dir_dict = {}  # {Direction: [trip_id, trip_id, ...]}
            for triproute in c:
                trip_dir_dict.setdefault(triproute[1], []).append(triproute[0])
            if not trip_dir_dict:
                arcpy.AddError(
                    "There are no trips in the GTFS data for the route \
you have selected (%s).  Please select a different route or fix your GTFS \
dataset." % RouteText)
                raise BBB_SharedFunctions.CustomError

        except:
            arcpy.AddError("Error getting trips associated with route.")
            raise

        # ----- Get list of stops associated with trips and split into directions -----
        try:
            # If a stop is used for trips going in both directions, count them separately.

            # Select unique set of stops used by trips in each direction
            stoplist = {}  # {Direction: [stop_id, stop_id, ...]}
            for direction in trip_dir_dict:
                stops = []
                for trip in trip_dir_dict[direction]:
                    stopsfetch = '''SELECT stop_id FROM stop_times
                                WHERE trip_id == ?'''
                    c.execute(stopsfetch, (trip, ))
                    for stop in c:
                        stops.append(stop[0])
                stoplist[direction] = list(set(stops))

            # If there is more than one direction, we will append the direction number
            # to the output fc names, so add an _ here for prettiness.
            if len(stoplist) > 1:
                arcpy.AddMessage(
                    "Route %s contains trips going in more than one \
direction. A separate feature class will be created for each direction, and the \
GTFS direction_id will be appended to the feature class name." %
                    route_short_name)
                outStopsname += "_"
                outPolysname += "_"

        except:
            arcpy.AddError("Error getting stops associated with route.")
            raise

        # ===== Create output =====

        # ----- Create a feature class of stops ------
        try:

            arcpy.AddMessage("Creating feature class of GTFS stops...")

            for direction in stoplist:
                stops = stoplist[direction]
                outputname = outStopsname
                if direction != None:
                    outputname += str(direction)
                outStops = os.path.join(outGDB, outputname)

                outStops, outStopList = BBB_SharedFunctions.MakeStopsFeatureClass(
                    outStops, stops)

                # Add a route_id and direction_id field and populate it
                arcpy.management.AddField(outStops, "route_id", "TEXT")
                arcpy.management.AddField(outStops, "direction_id", "TEXT")
                fields = ["route_id", "direction_id"]
                with arcpy.da.UpdateCursor(outStops, fields) as cursor:
                    for row in cursor:
                        row[0] = route_id
                        row[1] = direction
                        cursor.updateRow(row)

        except:
            arcpy.AddError("Error creating feature class of GTFS stops.")
            raise

        #----- Create Service Areas around stops -----
        try:

            arcpy.AddMessage("Creating buffers around stops...")

            for direction in stoplist:
                outputname = outStopsname
                if direction != None:
                    outputname += str(direction)
                outStops = os.path.join(outGDB, outputname)

                TrimPolys, TrimPolysValue = BBB_SharedFunctions.CleanUpTrimSettings(
                    TrimSettings)
                polygons = BBB_SharedFunctions.MakeServiceAreasAroundStops(
                    outStops, inNetworkDataset,
                    BBB_SharedFunctions.CleanUpImpedance(imp), BufferSize,
                    restrictions, TrimPolys, TrimPolysValue)

                # Join stop information to polygons and save as feature class
                arcpy.management.AddJoin(polygons, "stop_id", outStops,
                                         "stop_id")
                outPolys = outPolysname
                if direction != None:
                    outPolys += str(direction)
                outPolysFC = os.path.join(outGDB, outPolys)
                arcpy.management.CopyFeatures(polygons, outPolysFC)

                # Add a route_id and direction_id field and populate it
                arcpy.management.AddField(outPolysFC, "route_id", "TEXT")
                arcpy.management.AddField(outPolysFC, "direction_id", "TEXT")
                fields = ["route_id", "direction_id"]
                with arcpy.da.UpdateCursor(outPolysFC, fields) as cursor:
                    for row in cursor:
                        row[0] = route_id
                        row[1] = direction
                        cursor.updateRow(row)

        except:
            arcpy.AddError("Error creating buffers around stops.")
            raise

        arcpy.AddMessage("Done!")
        arcpy.AddMessage("Output written to %s is:" % outGDB)
        outFClist = []
        for direction in stoplist:
            outPolysFC = outPolysname
            outStopsFC = outStopsname
            if direction != None:
                outStopsFC += str(direction)
                outPolysFC += str(direction)
            outFClist.append(outStopsFC)
            outFClist.append(outPolysFC)
            arcpy.AddMessage("- " + outStopsFC)
            arcpy.AddMessage("- " + outPolysFC)

        # Tell the tool that this is output. This will add the output to the map.
        outFClistwpaths = [os.path.join(outGDB, fc) for fc in outFClist]
        arcpy.SetParameterAsText(8, ';'.join(outFClistwpaths))

    except BBB_SharedFunctions.CustomError:
        arcpy.AddError("Failed to create buffers around stops for this route.")
        pass

    except:
        arcpy.AddError("Failed to create buffers around stops for this route.")
        raise

    finally:
        if OverwriteOutput:
            arcpy.env.overwriteOutput = OverwriteOutput
Example #20
0
import arcpy
import glob
import os
import ogr
from connectionData import pathCad, pathShp, typeCadFile, field
arcpy.env.overwriteOutput = True
gdb = pathCad + "/dgn.gdb"
arcpy.env.workspace = gdb
arcpy.CreateFileGDB_management(pathCad, "dgn.gdb")
reference_scale = "1500"
#globTxt = "r,"+ pathCad +"/*."+typeCadFile
for file in glob.glob(r"data/dgn_dxf/*.dgn"):
    outDS = arcpy.ValidateTableName(
        os.path.splitext(os.path.basename(file))[0])
    arcpy.CADToGeodatabase_conversion(file, gdb, outDS, reference_scale)

datasetList = arcpy.ListDatasets('*', 'Feature')
for dataset in datasetList:
    arcpy.env.workspace = dataset
    fcList = arcpy.ListFeatureClasses()
    for fc in fcList:
        #if fc == "Point" or fc == "Polyline" or fc == "Polygon":
        print fc
        inFeatures = fc
        outLocation = pathCad
        outFeatureClass = fc
        arcpy.FeatureClassToFeatureClass_conversion(inFeatures, outLocation,
                                                    outFeatureClass)
Example #21
0
try:
    
    geoResponse, geoVal = geoRequest(coorStr)

    if geoResponse:

        usrInterps = paramInterps.split(";")
        keys = ",".join(geoVal)

        arcpy.SetProgressor("step", None, 0, len(usrInterps), 1)

        for interp in usrInterps:
    
            tblName = "SSURGO_express_tbl" + interp + aggMethod
            tblName = arcpy.ValidateTableName (tblName)
            tblName = tblName.replace("___", "_")
            tblName = tblName.replace("__", "_")

            # path = os.path.dirname(tblName)
            # name = os.path.basename(tblName)

            intrpLogic, intrpData = tabRequest(interp)
    
            if intrpLogic:

                intrpRes = intrpData["Table"]

                if not arcpy.Exists(outLoc + os.sep + tblName):

                    columnNames = intrpRes.pop(0)
Example #22
0
def main(input_table=None,
         sr=None,
         output_loc=None,
         output_gdb=None,
         output_fc=None,
         genetic=None,
         identification=None,
         location=None,
         other=None,
         mode='toolbox',
         protected_map=config.protected_columns):

    # set mode based on how script is called.
    settings.mode = mode

    # First, create a geodatabase for all our future results.
    # TODO: can we generate this from a single value?
    gdb_path = os.path.abspath(os.path.join(output_loc, output_gdb + '.gdb'))

    # if the user is calling this from the command-line, they won't have necessarily
    # entered a full path for the FC output. Infer it from the input instead.
    if output_fc.lower().find('gdb') == -1:
        output_fc = os.path.join(gdb_path, output_fc)

    # check if we received a value spatial reference -- if not, use WGS84.
    if sr in ('', None):
        # default spatial reference can be redefined.
        sr = config.sr

    try:
        # only try to create this GDB if it doesn't already exist.
        if not os.path.exists(gdb_path):
            # Process: Create File GDB
            # SYNTAX: CreateFileGDB_management (out_folder_path, out_name, {out_version})
            arcpy.CreateFileGDB_management(output_loc, output_gdb, "CURRENT")
            utils.msg("File geodatabase successfully created: %s" % gdb_path)
        else:
            utils.msg("File geodatabase already exists, skipping creation.")
    except Exception as e:
        utils.msg("Error creating file geodatabase",
                  mtype='error',
                  exception=e)
        sys.exit()

    # TODO: WE NEED TO DO A FULL CLASSIFICATION OF THE INPUT AND MANUALLY BUILD UP THE LAYER...
    # We'll have two columns per locus, need to import correctly

    # Start things off by importing the table directly. We still need to edit the header
    # because of ArcGIS' restrictions on table names.

    # do we have a text-based file?
    file_type = utils.file_type(input_table)
    if file_type == 'Text':
        # Generate a temporary copy of the input CSV which corrects it for
        # ArcGIS, stripping invalid column label characters.
        data_table = utils.validate_table(input_table)

        # TODO: use field mapping to handle the date-time field?
        utils.protect_columns(data_table, protected_map)
    else:
        data_table = input_table

    # write out our table, after additional validation.
    try:
        arcpy.env.overwriteOutput = settings.overwrite

        # generate table name based on input name
        (label, ext) = os.path.splitext(os.path.basename(input_table))

        # Validate label will produce a valid table name from our input file
        validated_label = arcpy.ValidateTableName(label)

        # write out our filtered table to ArcGIS
        arcpy.TableToTable_conversion(data_table, gdb_path, validated_label)

        if file_type == 'Text':
            # Delete the temporary table with validated names;
            # temp file is stored in the same spot as the original.
            temp_dir = os.path.dirname(input_table)
            temp_path = os.path.join(temp_dir, data_table)
            os.remove(temp_path)

    except Exception as e:
        utils.msg("Error converting table %s to GDB" % input_table,
                  mtype='error',
                  exception=e)
        sys.exit()

    input_csv = os.path.join(gdb_path, validated_label)
    utils.msg("Table successfully imported: \n %s" % input_csv)
    fields = [f.name.lower() for f in arcpy.ListFields(input_csv)]

    # intially, our date column is imported as text to prevent ArcGIS
    # from inadvertently munging it. Add a formatted date column.
    try:
        # TODO: make date field defined elsewhere.
        input_time_field = "Date_Time"
        field_name = 'Date_formatted'
        expression = 'formatDate(!{input_time_field}!)'.format(
            input_time_field=input_time_field)
        code_block = """
import dateutil.parser
def formatDate(input_date):
    parsed_date = dateutil.parser.parse(input_date)
    return parsed_date.strftime("%m/%d/%Y %H:%M:%S")"""
        # check if a formatted date field exists; if so skip this step
        if field_name.lower() not in fields:
            arcpy.AddField_management(input_csv, field_name, 'DATE')
            arcpy.CalculateField_management(input_csv, field_name, expression,
                                            "PYTHON_9.3", code_block)
            utils.msg("Added a formatted date field: {field_name}.".format(
                field_name=field_name))
    except Exception as e:
        utils.msg("Error parsing date information", mtype='error', exception=e)
        sys.exit()

    # coordinate columns
    x = y = None

    # Convert the table to a temporary spatial feature
    try:
        if location is None:
            raise Exception("Required location columns not set.")

        # A temporary XY Layer needed to create the feature class.
        # NOTE: This table is deleted when the script finishes
        temporary_layer = input_csv + '_xy_temp'

        # 'location', ArcGIS passes semicolon separated values
        loc_parts = location.split(";")

        # TODO: ArcGIS doesn't preserve order; do we need separate fields for these? or some other approach?
        if loc_parts[0].lower() in ['x', 'longitude', 'lon']:
            (x, y) = loc_parts[:2]
        else:
            (y, x) = loc_parts[:2]

        # Process: Make XY Event Layer.  This layer is temporary and will be
        # deleted upon script completion.
        # SYNTAX: arcpy.MakeXYEventLayer_management(table, in_x_field,
        #           in_y_field, out_layer, {spatial_reference}, {in_z_field})
        arcpy.MakeXYEventLayer_management(input_csv, x, y, temporary_layer, sr)
    except Exception as e:
        utils.msg("Error making XY Event Layer", mtype='error', exception=e)
        sys.exit()

    utils.msg("XY event layer successfully created.")

    # Copy our features to a permanent layer
    try:
        # for this step, overwrite any existing results
        arcpy.env.overwriteOutput = True

        # Process: Copy Features
        # SYNTAX: CopyFeatures_management (in_features, out_feature_class, {config_keyword}, {spatial_grid_1}, {spatial_grid_2}, {spatial_grid_3})
        arcpy.CopyFeatures_management(temporary_layer, output_fc, "", "0", "0",
                                      "0")
        utils.msg("Features succesfully created: \n %s" % output_fc)

    except Exception as e:
        utils.msg("Error copying features to a feature class",
                  mtype='error',
                  exception=e)
        sys.exit()

    utils.msg(
        "Feature Class successfully created, your SRGD file has been imported!"
    )

    try:
        haplotype_table = os.path.join(
            gdb_path, "{}_{}".format(validated_label, 'Haplotypes'))

        # look up our haplotype data
        haplotypes = utils.Haplotype(output_fc)

        # create a dictionary for inserting records
        dts = {
            'names': ('code', 'haplotype', 'count'),
            'formats': (numpy.uint16, 'S6', numpy.uint8)
        }

        # create a numpy formatted structure from this data
        array = numpy.rec.fromrecords(haplotypes.indexed, dtype=dts)

        # output the new table
        arcpy.da.NumPyArrayToTable(array, haplotype_table)

        utils.msg("Haplotype table created: \n {}".format(haplotype_table))

    except Exception as e:
        utils.msg("Error creating supplemental haplotype table",
                  mtype='error',
                  exception=e)
        sys.exit()

    # Because we can't pass around objects between this process and the calling
    # addin environment, dump out the settings to our shared configuration file.
    try:
        config.update('fc_path', output_fc.strip())
        config.update('x_coord', x)
        config.update('y_coord', y)

        var_types = {
            'identification': identification,
            'genetic': genetic,
            'location': location,
            'other': other
        }

        if identification is None:
            raise Exception("Required Identification columns not entered.")

        # the first ID field should be used as the default key.
        id_cols = identification.split(";")
        id_field = id_cols[0]
        for (i, col) in enumerate(id_cols):
            # FIXME this will always set individual_id to the primary key.
            if col.lower() == 'individual_id':
                id_field = id_cols[i]
        config.update('id_field', id_field)

        for (var, val) in var_types.items():
            if val is None:
                val = ''
            config.update('%s_columns' % var, val.strip())

    except Exception as e:
        msg = "Error creating output configuration file: %s" % config.config_path
        utils.msg(msg, mtype='error', exception=e)
        sys.exit()

    # clean up: remove intermediate steps.
    try:
        arcpy.Delete_management(temporary_layer)
    except Exception as e:
        utils.msg("Unable to delete temporary layer",
                  mtype='error',
                  exception=e)
        sys.exit()
Example #23
0
# Script arguments
input_surface = arcpy.GetParameterAsText(0)  #Input Surface
RADIUS2_to_infinity = arcpy.GetParameterAsText(
    1)  #Force visibility to infinity
towerClass = arcpy.GetParameterAsText(2)  #Defensive Position Feature Class
descField = arcpy.GetParameterAsText(3)  #Defensive Position Description Field
towerName = arcpy.GetParameterAsText(4)  #Defensive Position Description
towerHeightField = arcpy.GetParameterAsText(
    5)  #Defensive Position Height Field
# The name of the workspace in which the features should be stored
outWorkspace = arcpy.GetParameterAsText(6)  #Output Workspace
# The name of the featureclass in which the features should be stored
outFeatureClassName = arcpy.GetParameterAsText(7)  #Output Visibility

# Scrub the name
scrubbedFeatureClassName = arcpy.ValidateTableName(outFeatureClassName,
                                                   outWorkspace)

# Put it all together
output_rlos = os.path.join(outWorkspace, scrubbedFeatureClassName)
if RADIUS2_to_infinity == '#' or not RADIUS2_to_infinity:
    RADIUS2_to_infinity = "false"

terrestrial_refractivity_coefficient = 0.13
polygon_simplify = "SIMPLIFY"

delete_me = []
DEBUG = True
arcpy.AddMessage("Using scratch GDB of: " + arcpy.env.scratchWorkspace)

if RADIUS2_to_infinity == 'true':
    RADIUS2_to_infinity = True
try:
    field = [u'RptGroup']
    with arcpy.da.SearchCursor(inFC, field) as scursor:
        for row in scursor:
            items = row[0].split("/")
            if items not in type:
                type.append(items)

    del scursor

    for t in type:
        expression = "RptGroup LIKE '%" + t[0] + "%'"
        tempWS = r"in_memory"
        fc = t[0] + "_H"
        gFC = arcpy.ValidateTableName(fc, tempWS)
        tempFC = os.path.join(tempWS, gFC)
        outFC = os.path.join(outPath, gFC)
        print expression, gFC

        if arcpy.Exists(outFC) == False:
            print("Selecting Data")

            arcpy.CreateFeatureclass_management(tempWS, gFC, "MULTIPOINT", inFC, "DISABLED", "DISABLED", inFC)
            f = [u'OBJECTID', u'Shape', u'MsgSerial', u'RptGroup']
            f1 = [u'OBJECTID', u'Shape', u'RptGroup']
            iCursor = arcpy.da.InsertCursor(tempFC, f1)
            with arcpy.da.SearchCursor(inFC, f1, expression) as cursor:
                for row in cursor:
                    iCursor.insertRow(row)
Example #25
0
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format):
    """Clips input results."""
    clipped = 0
    errors = 0
    skipped = 0
    fds = None
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.items():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    if out_coordinate_system == 0:
                        service_layer = task_utils.ServiceLayer(ds)
                        wkid = service_layer.wkid
                        out_sr = arcpy.SpatialReference(wkid)
                        arcpy.env.outputCoordinateSystem = out_sr
                    else:
                        out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                        arcpy.env.outputCoordinateSystem = out_sr

                    if not out_sr.name == gcs_sr.name:
                        try:
                            geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                            clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                        except (AttributeError, IndexError):
                            try:
                                clip_poly = gcs_clip_poly.projectAs(out_sr)
                            except AttributeError:
                                clip_poly = gcs_clip_poly
                        except ValueError:
                            clip_poly = gcs_clip_poly
                    else:
                        clip_poly = gcs_clip_poly

                    arcpy.env.overwriteOutput = True
                    service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope')
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON))
                        feature_set = arcpy.FeatureSet()
                        if not out_name:
                            out_name = service_layer.service_layer_name
                        try:
                            feature_set.load(url)
                        except Exception:
                            continue
                        if not out_features:
                            out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name)
                        else:
                            clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features')
                            arcpy.Append_management(clip_features, out_features, 'NO_TEST')
                            try:
                                arcpy.Delete_management(clip_features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(float(g) / group_cnt, '', 'clip_data')
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                for row in out_name:
                    try:
                        arcpy.env.overwriteOutput = True
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'
                        # Clip the geometry.
                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                try:
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                except arcpy.ExecuteError:
                                    arcpy.DeleteField_management(layer_name, valid_field)
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.items():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        try:
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                        except arcpy.ExecuteError:
                                            arcpy.DeleteField_management(layer_name, valid_field)
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)
                        status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        processed_count += 1
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                        skipped_reasons[ds] = 'Invalid input type'
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue

            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # --------------------------------------------------------------------
            # If no output coord. system, get output spatial reference from input.
            # --------------------------------------------------------------------
            if out_coordinate_system == 0:
                try:
                    out_sr = dsc.spatialReference
                    arcpy.env.outputCoordinateSystem = out_sr
                except AttributeError:
                    out_sr = task_utils.get_spatial_reference(4326)
                    arcpy.env.outputCoordinateSystem = out_sr
            else:
                out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                arcpy.env.outputCoordinateSystem = out_sr

            # -------------------------------------------------
            # If the item is not a file, project the clip area.
            # -------------------------------------------------
            if dsc.dataType not in ('File', 'TextFile'):
                if not out_sr.name == gcs_sr.name:
                    try:
                        geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                        clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                    except (AttributeError, IndexError):
                        try:
                            clip_poly = gcs_clip_poly.projectAs(out_sr)
                        except AttributeError:
                            clip_poly = gcs_clip_poly
                    except ValueError:
                        clip_poly = gcs_clip_poly
                else:
                    clip_poly = gcs_clip_poly
                extent = clip_poly.extent


            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_poly, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_poly, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_poly, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                gcs_clip_poly,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds))
                skipped += 1
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = ex.message
            errors += 1
            pass
    return clipped, errors, skipped
Example #26
0
import arcpy
import os
arcpy.env.workspace = r'C:\Data'
outworkspace = r'D:\Documents\ArcGIS\Default.gdb'
fclist = arcpy.ListFeatureClasses()
for shapefile in fclist:
    fcname = arcpy.Describe(shapefile).basename
    newfcname = arcpy.ValidateTableName(fcname)
    outfc = os.path.join(outworkspace, newfcname)
    arcpy.CopyFeatures_management(shapefile, outfc)
        print "Procesando secciones " + str(recuperalista(rango))
        arcpy.Delete_management("in_memory")
        rango = recuperalista(rango)

        for numero in rango:

            print "ejecutandose la seccion #: %s " % numero
            ruta = creadirs(numero)
            ruta = crearFGDB(ruta, numero)
            ##                ciclo=1
            ####                print inFeatures
            ##                Eliminate(inFeatures,ruta+"\\"+"cuadrox_"+str(numero),expression,ciclo)
            ##                ciclo=0
            nombre = arcpy.ValidateTableName(
                "f" + str(datetime.datetime.now().strftime("%b_%S")) +
                str(random.randrange(0, 5000)) + "_" + str(numero),
                "in_memory")
            path = r"in_memory\%s" % nombre
            ##                path1 = r"in_memory\%s" % nombre+"1"
            ##                path2 = r"in_memory\%s" % nombre+"2"
            ##                path3 = r"in_memory\%s" % nombre+"3"
            ##                path4 = r"in_memory\%s" % nombre+"4"
            ##                ciclo=2
            ##                Eliminate(ruta+"\\"+"cuadrox_"+str(numero),path,expression,ciclo)
            ##                Eliminate(path,path1,expression,ciclo)
            ##                Eliminate(path1,path2,expression,ciclo)
            ##                Eliminate(path2,path3,expression,ciclo)
            ##                Eliminate(path3,path4,expression,ciclo)

            ##                Eliminate(path4,ruta+"\\"+"cuadrox_"+str(numero)+"_Final",expression,ciclo)
            arcpy.CopyFeatures_management(inFeatures, path)
Example #28
0
        elif inWatershed.find('.shp') > -1:
            watershedGDB_path = os.path.dirname(
                inWatershed[:inWatershed.find('.') + 4]
            ) + os.sep + os.path.basename(inWatershed).replace(
                ".shp", "").replace(" ", "_") + "_EngTools.gdb"

        else:
            AddMsgAndPrint(
                "\n\nWatershed Polygon must either be a feature class or shapefile!.....Exiting",
                2)
            exit()

        watershedFD = watershedGDB_path + os.sep + "Layers"
        watershedGDB_name = os.path.basename(watershedGDB_path)
        userWorkspace = os.path.dirname(watershedGDB_path)
        wsName = arcpy.ValidateTableName(
            os.path.splitext(os.path.basename(inWatershed))[0])

        # log File Path
        textFilePath = userWorkspace + os.sep + os.path.basename(
            userWorkspace).replace(" ", "_") + "_EngTools.txt"

        # record basic user inputs and settings to log file for future purposes
        logBasicSettings()

        # --------------------------------------------------- Temporary Datasets
        # These layers wouldn't work using in_memory rasters, specifically the combine
        landuse = watershedGDB_path + os.sep + "NLCD"
        soilsGrid = watershedGDB_path + os.sep + "SOILS"
        LU_PLUS_SOILS = watershedGDB_path + os.sep + "LU_PLUS_SOILS"

        # --------------------------------------------------- Permanent Datasets
Example #29
0
        demFormat = demDesc['format']
        demCoordType = demSR.type

        if demCoordType == 'Projected':
            bProjectedCS = True
            demLinearUnits = demSR.linearUnitName
        else:
            bProjectedCS = False
            demLinearUnits = demSR.angularUnitName

        bImageService = False
        if demFormat == 'Image Service':
            bImageService = True

        # --------------------------------------------------------------------------------------------- Set Variables
        projectName = arcpy.ValidateTableName(
            os.path.basename(userWorkspace).replace(" ", "_"))
        textFilePath = userWorkspace + os.sep + projectName + "_EngTools.txt"

        watershedGDB_name = os.path.basename(userWorkspace).replace(
            " ", "_") + "_EngTools.gdb"  # replace spaces for new FGDB name
        watershedGDB_path = userWorkspace + os.sep + watershedGDB_name
        watershedFD = watershedGDB_path + os.sep + "Layers"
        AOIpath = arcpy.da.Describe(AOI)['catalogPath']

        # Permanent Datasets
        projectAOI = watershedFD + os.sep + projectName + "_AOI"
        Contours = watershedFD + os.sep + projectName + "_Contours_" + str(
            int(interval)).replace(".", "_") + "ft"
        DEM_aoi = watershedGDB_path + os.sep + projectName + "_DEM"
        Hillshade_aoi = watershedGDB_path + os.sep + projectName + "_Hillshade"
        depthGrid = watershedGDB_path + os.sep + projectName + "_DepthGrid"
Example #30
0
def clip_data(input_items, out_workspace, clip_polygon, out_format):
    """Clips input results using the clip polygon.

    :param input_items: list of item to be clipped
    :param out_workspace: the output workspace where results are created
    :param clip_polygon: the clip polygon geometry
    :param out_format: the type of output to be created (i.e. SHP for shapefile)
    """

    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    clipped = 0
    errors = 0
    skipped = 0
    fds = None

    for ds, out_name in input_items.iteritems():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    clip_service_layers(ds, clip_polygon, out_name)
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data_by_features')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue


            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()


            # ---------------------------------
            # Is the input is geometry features
            # ---------------------------------
            if isinstance(out_name, list):
                arcpy.env.overwriteOutput = True
                increment = task_utils.get_increment(result_count)
                for row in out_name:
                    try:
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'

                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            if out_format == 'SHP':
                                arcpy.DeleteField_management(layer_name, 'Id')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                if out_format == 'SHP':
                                    arcpy.DeleteField_management(layer_name, 'Id')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.iteritems():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, clip_polygon, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)

                        processed_count += 1
                        if (processed_count % increment) == 0:
                            status_writer.send_percent(float(processed_count) / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        skipped_reasons[ds] = 'Invalid input type'
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue


            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_polygon, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_polygon,
                                            os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_polygon, name)

                # arcpy.Clip_analysis(ds, clip_polygon, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_polygon, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                if type(clip_polygon) is arcpy.Polygon:
                    extent = clip_polygon.extent
                else:
                    extent = arcpy.Describe(clip_polygon).extent
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_polygon, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_polygon, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_polygon, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                clip_polygon,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_polygon, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(_('Invalid input type: {0}').format(ds))
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                skipped += 1
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = repr(ex)
            errors += 1
            pass
    return clipped, errors, skipped