def summarize_crashes(buffs,
                      out_features=arcpy.CreateUniqueName(
                          'crash_summary', 'in_memory'),
                      crash_service_url=DEFAULT_CRASH_URL):
    """downloads and summarizes crash information within buffered intersections

    Required:
        buffs -- intersection buffers
        out_features -- output features
        crash_service_url -- IOWA DOT Crash REST Service url
    """
    # dissolve buffers for REST API query
    dis = arcpy.CreateUniqueName('buff_dis', 'in_memory')
    arcpy.management.Dissolve(buffs, dis)

    # custom module: get layer from REST service
    lyr = restapi.MapServiceLayer(crash_service_url)

    # get geometry as JSON
    g = restapi.Geometry(dis)

    # query REST Service
    fs = lyr.query(geometry=g)
    crashes = arcpy.CreateUniqueName('crashes', 'in_memory')

    # export feature set
    restapi.exportFeatureSet(fs, crashes)

    # crash summary fields
    crash_sum_fields = [
        'FATALITIES', 'INJURIES', 'MAJINJURY', 'MININJURY', 'POSSINJURY',
        'UNKINJURY', 'PROPDMG', 'VEHICLES', 'TOCCUPANTS'
    ]

    # create field map for spatial join
    fms = arcpy.FieldMappings()
    fms.addTable(buffs)
    fms.addTable(crashes)

    # set field map merge option to 'Sum' to summarize crash results per intersection buffer
    for field_name in crash_sum_fields:
        fi = fms.findFieldMapIndex(field_name)
        field = fms.getFieldMap(fi)
        field.mergeRule = 'Sum'

        # update field map
        fms.replaceFieldMap(fi, field)

    # do spatial join
    arcpy.analysis.SpatialJoin(buffs, crashes, out_features, field_mapping=fms)
    return out_features
Ejemplo n.º 2
0
def create_unique_name(name, gdb):
    """Creates and returns a valid and unique name for the geodatabase.
    :param name: name to be validated
    :param gdb: workspace path
    :rtype : str
    """
    import arcpy
    if gdb.endswith('.gdb'):
        valid_name = arcpy.ValidateTableName(name, gdb)
        unique_name = arcpy.CreateUniqueName(valid_name, gdb)
    else:
        valid_name = arcpy.ValidateTableName(name, gdb)
        unique_name = arcpy.CreateUniqueName(valid_name + '.shp', gdb)
    return unique_name
def publish_service(service_name, results):
    """Publishes the results list to an ArcGIS Server specified in the config module"""
    # Create Service draft.
    sddraft = arcpy.CreateUniqueName(service_name + '.sddraft')
    info = arcpy.CreateGPSDDraft(results,
                                 sddraft,
                                 service_name,
                                 folder_name=ARC_SERVER['serviceFolder'],
                                 showMessages='INFO')

    sd = arcpy.CreateUniqueName(service_name + '.sd')
    info = arcpy.StageService_server(sddraft, sd)

    info = arcpy.UploadServiceDefinition_server(sd, ARC_SERVER['uri'])
    return sd
Ejemplo n.º 4
0
def generate_gdb_filename(name_base="xt",
                          return_full=True,
                          gdb=None,
                          scratch=False):
    '''returns the filename and the gdb separately for use in some tools'''
    if gdb is None:
        temp_gdb = get_temp_gdb()
    else:
        temp_gdb = gdb

    try:
        if scratch:
            filename = arcpy.CreateScratchName(name_base, workspace=temp_gdb)
        else:
            filename = arcpy.CreateUniqueName(name_base, temp_gdb)
    except:
        geoprocessing_log.error("Couldn't create GDB filename - {0:s}".format(
            traceback.format_exc()))
        raise

    temp_datasets.append(filename)  # add it to the tempfile registry

    if return_full:
        return filename
    else:
        return os.path.split(filename)[1], temp_gdb
Ejemplo n.º 5
0
    def __init__(self,
                 remote_assets,
                 fixed_assets,
                 near_table,
                 closest_facility,
                 folder,
                 search_threshold=MAX_SEARCH,
                 daisy_threshold=MAX_DAISY,
                 extend_route=False):

        self.remote = remote_assets
        self.fixed = fixed_assets
        self.near = near_table
        self.CF = common.wrappers.ClosestFacilityHelper(closest_facility)
        self.search = search_threshold
        self.daisy = daisy_threshold
        self.extend = extend_route
        self.gdb = arcpy.CreateUniqueName(FINAL_GDB, folder)
        self.field_map = arcpy.na.NAClassFieldMappings(
            network_analyst_layer=self.CF.na_layer,
            sub_layer_name=self.CF.facilities.name,
            use_location_fields=True,
            list_candidate_fields=arcpy.ListFields(self.remote))
        self.result = None
        self.route_fields = None
        self.near_array = None
        self.point_dict = None
        self.fixed_assets = []
Ejemplo n.º 6
0
def intersectar(fcCarto,
                fcPlanif,
                scrWS,
                cfecha="FCH_TRAN",
                chas="AREA",
                cPctje="PCT_AVANCE",
                calcHcarto=True,
                calcHplanif=True,
                calcP=True):
    #-------------------------------------------------------------------------------------------------------------------------
    if calcHcarto:
        agregarHAS(fcCarto, chas)
    if calcHplanif:
        agregarHAS(fcPlanif, chas)
    oldWS = arcpy.env.workspace
    arcpy.env.workspace = scrWS
    salida = arcpy.CreateUniqueName("Intersect")
    print(salida)
    arcpy.Intersect_analysis([fcCarto, fcPlanif], salida, "ALL", eps, "INPUT")
    agregarHAS(salida, "HAS")
    if calcP:
        agregarPCTJE(salida, "HAS", cPctje, "AREA_1")
    agregarCampos(salida, cfecha)
    arcpy.env.workspace = oldWS
    return (corto(salida))
Ejemplo n.º 7
0
def execute(in_netcdf, out_feat, levels=(20, 25, 30, 35, 40, 45), mask=None):
    # This is a very stupid fix for multiprocessing
    # But I am sure why arcpy.CheckoutExtension works
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = "in_memory"
    workspace = "in_memory"

    # Set all filenames
    temp_dir = os.path.dirname(os.path.abspath(in_netcdf))  # Emm, csv and img must be saved with .nc
    layer1 = relocate(in_netcdf, temp_dir, ".img")
    if not os.path.exists(layer1):
        fn_csv = relocate(in_netcdf, temp_dir, ".csv")
        cnt_dir = os.path.dirname(in_netcdf) + "\\cnt"

        # UnComment this to skip existed results
        if os.path.exists(relocate(fn_csv, cnt_dir, ".shp")):
            print("%s existed. Skip!" % relocate(fn_csv, cnt_dir, ".shp"))
            return
        ds = netCDF4.Dataset(in_netcdf)
        # Level 7 == 3.5km
        refl_l = numpy.ravel(ds.variables["WNDSPD_850MB"])
        lon_l = numpy.ravel(ds.variables["lon"])
        lat_l = numpy.ravel(ds.variables["lat"])

        lon_l, lat_l = utils.projFunc(lon_l, lat_l)

        print(fn_csv)
        if not os.path.exists(fn_csv):
            f_csv = open(fn_csv, "w")
            f_csv.write("Id,X,Y,Reflect\n")

            for i in range(refl_l.shape[0]):
                if not refl_l[i] >= 10:
                    continue
                refl = refl_l[i]
                lat = lat_l[i]
                lon = lon_l[i]
                f_csv.write("%d,%f,%f,%f\n" % (i, lon, lat, refl))

            f_csv.close()
            print("NC to CSV:", fn_csv)
        else:
            print("Have CSV:", fn_csv)

        reflect = arcpy.CreateUniqueName(arcpy.ValidateTableName("reflect.shp"), workspace)
        arcpy.MakeXYEventLayer_management(fn_csv, 'X', 'Y', reflect, utils.spatialRef, "Reflect")
        arcpy.PointToRaster_conversion(reflect, "Reflect", layer1, cellsize=utils.resolution)
        arcpy.DefineProjection_management(layer1, utils.spatialRef)
        print("CSV to Rsater:", layer1)

    # Apply mask on if provided
    #if mask is not None:
    #    l2 = arcpy.sa.ExtractByMask(in_netcdf, mask)
    #    l21 = arcpy.sa.Con(l2, l2, 0, "VALUE >= 10")
    #else:
    # layer1 = in_netcdf
    # l21 = arcpy.sa.Con(layer1, layer1, 0, "VALUE >= 10")
    l22 = arcpy.sa.Con(arcpy.sa.IsNull(layer1), 0, layer1)
    arcpy.sa.ContourList(l22, out_feat, levels)
    print("Raster to Contour:", out_feat)
Ejemplo n.º 8
0
def create_folder_in_scratch(folderName):
	# create the folders necessary for the job
	scratch = arcpy.env.scratchWorkspace
	#scratch = sys.path[0]
	folderPath = arcpy.CreateUniqueName(folderName, scratch)
	arcpy.CreateFolder_management(scratch, os.path.basename(folderPath))
	return folderPath
Ejemplo n.º 9
0
def make_output_path(raster, inLayerName, outLayerName, convert, formatList, zipFolderPath, scratchFolderPath, outputDataFolderName='data'):
	outFormat = formatList[1].lower()

	if convert:
		outwkspc = get_temp_location_path(zipFolderPath, outFormat, outputDataFolderName=outputDataFolderName)
	else:
		outwkspc = get_temp_location_path(scratchFolderPath, "gdb", outputDataFolderName=outputDataFolderName)

	if inLayerName.find("\\"):
		inLayerName = inLayerName.split("\\")[-1]

	# make sure there are no spaces in the out raster name and make sure its less than 13 chars
	if outFormat == "grid":
		if len(inLayerName) > 12:
			inLayerName = inLayerName[:12]
		if inLayerName.find(" ") > -1:
			inLayerName = inLayerName.replace(" ", "_")

	# make the output path
	tmpName = os.path.basename(arcpy.CreateUniqueName(outLayerName, outwkspc))
	tmpName = arcpy.ValidateTableName(tmpName, outwkspc)

	# do some extension housekeeping.
	# Raster formats and shp always need to put the extension at the end
	if raster or outFormat == "shp":
		if outFormat != "gdb" and outFormat != "mdb" and outFormat != "grid":
			tmpName += formatList[2].lower()

	outputpath = os.path.join(outwkspc, tmpName)

	return tmpName, outputpath
    def preprocess_inputs(input_features, network_data_source, travel_mode, output_workspace):
        """Preprocess input features so that they can be processed in chunks.

        The function performs tasks such as spatially sorting input features and calculate network locations for the
        features.

        Args:
            input_features: The full catalog path to the input feature class.
            network_data_source: The catalog path to the network dataset used for analysis
            travel_mode: Name of the travel mode used for the analysis.
            output_workspace: The catalog path of the output workspace in which to write the output feature class.
        Returns:
            The full catalog path of the processed feature class.

        """
        logger.info("Preprocessing %s", input_features)
        # Create output features in a feature class with the same name as input feature class.
        desc_input_features = arcpy.Describe(input_features)
        input_path = desc_input_features.catalogPath
        output_features = arcpy.CreateUniqueName(os.path.basename(input_path), output_workspace)

        # Spatially sort input features
        logger.debug("Spatially sorting %s", input_features)
        result = arcpy.management.Sort(input_features, output_features,
                                       [[desc_input_features.shapeFieldName, "ASCENDING"]], "PEANO")
        logger.debug(result.getMessages().split("\n")[-1])
        # Calculate network location fields if network data source is local
        if not ODCostMatrix.is_nds_service(network_data_source):
            logger.debug("Calculating network locations for %s", input_features)
            result = arcpy.na.CalculateLocations(output_features, network_data_source, "20 Miles",
                                                 ODCostMatrix.get_nds_search_criteria(network_data_source),
                                                 travel_mode=travel_mode)
            logger.debug(result.getMessages().split("\n")[-1])

        return output_features
Ejemplo n.º 11
0
def create_submersion_matrix(debit, a, b, dem, mask, in_mask, out_sr):
    hhh = numpy.exp(a) * float(
        debit)**b  # hhh = (float(debit)/numpy.exp(b))**(1/a)

    pl = hhh < dem  # plaine inondable binaire

    # Lissage de la plaine inondable binaire
    pl = scipy.ndimage.binary_opening(pl,
                                      structure=numpy.ones((3, 3)),
                                      iterations=1).astype(numpy.int)
    pl = scipy.ndimage.binary_fill_holes(pl)  # .astype(int)

    # Conversion en polygon pour eliminer les zones ne touchant pas au lit de la riviere
    pl2 = arcpy.NumPyArrayToRaster(
        numpy.invert(pl).astype(int), out_sr[2], out_sr[1], out_sr[1])
    arcpy.DefineProjection_management(pl2, out_sr[0])
    pl2 = pl2 * in_mask
    polyg = arcpy.CreateScratchName("", "", "FeatureClass")
    polyg = arcpy.CreateUniqueName(polyg)
    arcpy.RasterToPolygon_conversion(pl2, polyg, "NO_SIMPLIFY")
    arcpy.MakeFeatureLayer_management(
        polyg, 'polyg_layer', 'GRIDCODE = 1'
    )  # Pour les shapefile, le champ est GRIDCODE, sinon grid_code
    arcpy.SelectLayerByLocation_management('polyg_layer', "INTERSECT", mask,
                                           '', "NEW_SELECTION")
    new_mask = arcpy.CreateScratchName("", "", "RasterDataset")
    new_mask = arcpy.CreateUniqueName(new_mask)
    arcpy.PolygonToRaster_conversion('polyg_layer', 'GRIDCODE', new_mask,
                                     "CELL_CENTER", "NONE",
                                     out_sr[1])  # inDEM)

    # Calcul de la profondeur de submersion
    pl = numpy.invert(pl).astype(int) * (dem - hhh)

    # Nettoyage des valeurs indesirables
    pl[pl < -100] = 0
    pl[pl >
       0] = -0.001  # les pixels dont la valeur est > 0 sont ceux issus du lissage de la plaine inondable binaire

    # Conversion en raster et definition de la projection
    rhhh = arcpy.NumPyArrayToRaster(pl, out_sr[2], out_sr[1],
                                    out_sr[1]) * new_mask
    arcpy.DefineProjection_management(rhhh, out_sr[0])

    return rhhh
Ejemplo n.º 12
0
def tmp(extension):
    """Generates a unique filename in the scratch folder using arcpy"""
    try:
        # Only works for ArcGIS 10.1+
        scratch = arcpy.env.scratchFolder
    except:
        # Should be a fix for ArcGIS 10.0
        scratch = os.path.join(tempfile.gettempdir(), "scratch")
        if not os.path.exists(scratch):
            os.mkdir(scratch)
    return arcpy.CreateUniqueName("tmp." + extension, scratch)
Ejemplo n.º 13
0
def _dissolve_multipart(geom_list):
    folder = tempfile.mkdtemp()

    shape_list = []
    for geom in geom_list:
        unique_name = arcpy.CreateUniqueName(os.path.join(folder, "xxx.shp"))
        arcpy.CopyFeatures_management(geom, unique_name)
        shape_list.append(unique_name)

    merge_file = arcpy.CreateUniqueName(os.path.join(folder, "xxx.shp"))
    arcpy.Merge_management(shape_list, merge_file)
    dissolve_file = arcpy.CreateUniqueName(os.path.join(folder, "xxx.shp"))

    arcpy.Dissolve_management(merge_file, dissolve_file)
    out_poly = None
    for row in arcpy.SearchCursor(dissolve_file):
        out_poly = copy.copy(row.SHAPE)
        break

    try:
        shutil.rmtree(folder, True)
    except Exception, e:
        logging.critical(e)
        logging.critical("Couldn't delete folder %s" % folder)
Ejemplo n.º 14
0
    def _spatially_sort_input(input_features, tracked_oid_name):
        """Spatially sort the input feature class.

        Also adds a field to the input feature class to preserve the original OID values. This field is called
        "OriginOID" for origins and "DestinationOID" for destinations.

        Args:
            input_features (str): Catalog path to the feature class to sort
            tracked_oid_name (str): New field name to store original OIDs.
        """
        arcpy.AddMessage(
            f"Spatially sorting input dataset {input_features}...")

        # Add a unique ID field so we don't lose OID info when we sort and can use these later in joins.
        # Note that if the original input was a shapefile, these IDs will likely be wrong because copying the original
        # input to the output geodatabase will have altered the original ObjectIDs.
        # Consequently, don't use shapefiles as inputs.
        desc = arcpy.Describe(input_features)
        if tracked_oid_name in [f.name for f in desc.fields]:
            arcpy.management.DeleteField(input_features, tracked_oid_name)
        arcpy.management.AddField(input_features, tracked_oid_name, "LONG")
        arcpy.management.CalculateField(input_features, tracked_oid_name,
                                        f"!{desc.oidFieldName}!")

        # Make a temporary copy of the inputs so the Sort tool can write its output to the input_features path, which is
        # the ultimate desired location
        temp_inputs = arcpy.CreateUniqueName(
            "TempODInputs", arcpy.env.scratchGDB)  # pylint:disable = no-member
        arcpy.management.Copy(input_features, temp_inputs)

        # Spatially sort input features
        try:
            arcpy.management.Sort(temp_inputs, input_features,
                                  [[desc.shapeFieldName, "ASCENDING"]],
                                  "PEANO")
        except arcpy.ExecuteError:  # pylint:disable = no-member
            msgs = arcpy.GetMessages(2)
            if "000824" in msgs:  # ERROR 000824: The tool is not licensed.
                arcpy.AddWarning(
                    "Skipping spatial sorting because the Advanced license is not available."
                )
            else:
                arcpy.AddWarning(
                    f"Skipping spatial sorting because the tool failed. Messages:\n{msgs}"
                )

        # Clean up. Delete temporary copy of inputs
        arcpy.management.Delete([temp_inputs])
Ejemplo n.º 15
0
def extract_photos_from_fc(infc, out_folder):
    if not os.path.exists(out_folder):
        os.mkdir(out_folder)
    fldBLOB = 'DATA'
    fldAttName = 'ATT_NAME'
    fldGlobID = 'REL_GLOBALID'
    flds = [fldBLOB, fldAttName, fldGlobID]
    #Attachment table
    tbl = os.path.join("{}__ATTACH".format(infc))
    with arcpy.da.SearchCursor(tbl, flds) as cursor:
        for row in cursor:
            binaryRep = row[0]
            GlobID = row[2]
            uniquename = arcpy.CreateUniqueName("{}.jpg".format(GlobID),
                                                out_folder)
            open(uniquename, 'wb').write(binaryRep.tobytes())
def crashes_as_json(buffs):
    """prepares crashes for GP Service by converting output to points as FeatureSet()

    Required:
        buffs -- intersection buffers
    """
    crashes = summarize_crashes(buffs)

    # convert summarized buffer polygons to points
    points = arcpy.CreateUniqueName('CrashPoints', 'in_memory')

    # arcpy.FeatureToPoint requires advanced license,use polyfill so it's not required at ArcGIS Server level
    featureToPointPolyfill(crashes, points)

    # convert to JSON
    arcpy.SetParameter(1, arcpy.FeatureSet(points))
Ejemplo n.º 17
0
def sobreposicao_camadas(camada1, camada2, dissolve=True, keep_atributes=False):
    log(id_imovel, 'Classificando tema {0}'.format(camada2))
    if (not arcpy.Exists(camada2)):
        log(id_imovel, 'Tema {} nao encontrado na base de insumos'.format(camada2))
        return None
    if keep_atributes:
        join_attributes = 'NO_FID'
        output = arcpy.CreateUniqueName('INTERSECT', '%scratchGDB%')
    else:
        join_attributes = 'ONLY_FID'
        output = arcpy.Geometry()
    intersect = arcpy.Intersect_analysis([camada1, camada2], output, output_type='INPUT', join_attributes=join_attributes)
    if not intersect:
        return None
    if not dissolve:
        return intersect
    return dissolve_poligonos(intersect)
Ejemplo n.º 18
0
def AdoptParameter(provided_input, parameter_name, preserve_existing=True):
    """
    Copies the provided input into the geodatabase as the parameter_name
    parameter. If a feature class already exists with the parameter_name,
    a unique copy will be saved (with preserve_existing=True).
    Workspace must be defined as project's unique geodatabase before
    calling this function.
    :param provided_input: a feature class or shapefile
    :param parameter_name: the name to save the provided_input as string
    :param preserve_existing: True to avoid overwriting
    :return: the name of the adopted parameter as a string
    """
    # Save a copy of the existing feature class if it already exists
    if preserve_existing:
        if arcpy.Exists(parameter_name):
            new_parameter_name = arcpy.CreateUniqueName(parameter_name)
            arcpy.CopyFeatures_management(parameter_name, new_parameter_name)

    # Copy providedInput to temporary memory to allow overwriting
    arcpy.CopyFeatures_management(provided_input, "in_memory/tmp_provided")

    # Delete existing layers in the TOC of the paramaterName
    if arcpy.ListInstallations()[0] == 'arcgispro':
        p = arcpy.mp.ArcGISProject("CURRENT")
        m = p.activeMap
        for _ in m.listLayers():
            arcpy.Delete_management(parameter_name)
    else:
        mxd = arcpy.mapping.MapDocument("CURRENT")
        for _ in arcpy.mapping.ListLayers(mxd, parameter_name):
            arcpy.Delete_management(parameter_name)

    # Delete feature classes in the geodatabase
    for _ in arcpy.ListFeatureClasses(parameter_name):
        arcpy.Delete_management(parameter_name)

    # Execute renaming
    adopted_parameter = arcpy.CopyFeatures_management(
        "in_memory/tmp_provided", parameter_name
        )

    # Clean up
    arcpy.Delete_management("in_memory")

    return adopted_parameter
Ejemplo n.º 19
0
    def execute(self, parameters, messages):
        """The source code of the tool."""
        in_line_features = parameters[0].valueAsText
        route_id_field = parameters[1].valueAsText
        station_interval = parameters[2].value
        cross_section_width = parameters[3].value
        out_route_feature_class = parameters[4].valueAsText
        out_station_feature_class = parameters[5].valueAsText
        out_cross_section_feature_class = parameters[6].valueAsText

        arcpy.env.overwriteOutput = True

        # Create route from polyline by length
        create_route_by_length(in_features=in_line_features,
                               route_id_field=route_id_field,
                               route_feature_class=out_route_feature_class)
        # Create a unique in_memory name
        event_tbl = arcpy.CreateUniqueName('event_table', 'in_memory')

        create_point_event_table(out_route_feature_class,
                                 route_id_field_name=route_id_field,
                                 measure_interval=station_interval,
                                 out_table=event_tbl)

        # locate points along route
        event_properties = '{} {} {}'.format(route_id_field, "POINT",
                                             'Measure')
        arcpy.lr.MakeRouteEventLayer(in_routes=out_route_feature_class,
                                     route_id_field=route_id_field,
                                     in_table=event_tbl,
                                     in_event_properties=event_properties,
                                     out_layer='Point Events',
                                     add_angle_field=True,
                                     angle_type="NORMAL",
                                     point_event_type="POINT")

        # Save points to feature class
        arcpy.management.CopyFeatures('Point Events',
                                      out_station_feature_class)
        #Create x section
        create_cross_section(out_station_feature_class, route_id_field,
                             cross_section_width,
                             out_cross_section_feature_class)

        return
Ejemplo n.º 20
0
    def execute(self, parameters, messages):
        """The source code of the tool."""
        in_line_features = parameters[0].valueAsText
        route_id_field = parameters[1].valueAsText
        measure_interval = parameters[2].value
        out_point_feature_class = parameters[3].valueAsText
        out_route_feature_class = parameters[4].valueAsText

        arcpy.env.overwriteOutput = True

        # Create route from polyline by length
        create_route_by_length(in_features=in_line_features,
                               route_id_field=route_id_field,
                               route_feature_class=out_route_feature_class)

        # Create a set of UniqueNames for in_memory features
        event_tbl = arcpy.CreateUniqueName('event_table', 'in_memory')
        #point_fc = arcpy.CreateUniqueName('point_fc', 'in_memory')

        # Create point event table
        create_point_event_table(out_route_feature_class,
                                 route_id_field_name=route_id_field,
                                 measure_interval=measure_interval,
                                 out_table=event_tbl)
        # locate points along route
        event_properties = '{} {} {}'.format(route_id_field, "POINT",
                                             'Measure')
        arcpy.lr.MakeRouteEventLayer(in_routes=out_route_feature_class,
                                     route_id_field=route_id_field,
                                     in_table=event_tbl,
                                     in_event_properties=event_properties,
                                     out_layer='Point Events',
                                     add_angle_field=True,
                                     angle_type="NORMAL",
                                     point_event_type="POINT")
        # Save points to feature class
        arcpy.management.CopyFeatures('Point Events', out_point_feature_class)

        # Cleanup
        if arcpy.Exists(event_tbl):
            arcpy.management.Delete(event_tbl)
        if arcpy.Exists('Point Events'):
            arcpy.management.Delete('Point Events')
        return
Ejemplo n.º 21
0
def create_route_by_length(in_features, route_id_field, route_feature_class):
    """Converts features into measured routes based on the length of the lines.
    The input line features that share a common identifier are merged to create
    a single route.
    Process should only be run on simple lines (no multi-part).
    If merging on common id, geometry direction should also align.
    """
    # Create a unique in_memory name
    the_line = arcpy.CreateUniqueName('theLine', 'in_memory')
    # Copy to memory as to not alter original and speed
    arcpy.management.CopyFeatures(in_features, the_line)
    # Add Fields
    arcpy.management.AddField(the_line,
                              'FromMeasure',
                              "DOUBLE",
                              field_is_nullable='NULLABLE',
                              field_is_required='NON_REQUIRED')
    arcpy.management.AddField(the_line,
                              'ToMeasure',
                              "DOUBLE",
                              field_is_nullable='NULLABLE',
                              field_is_required='NON_REQUIRED')
    # Update the in_memory table
    update_fields = ['FromMeasure', 'ToMeasure', 'SHAPE@LENGTH']
    with arcpy.da.UpdateCursor(the_line, update_fields) as cursor:
        for row in cursor:
            row[0] = 0
            row[1] = row[2]
            cursor.updateRow(row)

    # Create Route
    arcpy.lr.CreateRoutes(the_line,
                          route_id_field,
                          route_feature_class,
                          measure_source='TWO_FIELDS',
                          from_measure_field='FromMeasure',
                          to_measure_field='ToMeasure')

    # Delete the in_memory line
    arcpy.management.Delete(the_line)
    # Return the route
    return route_feature_class
Ejemplo n.º 22
0
def InitializeNames(gname,
                    tname=None,
                    temptable=False,
                    scratch=None,
                    replacechars=None):
    # Was this just a table name or a GDB and table name
    if temptable == True:
        local_gname, local_tname, local_fname = DecodeNames(gname, tname)

        if local_gname == "":
            local_gname = arcpy.env.scratchGDB if scratch == None else scratch

        local_tname = ParseTableName(local_tname, replacechars)

        return DecodeNames(
            arcpy.CreateUniqueName(ParseTableName(local_tname, replacechars),
                                   local_gname))
    else:
        local_tname = ParseTableName(tname, replacechars) if tname else None
        return DecodeNames(gname, local_tname)
Ejemplo n.º 23
0
def union(fcCarto,
          fcPlanif,
          scrWS,
          consideraFecha=False,
          cfecha="FCH_TRAN",
          fcNoPlanif="Cosecha_No_Planificada",
          fcRemanente="Remanente",
          cPctje="PCT_MES"):
    #-------------------------------------------------------------------------------------------------------------------------
    oldWS = arcpy.env.workspace
    arcpy.env.workspace = scrWS
    union = "UNION"
    union = arcpy.CreateUniqueName(union)
    arcpy.Union_analysis([[fcCarto, 1], [fcPlanif, 2]], union, "ALL", eps)

    agregarCampos(union, cfecha)
    agregarHAS(union, "HAS")
    agregarPCTJE(union, "HAS", cPctje, "AREA_1")

    arcpy.env.workspace = oldWS
    return (corto(union))
Ejemplo n.º 24
0
    def __init__(self, remote_assets, fixed_assets, network_dataset,
                 output_gdb):
        self.remote = remote_assets
        self.fixed = fixed_assets
        self.network = network_dataset
        self.gdb = arcpy.CreateUniqueName(TEMP_GDB, output_gdb)

        # Find the default cost
        impedance = filter(lambda f: f.usageType == "Cost" and f.useByDefault,
                           arcpy.Describe(self.network).attributes)[0].name
        cf = arcpy.na.MakeClosestFacilityLayer(self.network,
                                               common.unique_name("tempNA"),
                                               impedance)[0]
        desc = arcpy.Describe(cf)
        # Get the default locator information to build search_criteria
        self.criteria = [[
            getattr(desc.locators, "source{}".format(i)),
            getattr(desc.locators, "snapType{}".format(i))
        ] for i in range(desc.locatorCount)]
        self.tolerance = desc.searchTolerance
        common.delete(cf)
Ejemplo n.º 25
0
def create_archive(archive_folder, output_file):
    """
    archive_folder: Folder to store zip file
    output_file: Feature class to be archived

    Creates a zip file containing a shapefile representation of the
    output_file.
    If the output_file is not a shapefile, it creates a temporary shapefile to
    add to the archive.
    """
    output_desc = arcpy.Describe(output_file)
    if not output_desc.dataType == 'ShapeFile':
        """
        If output_file isn't a shapefile, create a temporary one to
        use for archiving
        """
        temp_name = arcpy.CreateUniqueName(os.path.basename(output_file),
                                           arcpy.env.scratchFolder)
        temp_file = os.path.join(os.environ['TMP'], temp_name)
        arcpy.CopyFeatures_management(output_file, temp_file)
        output_file = temp_file
        logger.logMsg('Creating temporary shapefile %s for archiving' %
                      output_file)
    logger.logMsg('output_desc.file: %s' % output_desc.file)
    archive_file = output_desc.file
    archive_file += '_'
    archive_file += date.isoformat(datetime.now())
    archive_file += '.zip'
    archive_filepath = os.path.join(archive_folder, archive_file)
    logger.logMsg('Archiving %s to %s' % (output_file, archive_filepath))
    output_desc = arcpy.Describe(output_file)
    try:
        with zipfile.ZipFile(archive_filepath,
                             mode='w',
                             compression=zipfile.ZIP_DEFLATED) as zf:
            shape_zipper(output_file, zf)
            zip_info(zf)
    except arcpy.ExecuteError as e:
        raise e
        logger.logError()
Ejemplo n.º 26
0
def accuracy_analysis(test_samples,classified_image): 
    confuse = {}
    for i in range(1,7):
        for j in range(1,7):
            confuse[i,j] = 0

    #从分类结果图像中提取验证样本点对应的分类值,产生新的验证样本点文件
    outPointFeatures = arcpy.CreateUniqueName("c:\\data\\tmp\\xx.shp")
    ExtractValuesToPoints(test_samples,classified_image,outPointFeatures)

    #产生混淆矩阵
    cur = arcpy.SearchCursor(outPointFeatures)
    n = 0
    for row in cur:
        n = n + 1
        i = row.GrndTruth
        j = row.RASTERVALU
        confuse[i,j] = confuse[i,j] + 1
    del outPointFeatures

    #根据混淆矩阵计算分类总精度和Kappa系数
    right_n = 0
    row_column = 0
    for k in range(1,7):
        right_n = right_n + confuse[k,k]
        row = 0
        for r in range(1,7):
            row = confuse[k,r] +row
        column = 0
        for c in range(1,7):
            column = confuse[c,k] +column
        row_column = row*column + row_column
    overall_precise = float(right_n)/float(n)
    Kappa = float(n*right_n - row_column)/float(n*n - row_column)
    print "n:",n
    print "overall_precise:",overall_precise
    print "Kappa:",Kappa
Ejemplo n.º 27
0
height_files = {
    'multipoint': None,
    'tin': None,
    'raster': os.path.join(support_folder, 'dhm.tif'),
    'name': 'height'
}
intensity_files = {
    'multipoint': None,
    'tin': None,
    'raster': os.path.join(support_folder, 'intensity.tif'),
    'rawraster': os.path.join(support_folder, 'intensity.tif'),
    'name': 'intensity'
}

arcpy.AddMessage("Generating DSM")
surfaceLyr = arcpy.CreateUniqueName('First Return Layer')
arcpy.MakeLasDatasetLayer_management(
    in_las_dataset=inlasd,
    out_layer=surfaceLyr,
    class_code=None,
    return_values=['Single Return', 'First of Many'])
arcpy.LasDatasetToRaster_conversion(
    in_las_dataset=surfaceLyr,
    out_raster=surface_files['rawraster'],
    value_field='ELEVATION',
    interpolation_type=
    "TRIANGULATION NATURAL_NEIGHBOR NO_THINNING CLOSEST_TO_MEAN 0",
    # 'TRIANGULATION Linear {point_thinning_type} {point_selection_method} {resolution}',
    data_type='FLOAT',
    sampling_type='CELLSIZE',
    sampling_value=cell_edge_length,
Ejemplo n.º 28
0
def clip_data(input_items, out_workspace, out_coordinate_system, gcs_sr, gcs_clip_poly, out_format):
    """Clips input results."""
    clipped = 0
    errors = 0
    skipped = 0
    fds = None
    global processed_count
    global layer_name
    global existing_fields
    global new_fields
    global field_values

    for ds, out_name in input_items.items():
        try:
            if not isinstance(out_name, list):
                out_name = ''
            # -----------------------------------------------
            # If the item is a service layer, process and continue.
            # -----------------------------------------------
            if ds.startswith('http'):
                try:
                    if out_coordinate_system == 0:
                        service_layer = task_utils.ServiceLayer(ds)
                        wkid = service_layer.wkid
                        out_sr = arcpy.SpatialReference(wkid)
                        arcpy.env.outputCoordinateSystem = out_sr
                    else:
                        out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                        arcpy.env.outputCoordinateSystem = out_sr

                    if not out_sr.name == gcs_sr.name:
                        try:
                            geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                            clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                        except (AttributeError, IndexError):
                            try:
                                clip_poly = gcs_clip_poly.projectAs(out_sr)
                            except AttributeError:
                                clip_poly = gcs_clip_poly
                        except ValueError:
                            clip_poly = gcs_clip_poly
                    else:
                        clip_poly = gcs_clip_poly

                    arcpy.env.overwriteOutput = True
                    service_layer = task_utils.ServiceLayer(ds, clip_poly.extent.JSON, 'esriGeometryEnvelope')
                    oid_groups = service_layer.object_ids
                    out_features = None
                    g = 0.
                    group_cnt = service_layer.object_ids_cnt
                    for group in oid_groups:
                        g += 1
                        group = [oid for oid in group if oid]
                        where = '{0} IN {1}'.format(service_layer.oid_field_name, tuple(group))
                        url = ds + "/query?where={}&outFields={}&returnGeometry=true&f=json".format(where, '*', eval(clip_poly.JSON))
                        feature_set = arcpy.FeatureSet()
                        if not out_name:
                            out_name = service_layer.service_layer_name
                        try:
                            feature_set.load(url)
                        except Exception:
                            continue
                        if not out_features:
                            out_features = arcpy.Clip_analysis(feature_set, clip_poly, out_name)
                        else:
                            clip_features = arcpy.Clip_analysis(feature_set, clip_poly, 'in_memory/features')
                            arcpy.Append_management(clip_features, out_features, 'NO_TEST')
                            try:
                                arcpy.Delete_management(clip_features)
                            except arcpy.ExecuteError:
                                pass
                        status_writer.send_percent(float(g) / group_cnt, '', 'clip_data')
                    processed_count += 1.
                    clipped += 1
                    status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(ds), 'clip_data')
                    continue
                except Exception as ex:
                    status_writer.send_state(status.STAT_WARNING, str(ex))
                    errors_reasons[ds] = ex.message
                    errors += 1
                    continue

            # -----------------------------------------------
            # Check if the path is a MXD data frame type.
            # ------------------------------------------------
            map_frame_name = task_utils.get_data_frame_name(ds)
            if map_frame_name:
                ds = ds.split('|')[0].strip()

            # -------------------------------
            # Is the input a geometry feature
            # -------------------------------
            if isinstance(out_name, list):
                for row in out_name:
                    try:
                        arcpy.env.overwriteOutput = True
                        name = os.path.join(out_workspace, arcpy.ValidateTableName(ds, out_workspace))
                        if out_format == 'SHP':
                            name += '.shp'
                        # Clip the geometry.
                        geo_json = row['[geo]']
                        geom = arcpy.AsShape(geo_json)
                        row.pop('[geo]')
                        if not arcpy.Exists(name):
                            if arcpy.env.outputCoordinateSystem:
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                            else:
                                arcpy.env.outputCoordinateSystem = 4326
                                arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                            layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                            existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                            new_fields = []
                            field_values = []
                            for field, value in row.iteritems():
                                valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                new_fields.append(valid_field)
                                field_values.append(value)
                                try:
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                except arcpy.ExecuteError:
                                    arcpy.DeleteField_management(layer_name, valid_field)
                                    arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                        else:
                            if not geom.type.upper() == arcpy.Describe(name).shapeType.upper():
                                name = arcpy.CreateUniqueName(os.path.basename(name), out_workspace)
                                if arcpy.env.outputCoordinateSystem:
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())
                                else:
                                    arcpy.env.outputCoordinateSystem = 4326
                                    arcpy.CreateFeatureclass_management(out_workspace, os.path.basename(name), geom.type.upper())

                                layer_name = arcpy.MakeFeatureLayer_management(name, 'flayer')
                                existing_fields = [f.name for f in arcpy.ListFields(layer_name)]
                                new_fields = []
                                field_values = []
                                for field, value in row.items():
                                    valid_field = arcpy.ValidateFieldName(field, out_workspace)
                                    new_fields.append(valid_field)
                                    field_values.append(value)
                                    if not valid_field in existing_fields:
                                        try:
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')
                                        except arcpy.ExecuteError:
                                            arcpy.DeleteField_management(layer_name, valid_field)
                                            arcpy.AddField_management(layer_name, valid_field, 'TEXT')

                        clipped_geometry = arcpy.Clip_analysis(geom, gcs_clip_poly, arcpy.Geometry())
                        if clipped_geometry:
                            with arcpy.da.InsertCursor(layer_name, ["SHAPE@"] + new_fields) as icur:
                                icur.insertRow([clipped_geometry[0]] + field_values)
                        status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(row['name']), 'clip_data')
                        processed_count += 1
                        clipped += 1
                    except KeyError:
                        processed_count += 1
                        skipped += 1
                        status_writer.send_state(_(status.STAT_WARNING, 'Invalid input type: {0}').format(ds))
                        skipped_reasons[ds] = 'Invalid input type'
                    except Exception as ex:
                        processed_count += 1
                        errors += 1
                        errors_reasons[ds] = ex.message
                        continue
                continue

            dsc = arcpy.Describe(ds)
            try:
                if dsc.spatialReference.name == 'Unknown':
                    status_writer.send_state(status.STAT_WARNING, _('{0} has an Unknown projection. Output may be invalid or empty.').format(dsc.name))
            except AttributeError:
                pass

            # --------------------------------------------------------------------
            # If no output coord. system, get output spatial reference from input.
            # --------------------------------------------------------------------
            if out_coordinate_system == 0:
                try:
                    out_sr = dsc.spatialReference
                    arcpy.env.outputCoordinateSystem = out_sr
                except AttributeError:
                    out_sr = task_utils.get_spatial_reference(4326)
                    arcpy.env.outputCoordinateSystem = out_sr
            else:
                out_sr = task_utils.get_spatial_reference(out_coordinate_system)
                arcpy.env.outputCoordinateSystem = out_sr

            # -------------------------------------------------
            # If the item is not a file, project the clip area.
            # -------------------------------------------------
            if dsc.dataType not in ('File', 'TextFile'):
                if not out_sr.name == gcs_sr.name:
                    try:
                        geo_transformation = arcpy.ListTransformations(gcs_sr, out_sr)[0]
                        clip_poly = gcs_clip_poly.projectAs(out_sr, geo_transformation)
                    except (AttributeError, IndexError):
                        try:
                            clip_poly = gcs_clip_poly.projectAs(out_sr)
                        except AttributeError:
                            clip_poly = gcs_clip_poly
                    except ValueError:
                        clip_poly = gcs_clip_poly
                else:
                    clip_poly = gcs_clip_poly
                extent = clip_poly.extent


            # -----------------------------
            # Check the data type and clip.
            # -----------------------------

            # Feature Class or ShapeFile
            if dsc.dataType in ('FeatureClass', 'ShapeFile', 'Shapefile'):
                if out_name == '':
                    name = arcpy.ValidateTableName(dsc.name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                else:
                    name = arcpy.ValidateTableName(out_name, out_workspace)
                    name = task_utils.create_unique_name(name, out_workspace)
                # Does the input exist in a feature dataset? If so, create the feature dataset if it doesn't exist.
                ws = os.path.dirname(ds)
                if [any(ext) for ext in ('.gdb', '.mdb', '.sde') if ext in ws]:
                    if os.path.splitext(ws)[1] in ('.gdb', '.mdb', '.sde'):
                        arcpy.Clip_analysis(ds, clip_poly, name)
                    else:
                        fds_name = os.path.basename(ws)
                        if not arcpy.Exists(os.path.join(out_workspace, fds_name)):
                            arcpy.CreateFeatureDataset_management(out_workspace, fds_name, dsc.spatialReference)
                        arcpy.Clip_analysis(ds, clip_poly, os.path.join(out_workspace, fds_name, os.path.basename(ds)))
                else:
                    arcpy.Clip_analysis(ds, clip_poly, name)

            # Feature dataset
            elif dsc.dataType == 'FeatureDataset':
                if not out_format == 'SHP':
                    fds_name = os.path.basename(task_utils.create_unique_name(dsc.name, out_workspace))
                    fds = arcpy.CreateFeatureDataset_management(out_workspace, fds_name)
                arcpy.env.workspace = ds
                for fc in arcpy.ListFeatureClasses():
                    try:
                        if not out_format == 'SHP':
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, fds.getOutput(0)))
                        else:
                            arcpy.Clip_analysis(fc, clip_poly, task_utils.create_unique_name(fc, out_workspace))
                    except arcpy.ExecuteError:
                        pass
                arcpy.env.workspace = out_workspace

            # Raster dataset
            elif dsc.dataType == 'RasterDataset':
                if out_name == '':
                    name = task_utils.create_unique_name(dsc.name, out_workspace)
                else:
                    name = task_utils.create_unique_name(out_name, out_workspace)
                ext = '{0} {1} {2} {3}'.format(extent.XMin, extent.YMin, extent.XMax, extent.YMax)
                arcpy.Clip_management(ds, ext, name, in_template_dataset=clip_poly, clipping_geometry="ClippingGeometry")

            # Layer file
            elif dsc.dataType == 'Layer':
                task_utils.clip_layer_file(dsc.catalogPath, clip_poly, arcpy.env.workspace)

            # Cad drawing dataset
            elif dsc.dataType == 'CadDrawingDataset':
                arcpy.env.workspace = dsc.catalogPath
                cad_wks_name = os.path.splitext(dsc.name)[0]
                for cad_fc in arcpy.ListFeatureClasses():
                    name = task_utils.create_unique_name('{0}_{1}'.format(cad_wks_name, cad_fc), out_workspace)
                    arcpy.Clip_analysis(cad_fc, clip_poly, name)
                arcpy.env.workspace = out_workspace

            # File
            elif dsc.dataType in ('File', 'TextFile'):
                if dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith('.kmz'):
                    name = os.path.splitext(dsc.name)[0]
                    kml_layer = arcpy.KMLToLayer_conversion(dsc.catalogPath, arcpy.env.scratchFolder, name)
                    group_layer = arcpy.mapping.Layer(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    for layer in arcpy.mapping.ListLayers(group_layer):
                        if layer.isFeatureLayer:
                            arcpy.Clip_analysis(layer,
                                                gcs_clip_poly,
                                                task_utils.create_unique_name(layer, out_workspace))
                    # Clean up temp KML results.
                    arcpy.Delete_management(os.path.join(arcpy.env.scratchFolder, '{0}.lyr'.format(name)))
                    arcpy.Delete_management(kml_layer[1])
                    del group_layer
                else:
                    if out_name == '':
                        out_name = dsc.name
                    if out_workspace.endswith('.gdb'):
                        f = arcpy.Copy_management(ds, os.path.join(os.path.dirname(out_workspace), out_name))
                    else:
                        f = arcpy.Copy_management(ds, os.path.join(out_workspace, out_name))
                    processed_count += 1.
                    status_writer.send_percent(processed_count / result_count, _('Copied file: {0}').format(dsc.name), 'clip_data')
                    status_writer.send_state(_('Copied file: {0}').format(dsc.name))
                    clipped += 1
                    if out_format in ('LPK', 'MPK'):
                        files_to_package.append(f.getOutput(0))
                    continue

            # Map document
            elif dsc.dataType == 'MapDocument':
                task_utils.clip_mxd_layers(dsc.catalogPath, clip_poly, arcpy.env.workspace, map_frame_name)
            else:
                processed_count += 1.
                status_writer.send_percent(processed_count / result_count, _('Invalid input type: {0}').format(ds), 'clip_data')
                status_writer.send_state(status.STAT_WARNING, _('Invalid input type: {0}').format(ds))
                skipped += 1
                skipped_reasons[ds] = _('Invalid input type: {0}').format(dsc.dataType)
                continue

            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Clipped: {0}').format(dsc.name), 'clip_data')
            status_writer.send_status(_('Clipped: {0}').format(dsc.name))
            clipped += 1
        # Continue. Process as many as possible.
        except Exception as ex:
            processed_count += 1.
            status_writer.send_percent(processed_count / result_count, _('Skipped: {0}').format(os.path.basename(ds)), 'clip_data')
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors_reasons[ds] = ex.message
            errors += 1
            pass
    return clipped, errors, skipped
Ejemplo n.º 29
0
def execute(request):
    """Creates a GeoPDF.
    :param request: json as a dict.
    """
    added_to_map = 0
    errors = 0
    skipped = 0

    parameters = request['params']
    num_results, response_index = task_utils.get_result_count(parameters)
    docs = parameters[response_index]['response']['docs']
    input_items = task_utils.get_input_items(docs)
    input_rows = collections.defaultdict(list)
    for doc in docs:
        if 'path' not in doc:
            input_rows[doc['name']].append(doc)
    if num_results > task_utils.CHUNK_SIZE:
        status_writer.send_state(status.STAT_FAILED,
                                 'Reduce results to 25 or less.')
        return

    map_template = task_utils.get_parameter_value(parameters, 'map_template',
                                                  'value')
    base_map = task_utils.get_parameter_value(parameters, 'base_map', 'value')
    map_title = task_utils.get_parameter_value(parameters, 'map_title',
                                               'value')
    attribute_setting = task_utils.get_parameter_value(parameters,
                                                       'attribute_settings',
                                                       'value')
    author = task_utils.get_parameter_value(parameters, 'map_author', 'value')
    output_file_name = task_utils.get_parameter_value(parameters,
                                                      'output_file_name',
                                                      'value')
    if not output_file_name:
        output_file_name = 'output_pdf'
    try:
        map_view = task_utils.get_parameter_value(parameters, 'map_view',
                                                  'extent')
    except KeyError:
        map_view = None
        pass

    temp_folder = os.path.join(request['folder'], 'temp')
    if not os.path.exists(temp_folder):
        os.makedirs(temp_folder)

    if base_map == 'NONE':
        base_layer = None
    else:
        base_layer = arcpy.mapping.Layer(
            os.path.join(os.path.dirname(os.path.dirname(__file__)),
                         'supportfiles', 'basemaps',
                         '{0}.lyr'.format(base_map)))
    mxd_path = os.path.join(os.path.dirname(os.path.dirname(__file__)),
                            'supportfiles', 'frame', map_template)
    mxd = arcpy.mapping.MapDocument(mxd_path)
    data_frame = arcpy.mapping.ListDataFrames(mxd)[0]

    layers = []
    all_layers = []

    if input_rows:
        for name, rows in input_rows.iteritems():
            for row in rows:
                try:
                    name = arcpy.CreateUniqueName(name, 'in_memory')
                    # Create the geometry.
                    geo_json = row['[geo]']
                    geom = arcpy.AsShape(geo_json)
                    arcpy.CopyFeatures_management(geom, name)
                    feature_layer = arcpy.MakeFeatureLayer_management(
                        name, os.path.basename(name))
                    layer_file = arcpy.SaveToLayerFile_management(
                        feature_layer,
                        os.path.join(temp_folder,
                                     '{0}.lyr'.format(os.path.basename(name))))
                    layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                    all_layers.append(
                        arcpy.mapping.Layer(layer_file.getOutput(0)))
                    added_to_map += 1
                except KeyError:
                    skipped += 1
                    skipped_reasons[name] = 'No geographic information'
                    continue

    for i, item in enumerate(input_items, 1):
        try:
            # Is the item a mxd data frame.
            map_frame_name = task_utils.get_data_frame_name(item)
            if map_frame_name:
                item = item.split('|')[0].strip()

            dsc = arcpy.Describe(item)
            if dsc.dataType == 'Layer':
                layers.append(arcpy.mapping.Layer(dsc.catalogPath))

            elif dsc.dataType == 'FeatureClass' or dsc.dataType == 'ShapeFile':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                feature_layer = arcpy.MakeFeatureLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    feature_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.dataType == 'FeatureDataset':
                arcpy.env.workspace = item
                for fc in arcpy.ListFeatureClasses():
                    layer_file = arcpy.SaveToLayerFile_management(
                        arcpy.MakeFeatureLayer_management(
                            fc, '{0}_{1}'.format(fc, i)),
                        os.path.join(temp_folder, '{0}_{1}.lyr'.format(fc, i)))
                    layer = arcpy.mapping.Layer(layer_file.getOutput(0))
                    layer.name = fc
                    layers.append(layer)
                    all_layers.append(layer)

            elif dsc.dataType == 'RasterDataset':
                if os.path.basename(item) in [l.name for l in all_layers]:
                    layer_name = '{0}_{1}'.format(os.path.basename(item), i)
                else:
                    layer_name = os.path.basename(item)
                raster_layer = arcpy.MakeRasterLayer_management(
                    item, layer_name)
                layer_file = arcpy.SaveToLayerFile_management(
                    raster_layer,
                    os.path.join(temp_folder, '{0}.lyr'.format(layer_name)))
                layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))
                all_layers.append(arcpy.mapping.Layer(layer_file.getOutput(0)))

            elif dsc.catalogPath.endswith('.kml') or dsc.catalogPath.endswith(
                    '.kmz'):
                if not os.path.splitext(dsc.name)[0] in layers:
                    name = os.path.splitext(dsc.name)[0]
                else:
                    name = '{0}_{1}'.format(os.path.splitext(dsc.name)[0], i)
                arcpy.KMLToLayer_conversion(dsc.catalogPath, temp_folder, name)
                layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))
                all_layers.append(
                    arcpy.mapping.Layer(
                        os.path.join(temp_folder, '{0}.lyr'.format(name))))

            elif dsc.dataType == 'MapDocument':
                input_mxd = arcpy.mapping.MapDocument(item)
                if map_frame_name:
                    df = arcpy.mapping.ListDataFrames(input_mxd,
                                                      map_frame_name)[0]
                    layers = arcpy.mapping.ListLayers(input_mxd, data_frame=df)
                else:
                    layers = arcpy.mapping.ListLayers(input_mxd)

            if layers:
                for layer in layers:
                    status_writer.send_status(
                        _('Adding layer {0}...').format(layer.name))
                    arcpy.mapping.AddLayer(data_frame, layer)
                    added_to_map += 1
                    layers = []
            else:
                status_writer.send_status(
                    _('Invalid input type: {0}').format(item))
                skipped_reasons[item] = 'Invalid input type'
                skipped += 1
        except Exception as ex:
            status_writer.send_status(_('FAIL: {0}').format(repr(ex)))
            errors += 1
            errors_reasons[item] = repr(ex)
            pass

    if map_view:
        extent = map_view.split(' ')
        new_extent = data_frame.extent
        new_extent.XMin, new_extent.YMin = float(extent[0]), float(extent[1])
        new_extent.XMax, new_extent.YMax = float(extent[2]), float(extent[3])
        data_frame.extent = new_extent
    else:
        data_frame.zoomToSelectedFeatures()

    # Update text elements in map template.
    date_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                    'date')
    if date_element:
        date_element[0].text = 'Date: {0}'.format(task_utils.get_local_date())

    title_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                     'title')
    if title_element:
        title_element[0].text = map_title

    author_element = arcpy.mapping.ListLayoutElements(mxd, 'TEXT_ELEMENT',
                                                      'author')
    if author_element:
        author_element[0].text = '{0} {1}'.format(author_element[0].text,
                                                  author)

    if map_template in ('ANSI_D_LND.mxd', 'ANSI_E_LND.mxd'):
        coord_elements = arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'x*')
        coord_elements += arcpy.mapping.ListLayoutElements(
            mxd, 'TEXT_ELEMENT', 'y*')
        if coord_elements:
            for e in coord_elements:
                new_text = e.text
                if e.name == 'xmin':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMin)
                    if data_frame.extent.XMin > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'xmax':
                    dms = task_utils.dd_to_dms(data_frame.extent.XMax)
                    if data_frame.extent.XMax > 0:
                        new_text = new_text.replace('W', 'E')
                elif e.name == 'ymin':
                    dms = task_utils.dd_to_dms(data_frame.extent.YMin)
                    if data_frame.extent.YMin < 0:
                        new_text = new_text.replace('N', 'S')
                elif e.name == 'ymax':
                    if data_frame.extent.YMax < 0:
                        new_text = new_text.replace('N', 'S')
                    dms = task_utils.dd_to_dms(data_frame.extent.YMax)

                new_text = new_text.replace('d', str(dms[0]))
                new_text = new_text.replace('m', str(dms[1]))
                new_text = new_text.replace('s', str(dms[2]))
                e.text = new_text

    # Do this now so it does not affect zoom level or extent.
    if base_layer:
        status_writer.send_status(_('Adding basemap {0}...').format(base_map))
        arcpy.mapping.AddLayer(data_frame, base_layer, 'BOTTOM')

    if added_to_map > 0:
        status_writer.send_status(_('Exporting to PDF...'))
        arcpy.mapping.ExportToPDF(mxd,
                                  os.path.join(
                                      request['folder'],
                                      '{0}.pdf'.format(output_file_name)),
                                  layers_attributes=attribute_setting)
        # Create a thumbnail size PNG of the mxd.
        task_utils.make_thumbnail(
            mxd, os.path.join(request['folder'], '_thumb.png'), False)
    else:
        status_writer.send_state(status.STAT_FAILED,
                                 _('No results can be exported to PDF'))
        task_utils.report(os.path.join(request['folder'], '__report.json'),
                          added_to_map,
                          skipped,
                          skipped_details=skipped_reasons)
        return

    # Update state if necessary.
    if skipped > 0 or errors > 0:
        status_writer.send_state(
            status.STAT_WARNING,
            _('{0} results could not be processed').format(skipped + errors))
    task_utils.report(os.path.join(request['folder'],
                                   '__report.json'), added_to_map, skipped,
                      errors, errors_reasons, skipped_reasons)
Ejemplo n.º 30
0
def tmp(extension):
    """Generates a unique filename in the scratch folder using arcpy"""
    return arcpy.CreateUniqueName("tmp." + extension, arcpy.env.scratchFolder)