Example #1
0
def as_values(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
) -> Iterator[Any]:
    """Generate attribute values.

    Notes:
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
    """
    dataset_path = Path(dataset_path)
    cursor = arcpy.da.SearchCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=[field_name],
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    with cursor:
        for (value, ) in cursor:
            yield value
Example #2
0
def as_tuples(
    dataset_path: Union[Path, str],
    field_names: Iterable[str],
    *,
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
) -> Iterator[tuple]:
    """Generate tuples of feature attribute values.

    Notes:
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        dataset_path: Path to dataset.
        field_names: Names of fields to include in generated dictionary. Attributes will
            be in the tuple index that matches their field name here.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
    """
    field_names = list(field_names)
    dataset_path = Path(dataset_path)
    cursor = arcpy.da.SearchCursor(
        # ArcPy2.8.0: Convert Path to str.
        in_table=str(dataset_path),
        field_names=field_names,
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    with cursor:
        yield from cursor
Example #3
0
def table_to_points(
    dataset_path: Union[Path, str],
    *,
    x_field_name: str,
    y_field_name: str,
    z_field_name: Optional[str] = None,
    output_path: Union[Path, str],
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = 4326,
    log_level: int = logging.INFO,
) -> Counter:
    """Convert coordinate table to a new point dataset.

    Args:
        dataset_path: Path to dataset.
        output_path: Path to output dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        x_field_name: Name of field with x-coordinate.
        y_field_name: Name of field with y-coordinate.
        z_field_name: Name of field with z-coordinate.
        spatial_reference_item: Item from which the spatial reference of the output
            geometry will be derived. Default is 4326 (EPSG code for unprojected WGS84).
        log_level: Level to log the function at.

    Returns:
        Feature counts for original and output datasets.
    """
    dataset_path = Path(dataset_path)
    output_path = Path(output_path)
    LOG.log(
        log_level,
        "Start: Convert table rows `%s` to points in output `%s`.",
        dataset_path,
        output_path,
    )
    layer_name = unique_name()
    states = Counter()
    states["in original dataset"] = dataset.feature_count(dataset_path)
    view = DatasetView(dataset_path, dataset_where_sql=dataset_where_sql)
    arcpy.management.MakeXYEventLayer(
        table=view.name,
        out_layer=layer_name,
        in_x_field=x_field_name,
        in_y_field=y_field_name,
        in_z_field=z_field_name,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    dataset.copy(layer_name, output_path=output_path, log_level=logging.DEBUG)
    arcpy.management.Delete(layer_name)
    states["in output"] = dataset.feature_count(output_path)
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Convert.")
    return states
Example #4
0
def project(
    dataset_path: Union[Path, str],
    *,
    output_path: Union[Path, str],
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = 4326,
    log_level: int = logging.INFO,
) -> Counter:
    """Project dataset features to a new dataset.

    Args:
        dataset_path: Path to dataset.
        output_path: Path to output dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference of the output
            geometry will be derived. Default is 4326 (EPSG code for unprojected WGS84).
        log_level: Level to log the function at.

    Returns:
        Feature counts for original and output datasets.
    """
    dataset_path = Path(dataset_path)
    output_path = Path(output_path)
    spatial_reference = SpatialReference(spatial_reference_item)
    LOG.log(
        log_level,
        "Start: Project `%s` to %s in output `%s`.",
        dataset_path,
        spatial_reference.name,
        output_path,
    )
    _dataset = Dataset(dataset_path)
    states = Counter()
    states["in original dataset"] = dataset.feature_count(dataset_path)
    # Project tool ignores view selections, so we create empty output & insert features.
    dataset.create(
        dataset_path=output_path,
        field_metadata_list=_dataset.user_fields,
        geometry_type=_dataset.geometry_type,
        spatial_reference_item=spatial_reference,
        log_level=logging.DEBUG,
    )
    features.insert_from_path(
        output_path,
        field_names=_dataset.user_field_names,
        source_path=dataset_path,
        source_where_sql=dataset_where_sql,
        log_level=logging.DEBUG,
    )
    states["in output"] = dataset.feature_count(output_path)
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Project.")
    return states
Example #5
0
def create(
    dataset_path: Union[Path, str],
    *,
    field_metadata_list: Optional[Iterable[Union[Field, dict]]] = None,
    geometry_type: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = 4326,
    log_level: int = logging.INFO,
) -> Dataset:
    """Create new dataset.

    Args:
        dataset_path: Path to dataset.
        field_metadata_list: Collection of field metadata instances or mappings.
        geometry_type: Type of geometry, if a spatial dataset. Will create a nonspatial
            dataset if set to None.
        spatial_reference_item: Item from which the spatial reference of the output
            geometry will be derived. Default is 4326 (EPSG code for unprojected WGS84).
        log_level: Level to log the function at.

    Returns:
        Dataset metadata instance for created dataset.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Create dataset `%s`.", dataset_path)
    if geometry_type:
        if spatial_reference_item is None:
            spatial_reference_item = 4326
        # ArcPy2.8.0: Convert Path to str.
        arcpy.management.CreateFeatureclass(
            out_path=str(dataset_path.parent),
            out_name=dataset_path.name,
            geometry_type=geometry_type,
            has_z="DISABLED",
            spatial_reference=SpatialReference(spatial_reference_item).object,
        )
    else:
        # ArcPy2.8.0: Convert Path to str.
        arcpy.management.CreateTable(
            out_path=str(dataset_path.parent),
            out_name=dataset_path.name,
        )
    if field_metadata_list:
        for field_metadata in field_metadata_list:
            if isinstance(field_metadata, Field):
                field_metadata = field_metadata.field_as_dict
            add_field(dataset_path, log_level=logging.DEBUG, **field_metadata)
    LOG.log(log_level, "End: Create.")
    return Dataset(dataset_path)
Example #6
0
def as_dicts(
    url: str,
    *,
    field_names: Optional[Iterable[str]] = None,
    service_where_sql: Optional[str] = None,
    include_geometry: bool = True,
    spatial_reference_item: SpatialReferenceSourceItem = None,
) -> Iterator[dict]:
    """Generate mappings of feature attribute name to value.

    Notes:
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        url: URL for the service endpoint.
        field_names: Names of fields to include in generated dictionary. Names will be
            the keys in the dictionary mapping to their attributes values. If set to
            None, all fields will be included.
            Do not include geometry field; use `include_geometry` to have added.
        include_geometry: Add geometry attribute to dictionary under "SHAPE@" key if
            True.
        service_where_sql: SQL where-clause for service subselection.
        spatial_reference_item: Item from which the spatial reference of the output
            geometry will be derived. If set to None, will use spatial reference of the
            service.
    """
    # `spatial_reference_item = None` will return instance with wkid being None.
    wkid = SpatialReference(spatial_reference_item).wkid
    feature_layer = arcgis.features.FeatureLayer(url)
    feature_set = feature_layer.query(
        where=service_where_sql if service_where_sql else "1=1",
        out_fields="*" if field_names is None else list(field_names),
        out_sr=wkid,
    )
    for feature in feature_set.features:
        feature_dict = feature.attributes
        if include_geometry:
            if "spatialReference" not in feature.geometry:
                feature.geometry["spatialReference"] = {"wkid": wkid}
            feature_dict["SHAPE@"] = arcpy.AsShape(feature.geometry,
                                                   esri_json=True)
        yield feature_dict
Example #7
0
def as_dicts(
    dataset_path: Union[Path, str],
    field_names: Optional[Iterable[str]] = None,
    *,
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
) -> Iterator[dict]:
    """Generate dictionaries of feature attribute name to their value.

    Notes:
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        dataset_path: Path to dataset.
        field_names: Names of fields to include in generated dictionary. Names will be
            the keys in the dictionary mapping to their attributes values. If set to
            None, all fields will be included.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
    """
    dataset_path = Path(dataset_path)
    if field_names:
        field_names = list(field_names)
    else:
        field_names = Dataset(dataset_path).field_names_tokenized
    cursor = arcpy.da.SearchCursor(
        # ArcPy2.8.0: Convert Path to str.
        in_table=str(dataset_path),
        field_names=field_names,
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    with cursor:
        for feature in cursor:
            yield dict(zip(cursor.fields, feature))
Example #8
0
def closest_facility_route(
    dataset_path: Union[Path, str],
    *,
    id_field_name: str,
    facility_path: Union[Path, str],
    facility_id_field_name: str,
    network_path: Union[Path, str],
    dataset_where_sql: Optional[str] = None,
    facility_where_sql: Optional[str] = None,
    max_cost: Optional[Union[float, int]] = None,
    travel_from_facility: bool = False,
    travel_mode: str,
) -> Iterator[Dict[str, Any]]:
    """Generate route info dictionaries for closest facility to each location feature.

    Args:
        dataset_path: Path to dataset.
        id_field_name: Name of dataset ID field.
        facility_path: Path to facility dataset.
        facility_id_field_name: Name of facility dataset ID field.
        network_path: Path to network dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        facility_where_sql: SQL where-clause for the facility dataset subselection.
        max_cost: Maximum travel cost the search will allow, in the units of the cost
            attribute.
        travel_from_facility: Perform the analysis travelling from the facility if True,
            rather than toward the facility.
        travel_mode: Name of the network travel mode to use. Travel mode must exist in
            the network dataset.

    Yields:
        Closest facility route details.
        Keys:
            * dataset_id
            * facility_id
            * cost - Cost of route, in units of travel mode impedance.
            * geometry - Route geometry, in spatial reference of dataset.

    Raises:
        RuntimeError: When analysis fails.
    """
    dataset_path = Path(dataset_path)
    facility_path = Path(facility_path)
    network_path = Path(network_path)
    analysis = arcpy.nax.ClosestFacility(network_path)
    analysis.defaultImpedanceCutoff = max_cost
    distance_units = UNIT_PLURAL[SpatialReference(dataset_path).linear_unit]
    analysis.distanceUnits = getattr(arcpy.nax.DistanceUnits, distance_units)
    analysis.ignoreInvalidLocations = True
    if travel_from_facility:
        analysis.travelDirection = arcpy.nax.TravelDirection.FromFacility
    # ArcPy2.8.0: Convert Path to str.
    analysis.travelMode = arcpy.nax.GetTravelModes(network_path)[travel_mode]
    # Load facilities.
    field = Field(
        facility_path,
        Dataset(facility_path).oid_field_name if
        facility_id_field_name.upper() == "OID@" else facility_id_field_name,
    )
    field_description = [
        "source_id",
        field.type if field.type != "OID" else "LONG",
        "#",
        field.length,
        "#",
        "#",
    ]
    analysis.addFields(arcpy.nax.ClosestFacilityInputDataType.Facilities,
                       [field_description])
    cursor = analysis.insertCursor(
        arcpy.nax.ClosestFacilityInputDataType.Facilities,
        field_names=["source_id", "SHAPE@"],
    )
    _features = features.as_tuples(
        facility_path,
        field_names=[facility_id_field_name, "SHAPE@"],
        dataset_where_sql=facility_where_sql,
    )
    with cursor:
        for feature in _features:
            cursor.insertRow(feature)
    # Load dataset locations.
    field = Field(
        dataset_path,
        Dataset(dataset_path).oid_field_name
        if id_field_name.upper() == "OID@" else id_field_name,
    )
    field_description = [
        "source_id",
        field.type if field.type != "OID" else "LONG",
        "#",
        field.length,
        "#",
        "#",
    ]
    analysis.addFields(arcpy.nax.ClosestFacilityInputDataType.Incidents,
                       [field_description])
    cursor = analysis.insertCursor(
        arcpy.nax.ClosestFacilityInputDataType.Incidents,
        field_names=["source_id", "SHAPE@"],
    )
    _features = features.as_tuples(
        dataset_path,
        field_names=[id_field_name, "SHAPE@"],
        dataset_where_sql=dataset_where_sql,
    )
    with cursor:
        for feature in _features:
            cursor.insertRow(feature)
    # Solve & generate.
    result = analysis.solve()
    if not result.solveSucceeded:
        for message in result.solverMessages(arcpy.nax.MessageSeverity.All):
            LOG.error(message)
        raise RuntimeError("Closest facility analysis failed")

    facility_oid_id = dict(
        result.searchCursor(
            output_type=getattr(arcpy.nax.ClosestFacilityOutputDataType,
                                "Facilities"),
            field_names=["FacilityOID", "source_id"],
        ))
    location_oid_id = dict(
        result.searchCursor(
            output_type=getattr(arcpy.nax.ClosestFacilityOutputDataType,
                                "Incidents"),
            field_names=["IncidentOID", "source_id"],
        ))
    keys = ["FacilityOID", "IncidentOID", f"Total_{distance_units}", "SHAPE@"]
    cursor = result.searchCursor(
        output_type=arcpy.nax.ClosestFacilityOutputDataType.Routes,
        field_names=keys)
    with cursor:
        for row in cursor:
            route = dict(zip(keys, row))
            yield {
                "dataset_id": location_oid_id[route["IncidentOID"]],
                "facility_id": facility_oid_id[route["FacilityOID"]],
                "cost": route[f"Total_{distance_units}"],
                "geometry": route["SHAPE@"],
            }
Example #9
0
def update_by_function(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    function: FunctionType,
    field_as_first_arg: bool = True,
    arg_field_names: Iterable[str] = (),
    kwarg_field_names: Iterable[str] = (),
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by passing them to a function.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        function: Function to return values from.
        field_as_first_arg: True if field value will be the first positional argument.
        arg_field_names: Field names whose values will be the function positional
            arguments (not including primary field).
        kwarg_field_names: Field names whose names & values will be the function keyword
            arguments.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by function `%s`.",
        dataset_path,
        field_name,
        # Partials show all the pre-loaded arg & kwarg values, which is cumbersome.
        "partial version of function {}".format(function.func) if isinstance(
            function, partial) else "function `{}`".format(function),
    )
    arg_field_names = list(arg_field_names)
    kwarg_field_names = list(kwarg_field_names)
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=arg_field_names + kwarg_field_names + [field_name],
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for feature in cursor:
            old_value = feature[-1]
            args = feature[:len(arg_field_names)]
            if field_as_first_arg:
                args = [old_value] + args
            kwargs = dict(
                zip(kwarg_field_names, feature[len(arg_field_names):-1]))
            new_value = function(*args, **kwargs)
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow(feature[:-1] + [new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #10
0
def update_by_field(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    source_field_name: str,
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values with values from another field.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        source_field_name: Name of field to get values from.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by field `%s`.",
        dataset_path,
        field_name,
        source_field_name,
    )
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=[field_name, source_field_name],
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for old_value, new_value in cursor:
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow([new_value, new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states