Example #1
0
def delete(
    dataset_path: Union[Path, str],
    *,
    dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Delete features in dataset.

    Args:
        dataset_path: Path to dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each delete-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Delete features from `%s`.", dataset_path)
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    view = DatasetView(dataset_path, dataset_where_sql=dataset_where_sql)
    with view, session:
        states["deleted"] = view.count
        arcpy.management.DeleteRows(in_rows=view.name)
        states["remaining"] = dataset.feature_count(dataset_path)
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Delete.")
    return states
Example #2
0
def update_by_value(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    value: Any,
    dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by assigning a given value.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        value: Value to assign.
        dataset_where_sql: SQL where-clause for dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by given value.",
        dataset_path,
        field_name,
    )
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=[field_name],
        where_clause=dataset_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for (old_value, ) in cursor:
            if same_value(old_value, value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow([value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #3
0
def delete_by_id(
    dataset_path: Union[Path, str],
    delete_ids: Iterable[Union[Sequence[Any], Any]],
    id_field_names: Iterable[str],
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Delete features in dataset with given IDs.

    Note:
        There is no guarantee that the ID field(s) are unique.
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        dataset_path: Path to dataset.
        delete_ids: ID sequences for features to delete. If id_field_names contains only
            one field, IDs may be provided as non-sequence single-value.
        id_field_names: Names of the feature ID fields.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each delete-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Delete features in `%s` with given IDs.",
            dataset_path)
    id_field_names = list(id_field_names)
    if inspect.isgeneratorfunction(delete_ids):
        delete_ids = delete_ids()
    ids = set()
    for _id in delete_ids:
        if isinstance(_id, Iterable) and not isinstance(_id, str):
            ids.add(tuple(_id))
        else:
            ids.add((_id, ))
    states = Counter()
    if ids:
        # ArcPy2.8.0: Convert Path to str.
        cursor = arcpy.da.UpdateCursor(str(dataset_path),
                                       field_names=id_field_names)
        session = Editing(
            Dataset(dataset_path).workspace_path, use_edit_session)
        with session, cursor:
            for row in cursor:
                _id = tuple(row)
                if _id in ids:
                    cursor.deleteRow()
                    states["deleted"] += 1
    else:
        LOG.log(log_level, "No IDs provided.")
        states["deleted"] = 0
    states["unchanged"] = dataset.feature_count(dataset_path)
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Delete.")
    return states
Example #4
0
def update_by_expression(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    expression: str,
    expression_type: str = "Python",
    dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Field:
    """Update attribute values using a (single) code-expression.

    Wraps arcpy.management.CalculateField.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        expression: String expression to evaluate values from.
        expression_type: Type of code expression represents. Allowed values include:
            "Arcade", "Python", "Python3", and "SQL". Case-insensitive.
        dataset_where_sql: SQL where-clause for dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Field metadata instance for field with updated attributes.

    Raises:
        AttributeError: If given expression type invalid.
    """
    dataset_path = Path(dataset_path)
    if expression_type.upper() not in ["ARCADE", "PYTHON", "PYTHON3", "SQL"]:
        raise AttributeError("Invalid expression_type")

    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by %s expression `%s`.",
        dataset_path,
        field_name,
        expression_type,
        expression,
    )
    if expression_type.upper() == "PYTHON":
        expression_type = "PYTHON3"
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    view = DatasetView(dataset_path, dataset_where_sql=dataset_where_sql)
    with session, view:
        arcpy.management.CalculateField(
            in_table=view.name,
            field=field_name,
            expression=expression,
            expression_type=expression_type,
        )
    LOG.log(log_level, "End: Update.")
    return Field(dataset_path, field_name)
Example #5
0
def eliminate_interior_rings(
    dataset_path: Union[Path, str],
    *,
    dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Eliminate interior rings of polygon features.

    Args:
        dataset_path: Path to dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each ring-eliminate-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Eliminate interior rings in `%s`.",
            dataset_path)
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert Path to str.
        in_table=str(dataset_path),
        field_names=["SHAPE@"],
        where_clause=dataset_where_sql,
    )
    _dataset = Dataset(dataset_path)
    session = Editing(_dataset.workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for (old_geometry, ) in cursor:
            if not any(None in part for part in old_geometry):
                states["unchanged"] += 1
                continue

            parts = arcpy.Array()
            for old_part in old_geometry:
                if None not in old_part:
                    parts.append(old_part)
                else:
                    new_part = arcpy.Array()
                    for point in old_part:
                        if not point:
                            break

                        new_part.append(point)
                    parts.append(new_part)
            new_geometry = arcpy.Polygon(parts,
                                         _dataset.spatial_reference.object)
            cursor.updateRow([new_geometry])
            states["rings eliminated"] += 1
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Eliminate.")
    return states
Example #6
0
def densify(
    dataset_path: Union[Path, str],
    *,
    dataset_where_sql: Optional[str] = None,
    distance: Union[float, int],
    only_curve_features: bool = False,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Add vertices at a given distance along feature geometry segments.

    Args:
        dataset_path: Path to dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        distance: Interval at which to add vertices, in units of the dataset.
        only_curve_features: Only densify curve features if True.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each densify-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Densify feature geometry in `%s`.",
            dataset_path)
    _dataset = Dataset(dataset_path)
    # Densify method on geometry object assumes meters if distance not string-with-unit.
    if _dataset.spatial_reference.linear_unit != "Meter":
        distance_unit = getattr(UNIT,
                                _dataset.spatial_reference.linear_unit.lower())
        distance_with_unit = (distance * distance_unit).to(
            UNIT.meter) / UNIT.meter
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert Path to str.
        in_table=str(dataset_path),
        field_names=["SHAPE@"],
        where_clause=dataset_where_sql,
    )
    session = Editing(_dataset.workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for (old_geometry, ) in cursor:
            if old_geometry:
                if only_curve_features and not old_geometry.hasCurves:
                    continue

                new_geometry = old_geometry.densify(
                    method="GEODESIC", distance=distance_with_unit)
                cursor.updateRow((new_geometry, ))
                states["densified"] += 1
            else:
                states["unchanged"] += 1
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Densify.")
    return states
Example #7
0
def keep_by_location(
    dataset_path: Union[Path, str],
    *,
    location_path: Union[Path, str],
    dataset_where_sql: Optional[str] = None,
    location_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Keep features where geometry overlaps location-dataset geometry.

    Args:
        dataset_path: Path to dataset.
        location_path: Path to location-dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        location_where_sql: SQL where-clause for location-dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each keep-state.
    """
    dataset_path = Path(dataset_path)
    location_path = Path(location_path)
    LOG.log(
        log_level,
        "Start: Keep features in `%s` where location overlaps `%s`.",
        dataset_path,
        location_path,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    view = DatasetView(dataset_path, dataset_where_sql=dataset_where_sql)
    location_view = DatasetView(location_path,
                                dataset_where_sql=location_where_sql)
    with session, view, location_view:
        arcpy.management.SelectLayerByLocation(
            in_layer=view.name,
            overlap_type="INTERSECT",
            select_features=location_view.name,
            selection_type="NEW_SELECTION",
        )
        arcpy.management.SelectLayerByLocation(
            in_layer=view.name, selection_type="SWITCH_SELECTION")
        states["deleted"] = delete(view.name,
                                   log_level=logging.DEBUG)["deleted"]
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Keep.")
    return states
Example #8
0
def insert_from_iters(
    dataset_path: Union[Path, str],
    field_names: Iterable[str],
    *,
    source_features: Iterable[Sequence[Any]],
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Insert features into dataset from sequences.

    Args:
        dataset_path: Path to dataset.
        field_names: Names of fields for insert. Names must be in the same order as
            their corresponding attributes in `source_features` elements.
        source_features: Features to insert.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each insert-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Insert features into `%s` from sequences.",
            dataset_path)
    field_names = list(field_names)
    if inspect.isgeneratorfunction(source_features):
        source_features = source_features()
    # ArcPy2.8.0: Convert Path to str.
    cursor = arcpy.da.InsertCursor(in_table=str(dataset_path),
                                   field_names=field_names)
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for row in source_features:
            cursor.insertRow(tuple(row))
            states["inserted"] += 1
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Insert.")
    return states
Example #9
0
def update_by_unique_id(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    dataset_where_sql: Optional[str] = None,
    initial_number: int = 1,
    start_after_max_number: bool = False,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by assigning a unique ID.

    Existing IDs are preserved, if unique.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        dataset_where_sql: SQL where-clause for dataset subselection.
        initial_number: Initial number for a proposed ID, if using a numeric data type.
            Superseded by `start_after_max_number`.
        start_after_max_number: Initial number will be one greater than the
            maximum existing ID number if True, if using a numeric data type.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by assigning unique IDs.",
        dataset_path,
        field_name,
    )
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=[field_name],
        where_clause=dataset_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    # First run will clear duplicate IDs & gather used IDs.
    used_ids = set()
    # BUG-UNFILED: Use separate edit sessions (not a fan of this intermediate state).
    with session, cursor:
        for (id_value, ) in cursor:
            if id_value in used_ids:
                cursor.updateRow([None])
            else:
                used_ids.add(id_value)
        _field = Field(dataset_path, field_name)
        id_pool = unique_ids(
            data_type=python_type(_field.type),
            string_length=_field.length,
            initial_number=(max(used_ids) +
                            1 if start_after_max_number else initial_number),
        )
    states = Counter()
    # Second run will fill in missing IDs.
    with session, cursor:
        for (id_value, ) in cursor:
            if id_value is not None:
                states["unchanged"] += 1
            else:
                id_value = next(id_pool)
                while id_value in used_ids:
                    id_value = next(id_pool)
                try:
                    cursor.updateRow([id_value])
                    states["altered"] += 1
                    used_ids.add(id_value)
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{id_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #10
0
def update_by_overlay_count(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    overlay_dataset_path: Union[Path, str],
    dataset_where_sql: Optional[str] = None,
    overlay_where_sql: Optional[str] = None,
    tolerance: Optional[float] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by count of overlay features.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        overlay_dataset_path: Path to overlay-dataset.

    Keyword Args:
        dataset_where_sql: SQL where-clause for dataset subselection.
        overlay_where_sql: SQL where-clause for overlay-dataset subselection.
        tolerance: Tolerance for coincidence, in units of the dataset. If set to None,
            will use the default tolerance for the workspace of the dataset.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    overlay_dataset_path = Path(overlay_dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by overlay feature counts from `%s`.",
        dataset_path,
        field_name,
        overlay_dataset_path,
    )
    original_tolerance = arcpy.env.XYTolerance
    view = DatasetView(dataset_path,
                       field_names=[],
                       dataset_where_sql=dataset_where_sql)
    overlay_view = DatasetView(
        overlay_dataset_path,
        field_names=[],
        dataset_where_sql=overlay_where_sql,
    )
    with view, overlay_view:
        if tolerance is not None:
            arcpy.env.XYTolerance = tolerance
        temp_output_path = unique_path("output")
        arcpy.analysis.SpatialJoin(
            target_features=view.name,
            join_features=overlay_view.name,
            # ArcPy2.8.0: Convert to str.
            out_feature_class=str(temp_output_path),
            join_operation="JOIN_ONE_TO_ONE",
            join_type="KEEP_COMMON",
            match_option="INTERSECT",
        )
    arcpy.env.XYTolerance = original_tolerance
    cursor = arcpy.da.SearchCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(temp_output_path),
        field_names=["TARGET_FID", "Join_Count"],
    )
    with cursor:
        oid_overlay_count = dict(cursor)
    # ArcPy2.8.0: Convert to str.
    arcpy.management.Delete(str(temp_output_path))
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=["OID@", field_name],
        where_clause=dataset_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for feature in cursor:
            oid = feature[0]
            old_value = feature[1]
            new_value = oid_overlay_count.get(oid, 0)
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow([oid, new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #11
0
def update_by_mapping(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    mapping: Union[Mapping, FunctionType],
    key_field_names: Iterable[str],
    dataset_where_sql: Optional[str] = None,
    default_value: Any = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by finding them in a mapping.

    Notes:
        Mapping key must be a tuple if an iterable.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        mapping: Mapping to get values from.
        key_field_names: Names of mapping key fields.
        dataset_where_sql: SQL where-clause for dataset subselection.
        default_value: Value to assign mapping if key value not in mapping.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    key_field_names = list(key_field_names)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by mapping with key in `%s`.",
        dataset_path,
        field_name,
        key_field_names,
    )
    if isinstance(mapping, EXECUTABLE_TYPES):
        mapping = mapping()
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=key_field_names + [field_name],
        where_clause=dataset_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for feature in cursor:
            key = feature[0] if len(key_field_names) == 1 else tuple(
                feature[:-1])
            old_value = feature[-1]
            new_value = mapping.get(key, default_value)
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow(feature[:-1] + [new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #12
0
def update_by_joined_value(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    key_field_names: Iterable[str],
    join_dataset_path: Union[Path, str],
    join_field_name: str,
    join_key_field_names: Iterable[str],
    dataset_where_sql: Optional[str] = None,
    join_dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by referencing a joinable field in another dataset.

    key_field_names & join_key_field_names must be the same length & same order.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        key_field_names: Names of relationship key fields.
        join_dataset_path: Path to join-dataset.
        join_field_name: Name of join-field.
        join_key_field_names: Names of relationship key fields on join-dataset.
        dataset_where_sql: SQL where-clause for dataset subselection.
        join_dataset_where_sql: SQL where-clause for join-dataset subselection.
        use_edit_session: Updates are done in an edit session if True.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        AttributeError: If key_field_names & join_key_field_names have different length.
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    join_dataset_path = Path(join_dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by joined values in `%s.%s`.",
        dataset_path,
        field_name,
        join_dataset_path,
        join_field_name,
    )
    key_field_names = list(key_field_names)
    join_key_field_names = list(join_key_field_names)
    if len(key_field_names) != len(join_key_field_names):
        raise AttributeError(
            "key_field_names & join_key_field_names not same length.")

    cursor = arcpy.da.SearchCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(join_dataset_path),
        field_names=join_key_field_names + [join_field_name],
        where_clause=join_dataset_where_sql,
    )
    with cursor:
        id_join_value = {feature[:-1]: feature[-1] for feature in cursor}
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=key_field_names + [field_name],
        where_clause=dataset_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for feature in cursor:
            old_value = feature[-1]
            new_value = id_join_value.get(tuple(feature[:-1]))
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow(feature[:-1] + [new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #13
0
def update_by_function(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    function: FunctionType,
    field_as_first_arg: bool = True,
    arg_field_names: Iterable[str] = (),
    kwarg_field_names: Iterable[str] = (),
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values by passing them to a function.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        function: Function to return values from.
        field_as_first_arg: True if field value will be the first positional argument.
        arg_field_names: Field names whose values will be the function positional
            arguments (not including primary field).
        kwarg_field_names: Field names whose names & values will be the function keyword
            arguments.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by function `%s`.",
        dataset_path,
        field_name,
        # Partials show all the pre-loaded arg & kwarg values, which is cumbersome.
        "partial version of function {}".format(function.func) if isinstance(
            function, partial) else "function `{}`".format(function),
    )
    arg_field_names = list(arg_field_names)
    kwarg_field_names = list(kwarg_field_names)
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=arg_field_names + kwarg_field_names + [field_name],
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for feature in cursor:
            old_value = feature[-1]
            args = feature[:len(arg_field_names)]
            if field_as_first_arg:
                args = [old_value] + args
            kwargs = dict(
                zip(kwarg_field_names, feature[len(arg_field_names):-1]))
            new_value = function(*args, **kwargs)
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow(feature[:-1] + [new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #14
0
def update_from_iters(
    dataset_path: Union[Path, str],
    field_names: Iterable[str],
    *,
    id_field_names: Iterable[str],
    source_features: Iterable[Sequence[Any]],
    delete_missing_features: bool = True,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update features in dataset from sequences.

    Note:
        There is no guarantee that the ID field(s) are unique.
        Use ArcPy cursor token names for object IDs and geometry objects/properties.

    Args:
        dataset_path: Path to dataset.
        field_names: Names of fields for update. Names must be in the same order as
            their corresponding attributes in `source_features` elements.
        id_field_names: Names of the feature ID fields. All ID fields must also be in
            `field_names`.
        source_features: Features to insert.
        delete_missing_features: True if update should delete features missing
            from `source_features`.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each update-state.

    Raises:
        ValueError: When `id_field_names` is not a subset of `field_names`.
    """
    dataset_path = Path(dataset_path)
    LOG.log(log_level, "Start: Update features in `%s` from sequences.",
            dataset_path)
    field_names = list(field_names)
    id_field_names = list(id_field_names)
    if not set(id_field_names).issubset(field_names):
        raise ValueError("id_field_names must be a subset of field_names")

    if inspect.isgeneratorfunction(source_features):
        source_features = source_features()
    dataset_ids = {
        tuple(freeze_values(*_id))
        for _id in as_tuples(dataset_path, id_field_names)
    }
    id_feature = {}
    insert_features = []
    for feature in source_features:
        feature = list(freeze_values(*feature))
        _id = tuple(feature[field_names.index(field_name)]
                    for field_name in id_field_names)
        if _id not in dataset_ids:
            insert_features.append(feature)
        else:
            id_feature[_id] = feature
    if delete_missing_features:
        delete_ids = {_id for _id in dataset_ids if _id not in id_feature}
    else:
        delete_ids = set()
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    if delete_ids or id_feature:
        # ArcPy2.8.0: Convert Path to str.
        cursor = arcpy.da.UpdateCursor(in_table=str(dataset_path),
                                       field_names=field_names)
        with session, cursor:
            for feature in cursor:
                _id = tuple(
                    freeze_values(*(feature[field_names.index(field_name)]
                                    for field_name in id_field_names)))
                if _id in delete_ids:
                    cursor.deleteRow()
                    states["deleted"] += 1
                elif _id in id_feature:
                    new_feature = id_feature.pop(_id)
                    if not same_feature(feature, new_feature):
                        try:
                            cursor.updateRow(new_feature)
                        except RuntimeError as error:
                            raise RuntimeError(
                                f"Row failed to update. Offending row: {new_feature}"
                            ) from error

                        states["altered"] += 1
                    else:
                        states["unchanged"] += 1
                else:
                    states["unchanged"] += 1
    if insert_features:
        cursor = arcpy.da.InsertCursor(
            # ArcPy2.8.0: Convert Path to str.
            in_table=str(dataset_path),
            field_names=field_names,
        )
        with session, cursor:
            for new_feature in insert_features:
                try:
                    cursor.insertRow(new_feature)
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Row failed to insert. Offending row: {new_feature}"
                    ) from error

                states["inserted"] += 1
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #15
0
def insert_from_path(
    dataset_path: Union[Path, str],
    field_names: Optional[Iterable[str]] = None,
    *,
    source_path: Union[Path, str],
    source_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Insert features into dataset from another dataset.

    Args:
        dataset_path: Path to dataset.
        field_names: Names of fields for insert. Fields must exist in both datasets. If
            set to None, all user fields present in both datasets will be inserted,
            along with the geometry field (if present).
        source_path: Path to dataset for features to insert.
        source_where_sql: SQL where-clause for source dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each insert-state.
    """
    dataset_path = Path(dataset_path)
    source_path = Path(source_path)
    LOG.log(
        log_level,
        "Start: Insert features into `%s` from `%s`.",
        dataset_path,
        source_path,
    )
    _dataset = Dataset(dataset_path)
    source_dataset = Dataset(source_path)
    if field_names is None:
        field_names = set(name.lower()
                          for name in _dataset.field_names_tokenized) & set(
                              name.lower()
                              for name in source_dataset.field_names_tokenized)
    else:
        field_names = set(name.lower() for name in field_names)
    # OIDs & area/length "fields" have no business being part of an insert.
    # Geometry itself is handled separately in append function.
    for i_dataset in [_dataset, source_dataset]:
        for field_name in chain(*i_dataset.field_name_token.items()):
            field_names.discard(field_name)
            field_names.discard(field_name.lower())
            field_names.discard(field_name.upper())
    field_names = list(field_names)
    # Create field maps.
    # ArcGIS Pro's no-test append is case-sensitive (verified 1.0-1.1.1).
    # Avoid this problem by using field mapping.
    # BUG-000090970 - ArcGIS Pro 'No test' field mapping in Append tool does not auto-
    # map to the same field name if naming convention differs.
    field_mapping = arcpy.FieldMappings()
    for field_name in field_names:
        field_map = arcpy.FieldMap()
        field_map.addInputField(source_path, field_name)
        field_mapping.addFieldMap(field_map)
    session = Editing(_dataset.workspace_path, use_edit_session)
    states = Counter()
    view = DatasetView(
        source_path,
        name=unique_name("view"),
        dataset_where_sql=source_where_sql,
        # Must be nonspatial to append to nonspatial table.
        force_nonspatial=(not _dataset.is_spatial),
    )
    with view, session:
        arcpy.management.Append(
            inputs=view.name,
            # ArcPy2.8.0: Convert Path to str.
            target=str(dataset_path),
            schema_type="NO_TEST",
            field_mapping=field_mapping,
        )
        states["inserted"] = view.count
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Insert.")
    return states
Example #16
0
def update_node_ids(
    dataset_path: Union[Path, str],
    *,
    from_id_field_name: str,
    to_id_field_name: str,
    dataset_where_sql: Optional[str] = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update node ID values.

    Args:
        dataset_path: Path to the dataset.
        from_id_field_name: Name of from-node ID field.
        to_id_field_name: Name of to-node ID field.
        dataset_where_sql: SQL where-clause for dataset subselection.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each update-state.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update node IDs in `%s` (from) & `%s` (to) for `%s`.",
        from_id_field_name,
        to_id_field_name,
        dataset_path,
    )
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=["OID@", from_id_field_name, to_id_field_name],
        where_clause=dataset_where_sql,
    )
    oid_node = id_node_map(
        dataset_path,
        from_id_field_name=from_id_field_name,
        to_id_field_name=to_id_field_name,
        update_nodes=True,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for old_feature in cursor:
            oid = old_feature[0]
            new_feature = (oid, oid_node[oid]["from"], oid_node[oid]["to"])
            if same_feature(old_feature, new_feature):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow(new_feature)
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Row failed to update. Offending row: `{new_feature}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #17
0
def update_rows(
    dataset_path: Union[Path, str],
    *,
    field_name: str,
    id_field_names: Iterable[str],
    cmp_dataset_path: Union[Path, str],
    cmp_field_name: Optional[str] = None,
    cmp_id_field_names: Optional[Iterable[str]] = None,
    cmp_date: Optional[Union[date, _datetime]] = None,
    date_initiated_field_name: str = "date_initiated",
    date_expired_field_name: str = "date_expired",
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Add field value changes to tracking dataset from comparison dataset.

    Args:
        dataset_path: Path to tracking dataset.
        field_name: Name of field with tracked attribute.
        id_field_names: Names of the feature ID fields.
        cmp_dataset_path: Path to comparison dataset.
        cmp_field_name: Name of field with tracked attribute in comparison dataset. If
            set to None, will assume same as field_name.
        cmp_id_field_names: Names of the feature ID fields in comparison dataset. If set
            to None, will assume same as field_name.
        cmp_date: Date to mark comparison change. If set to None, will set to the date
            of execution.
        date_initiated_field_name: Name of tracking-row-inititated date field.
        date_expired_field_name: Name of tracking-row-expired date field.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Feature counts for each update-state.
    """
    dataset_path = Path(dataset_path)
    cmp_dataset_path = Path(cmp_dataset_path)
    LOG.log(
        log_level,
        "Start: Update tracking rows in `%s` from `%s`.",
        dataset_path,
        cmp_dataset_path,
    )
    id_field_names = list(id_field_names)
    if cmp_field_name is None:
        cmp_field_name = field_name
    cmp_id_field_names = (id_field_names if cmp_id_field_names is None else
                          list(cmp_id_field_names))
    if cmp_date is None:
        cmp_date = date.today()
    current_where_sql = f"{date_expired_field_name} IS NULL"
    id_current_value = {
        row[:-1]: row[-1]
        for row in features.as_tuples(
            dataset_path,
            field_names=id_field_names + [field_name],
            dataset_where_sql=current_where_sql,
        )
    }
    id_cmp_value = {
        row[:-1]: row[-1]
        for row in features.as_tuples(cmp_dataset_path,
                                      field_names=cmp_id_field_names +
                                      [cmp_field_name])
    }
    changed_ids = set()
    expired_ids = {_id for _id in id_current_value if _id not in id_cmp_value}
    new_rows = []
    for _id, value in id_cmp_value.items():
        if _id not in id_current_value:
            new_rows.append(_id + (value, cmp_date))
        elif not same_value(value, id_current_value[_id]):
            changed_ids.add(_id)
            new_rows.append(_id + (value, cmp_date))
    # ArcPy2.8.0: Convert Path to str.
    cursor = arcpy.da.UpdateCursor(
        in_table=str(dataset_path),
        field_names=id_field_names + [field_name, date_expired_field_name],
        where_clause=current_where_sql,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for row in cursor:
            _id = tuple(row[:len(id_field_names)])
            if _id in changed_ids or _id in expired_ids:
                cursor.updateRow(_id + (row[-2], cmp_date))
            else:
                states["unchanged"] += 1
    features.insert_from_iters(
        dataset_path,
        field_names=id_field_names + [field_name, date_initiated_field_name],
        source_features=new_rows,
        use_edit_session=use_edit_session,
        log_level=logging.DEBUG,
    )
    states["changed"] = len(changed_ids)
    states["expired"] = len(expired_ids)
    log_entity_states("features", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states
Example #18
0
def update_by_field(
    dataset_path: Union[Path, str],
    field_name: str,
    *,
    source_field_name: str,
    dataset_where_sql: Optional[str] = None,
    spatial_reference_item: SpatialReferenceSourceItem = None,
    use_edit_session: bool = False,
    log_level: int = logging.INFO,
) -> Counter:
    """Update attribute values with values from another field.

    Args:
        dataset_path: Path to dataset.
        field_name: Name of field.
        source_field_name: Name of field to get values from.
        dataset_where_sql: SQL where-clause for dataset subselection.
        spatial_reference_item: Item from which the spatial reference for any geometry
            properties will be set to. If set to None, will use spatial reference of
            the dataset.
        use_edit_session: True if edits are to be made in an edit session.
        log_level: Level to log the function at.

    Returns:
        Attribute counts for each update-state.

    Raises:
        RuntimeError: If attribute cannot be updated.
    """
    dataset_path = Path(dataset_path)
    LOG.log(
        log_level,
        "Start: Update attributes in `%s.%s` by field `%s`.",
        dataset_path,
        field_name,
        source_field_name,
    )
    cursor = arcpy.da.UpdateCursor(
        # ArcPy2.8.0: Convert to str.
        in_table=str(dataset_path),
        field_names=[field_name, source_field_name],
        where_clause=dataset_where_sql,
        spatial_reference=SpatialReference(spatial_reference_item).object,
    )
    session = Editing(Dataset(dataset_path).workspace_path, use_edit_session)
    states = Counter()
    with session, cursor:
        for old_value, new_value in cursor:
            if same_value(old_value, new_value):
                states["unchanged"] += 1
            else:
                try:
                    cursor.updateRow([new_value, new_value])
                    states["altered"] += 1
                except RuntimeError as error:
                    raise RuntimeError(
                        f"Update cursor failed: Offending value: `{new_value}`"
                    ) from error

    log_entity_states("attributes", states, logger=LOG, log_level=log_level)
    LOG.log(log_level, "End: Update.")
    return states