Ejemplo n.º 1
0
    def extract(self):
        """Extract review features.

        Returns:
            arcetl.diff.Differ: Reference to instance.
        """
        # Clear old attributes.
        self._id_attr.clear()
        for tag in self._dataset_tags:
            feats = attributes.as_dicts(
                dataset_path=self._dataset[tag]["path"],
                field_names=self._keys["load"],
                dataset_where_sql=self._dataset[tag]["where_sql"],
                spatial_reference_item=self._dataset["init"]["spatial_reference"],
            )
            for feat in feats:
                id_val = tuple(freeze_values(*(feat[key] for key in self._keys["id"])))
                self._id_attr[tag][id_val] = feat
        # Add overlay attributes.
        for tag in self._dataset_tags:
            view = DatasetView(
                dataset_path=self._dataset[tag]["path"],
                dataset_where_sql=self._dataset[tag]["where_sql"],
                field_names=self._keys["id"],
            )
            with view:
                for overlay in self._dataset["overlays"]:
                    field_maps = arcpy.FieldMappings()
                    for path, keys in [
                        (view.name, self._keys["id"]),
                        (overlay["path"], overlay["keys"]),
                    ]:
                        for key in keys:
                            field_map = arcpy.FieldMap()
                            field_map.addInputField(path, key)
                            field_maps.addFieldMap(field_map)
                    output_path = unique_path()
                    arcpy.analysis.SpatialJoin(
                        target_features=view.name,
                        join_features=overlay["path"],
                        out_feature_class=output_path,
                        field_mapping=field_maps,
                    )
                    for feat in attributes.as_dicts(
                        output_path, field_names=(self._keys["id"] + overlay["keys"])
                    ):
                        id_val = tuple(
                            freeze_values(*(feat[key] for key in self._keys["id"]))
                        )
                        if id_val in self._id_attr[tag]:
                            for key in overlay["keys"]:
                                # Use (path, field name) for attribute key.
                                self._id_attr[tag][id_val][
                                    (overlay["path"], key)
                                ] = feat[key]
                    arcpy.management.Delete(output_path)
        return self
Ejemplo n.º 2
0
def feature_count(dataset_path, **kwargs):
    """Return number of features in dataset.

    Args:
        dataset_path (str): Path of the dataset.
        **kwargs: Arbitrary keyword arguments. See below.

   Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.

    Returns:
        int.
    """
    kwargs.setdefault("dataset_where_sql")
    view = DatasetView(dataset_path, **kwargs)
    with view:
        return view.count
Ejemplo n.º 3
0
def copy(dataset_path, output_path, **kwargs):
    """Copy features into a new dataset.

    Args:
        dataset_path (str): Path of the dataset.
        output_path (str): Path of output dataset.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        schema_only (bool): Copy only the schema--omitting data--if True. Default is
            False.
        overwrite (bool): Overwrite the output dataset if it exists, if True. Default is
            False.
        log_level (str): Level to log the function at. Default is "info".

    Returns:
        collections.Counter: Counts for each feature action.

    Raises:
        ValueError: If dataset type not supported.
    """
    kwargs.setdefault("dataset_where_sql")
    kwargs.setdefault("schema_only", False)
    kwargs.setdefault("overwrite", False)
    if kwargs["schema_only"]:
        kwargs["dataset_where_sql"] = "0=1"
    log = leveled_logger(LOG, kwargs.setdefault("log_level", "info"))
    log("Start: Copy dataset %s to %s.", dataset_path, output_path)
    meta = {"dataset": dataset_metadata(dataset_path)}
    view = DatasetView(dataset_path, kwargs["dataset_where_sql"])
    with view:
        if meta["dataset"]["is_spatial"]:
            exec_copy = arcpy.management.CopyFeatures
        elif meta["dataset"]["is_table"]:
            exec_copy = arcpy.management.CopyRows
        else:
            raise ValueError(
                "{} unsupported dataset type.".format(dataset_path))

        if kwargs["overwrite"] and arcpy.Exists(output_path):
            delete(output_path, log_level=None)
        exec_copy(view.name, output_path)
    log("End: Copy.")
    return Counter(copied=feature_count(output_path))
Ejemplo n.º 4
0
def update_by_expression(dataset_path, field_name, expression, **kwargs):
    """Update attribute values using a (single) code-expression.

    Wraps arcpy.management.CalculateField.

    Args:
        dataset_path (str): Path of the dataset.
        field_name (str): Name of the field.
        expression (str): Python string expression to evaluate values from.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        use_edit_session (bool): Updates are done in an edit session if True. Default is
            False.
        log_level (str): Level to log the function at. Default is "info".

    Returns:
        str: Name of the field updated.
    """
    kwargs.setdefault("dataset_where_sql")
    kwargs.setdefault("use_edit_session", False)
    log = leveled_logger(LOG, kwargs.setdefault("log_level", "info"))
    log(
        "Start: Update attributes in %s on %s using expression: `%s`.",
        field_name,
        dataset_path,
        expression,
    )
    meta = {"dataset": dataset_metadata(dataset_path)}
    session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"])
    dataset_view = DatasetView(dataset_path, kwargs["dataset_where_sql"])
    with session, dataset_view:
        arcpy.management.CalculateField(
            in_table=dataset_view.name,
            field=field_name,
            expression=expression,
            expression_type="python_9.3",
        )
    log("End: Update.")
    return field_name
Ejemplo n.º 5
0
def planarize(dataset_path, output_path, **kwargs):
    """Planarize feature geometry into lines.

    Note:
        This method does not make topological linework. However it does carry all
        attributes with it, rather than just an ID attribute.

        Since this method breaks the new line geometry at intersections, it can be
        useful to break line geometry features that cross.

    Args:
        dataset_path (str): Path of the dataset.
        output_path (str): Path of the output dataset.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        tolerance (float): Tolerance for coincidence, in units of the dataset.
        log_level (str): Level to log the function at. Default is "info".

    Returns:
        str: Path of the converted dataset.
    """
    kwargs.setdefault("dataset_where_sql")
    kwargs.setdefault("tolerance")
    log = leveled_logger(LOG, kwargs.setdefault("log_level", "info"))
    log("Start: Planarize geometry in %s to lines in %s.", dataset_path, output_path)
    view = DatasetView(dataset_path, kwargs["dataset_where_sql"])
    with view:
        arcpy.management.FeatureToLine(
            in_features=view.name,
            out_feature_class=output_path,
            cluster_tolerance=kwargs["tolerance"],
            attributes=True,
        )
    log("End: Planarize.")
    return output_path
Ejemplo n.º 6
0
def polygons_to_lines(dataset_path, output_path, topological=False, **kwargs):
    """Convert geometry from polygons to lines.

    Note:
        If topological is set to True, shared outlines will be a single, separate
        feature. Note that one cannot pass attributes to a topological transformation
        (as the values would not apply to all adjacent features).

        If an id field name is specified, the output dataset will identify the input
        features that defined the line feature with the name & values from the provided
        field. This option will be ignored if the output is non-topological lines, as
        the field will pass over with the rest of the attributes.

    Args:
        dataset_path (str): Path of the dataset.
        output_path (str): Path of the output dataset.
        topological (bool): Flag to indicate lines should be topological, or merged
            where lines overlap.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        id_field_name (str): Name of the field to apply ID to lines from.
        tolerance (float): Tolerance for coincidence, in units of the dataset.
        log_level (str): Level to log the function at. Default is "info".

    Returns:
        str: Path of the converted dataset.
    """
    kwargs.setdefault("dataset_where_sql")
    kwargs.setdefault("id_field_name")
    log = leveled_logger(LOG, kwargs.setdefault("log_level", "info"))
    log("Start: Convert polgyons in %s to lines in %s.", dataset_path, output_path)
    meta = {
        "dataset": dataset_metadata(dataset_path),
        "orig_tolerance": arcpy.env.XYTolerance,
    }
    view = DatasetView(dataset_path, kwargs["dataset_where_sql"])
    with view:
        if "tolerance" in kwargs:
            arcpy.env.XYTolerance = kwargs["tolerance"]
        arcpy.management.PolygonToLine(
            in_features=view.name,
            out_feature_class=output_path,
            neighbor_option=topological,
        )
        if "tolerance" in kwargs:
            arcpy.env.XYTolerance = meta["orig_tolerance"]
    if topological:
        for side in ["left", "right"]:
            meta[side] = {"oid_key": side.upper() + "_FID"}
            if kwargs["id_field_name"]:
                meta[side]["id_field"] = next(
                    field
                    for field in meta["dataset"]["fields"]
                    if field["name"].lower() == kwargs["id_field_name"].lower()
                )
                meta[side]["id_field"]["name"] = side + "_" + kwargs["id_field_name"]
                # Cannot create an OID-type field, so force to long.
                if meta[side]["id_field"]["type"].lower() == "oid":
                    meta[side]["id_field"]["type"] = "long"
                dataset.add_field_from_metadata(
                    output_path, meta[side]["id_field"], log_level=None
                )
                attributes.update_by_joined_value(
                    output_path,
                    field_name=meta[side]["id_field"]["name"],
                    join_dataset_path=dataset_path,
                    join_field_name=kwargs["id_field_name"],
                    on_field_pairs=[
                        (meta[side]["oid_key"], meta["dataset"]["oid_field_name"])
                    ],
                    log_level=None,
                )
            dataset.delete_field(output_path, meta[side]["oid_key"], log_level=None)
    else:
        dataset.delete_field(output_path, "ORIG_FID", log_level=None)
    log("End: Convert.")
    return output_path
Ejemplo n.º 7
0
def update_by_overlay(
    dataset_path, field_name, overlay_dataset_path, overlay_field_name, **kwargs
):
    """Update attribute values by finding overlay feature value.

    Note:
        Since only one value will be selected in the overlay, operations with multiple
        overlaying features will respect the geoprocessing environment merge rule. This
        rule generally defaults to the value of the "first" feature.

        Only one overlay flag at a time can be used (e.g. "overlay_most_coincident",
        "overlay_central_coincident"). If multiple are set to True, the first one
        referenced in the code will be used. If no overlay flags are set, the operation
        will perform a basic intersection check, and the result will be at the whim of
        the geoprocessing environment merge rule for the update field.

    Args:
        dataset_path (str): Path of the dataset.
        field_name (str): Name of the field.
        overlay_dataset_path (str): Path of the overlay-dataset.
        overlay_field_name (str): Name of the overlay-field.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        overlay_central_coincident (bool): Overlay will use the centrally-coincident
            value if True. Default is False.
        overlay_most_coincident (bool): Overlay will use the most coincident value if
            True. Default is False.
        overlay_where_sql (str): SQL where-clause for overlay dataset subselection.
        replacement_value: Value to replace a present overlay-field value with.
        tolerance (float): Tolerance for coincidence, in units of the dataset.
        use_edit_session (bool): Updates are done in an edit session if True. Default is
            False.
        log_level (str): Level to log the function at. Default is "info".

    Returns:
        collections.Counter: Counts for each feature action.
    """
    kwargs.setdefault("dataset_where_sql")
    kwargs.setdefault("overlay_central_coincident", False)
    kwargs.setdefault("overlay_most_coincident", False)
    kwargs.setdefault("overlay_where_sql")
    kwargs.setdefault("use_edit_session", False)
    log = leveled_logger(LOG, kwargs.setdefault("log_level", "info"))
    log(
        "Start: Update attributes in %s on %s by overlay values in %s on %s.",
        field_name,
        dataset_path,
        overlay_field_name,
        overlay_dataset_path,
    )
    meta = {
        "dataset": dataset_metadata(dataset_path),
        "original_tolerance": arcpy.env.XYTolerance,
    }
    join_kwargs = {"join_operation": "join_one_to_many", "join_type": "keep_all"}
    if kwargs["overlay_central_coincident"]:
        join_kwargs["match_option"] = "have_their_center_in"
    ##TODO: Implement overlay_most_coincident.
    elif kwargs["overlay_most_coincident"]:
        raise NotImplementedError("overlay_most_coincident not yet implemented.")

    # else:
    #     join_kwargs["match_option"] = "intersect"
    dataset_view = DatasetView(dataset_path, kwargs["dataset_where_sql"])
    overlay_copy = TempDatasetCopy(
        overlay_dataset_path,
        kwargs["overlay_where_sql"],
        field_names=[overlay_field_name],
    )
    with dataset_view, overlay_copy:
        # Avoid field name collisions with neutral name.
        overlay_copy.field_name = dataset.rename_field(
            overlay_copy.path,
            overlay_field_name,
            new_field_name=unique_name(overlay_field_name),
            log_level=None,
        )
        if "tolerance" in kwargs:
            arcpy.env.XYTolerance = kwargs["tolerance"]
        # Create temp output of the overlay.
        temp_output_path = unique_path("output")
        arcpy.analysis.SpatialJoin(
            target_features=dataset_view.name,
            join_features=overlay_copy.path,
            out_feature_class=temp_output_path,
            **join_kwargs
        )
        if "tolerance" in kwargs:
            arcpy.env.XYTolerance = meta["original_tolerance"]
    # Push overlay (or replacement) value from output to update field.
    if "replacement_value" in kwargs and kwargs["replacement_value"] is not None:
        function = lambda x: kwargs["replacement_value"] if x else None
    else:
        function = lambda x: x
    update_by_function(
        temp_output_path,
        field_name,
        function,
        field_as_first_arg=False,
        arg_field_names=[overlay_copy.field_name],
        log_level=None,
    )
    # Update values in original dataset.
    update_action_count = update_by_joined_value(
        dataset_path,
        field_name,
        join_dataset_path=temp_output_path,
        join_field_name=field_name,
        on_field_pairs=[(meta["dataset"]["oid_field_name"], "target_fid")],
        dataset_where_sql=kwargs["dataset_where_sql"],
        use_edit_session=kwargs["use_edit_session"],
        log_level=None,
    )
    dataset.delete(temp_output_path, log_level=None)
    for action, count in sorted(update_action_count.items()):
        log("%s attributes %s.", count, action)
    log("End: Update.")
    return update_action_count