def eval(self): """Evaluate differences between datasets. Populates properties: ids & diffs. Returns: arcetl.diff.Differ: Reference to the instance. """ self.ids["added"] = set(self._id_attr["new"]) - set(self._id_attr["init"]) self.ids["persisted"] = set(self._id_attr["new"]) & set(self._id_attr["init"]) self.ids["removed"] = set(self._id_attr["init"]) - set(self._id_attr["new"]) # Init containers. for tag in self.diff_types: if tag in ["added", "removed"]: self.diffs[tag] = [ self.diff_info(id_val, tag) for id_val in self.ids[tag] ] else: self.diffs[tag] = [] self._displacement_links = [] for id_val in self.ids["persisted"]: # Check for geometry diff. geoms = [ self._id_attr["init"][id_val].get("shape@"), self._id_attr["new"][id_val].get("shape@"), ] if not same_value(*geoms): diff = self.diff_info(id_val, "geometry", geometries=geoms) self.diffs["geometry"].append(diff) link = self.displacement_link(id_val, geometries=geoms) self._displacement_links.append(link) # Check for attribute diffs. for key in self._keys["cmp"]: vals = [ self._id_attr["init"][id_val][key], self._id_attr["new"][id_val][key], ] if not same_value(*vals): diff = self.diff_info( id_val, "attribute", values=vals, field_name=key ) self.diffs["attribute"].append(diff) # Check for overlay diffs. for overlay in self._dataset["overlays"]: for key in overlay["keys"]: vals = [ self._id_attr["init"][id_val][(overlay["path"], key)], self._id_attr["new"][id_val][(overlay["path"], key)], ] if not same_value(*vals): diff = self.diff_info( id_val, "overlay", values=vals, dataset_path=overlay["path"], field_name=key, ) self.diffs["overlay"].append(diff) return self
def update_by_value(dataset_path, field_name, value, **kwargs): """Update attribute values by assigning a given value. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. value (object): Static value to assign. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", True) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by given value.", field_name, dataset_path ) meta = {"dataset": dataset_metadata(dataset_path)} session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=[field_name], where_clause=kwargs["dataset_where_sql"], ) update_action_count = Counter() with session, cursor: for [old_value] in cursor: if same_value(old_value, value): update_action_count["unchanged"] += 1 else: try: cursor.updateRow([value]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def displacement_link(self, feature_id, geometries): """Create link feature representing displacement of given feature. Args: feature_id: ID values for feature. geometries (list of arcpy.Geometry): Init & new feature geometries. Returns: dict: Displacement link. """ points = [geom.centroid for geom in geometries] # If centroids same, create "kick-out" midpoint to define valid line. if same_value(*points): mid = arcpy.Point(X=(points[0].X + 1), Y=(points[0].Y + 1), Z=points[0].Z) points.insert(1, mid) link = { "shape@": arcpy.Polyline( arcpy.Array(*points), self._dataset["init"]["spatial_reference"] ) } for i, id_key in enumerate(self._keys["id"]): link[id_key] = feature_id[i] return link
def diff_info(self, feature_id, diff_tag, values=None, geometries=None, **kwargs): """Create info-dictionary for diff of given feature. Keyword arguments are generally related to customizing description values. Args: feature_id: ID values for feature. diff_tag (str): Type of diff to create row for (see diff_types property). values (list): Init & new values for attribute/overlay. geometries (list of arcpy.Geometry): Init & new feature geometries. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_path (str): Path of relevant dataset. field_name (str): Name of relevant field. Returns: dict: Diff information. """ diff = { "diff_type": diff_tag, "description": self._diff_type_description[diff_tag].format(**kwargs), "init_repr": str(values[0]) if values is not None else None, "new_repr": str(values[1]) if values is not None else None, } if geometries: diff["shape@"] = ( geometries[0] if same_value(*geometries) else geometries[0].union(geometries[1]).convexHull() ) else: diff["shape@"] = None for i, id_key in enumerate(self._keys["id"]): diff[id_key] = feature_id[i] return diff
def update_by_mapping(dataset_path, field_name, mapping, key_field_names, **kwargs): """Update attribute values by finding them in a mapping. Note: Mapping key must be a tuple if an iterable. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. mapping: Mapping to get values from. key_field_names (iter): Fields names whose values will comprise the mapping key. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. default_value: Value to return from mapping if key value on feature not present. Default is None. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("default_value") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by mapping with key in %s.", field_name, dataset_path, key_field_names, ) meta = {"dataset": dataset_metadata(dataset_path)} keys = {"map": list(contain(key_field_names))} keys["feature"] = keys["map"] + [field_name] if isinstance(mapping, EXEC_TYPES): mapping = mapping() session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=keys["feature"], where_clause=kwargs["dataset_where_sql"], ) update_action_count = Counter() with session, cursor: for feature in cursor: value = { "map_key": feature[0] if len(keys["map"]) == 1 else tuple(feature[:-1]), "old": feature[-1], } value["new"] = mapping.get(value["map_key"], kwargs["default_value"]) if same_value(value["old"], value["new"]): update_action_count["unchanged"] += 1 else: try: cursor.updateRow(feature[:-1] + [value["new"]]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value["new"]) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def update_by_joined_value( dataset_path, field_name, join_dataset_path, join_field_name, on_field_pairs, **kwargs ): """Update attribute values by referencing a joinable field. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. join_dataset_path (str): Path of the join-dataset. join_field_name (str): Name of the join-field. on_field_pairs (iter): Field name pairs used to to determine join. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by joined values in %s on %s.", field_name, dataset_path, join_field_name, join_dataset_path, ) meta = {"dataset": dataset_metadata(dataset_path)} keys = { "dataset_id": list(pair[0] for pair in on_field_pairs), "join_id": list(pair[1] for pair in on_field_pairs), } keys["feature"] = keys["dataset_id"] + [field_name] join_value = id_map( join_dataset_path, id_field_names=keys["join_id"], field_names=join_field_name ) session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=keys["feature"], where_clause=kwargs["dataset_where_sql"], ) update_action_count = Counter() with session, cursor: for feature in cursor: value = { "id": ( feature[0] if len(keys["dataset_id"]) == 1 else tuple(feature[:-1]) ), "old": feature[-1], } value["new"] = join_value.get(value["id"]) if same_value(value["old"], value["new"]): update_action_count["unchanged"] += 1 else: try: cursor.updateRow(feature[:-1] + [value["new"]]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value["new"]) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def update_by_geometry(dataset_path, field_name, geometry_properties, **kwargs): """Update attribute values by cascading through geometry properties. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. geometry_properties (iter): Geometry property names in object-access order to retrieve the update value. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. spatial_reference_item: Item from which the spatial reference for the output geometry property will be derived. Default is the update dataset. use_edit_session (bool): Updates are done in an edit session if True. If not not specified or None, the spatial reference of the dataset is used. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("spatial_reference_item") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by geometry properties %s.", field_name, dataset_path, geometry_properties, ) meta = { "dataset": dataset_metadata(dataset_path), "spatial": spatial_reference_metadata(kwargs["spatial_reference_item"]), } session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=["shape@", field_name], where_clause=kwargs["dataset_where_sql"], spatial_reference=meta["spatial"]["object"], ) update_action_count = Counter() with session, cursor: for feature in cursor: value = {"geometry": feature[0], "old": feature[-1]} value["new"] = property_value( value["geometry"], GEOMETRY_PROPERTY_TRANSFORM, *contain(geometry_properties) ) if same_value(value["old"], value["new"]): update_action_count["unchanged"] += 1 else: try: cursor.updateRow([value["geometry"], value["new"]]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value["new"]) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def update_by_function(dataset_path, field_name, function, **kwargs): """Update attribute values by passing them to a function. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. function (types.FunctionType): Function to get values from. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: field_as_first_arg (bool): True if field value will be the first positional argument. Default is True. arg_field_names (iter): Field names whose values will be the positional arguments (not including primary field). kwarg_field_names (iter): Field names whose names & values will be the method keyword arguments. dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("field_as_first_arg", True) kwargs.setdefault("arg_field_names", []) kwargs.setdefault("kwarg_field_names", []) kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by function %s.", field_name, dataset_path, function, ) meta = {"dataset": dataset_metadata(dataset_path)} keys = { "args": list(contain(kwargs["arg_field_names"])), "kwargs": list(contain(kwargs["kwarg_field_names"])), } keys["feature"] = keys["args"] + keys["kwargs"] + [field_name] session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=keys["feature"], where_clause=kwargs["dataset_where_sql"], ) update_action_count = Counter() with session, cursor: for feature in cursor: value = { "old": feature[-1], "args": feature[: len(keys["args"])], "kwargs": dict(zip(keys["kwargs"], feature[len(keys["args"]) : -1])), } if kwargs["field_as_first_arg"]: value["args"] = [value["old"]] + value["args"] value["new"] = function(*value["args"], **value["kwargs"]) if same_value(value["old"], value["new"]): update_action_count["unchanged"] += 1 else: try: cursor.updateRow(feature[:-1] + [value["new"]]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value["new"]) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def update_by_feature_match( dataset_path, field_name, id_field_names, update_type, **kwargs ): """Update attribute values by aggregating info about matching features. Note: Currently, the sort_order update type uses functionality that only works with datasets contained in databases. Valid update_type codes: "flag_value": Apply the flag_value argument value to matched features. "match_count": Apply the count of matched features. "sort_order": Apply the position of the feature sorted with matches. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. id_field_names (iter): Field names used to identify a feature. update_type (str): Code indicating what values to apply to matched features. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. flag_value: Value to apply to matched features. Only used when update_type is "flag_value". sort_field_names (iter): Iterable of field names used to sort matched features. Only affects output when update_type="sort_order". use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s" + " by feature-matching %s on identifiers (%s).", field_name, dataset_path, update_type.replace("_", " "), id_field_names, ) if update_type not in ["flag_value", "match_count", "sort_order"]: raise ValueError("Invalid update_type.") for _type, kwarg in { "flag_value": "flag_value", "sort_order": "sort_field_names", }.items(): if update_type == _type and kwarg not in kwargs: raise TypeError( """{} is required keyword argument when update_type == "{}", .""".format( _type, kwarg ) ) meta = {"dataset": dataset_metadata(dataset_path)} keys = { "id": list(contain(id_field_names)), "sort": list(contain(kwargs.get("sort_field_names", []))), } keys["feature"] = keys["id"] + [field_name] matcher = FeatureMatcher(dataset_path, keys["id"], kwargs["dataset_where_sql"]) session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=keys["feature"], where_clause=kwargs["dataset_where_sql"], sql_clause=( (None, "order by " + ", ".join(keys["sort"])) if update_type == "sort_order" else None ), ) update_action_count = Counter() with session, cursor: for feature in cursor: value = { "id": feature[0] if len(keys["id"]) == 1 else tuple(feature[:-1]), "old": feature[-1], } if update_type == "flag_value": if matcher.is_duplicate(value["id"]): value["new"] = kwargs["flag_value"] else: value["new"] = value["old"] elif update_type == "match_count": value["new"] = matcher.match_count(value["id"]) elif update_type == "sort_order": value["new"] = matcher.increment_assigned(value["id"]) if same_value(value["old"], value["new"]): update_action_count["unchanged"] += 1 else: try: cursor.updateRow(feature[:-1] + [value["new"]]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value["new"]) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count