Exemplo n.º 1
0
def union(dataset_path, field_name, union_dataset_path, union_field_name,
          **kwargs):
    """Assign union attribute to features, splitting where necessary.

    Note:
        This function has a 'chunking' loop routine in order to avoid an unhelpful
        output error that occurs when the inputs are rather large. For some reason the
        identity will 'succeed' with an empty output warning, but not create an output
        dataset. Running the identity against smaller sets of data generally avoids
        this conundrum.

    Args:
        dataset_path (str): Path of the dataset.
        field_name (str): Name of the dataset's field to assign to.
        union_dataset_path (str): Path of the union dataset.
        union_field_name (str): Name of union dataset's field with values to assign.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        chunk_size (int): Number of features to process per loop. Default is 4096.
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        union_where_sql (str): SQL where-clause for the union dataset subselection.
        replacement_value: Value to replace overlay field values with.
        tolerance (float): Tolerance for coincidence, in dataset's units.
        log_level (str): Level to log the function at. Default is 'info'.

    Returns:
        str: Path of the dataset updated.

    """
    kwargs.setdefault('chunk_size', 4096)
    kwargs.setdefault('dataset_where_sql')
    kwargs.setdefault('union_where_sql')
    kwargs.setdefault('tolerance')
    log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info'))
    log(
        "Start: Union-set attributes in %s on %s by overlay values in %s on %s.",
        field_name,
        dataset_path,
        union_field_name,
        union_dataset_path,
    )
    if 'replacement_value' in kwargs and kwargs[
            'replacement_value'] is not None:
        update_function = (lambda x: kwargs['replacement_value']
                           if x else None)
    else:
        # Union puts empty string when identity feature not present.
        # Fix to null (replacement value function does this inherently).
        update_function = (lambda x: None if x == '' else x)
    view = {
        'dataset': arcobj.DatasetView(dataset_path,
                                      kwargs['dataset_where_sql'])
    }
    # Create a temporary copy of the union dataset.
    temp_union = arcobj.TempDatasetCopy(union_dataset_path,
                                        kwargs['union_where_sql'],
                                        field_names=[union_field_name])
    with view['dataset'], temp_union:
        # Avoid field name collisions with neutral field name.
        temp_union.field_name = dataset.rename_field(
            temp_union.path,
            union_field_name,
            new_field_name=unique_name(union_field_name),
            log_level=None,
        )
        for view['chunk'] in view['dataset'].as_chunks(kwargs['chunk_size']):
            temp_output_path = unique_path('output')
            arcpy.analysis.Union(
                in_features=[view['chunk'].name, temp_union.path],
                out_feature_class=temp_output_path,
                join_attributes='all',
                cluster_tolerance=kwargs['tolerance'],
                gaps=False,
            )
            # Clean up bad or null geometry created in processing.
            arcpy.management.RepairGeometry(temp_output_path)
            # Push identity (or replacement) value from temp to update field.
            attributes.update_by_function(
                temp_output_path,
                field_name,
                update_function,
                field_as_first_arg=False,
                arg_field_names=[temp_union.field_name],
                log_level=None,
            )
            # Replace original chunk features with new features.
            features.delete(view['chunk'].name, log_level=None)
            features.insert_from_path(dataset_path,
                                      temp_output_path,
                                      log_level=None)
            dataset.delete(temp_output_path, log_level=None)
    log("End: Union.")
    return dataset_path
Exemplo n.º 2
0
def generate_service_rings(
    dataset_path,
    output_path,
    network_path,
    cost_attribute,
    ring_width,
    max_distance,
    **kwargs
):
    """Create facility service ring features using a network dataset.

    Args:
        dataset_path (str): Path of the dataset.
        output_path (str): Path of the output service rings dataset.
        network_path (str): Path of the network dataset.
        cost_attribute (str): Name of the network cost attribute to use.
        ring_width (float): Distance a service ring represents in travel, in the
            dataset's units.
        max_distance (float): Distance in travel from the facility the outer ring will
            extend to, in the dataset's units.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        id_field_name (str): Name of facility ID field.
        restriction_attributes (iter): Collection of network restriction attribute
            names to use.
        travel_from_facility (bool): Flag to indicate performing the analysis
            travelling from (True) or to (False) the facility. Default is False.
        detailed_features (bool): Flag to generate high-detail features. Default is
            False.
        overlap_facilities (bool): Flag to overlap different facility service areas.
            Default is True.
        trim_value (float): Dstance from the network features to trim service areas at.
        log_level (str): Level to log the function at. Default is 'info'.

    Returns:
        str: Path of the output service rings dataset.

    """
    kwargs.setdefault('dataset_where_sql')
    kwargs.setdefault('id_field_name')
    kwargs.setdefault('restriction_attributes')
    kwargs.setdefault('travel_from_facility', False)
    kwargs.setdefault('detailed_features', False)
    kwargs.setdefault('overlap_facilities', True)
    kwargs.setdefault('trim_value')
    log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info'))
    log("Start: Generate service rings for %s.", dataset_path)
    # trim_value assumes meters if not input as linear_unit string.
    if kwargs['trim_value'] is not None:
        trim_value = arcobj.linear_unit_string(kwargs['trim_value'], dataset_path)
    else:
        trim_value = None
    view = {'dataset': arcobj.DatasetView(dataset_path, kwargs['dataset_where_sql'])}
    with arcobj.ArcExtension('Network'):
        arcpy.na.MakeServiceAreaLayer(
            in_network_dataset=network_path,
            out_network_analysis_layer='service_area',
            impedance_attribute=cost_attribute,
            travel_from_to=(
                'travel_from' if kwargs['travel_from_facility'] else 'travel_to'
            ),
            default_break_values=(
                ' '.join(
                    str(x) for x in range(ring_width, max_distance + 1, ring_width)
                )
            ),
            polygon_type=(
                'detailed_polys' if kwargs['detailed_features'] else 'simple_polys'
            ),
            merge=('no_merge' if kwargs['overlap_facilities'] else 'no_overlap'),
            nesting_type='rings',
            UTurn_policy='allow_dead_ends_and_intersections_only',
            restriction_attribute_name=kwargs['restriction_attributes'],
            polygon_trim=(True if trim_value else False),
            poly_trim_value=trim_value,
            hierarchy='no_hierarchy',
        )
        with view['dataset']:
            arcpy.na.AddLocations(
                in_network_analysis_layer="service_area",
                sub_layer="Facilities",
                in_table=view['dataset'].name,
                field_mappings='Name {} #'.format(kwargs['id_field_name']),
                search_tolerance=max_distance,
                match_type='match_to_closest',
                append='clear',
                snap_to_position_along_network='no_snap',
                exclude_restricted_elements=True,
            )
        arcpy.na.Solve(
            in_network_analysis_layer="service_area",
            ignore_invalids=True,
            terminate_on_solve_error=True,
        )
    dataset.copy('service_area/Polygons', output_path, log_level=None)
    dataset.delete('service_area', log_level=None)
    if kwargs['id_field_name']:
        meta = {
            'id_field': arcobj.field_metadata(dataset_path, kwargs['id_field_name'])
        }
        dataset.add_field_from_metadata(output_path, meta['id_field'], log_level=None)
        attributes.update_by_function(
            output_path,
            meta['id_field']['name'],
            function=TYPE_ID_FUNCTION_MAP[meta['id_field']['type']],
            field_as_first_arg=False,
            arg_field_names=['Name'],
            log_level=None,
        )
    log("End: Generate.")
    return output_path
Exemplo n.º 3
0
def overlay(dataset_path, field_name, overlay_dataset_path, overlay_field_name,
            **kwargs):
    """Assign overlay attribute to features, splitting where necessary.

    Note:
        Only one overlay flag at a time can be used. If mutliple are set to True, the
        first one referenced in the code will be used. If no overlay flags are set, the
        operation will perform a basic intersection check, and the result will be at
        the whim of the geoprocessing environment's merge rule for the update field.

        This function has a 'chunking' loop routine in order to avoid an unhelpful
        output error that occurs when the inputs are rather large. For some reason the
        identity will 'succeed' with an empty output warning, but not create an output
        dataset. Running the identity against smaller sets of data generally avoids this
        conundrum.

    Args:
        dataset_path (str): Path of the dataset.
        field_name (str): Name of the dataset's field to assign to.
        overlay_dataset_path (str): Path of the overlay dataset.
        overlay_field_name (str): Name of overlay dataset's field with values to
            assign.
        **kwargs: Arbitrary keyword arguments. See below.

    Keyword Args:
        chunk_size (int): Number of features to process per loop. Default is 4096.
        dataset_where_sql (str): SQL where-clause for dataset subselection.
        overlay_central_coincident (bool): Flag to overlay the centrally-coincident
            value. Default is False.
        overlay_most_coincident (bool): Flag to overlay the most coincident value.
            Default is False.
        overlay_where_sql (str): SQL where-clause for the overlay dataset subselection.
        replacement_value: Value to replace overlay field values with.
        tolerance (float): Tolerance for coincidence, in dataset's units.
        log_level (str): Level to log the function at. Default is 'info'.

    Returns:
        str: Path of the dataset updated.

    """
    kwargs.setdefault('chunk_size', 4096)
    kwargs.setdefault('dataset_where_sql')
    kwargs.setdefault('overlay_central_coincident', False)
    kwargs.setdefault('overlay_most_coincident', False)
    kwargs.setdefault('overlay_where_sql')
    log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info'))
    log(
        "Start: Overlay-set attributes in %s on %s by overlay values in %s on %s.",
        field_name,
        dataset_path,
        overlay_field_name,
        overlay_dataset_path,
    )
    # Check flags & set details for spatial join call.
    join_kwargs = {
        'join_operation': 'join_one_to_many',
        'join_type': 'keep_all'
    }
    if kwargs['overlay_central_coincident']:
        join_kwargs['match_option'] = 'have_their_center_in'
    elif kwargs['overlay_most_coincident']:
        raise NotImplementedError(
            "overlay_most_coincident not yet implemented.")

    else:
        join_kwargs['match_option'] = 'intersect'
    if 'replacement_value' in kwargs and kwargs[
            'replacement_value'] is not None:
        update_function = (lambda x: kwargs['replacement_value']
                           if x else None)
    else:
        update_function = (lambda x: x)
    meta = {'orig_tolerance': arcpy.env.XYTolerance}
    view = {
        'dataset': arcobj.DatasetView(dataset_path,
                                      kwargs['dataset_where_sql'])
    }
    # Create temporary copy of overlay dataset.
    temp_overlay = arcobj.TempDatasetCopy(
        overlay_dataset_path,
        kwargs['overlay_where_sql'],
        field_names=[overlay_field_name],
    )
    with view['dataset'], temp_overlay:
        # Avoid field name collisions with neutral field name.
        temp_overlay.field_name = dataset.rename_field(
            temp_overlay.path,
            overlay_field_name,
            new_field_name=unique_name(overlay_field_name),
            log_level=None,
        )
        if 'tolerance' in kwargs:
            arcpy.env.XYTolerance = kwargs['tolerance']
        for view['chunk'] in view['dataset'].as_chunks(kwargs['chunk_size']):
            temp_output_path = unique_path('output')
            arcpy.analysis.SpatialJoin(target_features=view['chunk'].name,
                                       join_features=temp_overlay.path,
                                       out_feature_class=temp_output_path,
                                       **join_kwargs)
            # Clean up bad or null geometry created in processing.
            arcpy.management.RepairGeometry(temp_output_path)
            # Push identity (or replacement) value from temp to update field.
            attributes.update_by_function(
                temp_output_path,
                field_name,
                update_function,
                field_as_first_arg=False,
                arg_field_names=[temp_overlay.field_name],
                log_level=None,
            )
            # Replace original chunk features with new features.
            features.delete(view['chunk'].name, log_level=None)
            features.insert_from_path(dataset_path,
                                      temp_output_path,
                                      log_level=None)
            dataset.delete(temp_output_path, log_level=None)
        if 'tolerance' in kwargs:
            arcpy.env.XYTolerance = meta['orig_tolerance']
    log("End: Overlay.")
    return dataset_path