def update_by_domain_code( dataset_path, field_name, code_field_name, domain_name, domain_workspace_path, **kwargs ): """Update attribute values using a coded-values domain. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. code_field_name (str): Name of the field with related domain code. domain_name (str): Name of the domain. domain_workspace_path (str) Path of the workspace the domain is in. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by code in %s using domain %s.", field_name, dataset_path, code_field_name, domain_name, ) meta = {"domain": domain_metadata(domain_name, domain_workspace_path)} update_action_count = update_by_function( dataset_path, field_name, function=meta["domain"]["code_description_map"].get, field_as_first_arg=False, arg_field_names=[code_field_name], dataset_where_sql=kwargs["dataset_where_sql"], use_edit_session=kwargs["use_edit_session"], log_level=None, ) for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def add_field(dataset_path, field_name, field_type, **kwargs): """Add field to dataset. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. field_type (str): Data type of the field. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: field_is_nullable (bool): Field can be nullable if True. Default is True. field_is_required (bool): Field value will be required for feature if True. Default is False. field_length (int): Length of field. Only applies to text fields. Default is 64. field_precision (int): Precision of field. Only applies to float/double fields. field_scale (int): Scale of field. Only applies to float/double fields. exist_ok (bool): If field already exists: will raise an error if False; will act as if field was added if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: str: Name of the field added. Raises: RuntimeError: If `exist_ok=False` and field already exists. """ kwargs.setdefault("field_is_nullable", True) kwargs.setdefault("field_is_required", False) kwargs.setdefault("field_length", 64) kwargs.setdefault("field_precision") kwargs.setdefault("field_scale") kwargs.setdefault("exist_ok", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log("Start: Add field %s to %s.", field_name, dataset_path) if arcpy.ListFields(dataset_path, field_name): LOG.info("Field already exists.") if not kwargs["exist_ok"]: raise RuntimeError("Cannot add existing field (exist_ok=False).") else: add_kwargs = { key: kwargs[key] for key in kwargs if key.startswith("field_") } arcpy.management.AddField(dataset_path, field_name, field_type, **add_kwargs) log("End: Add.") return field_name
def update_by_value(dataset_path, field_name, value, **kwargs): """Update attribute values by assigning a given value. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. value (object): Static value to assign. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", True) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s by given value.", field_name, dataset_path ) meta = {"dataset": dataset_metadata(dataset_path)} session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) cursor = arcpy.da.UpdateCursor( in_table=dataset_path, field_names=[field_name], where_clause=kwargs["dataset_where_sql"], ) update_action_count = Counter() with session, cursor: for [old_value] in cursor: if same_value(old_value, value): update_action_count["unchanged"] += 1 else: try: cursor.updateRow([value]) update_action_count["altered"] += 1 except RuntimeError: LOG.error("Offending value is %s", value) raise for action, count in sorted(update_action_count.items()): log("%s attributes %s.", count, action) log("End: Update.") return update_action_count
def copy(dataset_path, output_path, **kwargs): """Copy features into a new dataset. Args: dataset_path (str): Path of the dataset. output_path (str): Path of output dataset. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. schema_only (bool): Copy only the schema--omitting data--if True. Default is False. overwrite (bool): Overwrite the output dataset if it exists, if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: collections.Counter: Counts for each feature action. Raises: ValueError: If dataset type not supported. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("schema_only", False) kwargs.setdefault("overwrite", False) if kwargs["schema_only"]: kwargs["dataset_where_sql"] = "0=1" log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log("Start: Copy dataset %s to %s.", dataset_path, output_path) meta = {"dataset": dataset_metadata(dataset_path)} view = DatasetView(dataset_path, kwargs["dataset_where_sql"]) with view: if meta["dataset"]["is_spatial"]: exec_copy = arcpy.management.CopyFeatures elif meta["dataset"]["is_table"]: exec_copy = arcpy.management.CopyRows else: raise ValueError( "{} unsupported dataset type.".format(dataset_path)) if kwargs["overwrite"] and arcpy.Exists(output_path): delete(output_path, log_level=None) exec_copy(view.name, output_path) log("End: Copy.") return Counter(copied=feature_count(output_path))
def delete(dataset_path, **kwargs): """Delete dataset. Args: dataset_path (str): Path of the dataset. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is "info". Returns: str: Path of deleted dataset. """ log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log("Start: Delete dataset %s.", dataset_path) arcpy.management.Delete(in_data=dataset_path) log("End: Delete.") return dataset_path
def delete_field(dataset_path, field_name, **kwargs): """Delete field from dataset. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is "info". Returns: str: Name of the field deleted. """ log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log("Start: Delete field %s on %s.", field_name, dataset_path) arcpy.management.DeleteField(in_table=dataset_path, drop_field=field_name) log("End: Delete.") return field_name
def build_locator(locator_path, **kwargs): """Build locator. Args: locator_path (str): Path of the locator. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: str: Path of the built locator. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log("Start: Build locator %s.", locator_path) arcpy.geocoding.RebuildAddressLocator(locator_path) log("End: Build.") return locator_path
def build_network(network_path, **kwargs): """Build network dataset. Args: network_path (str): Path of the network dataset. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: str: Path of the built network dataset. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log("Start: Build network %s.", network_path) with arcobj.ArcExtension('Network'): arcpy.na.BuildNetwork(in_network_dataset=network_path) log("End: Build.") return network_path
def create_file_geodatabase(geodatabase_path, xml_workspace_path=None, include_xml_data=False, **kwargs): """Create new file geodatabase. Args: geodatabase_path (str): Path of the geodatabase. xml_workspace_path (str): Path of the XML workspace document to initialize the geodatabase with. include_xml_data (bool): Flag to include data stored in the XML workspace document, if it has any. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: str: Path of the created file geodatabase. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log("Start: Create file geodatabase %s.", geodatabase_path) if os.path.exists(geodatabase_path): LOG.warning("Geodatabase already exists.") return geodatabase_path arcpy.management.CreateFileGDB( out_folder_path=os.path.dirname(geodatabase_path), out_name=os.path.basename(geodatabase_path), out_version='current', ) if xml_workspace_path: arcpy.management.ImportXMLWorkspaceDocument( target_geodatabase=geodatabase_path, in_file=xml_workspace_path, import_type=('data' if include_xml_data else 'schema_only'), config_keyword='defaults', ) log("End: Create.") return geodatabase_path
def compress(workspace_path, disconnect_users=False, **kwargs): """Compress workspace (usually geodatabase). Args: geodatabase_path (str): Path of the workspace. disconnect_users (bool): Flag to disconnect users before compressing. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: str: Path of the compressed workspace. Raises: ValueError: If `workspace_path` doesn't reference a compressable geodatabase. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log("Start: Compress workspace %s.", workspace_path) meta = {'workspace': metadata(workspace_path)} # Disconnect only possible for enterprise workspaces. disconnect_users = disconnect_users and meta['workspace'][ 'is_enterprise_database'] if meta['workspace']['is_file_geodatabase']: _compress = arcpy.management.CompressFileGeodatabaseData elif meta['workspace']['is_enterprise_database']: _compress = arcpy.management.Compress else: raise ValueError("Compressing {} unsupported.".format(workspace_path)) if disconnect_users: arcpy.AcceptConnections(sde_workspace=workspace_path, accept_connections=False) arcpy.DisconnectUser(sde_workspace=workspace_path, users='all') _compress(workspace_path) if disconnect_users: arcpy.AcceptConnections(sde_workspace=workspace_path, accept_connections=True) log("End: Compress.") return workspace_path
def update_by_expression(dataset_path, field_name, expression, **kwargs): """Update attribute values using a (single) code-expression. Wraps arcpy.management.CalculateField. Args: dataset_path (str): Path of the dataset. field_name (str): Name of the field. expression (str): Python string expression to evaluate values from. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: dataset_where_sql (str): SQL where-clause for dataset subselection. use_edit_session (bool): Updates are done in an edit session if True. Default is False. log_level (str): Level to log the function at. Default is "info". Returns: str: Name of the field updated. """ kwargs.setdefault("dataset_where_sql") kwargs.setdefault("use_edit_session", False) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log( "Start: Update attributes in %s on %s using expression: `%s`.", field_name, dataset_path, expression, ) meta = {"dataset": dataset_metadata(dataset_path)} session = Editor(meta["dataset"]["workspace_path"], kwargs["use_edit_session"]) dataset_view = DatasetView(dataset_path, kwargs["dataset_where_sql"]) with session, dataset_view: arcpy.management.CalculateField( in_table=dataset_view.name, field=field_name, expression=expression, expression_type="python_9.3", ) log("End: Update.") return field_name
def create(dataset_path, field_metadata_list=None, **kwargs): """Create new dataset. Args: dataset_path (str): Path of the dataset . field_metadata_list (iter): Collection of field metadata mappings. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: geometry_type (str): Type of geometry, if a spatial dataset. spatial_reference_item: Item from which the spatial reference of the output geometry will be derived. Default is 4326 (EPSG code for unprojected WGS84). log_level (str): Level to log the function at. Default is "info". Returns: str: Path of the dataset created. """ kwargs.setdefault("geometry_type") kwargs.setdefault("spatial_reference_item", 4326) log = leveled_logger(LOG, kwargs.setdefault("log_level", "info")) log("Start: Create dataset %s.", dataset_path) meta = { "spatial": spatial_reference_metadata(kwargs["spatial_reference_item"]) } create_kwargs = { "out_path": os.path.dirname(dataset_path), "out_name": os.path.basename(dataset_path), } if kwargs["geometry_type"]: exec_create = arcpy.management.CreateFeatureclass create_kwargs["geometry_type"] = kwargs["geometry_type"] create_kwargs["spatial_reference"] = meta["spatial"]["object"] else: exec_create = arcpy.management.CreateTable exec_create(**create_kwargs) if field_metadata_list: for field_meta in field_metadata_list: add_field_from_metadata(dataset_path, field_meta, log_level=None) log("End: Create.") return dataset_path
def execute_sql(statement, database_path, **kwargs): """Execute SQL statement via ArcSDE's SQL execution interface. Only works if database_path resolves to an actual SQL database. Args: statement (str): SQL statement to execute. database_path (str): Path of the database to execute statement in. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: object: Return value from the SQL statement's execution. Likely return types: bool: True for successful execution of statement with no return value or retured rows. False if failure. list: A List of lists representing returned rows. object: A single return value. Raises: AttributeError: If statement SQL syntax is incorrect. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log("Start: Execute SQL statement.") conn = arcpy.ArcSDESQLExecute(server=database_path) try: result = conn.execute(statement) except AttributeError: LOG.exception("Incorrect SQL syntax.") raise finally: # Yeah, what can you do? del conn log("End: Execute.") return result
def create_geodatabase_xml_backup(geodatabase_path, output_path, include_data=False, include_metadata=True, **kwargs): """Create backup of geodatabase as XML workspace document. Args: geodatabase_path (str): Path of the geodatabase. output_path (str): Path of the XML workspace document to create. include_data (bool): Flag to include data in the output. include_metadata (bool): Flag to include metadata in the output. **kwargs: Arbitrary keyword arguments. See below. Keyword Args: log_level (str): Level to log the function at. Default is 'info'. Returns: str: Path of the created XML workspace document. """ log = leveled_logger(LOG, kwargs.setdefault('log_level', 'info')) log( "Start: Create XML backup of geodatabase %s at %s.", geodatabase_path, output_path, ) arcpy.management.ExportXMLWorkspaceDocument( in_data=geodatabase_path, out_file=output_path, export_type=('data' if include_data else 'schema_only'), storage_type='binary', export_metadata=include_metadata, ) log("End: Create.") return output_path