Example #1
0
Python 2.7.13 (v2.7.13:a06454b1afa1, Dec 17 2016, 20:42:59) [MSC v.1500 32 bit (Intel)] on win32
Type "copyright", "credits" or "license()" for more information.
>>> import arcpy

#imp-geoprocessing
from arcpy import env

#imp-spatial analyst
from arcpy.sa import *

#checkifextension is ON
print("your spatial package is" + ' ... ' +  arcpy.CheckOutExtension("Spatial"))


#setup wd
env.workspace = "C:\Users\MTA78\Desktop\GIS"

# Set input raster
inRaster = Raster("Water_Depth_Raster.tif")

#make dictionary of depth of water increase
DepthIncrease = {'2022':0.2,'2027':0.5,'2032':0.9, '2037':1.4, '2042':2.0, 
'2047':2.7, '2052':3.5, '2057':4.4, '2062':5.4, '2067':6.5}

#output Raster for each year 
WaterDepth2022 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2022"]
WaterDepth2027 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2027"]
WaterDepth2032 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2032"]
WaterDepth2037 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2037"]
WaterDepth2042 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2042"]
WaterDepth2047 = Raster("Water_Depth_Raster.tif")+DepthIncrease["2047"]
def daymet_parameters(config_path, data_name='PPT'):
    """Calculate GSFLOW DAYMET Parameters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.
    data_name : {'PPT', 'TMAX', 'TMIN', 'ALL'}
        DAYMET data type (the default is 'PPT').

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'daymet_normals_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nGSFLOW DAYMET Parameters')

    # DAYMET
    daymet_ws = inputs_cfg.get('INPUTS', 'daymet_folder')
    daymet_proj_method = inputs_cfg.get('INPUTS', 'prism_projection_method')
    daymet_cs = inputs_cfg.getint('INPUTS', 'prism_cellsize')
    calc_jh_coef_flag = inputs_cfg.getboolean('INPUTS',
                                              'calc_prism_jh_coef_flag')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()
    # Check that DAYMET folder is valid
    if not os.path.isdir(daymet_ws):
        logging.error(
            '\nERROR: DAYMET folder ({}) does not exist'.format(daymet_ws))
        sys.exit()
    proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if daymet_proj_method.upper() not in proj_method_list:
        logging.error('\nERROR: DAYMET projection method must be: {}'.format(
            ', '.join(proj_method_list)))
        sys.exit()
    logging.debug('  Projection method:    {}'.format(
        daymet_proj_method.upper()))

    # Check other inputs
    if daymet_cs <= 0:
        logging.error('\nERROR: DAYMET cellsize must be greater than 0\n')
        sys.exit()

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS 0'
    env.workspace = hru.param_ws
    env.scratchWorkspace = hru.scratch_ws

    # DAYMET data names
    if data_name == 'ALL':
        data_name_list = ['PPT', 'TMAX', 'TMIN']
    else:
        data_name_list = [data_name]

    # Set month list
    month_list = ['{:02d}'.format(m) for m in range(1, 13)]
    # month_list.extend(['annual'])

    # Check fields
    logging.info('\nAdding DAYMET fields if necessary')
    for data_name in data_name_list:
        for month in month_list:
            support.add_field_func(hru.polygon_path,
                                   '{}_{}'.format(data_name, month), 'DOUBLE')

    # Process each DAYMET data type
    logging.info('\nProjecting/clipping DAYMET mean monthly rasters')
    for data_name in data_name_list:
        logging.info('\n{}'.format(data_name))
        daymet_normal_re = re.compile(
            'daymet_(?P<type>%s)_30yr_normal_(?P<month>\d{2}).img$' %
            data_name, re.IGNORECASE)

        # Search all files & subfolders in DAYMET folder
        #   for images that match data type
        input_raster_dict = dict()
        for root, dirs, files in os.walk(daymet_ws):
            for file_name in files:
                daymet_normal_match = daymet_normal_re.match(file_name)
                if daymet_normal_match:
                    month_str = daymet_normal_match.group('month')
                    input_raster_dict[month_str] = os.path.join(
                        daymet_ws, root, file_name)
        if not input_raster_dict:
            logging.error(
                '\nERROR: No DAYMET rasters were found matching the following '
                'pattern:\n  {}\n\n'.format(daymet_normal_re.pattern))
            logging.error()
            sys.exit()

        # DAYMET input data workspace
        # input_ws = os.path.join(daymet_ws, data_name.lower())
        # if not os.path.isdir(input_ws):
        #    logging.error('\nERROR: The DAYMET {} folder does not exist'.format(
        #        data_name.lower()))
        #    sys.exit()

        # DAYMET output data workspace
        output_ws = os.path.join(hru.param_ws, data_name.lower() + '_rasters')
        if not os.path.isdir(output_ws):
            os.mkdir(output_ws)

        # Remove all non year/month rasters in DAYMET temp folder
        logging.info('  Removing existing DAYMET files')
        for item in os.listdir(output_ws):
            if daymet_normal_re.match(item):
                os.remove(os.path.join(output_ws, item))

        # Extract, project/resample, clip
        # Process images by month
        zs_daymet_dict = dict()
        # env.extent = hru.extent
        for month in month_list:
            logging.info('  Month: {}'.format(month))

            # Projected/clipped DAYMET raster
            input_raster = input_raster_dict[month]
            # input_name = 'daymet_{}_30yr_normal_800mM2_{}_bil.bil'.format(
            #    data_name.lower(), input_month)
            # input_raster = os.path.join(input_ws, input_name)
            output_name = 'daymet_{}_normal_{}.img'.format(
                data_name.lower(), month)
            output_raster = os.path.join(output_ws, output_name)

            # Set preferred transforms
            input_sr = arcpy.sa.Raster(input_raster).spatialReference
            transform_str = support.transform_func(hru.sr, input_sr)
            if transform_str:
                logging.debug('  Transform: {}'.format(transform_str))

            # Project DAYMET rasters to HRU coordinate system
            # DEADBEEF - Arc10.2 ProjectRaster does not extent
            support.project_raster_func(input_raster, output_raster, hru.sr,
                                        daymet_proj_method.upper(), daymet_cs,
                                        transform_str,
                                        '{} {}'.format(hru.ref_x, hru.ref_y),
                                        input_sr, hru)
            # arcpy.ProjectRaster_management(
            #    input_raster, output_raster, hru.sr,
            #    daymet_proj_method.upper(), daymet_cs, transform_str,
            #    '{} {}'.format(hru.ref_x, hru.ref_y),
            #    input_sr)

            # Save parameters for calculating zonal stats
            zs_field = '{}_{}'.format(data_name, month)
            zs_daymet_dict[zs_field] = [output_raster, 'MEAN']

            # Cleanup
            del input_raster, output_raster, output_name
            del input_sr, transform_str, zs_field

        # Cleanup
        # arcpy.ClearEnvironment('extent')

        # Calculate zonal statistics
        logging.info('\nCalculating DAYMET zonal statistics')
        support.zonal_stats_func(zs_daymet_dict, hru.polygon_path,
                                 hru.point_path, hru)
        del zs_daymet_dict
Example #3
0
# Name: Network analysis application to DH in Missouri
# Description: Find the network of connected centroids in 1870 and 1890
#              and calculate statistics at the county level
# Requirements: Spatial and Network Analyst Extension
# Author: Giorgio Chiovelli

#Import arcpy module
import arcpy
from arcpy import env
import os
import csv

#Turn overwrite on
arcpy.CheckOutExtension("spatial")
arcpy.CheckOutExtension("network")
arcpy.env.overwriteOutput = True

print "overwrite on"

directory = "C:/Users/se.4537/Dropbox/PoliteconGIS/Bonn_2019/Lecture 5/GIS data/_workflow2/"
folder = directory + "trade_cost/"

workspace = directory + "dh.gdb/dh_class"
workspace2 = directory + "modified.gdb/"

# Define a worspace
arcpy.env.workspace = workspace
# Set the local variables

# note: the two network datasets have to be called dh_1870 and dh_1890
try:
def ring_route(nds, ring_name, stops, output_dir, co, cost_reduction='#'):
    """
    Calculates the protection route. Returns duct and fiber length in m. 
    
    :param nds: network dataset, nds path
    :param ring_name: the name for the route layer, string
    :param stops: all the nodes in the ring, point feature 
    :param output_dir: path to store the results, typically feature dataset path
    :param co: central office, point feature
    :param cost_reduction: not used
    :return: {'ff_ring_duct': duct, 'ff_ring_fiber': fiber}
    """
    arcpy.CheckOutExtension("Network")

    # Create points layer
    layer_path = os.path.join('in_memory', 'stops')
    stops_layer = arcpy.MakeFeatureLayer_management(stops, layer_path)

    # Create points layer for selecting the nearest
    layer_path = os.path.join('in_memory', 'stops_selection')
    stops_layer_selection = arcpy.MakeFeatureLayer_management(stops, layer_path)

    # Create CO layer
    co_layer_path = os.path.join('in_memory', 'co')
    co_layer = arcpy.MakeFeatureLayer_management(co, co_layer_path)

    # # Cost attribute for disjoint paths
    # sc_disjoint = 20000
    # mapping_disjoint = "Name Name #;Attr_Length # " + str(sc_disjoint) + "; BarrierType # 1"
    #
    # cycle = int(arcpy.GetCount_management(stops).getOutput(0))
    #
    # ids_list, id_field = get_ids(stops_layer)

    # # Nearest neighbor heuristics
    # ####################################################################################################################
    # # Start with CO, find nearest neighbor
    # arcpy.Near_analysis(co_layer, stops_layer, method='GEODESIC')
    #
    # # Get the closest id
    # with arcpy.da.SearchCursor(co_layer, 'NEAR_FID') as cursor:
    #     for row in cursor:
    #         stop_id = row[0]
    #
    # # Select the closest node
    # clause = '"{0}" = {1}'.format(id_field, stop_id)
    # arcpy.SelectLayerByAttribute_management(stops_layer, 'NEW_SELECTION', clause)
    #
    # # Compute the route
    # constraints = []
    # co_path = route(nds, ring_name, co_layer, stops_layer, mapping_disjoint, constraints, 0)
    # # Add to constraints
    # constraints.append(co_path)
    #
    # # Iterate through the nodes
    # sort_out_ids = []
    # for j in range(1, cycle):
    #     # Store the previous
    #     prev = stop_id
    #     # Outsort the previous
    #     sort_out_ids.append(prev)
    #     # Get the next node but not going back
    #
    #     clause = ''
    #     for i in range(len(sort_out_ids)):
    #         if i == 0 or i == len(sort_out_ids):
    #             clause += '"{0}" = {1}'.format(id_field, sort_out_ids[i])
    #         else:
    #             clause += ' OR "{0}" = {1}'.format(id_field, sort_out_ids[i])
    #
    #     arcpy.SelectLayerByAttribute_management(stops_layer_selection, 'NEW_SELECTION', clause)
    #     arcpy.SelectLayerByAttribute_management(stops_layer_selection, 'SWITCH_SELECTION')
    #
    #     arcpy.Near_analysis(stops_layer, stops_layer_selection, method='GEODESIC')
    #
    #     with arcpy.da.SearchCursor(stops_layer, 'NEAR_FID') as cursor:
    #         for row in cursor:
    #             stop_id = row[0]
    #
    #     clause = '"{0}" = {1}'.format(id_field, stop_id)
    #     arcpy.SelectLayerByAttribute_management(stops_layer, 'NEW_SELECTION', clause)
    #
    #     clause = '"{0}" = {1}'.format(id_field, prev)
    #     arcpy.SelectLayerByAttribute_management(stops_layer_selection, 'NEW_SELECTION', clause)
    #
    #     path_tmp = route(nds, ring_name, stops_layer_selection, stops_layer, mapping_disjoint, constraints, j)
    #
    #     constraints.append(path_tmp)
    #
    #     arcpy.SelectLayerByAttribute_management(stops_layer_selection, 'CLEAR_SELECTION')
    #
    # # The last node has to be connected to the CO
    # clause = '"{0}" = {1}'.format(id_field, stop_id)
    # arcpy.SelectLayerByAttribute_management(stops_layer, 'NEW_SELECTION', clause)
    #
    # co_path = route(nds, ring_name, co_layer, stops_layer, mapping_disjoint, constraints, cycle+1)
    # # Add to constraints
    # constraints.append(co_path)

    layer_object = arcpy.na.MakeRouteLayer(nds, ring_name, impedance_attribute='Length',
                                           find_best_order='FIND_BEST_ORDER',
                                           output_path_shape='TRUE_LINES_WITH_MEASURES',
                                           ordering_type='PRESERVE_BOTH').getOutput(0)

    na_classes = arcpy.na.GetNAClassNames(layer_object)

    arcpy.na.AddLocations(layer_object, na_classes["Stops"], co_layer)
    arcpy.na.AddLocations(layer_object, na_classes["Stops"], stops_layer)
    arcpy.na.AddLocations(layer_object, na_classes["Stops"], co_layer)

    arcpy.na.Solve(layer_object)

    tmp = arcpy.MakeFeatureLayer_management(arcpy.mapping.ListLayers(layer_object, na_classes["Routes"])[0],
                                            'ring')

    route_out_name = os.path.join(output_dir, '{0}_ring'.format(ring_name))
    check_exists(route_out_name)
    arcpy.CopyFeatures_management(tmp, route_out_name)

    fiber = post_processing_fiber(arcpy.mapping.ListLayers(layer_object, na_classes["Stops"])[0])

    return fiber
Example #5
0
# add console logging
console_formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
console = logging.StreamHandler()
console.setLevel(logging.INFO)
console.setFormatter(console_formatter)
logging.getLogger().addHandler(console)

arcpy.env.overwriteOutput = True

with arcpy.EnvManager(scratchWorkspace=out_gdb, workspace=out_gdb):
	# arcpy.env.outputCoordinateSystem = arcpy.SpatialReference(3857)
	# Parallel processing doesn't ALWAYS help!
	# parallel_processing = '12'

	arcpy.CheckOutExtension("spatial")
	arcpy.CheckOutExtension("ImageAnalyst")

	try:
		txtMessage = "{0}** Starting Custom Differencing - " \
					 " - PointSet: {1}{0}" \
					 " - RasterTopic: {2}{0}" \
					 " - RasterPop: {3}{0}" \
					 " - Destination GDB: {4}{0}".format("\n", PointSet, RasterTopic, RasterPop, out_gdb)
		logging.info(txtMessage)

		# step 1
		# Build Custom Population Raster
		if arcpy.Exists(RasterPop):
			logging.info("{} Using Existing Population Raster: {}".format("\n", RasterPop))
		else:
Example #6
0
if saveIntermediate == 'true': saveIntermediate = True
else: saveIntermediate = False

inFds = gdb + '/GeologicMap'
outFds = gdb + '/CrossSection' + outFdsTag

if arcpy.Exists(scratchws):
    scratch = scratchws
else:
    scratch = outFds
addMsgAndPrint('  Scratch directory is ' + scratch)

arcpy.env.overwriteOutput = True

try:
    arcpy.CheckOutExtension('3D')
except:
    addMsgAndPrint('\nCannot check out 3D-analyst extension.')
    sys.exit()

## Checking section line
addMsgAndPrint('  Checking section line')
idField = getIdField(xsLine)
##   does xsLine have 1-and-only-1 arc? if not, bail
i = numberOfRows(xsLine)
if i > 1:
    addMsgAndPrint('OOPS! More than one arc in ' + xsLine)
    sys.exit()
elif i == 0:
    addMsgAndPrint('OOPS! Mo arcs in ' + xsLine)
    sys.exit()
Example #7
0
def soil_raster_prep(config_path):
    """Prepare GSFLOW soil rasters

    Parameters
    ----------
    config_path : str
        Project configuration file (.ini) path.

    Returns
    -------
    None

    """
    # Initialize hru_parameters class
    hru = support.HRUParameters(config_path)

    # Open input parameter config file
    inputs_cfg = ConfigParser.ConfigParser()
    try:
        inputs_cfg.readfp(open(config_path))
    except Exception as e:
        logging.error('\nERROR: Config file could not be read, '
                      'is not an input file, or does not exist\n'
                      '  config_file = {}\n'
                      '  Exception: {}\n'.format(config_path, e))
        sys.exit()

    # Log DEBUG to file
    log_file_name = 'soil_prep_log.txt'
    log_console = logging.FileHandler(filename=os.path.join(
        hru.log_ws, log_file_name),
                                      mode='w')
    log_console.setLevel(logging.DEBUG)
    log_console.setFormatter(logging.Formatter('%(message)s'))
    logging.getLogger('').addHandler(log_console)
    logging.info('\nPrepare GSFLOW Soil Rasters')

    soil_orig_ws = inputs_cfg.get('INPUTS', 'soil_orig_folder')
    awc_name = inputs_cfg.get('INPUTS', 'awc_name')
    clay_pct_name = inputs_cfg.get('INPUTS', 'clay_pct_name')
    sand_pct_name = inputs_cfg.get('INPUTS', 'sand_pct_name')
    soil_proj_method = 'NEAREST'
    soil_cs = inputs_cfg.getint('INPUTS', 'soil_cellsize')
    fill_soil_nodata_flag = inputs_cfg.getboolean('INPUTS',
                                                  'fill_soil_nodata_flag')

    # Use Ksat to calculate ssr2gw_rate and slowcoef_lin
    ksat_name = inputs_cfg.get('INPUTS', 'ksat_name')

    # Read and apply soil depth raster
    # Otherwise soil depth will only be derived from rooting depth
    try:
        soil_depth_flag = inputs_cfg.getboolean('INPUTS', 'soil_depth_flag')
    except ConfigParser.NoOptionError:
        soil_depth_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'soil_depth_flag', soil_depth_flag))
    if soil_depth_flag:
        soil_depth_name = inputs_cfg.get('INPUTS', 'soil_depth_name')

    # Use geology based multipliers to adjust ssr2gw_rate
    # Otherwise default value set in config file will be used
    try:
        ssr2gw_mult_flag = inputs_cfg.getboolean('INPUTS', 'ssr2gw_mult_flag')
    except ConfigParser.NoOptionError:
        ssr2gw_mult_flag = False
        logging.info('  Missing INI parameter, setting {} = {}'.format(
            'ssr2gw_mult_flag', ssr2gw_mult_flag))
    if ssr2gw_mult_flag:
        ssr2gw_mult_name = inputs_cfg.get('INPUTS', 'ssr2gw_mult_name')

    # Check input paths
    if not arcpy.Exists(hru.polygon_path):
        logging.error('\nERROR: Fishnet ({}) does not exist'.format(
            hru.polygon_path))
        sys.exit()

    # All of the soil rasters must exist
    awc_orig_path = os.path.join(soil_orig_ws, awc_name)
    clay_pct_orig_path = os.path.join(soil_orig_ws, clay_pct_name)
    sand_pct_orig_path = os.path.join(soil_orig_ws, sand_pct_name)
    ksat_orig_path = os.path.join(soil_orig_ws, ksat_name)
    if soil_depth_flag:
        soil_depth_orig_path = os.path.join(soil_orig_ws, soil_depth_name)
    if ssr2gw_mult_flag:
        ssr2gw_mult_orig_path = os.path.join(soil_orig_ws, ssr2gw_mult_name)

    # Check that either the original or projected/clipped raster exists
    if not arcpy.Exists(awc_orig_path):
        logging.error('\nERROR: AWC raster does not exist')
        sys.exit()
    if not arcpy.Exists(clay_pct_orig_path):
        logging.error('\nERROR: Clay raster does not exist')
        sys.exit()
    if not arcpy.Exists(sand_pct_orig_path):
        logging.error('\nERROR: Sand raster does not exist')
        sys.exit()
    if not arcpy.Exists(ksat_orig_path):
        logging.error('\nERROR: Ksat raster does not exist')
        sys.exit()
    if soil_depth_flag and not arcpy.Exists(soil_depth_orig_path):
        logging.error('\nERROR: Soil depth raster does not exist')
        sys.exit()
    if ssr2gw_mult_flag and not arcpy.Exists(ssr2gw_mult_orig_path):
        logging.error('\nERROR: Geology based raster for ssr2gw multiplier '
                      'does not exist')
        sys.exit()

    # Check other inputs
    if soil_cs <= 0:
        logging.error('\nERROR: soil cellsize must be greater than 0')
        sys.exit()
    soil_proj_method_list = ['BILINEAR', 'CUBIC', 'NEAREST']
    if soil_proj_method.upper() not in soil_proj_method_list:
        logging.error('\nERROR: Soil projection method must be: {}'.format(
            ', '.join(soil_proj_method_list)))
        sys.exit()

    # Build output folder if necessary
    soil_temp_ws = os.path.join(hru.param_ws, 'soil_rasters')
    if not os.path.isdir(soil_temp_ws):
        os.mkdir(soil_temp_ws)
    # Output paths
    awc_path = os.path.join(soil_temp_ws, 'awc.img')
    clay_pct_path = os.path.join(soil_temp_ws, 'clay_pct.img')
    sand_pct_path = os.path.join(soil_temp_ws, 'sand_pct.img')
    ksat_path = os.path.join(soil_temp_ws, 'ksat.img')
    soil_depth_path = os.path.join(soil_temp_ws, 'soil_depth.img')
    ssr2gw_mult_path = os.path.join(soil_temp_ws, 'ssr2gw_mult.img')

    # Set ArcGIS environment variables
    arcpy.CheckOutExtension('Spatial')
    env.overwriteOutput = True
    env.pyramid = 'PYRAMIDS -1'
    # env.pyramid = 'PYRAMIDS 0'
    env.workspace = soil_temp_ws
    env.scratchWorkspace = hru.scratch_ws

    # Available Water Capacity (AWC)
    logging.info('\nProjecting/clipping AWC raster')
    soil_orig_sr = arcpy.sa.Raster(awc_orig_path).spatialReference
    logging.debug('  AWC GCS:  {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(awc_path):
        arcpy.Delete_management(awc_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(awc_orig_path, awc_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    awc_orig_path, awc_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Percent clay
    logging.info('Projecting/clipping clay raster')
    soil_orig_sr = arcpy.sa.Raster(clay_pct_orig_path).spatialReference
    logging.debug('  Clay GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(clay_pct_path):
        arcpy.Delete_management(clay_pct_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not extent
    support.project_raster_func(clay_pct_orig_path, clay_pct_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    clay_pct_orig_path, clay_pct_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Percent sand
    logging.info('Projecting/clipping sand raster')
    soil_orig_sr = arcpy.sa.Raster(sand_pct_orig_path).spatialReference
    logging.debug('  Sand GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(sand_pct_path):
        arcpy.Delete_management(sand_pct_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, soil_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project soil raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(sand_pct_orig_path, sand_pct_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    sand_pct_orig_path, sand_pct_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Hydraulic conductivity
    logging.info('Projecting/clipping ksat raster')
    ksat_orig_sr = arcpy.sa.Raster(ksat_orig_path).spatialReference
    logging.debug('  Ksat GCS: {}'.format(soil_orig_sr.GCS.name))
    # Remove existing projected raster
    if arcpy.Exists(ksat_path):
        arcpy.Delete_management(ksat_path)
    # Set preferred transforms
    transform_str = support.transform_func(hru.sr, ksat_orig_sr)
    logging.debug('  Transform: {}'.format(transform_str))
    logging.debug('  Projection method: NEAREST')
    # Project ksat raster
    # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
    support.project_raster_func(ksat_orig_path, ksat_path, hru.sr,
                                soil_proj_method, soil_cs, transform_str,
                                '{} {}'.format(hru.ref_x,
                                               hru.ref_y), soil_orig_sr, hru)
    # env.extent = hru.extent
    # arcpy.ProjectRaster_management(
    #    ksat_orig_path, ksat_path, hru.sr,
    #    soil_proj_method, soil_cs, transform_str,
    #    '{} {}'.format(hru.ref_x, hru.ref_y),
    #    soil_orig_sr)
    # arcpy.ClearEnvironment('extent')

    # Soil depth is only needed if clipping root depth
    if soil_depth_flag:
        logging.info('\nProjecting/clipping depth raster')
        soil_orig_sr = arcpy.sa.Raster(soil_depth_orig_path).spatialReference
        logging.debug('  Depth GCS: {}'.format(soil_orig_sr.GCS.name))
        # Remove existing projected raster
        if arcpy.Exists(soil_depth_path):
            arcpy.Delete_management(soil_depth_path)
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, soil_orig_sr)
        logging.debug('  Transform: {}'.format(transform_str))
        logging.debug('  Projection method: NEAREST')
        # Project soil raster
        # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
        support.project_raster_func(soil_depth_orig_path, soil_depth_path,
                                    hru.sr, soil_proj_method, soil_cs,
                                    transform_str,
                                    '{} {}'.format(hru.ref_x, hru.ref_y),
                                    soil_orig_sr, hru)
        # env.extent = hru.extent
        # arcpy.ProjectRaster_management(
        #    soil_depth_orig_path, soil_depth_path, hru.sr,
        #    soil_proj_method, soil_cs, transform_str,
        #    '{} {}'.format(hru.ref_x, hru.ref_y),
        #    soil_orig_sr)
        # arcpy.ClearEnvironment('extent')

    # Geology based multiplier for gravity drainage (ssr2gw multiplier)
    if ssr2gw_mult_flag:
        logging.info('\nProjecting/clipping ssr2gw multiplier raster')
        soil_orig_sr = arcpy.sa.Raster(ssr2gw_mult_orig_path).spatialReference
        logging.debug('  Depth GCS: {}'.format(soil_orig_sr.GCS.name))
        # Remove existing projected raster
        if arcpy.Exists(ssr2gw_mult_path):
            arcpy.Delete_management(ssr2gw_mult_path)
        # Set preferred transforms
        transform_str = support.transform_func(hru.sr, soil_orig_sr)
        logging.debug('  Transform: {}'.format(transform_str))
        logging.debug('  Projection method: NEAREST')
        # Project soil raster
        # DEADBEEF - Arc10.2 ProjectRaster does not honor extent
        support.project_raster_func(ssr2gw_mult_orig_path, ssr2gw_mult_path,
                                    hru.sr, soil_proj_method, soil_cs,
                                    transform_str,
                                    '{} {}'.format(hru.ref_x, hru.ref_y),
                                    soil_orig_sr, hru)

    # Fill soil nodata values using nibble
    if fill_soil_nodata_flag:
        logging.info('\nFilling soil nodata values using Nibble')
        soil_raster_list = [awc_path, clay_pct_path, sand_pct_path, ksat_path]
        if soil_depth_flag:
            soil_raster_list.append(soil_depth_path)
        for soil_raster_path in soil_raster_list:
            logging.info('  {}'.format(soil_raster_path))
            # DEADBEEF - Check if there is any nodata to be filled first?
            mask_obj = arcpy.sa.Int(1000 * arcpy.sa.SetNull(
                arcpy.sa.Raster(soil_raster_path) < 0,
                arcpy.sa.Raster(soil_raster_path)))
            input_obj = arcpy.sa.Con(arcpy.sa.IsNull(mask_obj), 0, mask_obj)
            nibble_obj = 0.001 * arcpy.sa.Nibble(input_obj, mask_obj,
                                                 'ALL_VALUES')
            nibble_obj.save(soil_raster_path)
            arcpy.BuildPyramids_management(soil_raster_path)
Example #8
0
def viewshed_by_feat_class(inRaster,
                           observerDataset,
                           feat_class_folder,
                           output_folder,
                           snapRst=None,
                           visibilityRadius=50000):
    """
    Run viewshed for each feature class in a folder
    
    observerDataset should be a raster or a point/line feature class;
    observerDataset and inRaster should have a major extent than each one
    of the feature class in feat_class_folder
    
    A visibilityRadius is necessary to do a buffer of each feature class.
    Some entity could be outside of the feature class boundary but visible
    from within the same boundary.
    
    Restrictions:
    * ObserverDataset must have a table if a raster;
    
    TO BE MANTAINED?
    """

    import arcpy
    import os

    arcpy.CheckOutExtension('Spatial')

    from glass.oss.ops import create_folder
    from glass.prop.ff import vector_formats, raster_formats
    from glass.prop.rst import get_cellsize, rst_distinct
    from glass.anls.prox.bf import _buffer
    from glass.cpu.arcg.mng.rst.proc import clip_raster
    from glass.cpu.arcg.spanlst.surf import viewshed
    from glass.cpu.arcg.spanlst.rcls import reclassify

    # Check if observerDataset is a Raster or a Feature Class
    # Import methods to clip obserserverDataset
    RASTER_FORMATS = raster_formats()
    VECTOR_FORMATS = vector_formats()
    observerFormat = os.path.splitext(observerDataset)[1]

    if observerFormat in VECTOR_FORMATS:
        from glass.cpu.arcg.anls.exct import clip

    elif observerFormat in RASTER_FORMATS:
        from glass.to.shp.arcg import rst_to_pnt
        from glass.to.shp import rst_to_polyg

        # If Raster, get CELLSIZE of the observer dataset
        CELLSIZE = get_cellsize(observerDataset, gisApi='arcpy')
        REF_CELLSIZE = 500

        from glass.cpu.arcg.mng.sample import fishnet
        from glass.cpu.arcg.anls.ovlay import erase
        from glass.cpu.arcg.mng.feat import feat_to_pnt

    else:
        raise ValueError(('Could not identify if observerDataset '
                          'is a raster or a feature class'))

    # Create workspace for temporary files
    wTmp = create_folder(os.path.join(output_folder, 'tempfiles'))

    # When clipping the observerDataset (when it is a raster), there is a change
    # of obtaining a raster with more values than the original raster
    # Check values of the observerDataset
    UNIQUEVALUES = []
    if observerFormat in RASTER_FORMATS:
        for line in arcpy.SearchCursor(observerDataset):
            value = int(line.getValue("Value"))

            if value not in UNIQUEVALUES:
                UNIQUEVALUES.append(value)

            else:
                continue

    # List feature classes
    arcpy.env.workspace = feat_class_folder
    fclasses = arcpy.ListFeatureClasses()

    for fc in fclasses:
        # Create Buffer
        fcBuffer = _buffer(fc,
                           visibilityRadius,
                           os.path.join(wTmp, os.path.basename(fc)),
                           api='arcpy')
        # Clip inRaster
        clipInRst = clip_raster(inRaster,
                                fcBuffer,
                                os.path.join(
                                    wTmp, 'inrst_{}{}'.format(
                                        os.path.splitext(
                                            os.path.basename(fc))[0],
                                        os.path.splitext(inRaster)[1])),
                                snap=snapRst,
                                clipGeom=True)

        # Clip observerDataset
        # If Raster, convert to points
        if observerFormat in VECTOR_FORMATS:
            clipObs = clip(
                observerDataset, fcBuffer,
                os.path.join(
                    wTmp, 'obs_{}{}'.format(
                        os.path.splitext(os.path.basename(fc))[0],
                        os.path.splitext(observerDataset)[1])))

        elif observerFormat in RASTER_FORMATS:
            # Clip raster
            clipTmp = clip_raster(
                observerDataset,
                fcBuffer,
                os.path.join(
                    wTmp, 'obs_{}{}'.format(
                        os.path.splitext(os.path.basename(fc))[0],
                        os.path.splitext(observerDataset)[1])),
                snap=snapRst,
                clipGeom=None)

            # Check if clip has the same values as the original raster
            RST_UNIQUE = rst_distinct(clipTmp, gisApi='arcpy')
            if len(RST_UNIQUE) > len(UNIQUEVALUES):
                # Reclassify raster
                rules = {}
                for v in RST_UNIQUE:
                    if v in UNIQUEVALUES:
                        rules[v] = v
                    else:
                        rules[v] = 'NODATA'

                clipTmp = reclassify(
                    clipTmp,
                    'Value',
                    rules,
                    os.path.join(wTmp,
                                 'r_{}'.format(os.path.basename(clipTmp))),
                    template=clipTmp)

            if CELLSIZE < REF_CELLSIZE:
                # if cellsize if less than REF_CELLSIZE
                # Change cellsize to REF_CELLSIZE:

                # 1) Create fishnet REF_CELLSIZE
                fishNet = fishnet(os.path.join(
                    wTmp, 'fish_{}'.format(os.path.basename(fc))),
                                  clipTmp,
                                  cellWidth=REF_CELLSIZE,
                                  cellHeight=REF_CELLSIZE)

                # 2) Erase areas with NoData values
                # - Raster to shp
                cls_intPolygon = rst_to_polyg(
                    clipTmp,
                    os.path.join(wTmp,
                                 'cls_int_{}'.format(os.path.basename(fc))),
                    gisApi='arcpy')

                # - Erase areas of the fishnet that agrees with nodata values in the raster
                tmpErase = erase(
                    fishNet, cls_intPolygon,
                    os.path.join(wTmp,
                                 'nozones_{}'.format(os.path.basename(fc))))
                trueErase = erase(
                    fishNet, tmpErase,
                    os.path.join(wTmp,
                                 'fishint_{}'.format(os.path.basename(fc))))

                # 3) Convert erased fishnet to points
                clipObs = feat_to_pnt(
                    trueErase,
                    os.path.join(wTmp, 'obs_{}'.format(os.path.basename(fc))),
                    pnt_position="INSIDE")

            # Else - simple conversion to points
            else:
                clipObs = rst_to_pnt(
                    clipTmp,
                    os.path.join(wTmp, 'obs_{}'.format(os.path.basename(fc))))

        # Run viewshed
        viewshed(
            clipInRst, clipObs,
            os.path.join(
                output_folder, 'vis_{}{}'.format(
                    os.path.splitext(os.path.basename(fc))[0],
                    os.path.splitext(clipInRst)[1])))
Example #9
0
def viewshed_by_feat_class2(inRaster,
                            observerDataset,
                            feat_class_folder,
                            output_folder,
                            snapRst=None,
                            visibilityRadius=20000,
                            epsg=3763):
    """
    See for each feature class in a folder if it is possible to see a 
    interest object from all poins in each feature class
    
    Why this method is different from viewshed_by_feat_class?
    viewshed_by_feat_class uses viewshed tool of ArcGIS;
    This one will calculate the visibility point by point, when the script
    identifies that one point is observable from another, it stops.
    
    TO BE MANTAINED?
    """

    import arcpy
    import numpy
    import os

    from glass.oss.ops import create_folder
    from glass.prop.ff import vector_formats, raster_formats
    from glass.cpu.arcg.lyr import feat_lyr
    from glass.prop.rst import get_cell_coord
    from glass.prop.ext import rst_ext
    from glass.prop.rst import rst_shape, rst_distinct, get_nodata, get_cellsize
    from glass.anls.prox.bf import _buffer
    from glass.cpu.arcg.mng.rst.proc import clip_raster
    from glass.cpu.arcg.spanlst.surf import viewshed
    from glass.cpu.arcg.spanlst.rcls import reclassify
    from glass.cpu.arcg._3D.view import line_of_sight
    from glass.to.rst import shp_to_raster
    from glass.to.rst.arcg import array_to_raster
    from glass.to.shp.arcg import geomArray_to_fc
    from glass.fm.rst import toarray_varcmap as rst_to_array

    arcpy.CheckOutExtension('Spatial')
    arcpy.env.overwriteOutput = True

    # Check if observerDataset is a Raster or a Feature Class
    RASTER_FORMATS = raster_formats()
    VECTOR_FORMATS = vector_formats()
    observerFormat = os.path.splitext(observerDataset)[1]

    if observerFormat in VECTOR_FORMATS:
        from glass.cpu.arcg.anls.exct import clip

    elif observerFormat in RASTER_FORMATS:
        from glass.to.shp.arcg import rst_to_pnt
        from glass.to.shp import rst_to_polyg

        # If raster, get CELLSIZE of the observer dataset
        CELLSIZE = get_cellsize(observerDataset, gisApi='arcpy')
        REF_CELLSIZE = 500

        from glass.cpu.arcg.mng.sample import fishnet
        from glass.cpu.arcg.anls.ovlay import erase
        from glass.cpu.arcg.mng.feat import feat_to_pnt

    else:
        raise ValueError(('Could not identify if observerDataset is a raster '
                          'or a feature class'))

    # Create workspace for temporary files
    wTmp = create_folder(os.path.join(output_folder, 'tempfiles'))

    # When clipping the observerDataset (when it is a raster), there is a change
    # of obtaining a raster with more values than the original raster
    # Check values of the observerDataset
    UNIQUEVALUES = []
    if observerFormat in RASTER_FORMATS:
        for line in arcpy.SearchCursor(observerDataset):
            # TODO: Raster could not have attribute table
            value = int(line.getValue("Value"))

            if value not in UNIQUEVALUES:
                UNIQUEVALUES.append(value)

            else:
                continue

    # List feature classes
    arcpy.env.workspace = feat_class_folder
    fclasses = arcpy.ListFeatureClasses()

    for fc in fclasses:
        # Create Buffer
        fcBuffer = _buffer(
            fc,
            visibilityRadius,
            os.path.join(wTmp, os.path.basename(fc)),
        )

        # Clip inRaster
        clipInRst = clip_raster(inRaster,
                                fcBuffer,
                                os.path.join(
                                    wTmp, 'dem_{}{}'.format(
                                        os.path.splitext(
                                            os.path.basename(fc))[0],
                                        os.path.splitext(inRaster)[1])),
                                snap=snapRst,
                                clipGeom=True)

        # Clip observerDataset
        # If Raster, convert to points
        if observerDataset in VECTOR_FORMATS:
            clipObs = clip(
                observerDataset, fcBuffer,
                os.path.join(
                    wTmp, 'obs_{}{}'.format(
                        os.path.splitext(os.path.basename(fc))[0],
                        os.path.splitext(observerDataset)[1])))

        elif observerFormat in RASTER_FORMATS:
            # Clip Raster
            clipTmp = clip_raster(
                observerDataset,
                fcBuffer,
                os.path.join(
                    wTmp, 'obs_{}{}'.format(
                        os.path.splitext(os.path.basename(fc))[0],
                        os.path.splitext(observerDataset)[1])),
                snap=snapRst,
                clipGeom=True)

            # Check if clip has the same values that the original raster
            RST_UNIQUE = rst_distinct(clipTmp, gisApi='arcpy')
            if len(RST_UNIQUE) > len(UNIQUEVALUES):
                # Reclassify raster
                rules = {}
                for v in RST_UNIQUE:
                    if v in UNIQUEVALUES:
                        rules[v] = v
                    else:
                        rules[v] = 'NODATA'

                clipTmp = reclassify(
                    clipTmp,
                    'Value',
                    rules,
                    os.path.join(wTmp,
                                 'r_{}'.format(os.path.basename(clipTmp))),
                    template=clipTmp)

            if CELLSIZE < REF_CELLSIZE:
                # if cellsize if less than REF_CELLSIZE
                # Change cellsize to REF_CELLSIZE:

                # 1) Create fishnet REF_CELLSIZE
                fishNet = fishnet(os.path.join(
                    wTmp, 'fish_{}'.format(os.path.basename(fc))),
                                  clipTmp,
                                  cellWidth=REF_CELLSIZE,
                                  cellHeight=REF_CELLSIZE)

                # 2) Erase areas with NoData Values
                # Raster to shp
                cls_intPolygon = rst_to_polyg(
                    clipTmp,
                    os.path.join(wTmp,
                                 'cls_int_{}'.format(os.path.basename(fc))),
                    gisApi='arcpy')

                # - Erase areas of the fishnet that have nodata values
                # in the raster
                tmpErase = erase(
                    fishNet, cls_intPolygon,
                    os.path.join(wTmp,
                                 'nozones_{}'.format(os.path.basename(fc))))

                trueErase = erase(
                    fishNet, tmpErase,
                    os.path.join(wTmp,
                                 'fishint_{}'.format(os.path.basename(fc))))

                # 3) Convert erased fishnet to points
                clipObs = feat_to_pnt(
                    trueErase,
                    os.path.join(wTmp, 'obs_{}'.format(os.path.basename(fc))),
                    pnt_position="INSIDE")

            else:
                clipObs = rst_to_pnt(
                    clipTmp,
                    os.path.join(wTmp, 'obs_{}'.format(os.path.basename(fc))))

        # Calculate visibility
        # Boundary to raster
        boundRst = shp_to_raster(
            fc,
            'FID',
            CELLSIZE,
            None,
            os.path.join(
                wTmp, '{}_{}'.format(
                    os.path.splitext(os.path.basename(fc))[0],
                    os.path.splitext(observerDataset)[1])),
            snap=clipInRst,
            api='arcpy')

        noDataVal = get_nodata(boundRst, gisApi='arcpy')

        boundArray = rst_to_array(boundRst)

        # Raster to array
        # For each cell, get cell position and create line of sight
        shape = rst_shape(boundRst, gisApi='arcpy')
        xmin, xmax, ymin, ymax = rst_ext(boundRst, gisApi='arcpy2')

        visibilityArray = numpy.zeros((shape[0], shape[1]))

        numpy.copyto(visibilityArray, boundArray, 'unsafe',
                     boundArray == noDataVal)

        for l in range(len(visibilityArray)):
            for c in range(len(visibilityArray[l])):
                if visibilityArray[l][c] == noDataVal:
                    continue

                # Get cell position
                x, y = get_cell_coord(l, c, xmin, ymin, CELLSIZE, CELLSIZE)

                # Get Line of sight
                cursor = arcpy.SearchCursor(clipObs)
                for line in cursor:
                    FID = line.getValue("FID")
                    geom = line.Shape.centroid
                    sightArray = [
                        {
                            "FID": 0,
                            "GEOM": [(x, y), (geom.X, geom.Y)]
                        },
                    ]

                    lineSight = geomArray_to_fc(
                        sightArray,
                        os.path.join(
                            wTmp, 'ls_{}_{}_{}_{}{}'.format(
                                os.path.splitext(os.path.basename(fc))[0],
                                str(l), str(c), str(FID),
                                os.path.splitext(fc)[1])), "POLYLINE", epsg)

                    lineSightRes = line_of_sight(
                        clipInRst, lineSight,
                        os.path.join(
                            wTmp, 'lsr_{}_{}_{}_{}{}'.format(
                                os.path.splitext(os.path.basename(fc))[0],
                                str(l), str(c), str(FID),
                                os.path.splitext(fc)[1])))

                    lyrLineSight = feat_lyr(lineSightRes)

                    cs = arcpy.SearchCursor(lyrLineSight)
                    lnh = cs.next()
                    cnt = 0
                    while cnt == 0:
                        try:
                            vis = lnh.getValue("TarIsVis")
                        except:
                            pass
                        cnt += 1

                    if vis == 1:
                        visibilityArray[l][c] = 1
                        break
                    else:
                        continue

        # Generate Raster with visibility data
        visibilityRst = array_to_raster(
            visibilityArray, xmin, CELLSIZE, CELLSIZE,
            os.path.join(
                output_folder, 'vis_{}{}'.format(
                    os.path.splitext(os.path.basename(fc))[0],
                    os.path.splitext(clipInRst)[1])))

    return output_folder
# Import system modules
import arcpy

# Set environment settings
arcpy.env.workspace = "C:/gapyexamples/data"

# Set local variables
inPointFeatures = "ca_ozone_pts.shp"
zField = "ozone"
outLayer = "outDIWB"
outRaster = "C:/gapyexamples/output/diwbout"
cellSize = 2000.0
power = 2
inBarrier = "ca_outline.shp"
bandwidth = ""
iterations = 10
weightField = ""
addBarrier = ""
cumuBarrier = ""
flowBarrier = ""

# Check out the ArcGIS Geostatistical Analyst extension license
arcpy.CheckOutExtension("GeoStats")

# Execute DiffusionInterpolationWithBarriers
arcpy.DiffusionInterpolationWithBarriers_ga(inPointFeatures, zField, outLayer,
                                            outRaster, cellSize, inBarrier,
                                            bandwidth, iterations, weightField,
                                            addBarrier, cumuBarrier,
                                            flowBarrier)
Example #11
0
def PreProcess(DEMRasterPath,min_size,buffer_dist,outPutRaster):
    arcpy.CheckOutExtension("Spatial")
    input_dir = os.path.split(outPutRaster)[0]
    gdb = os.path.split(outPutRaster)[0]

    if os.path.splitext(input_dir)[1].lower() == ".gdb":
        input_dir = os.path.split(input_dir)[0]
        #gdb = os.path.split(outPutRaster)[0]

    env.workspace = input_dir
    env.overwriteOutput = True

    if arcpy.Exists(DEMRasterPath) == False:
        print("The input raster does not exist")
        quit()

    ### Mean Focal Statistics
    ras_mf = arcpy.sa.FocalStatistics(DEMRasterPath,"Rectangle 3 3 CELL","MEAN","DATA")

    ### Fill depression
    ras_fill = arcpy.sa.Fill(ras_mf)
    ras_fill.save(os.path.join(input_dir,"dem_filled.tif"))
    ### Get sink
    ras_sink = ras_fill - ras_mf

    ### Convert foreground sink
    #ras_sink_fg = arcpy.sa.Con(ras_sink,ras_sink,"#",'"Value">0')
    #ras_sink_fg.save("ras_sink_fg")

    ### Convert sink to binary image
    ras_sink_bin = arcpy.sa.Con(ras_sink > 0,1)

    ### Region group
    ras_region = arcpy.sa.RegionGroup(ras_sink_bin,"FOUR","WITHIN","ADD_LINK")

    ### Convert raster to polygon
    region_poly_name = os.path.join(input_dir,"region_poly.shp")
    arcpy.RasterToPolygon_conversion(ras_region,region_poly_name,"NO_SIMPLIFY")

    ### Select polygon based on minimum size
    area_field = "Area"
    arcpy.AddField_management(region_poly_name, area_field, "DOUBLE")
    arcpy.CalculateField_management(region_poly_name,"Area","!shape.area@squaremeters!","PYTHON_9.3","#")
    sqlExp = area_field + ">=" + str(min_size)

    ##print sqlExp
    region_poly_select_name = os.path.join(input_dir,"sink_poly.shp")
    arcpy.Select_analysis(region_poly_name,region_poly_select_name,sqlExp)

    ### Calculate field
    arcpy.CalculateField_management(region_poly_select_name,"gridcode","1","PYTHON")

    #### Convert polygon to raster
    region_poly_ras = os.path.join(input_dir,"region_poly_ras.tif")
    arcpy.PolygonToRaster_conversion(region_poly_select_name,"gridcode",region_poly_ras,"CELL_CENTER","NONE","1")

    ### Convert foreground sink TO 0
    ras_sink_bg = ras_mf - ras_mf
    ras_sink_bg.save(os.path.join(input_dir,"ras_sink_bg.tif"))

    #ras_sink_final = "ras_sink_final"
    in_ras_list = [region_poly_ras,ras_sink_bg]
    ras_sink_final_name = arcpy.sa.CellStatistics(in_ras_list,"SUM","DATA")
    arcpy.env.extent = ras_mf.extent
    ras_sink_final_name = arcpy.sa.ApplyEnvironment(ras_sink_final_name)
    #ras_name.save(os.path.join(input_dir,"ras_name.tif"))
    ### Convert foreground sink
    dem_name = arcpy.sa.Con(ras_sink_final_name==1,ras_mf,ras_fill)
    dem_name = arcpy.sa.ApplyEnvironment(dem_name)
    dem_name.save(os.path.join(input_dir,"dem_final.tif"))

    ### buffer sink area
    sink_buffer_name = os.path.join(input_dir,"sink_buffer.shp")
    sqlExp = str(buffer_dist) + " Meters"
    arcpy.Buffer_analysis(region_poly_select_name,sink_buffer_name,sqlExp,"","","ALL","")

    dem_sink = arcpy.sa.Con(ras_sink_final_name==1,ras_mf,ras_fill)
    dem_sink = arcpy.sa.ExtractByMask(dem_name,sink_buffer_name)
    dem_sink = arcpy.sa.ApplyEnvironment(dem_sink)
    arcpy.CopyRaster_management(dem_sink,outPutRaster)
    #dem_sink.save(outPutRaster)
    #dem_sink.save(os.path.join(input_dir,"sink_buffer.tif"))

    dem_sink_depth = ras_fill - dem_name
    dem_sink_depth_name = arcpy.sa.Con(dem_sink_depth>0,dem_sink)
    dem_sink_depth_name = arcpy.sa.ApplyEnvironment(dem_sink_depth_name)
    dem_sink_depth_name.save(os.path.join(input_dir,"sink_no_buffer.tif"))

    sink_depth = arcpy.sa.Con(dem_sink_depth>0,dem_sink_depth)
    sink_depth = arcpy.sa.ApplyEnvironment(sink_depth)
    sink_depth.save(os.path.join(input_dir,"sink_depth.tif"))

    arcpy.Delete_management(region_poly_name)
    arcpy.Delete_management(region_poly_ras)
    arcpy.Delete_management(sink_buffer_name)
    arcpy.Delete_management(os.path.join(input_dir,"ras_sink_bg.tif"))

    ### These data can be saved when needed.
    ### Save to a geodatabase
    if os.path.splitext(gdb)[1].lower() == ".gdb":
        arcpy.CopyRaster_management(os.path.join(input_dir,"sink_no_buffer.tif"),os.path.join(gdb,"sink_no_buffer"))
        arcpy.CopyRaster_management(os.path.join(input_dir,"dem_final.tif"),os.path.join(gdb,"dem_final"))
        arcpy.CopyRaster_management(os.path.join(input_dir,"sink_depth.tif"),os.path.join(gdb,"sink_depth"))
        arcpy.CopyRaster_management(os.path.join(input_dir,"dem_filled.tif"),os.path.join(gdb,"dem_filled"))
        arcpy.CopyFeatures_management(region_poly_select_name,os.path.join(gdb,"sink_poly"))
        arcpy.Delete_management(region_poly_select_name)
        arcpy.Delete_management(os.path.join(input_dir,"sink_no_buffer.tif"))
        arcpy.Delete_management(os.path.join(input_dir,"dem_final.tif"))
        arcpy.Delete_management(os.path.join(input_dir,"sink_depth.tif"))
        arcpy.Delete_management(os.path.join(input_dir,"dem_filled.tif"))

    #arcpy.Delete_management(region_poly_select_name)
    #arcpy.Delete_management(os.path.join(input_dir,"sink_no_buffer.tif"))
    #arcpy.Delete_management(os.path.join(input_dir,"dem_final.tif"))
    #arcpy.Delete_management(os.path.join(input_dir,"sink_depth.tif"))

    return outPutRaster

    print "PreProcess Done!"
import csv
import arcpy
import os
import arcgisscripting, os    
gp = arcgisscripting.create()  
import arcpy  
import os  
import glob
import shutil
from shutil import copyfile

arcpy.CheckOutExtension("3D")
f = open('list.csv')
csv = csv.reader(f)
count = 0
for row in csv:
    count = count + 1
    print count
    print row[0]
    fileName = str(row[0])
    inCopy = "I:/APS2008Resource/" + fileName + ".aux.xml"
    outCopy = "I:/APS2008Resource/blockfiles/uncompress/working/"+ fileName + ".aux.xml"
    print fileName
    print inCopy
    print outCopy
    copyfile(inCopy, outCopy)
##    domain = tifFile[:-4] + '_domain.shp'
##    bufferDomain =  tifFile[:-4] + '_buffer.shp'
##    clip =  tifFile[:-4] + '_clip.tif'
##    print 'working on domain'
##    arcpy.RasterDomain_3d(tifFile, domain, "POLYGON")
Example #13
0
prjFile = os.path.join(
    ap.GetInstallInfo()["InstallDir"], sPath,
    sFile)  #Creating the path to the projection file without having to find it

spatialRef = ap.SpatialReference(prjFile)

#Create the shape file with a spatial reference defined
ap.DefineProjection_management(output1, spatialRef)

#Step 2
pts_feat = "bldg_centroids.shp"
raster = "manh_dem.tif"
output2 = "cen_el.shp"

#Spatial Analyst extension needed for this
ap.CheckOutExtension("Spatial")

#Extract the elevation values at the building centroids
#ERROR 010026: Unable to copy dataset C:\Users\dbaron\PycharmProjects\gtech731\manh_data\cen_el.shp to ./manh_data\cen_el.shp.
#What am I doing wrong here? This is exactly how you have it written in your example
ap.sa.ExtractValuesToPoints(pts_feat, raster, output2)

#Step 3
output3 = "bldg_elev.shp"

#Join the centroid elevations with the buildings shapefile
ap.SpatialJoin_analysis("manh_bldgs.shp", output2, output3)

#Print out the building names
rows = ap.SearchCursor(output2, "RASTERVALU <= 14")
Example #14
0
import os
import sys
import time
import uuid
import shutil
import logging
import multiprocessing
from functools import partial
# 3rd party imports
import arcpy
from arcpy import da
from arcpy import env
import numpy as np
import math

arcpy.CheckOutExtension('GeoStats')

###########################################################################
module = "positional_offset.py"
logging.basicConfig(stream=sys.stderr,
                    level=logging.DEBUG,
                    format='%(name)s (%(levelname)s): %(message)s')
log = logging.getLogger(module)
###########################################################################

env.overwriteOutput = True

sr = arcpy.SpatialReference(3857)


class FunctionError(Exception):
Example #15
0
def main():
    pass

    """
    Pseudocode
    1. Get sun position from http://aa.usno.navy.mil/data/docs/AltAz.php
    2. Save data to excel
    3. Convert excel to table conversion
    4. set new variable for total hour and other parameters for hillshade
    5. To create the mean solar radiation raster make a search cursor that:
        (For loop 1)
            5.1 compute total hour
            5.2 also the parameters for hillshade, (altitude = row[1])
            5.3 Compute hillshade for each hour
            5.4 Add the rasters that are in the day time (solar)
        create a new list to store solar
    6. for loop 2 (for items in solar list)
        6.1 add all raster
        6.2 Find the average
    7. Create a map.
    """

    import arcpy
    from arcpy import env
    from arcpy.sa import *

    import csv
    import io

    #Opening csv files: https://docs.python.org/2/library/csv.html

    with open("Lab_10_Data.csv","rb") as File:
        reader = csv.reader(File)
        for row in reader:
            if row[1] == "-8.8":
                A1 = row[1]
                Alitude1 = float(A1)
            elif row[1] == "4.4":
                A2 = row[1]
                Alitude2 = float(A2)
            elif row[1] == "17.2":
                A3 = row[1]
                Alitude3 = float(A3)
            elif row[1] == "29.7":
                A4 = row[1]
                Alitude4 = float(A4)
            elif row[1] == "41.5":
                A5 = row[1]
                Alitude5 = float(A5)
            elif row[1] == "51.6":
                A6 = row[1]
                Alitude6 = float(A6)
            elif row[1] == "58.3":
                A7 = row[1]
                Alitude7 = float(A7)
            elif row[1] == "59.4":
                A8 = row[1]
                Alitude8 = float(A8)
            elif row[1] == "54.2":
                A9 = row[1]
                Alitude9 = float(A9)
            elif row[1] == "44.9":
                A10 = row[1]
                Alitude10 = float(A10)
            elif row[1] == "33.6":
                A11 = row[1]
                Alitude11 = float(A11)
            elif row[1] == "21.3":
                A12 = row[1]
                Alitude12 = float(A12)
            elif row[1] == "8.5":
                A13 = row[1]
                Alitude13 = float(A13)
            elif row[1] == "-4.6":
                A14 = row[1]
                Alitude14 = float(A14)

            if row[2] == "85.5":
                B1 = row[2]
                Azimuth1 = float(B1)
            elif row[2] == "93":
                B2 = row[2]
                Azimuth2 = float(B2)
            elif row[2] == "100.7":
                B3 = row[2]
                Azimuth3 = float(B3)
            elif row[2] == "109.7":
                B4 = row[2]
                Azimuth4 = float(B4)
            elif row[2] == "121.2":
                B5 = row[2]
                Azimuth5 = float(B5)
            elif row[2] == "137.4":
                B6 = row[2]
                Azimuth6 = float(B6)
            elif row[2] == "161":
                B7 = row[2]
                Azimuth7 = float(B7)
            elif row[2] == "190.1":
                B2 = row[2]
                Azimuth8 = float(B2)
            elif row[2] == "216.1":
                B9 = row[2]
                Azimuth9 = float(B9)
            elif row[2] == "234.4":
                B10 = row[2]
                Azimuth10 = float(B10)
            elif row[2] == "247.1":
                B11 = row[2]
                Azimuth11 = float(B11)
            elif row[2] == "256.7":
                B12 = row[2]
                Azimuth12 = float(B12)
            elif row[2] == "264.8":
                B13 = row[2]
                Azimuth13 = float(B13)
            elif row[2] == "272.3":
                B14 = row[2]
                Azimuth14 = float(B14)

    #http://desktop.arcgis.com/en/arcmap/10.3/tools/spatial-analyst-toolbox/hillshade.htm

    arcpy.env.workspace = "C:\Users\patri\Desktop\Geoprogramming\Lab10_RasterOps"

    arcpy.env.overwriteOutput= True

    arcpy.CheckOutExtension("Spatial")

    Input_Raster = "smdem"


    #These values did not work because the altitude was negative.
    """Hillshade1 = Hillshade(Input_Raster,Azimuth1,Alitude1,"SHADOWS")
    Hillshade14 = Hillshade(Input_Raster,Azimuth14,Alitude14,"SHADOWS")"""

    Hillshade2 = Hillshade(Input_Raster,Azimuth2,Alitude2,"SHADOWS")
    Hillshade3 = Hillshade(Input_Raster,Azimuth3,Alitude3,"SHADOWS")
    Hillshade4 = Hillshade(Input_Raster,Azimuth4,Alitude4,"SHADOWS")
    Hillshade5 = Hillshade(Input_Raster,Azimuth5,Alitude5,"SHADOWS")
    Hillshade6 = Hillshade(Input_Raster,Azimuth6,Alitude6,"SHADOWS")
    Hillshade7 = Hillshade(Input_Raster,Azimuth7,Alitude7,"SHADOWS")
    Hillshade8 = Hillshade(Input_Raster,Azimuth8,Alitude8,"SHADOWS")
    Hillshade9 = Hillshade(Input_Raster,Azimuth9,Alitude9,"SHADOWS")
    Hillshade10 = Hillshade(Input_Raster,Azimuth10,Alitude10,"SHADOWS")
    Hillshade11 = Hillshade(Input_Raster,Azimuth11,Alitude11,"SHADOWS")
    Hillshade12 = Hillshade(Input_Raster,Azimuth12,Alitude12,"SHADOWS")
    Hillshade13 = Hillshade(Input_Raster,Azimuth13,Alitude13,"SHADOWS")

    MEGA_Hillshade_List = [

    Hillshade2,Hillshade3,Hillshade4,Hillshade5,Hillshade6,Hillshade7,
    Hillshade8, Hillshade9, Hillshade10, Hillshade11, Hillshade12, Hillshade13
    ]

    MEGA_Hillshade_sum = sum(MEGA_Hillshade_List)

    MEGA_Daylight_hours = len(MEGA_Hillshade_List)

    Average_Solar_Radiation_Hillshade = MEGA_Hillshade_sum/MEGA_Daylight_hours

    Average_Solar_Radiation_Hillshade.save()

    File.close()
def S3_extract_all_by_inundation_boundary():
    # reduce file size by masking by inundation boundary
    infiles = [
    #'Z:/GeoData/Trueheart/Depth (Max).bathymetry029.tif',
    'Z:/GeoData/Temp/Agriculture2001.tif',
    'Z:/GeoData/Temp/ForestWaterWetland1992.tif',
    #'Z:/GeoData/LCB/NAIP/outputs/mosaic_ndvi_2016_0p6m.tif']
    outfiles = [
    #'masked_depth.tif',
    'masked_Agriculture2001.tif',
    'masked_ForestWaterWetland1992.tif',
    #'masked_mosaic_ndvi_2016_0p6m.tif']
    mask = 'Z:/GeoData/Trueheart/Inundation Boundary (Max Value_0).shp'
    for i in range(len(infiles)):
        arcpy_ExtractByMask(infiles[i],mask,outfiles[i],deleteAfter=False,clip=True,resample=True)
        

    
def S2_extract_depth_percentiles():
    # set input file and destination paths
    infile = u'Z:\GeoData\Trueheart\Depth (Max).bathymetry029.tif'
    dst = u'Z:\GeoData\Trueheart' # destination path
    # calculate percentiles
    pct = arcpy_CalculatePercentileFromRaster(inRaster=infile, 
    nbins=10, 
    omitValue=-9999.0, 
    trimMinMax=True,
    min=0,
    max=2.25)
    '''
    depth percentiles
    {'0.0%': 0.0009994507, 
    '10.0%': 0.27291107, 
    '20.0%': 0.46861267, 
    '30.0%': 0.62002563, 
    '40.0%': 0.7600937, 
    '50.0%': 0.899498, 
    '60.0%': 1.0399399, 
    '70.0%': 1.2348328, 
    '80.0%': 1.4948654, 
    '90.0%': 1.8216858, 
    '100.0%': 2.25}
    '''
    # select dictionary keys for lower and higher bounds of depth
    lower = ['0.0%','20.0%','40.0%','60.0%','80.0%']
    higher = ['20.0%','40.0%','60.0%','80.0%','100.0%']
    # for each range
    for i in range(len(lower)):
        low = pct[lower[i]] 
        high = pct[higher[i]]
        outfile = dst+'/depth_pct_{0:4.2f}-{1:4.2f}m.tif'.format(low,high)
        SQL = '{0} > {1} AND {0} < {2}'.format('VALUE',low,high)
        print('extracting file: {}'.format(o))
        arcpy_ExtractByAttributes(infile,SQL,outfile)
    
def S1_extract_land_use_classes():
    #   A. get raster of areas that have been forested since 1942, 1971, and 2001 respectively
    #       1992 and 2001 land use codes:
    agriculture = 2
    brush = 3
    forest = 4
    water = 5
    wetland = 6 
    #       set SQL conditions and format string (<> not equal to) 
    conditions = " {0} = {1} AND {0} = {2} AND {0} = {3} "
    SQL = conditions.format('VALUE',forest,water,wetland)
    i=u"Z:\GeoData\LCB\LCLULCB92\lclulcb92"
    o="ForestWaterWetland1992.tif"
    arcpy_ExtractByAttributes(inRaster=i,inSQL=SQL,outRaster=o)
    #   B. get raster of areas that were farms in 2001
    conditions = " {0} <> {1}"
    SQL = conditions.format('VALUE',agriculture)
    i=u"Z:\GeoData\LCB\LCLULCB01\lclulcb01"
    o="Agriculture2001.tif"
    arcpy_ExtractByAttributes(inRaster=i,inSQL=SQL,outRaster=o)

def arcpy_ResampleFromTemplate(inRast,template,outRast,method='NEAREST'):
    Y = arcpy.GetRasterProperties_management(template,'CELLSIZEY')
    X = arcpy.GetRasterProperties_management(template,'CELLSIZEX')
    arcpy.Resample_management(inRast, outRast, "{} {}".format(X,Y), method)
    return()
 
def arcpy_SelectFeatureByAttribute(inFeature,inSQL,outFeature):
    arcpy.MakeFeatureLayer_management(inFeature, 'lyr') 
    # Write the selected features to a new featureclass
    # Within selected features, further select only those cities which have a population > 10,000   
    arcpy.SelectLayerByAttribute_management('lyr', 'NEW_SELECTION', inSQL)
    arcpy.CopyFeatures_management('lyr', outFeature) 


def arcpy_ExtractByAttributes(inRaster,inSQL="VALUE > 1000",outRaster="extracted",deleteAfter=True):
    # Description: Extracts the cells of a raster based on a logical query.
    # Requirements: Spatial Analyst Extension 
    # Check out the ArcGIS Spatial Analyst extension license
    # Execute ExtractByAttributes
    print("extracting ",inSQL," from ",inRaster, "\n...")
    attExtract = arcpy.sa.ExtractByAttributes(inRaster, inSQL) 
    # Save the output 
    print("success! Saving file: ",outRaster)
    attExtract.save(outRaster)
    if deleteAfter:
        del attExtract
    return()
   
   
def arcpy_CalculatePercentileFromRaster(inRaster, nbins=10, omitValue=None, trimMinMax=False, min=None, max=None):
    # requires arcpy and numpy
    array = arcpy.RasterToNumPyArray(inRaster)
    #remove no data values, in this case zeroes, returning a flattened array  
    print ('removing no data values and flattening array.....'  )
    if omitValue is not None:
        flatArray = array[array != omitValue] 
    if trimMinMax is not None:
        print('trimming min and max values...')
        flatArray = numpy.clip(flatArray,min,max)  
    print ('sorting array....'  )
    flatArray = numpy.sort(flatArray)  
    numpy.histogram(flatArray,nbins)
    #report some summary stats  
    print('n = ', numpy.sum(flatArray)     )
    print('min = ', numpy.min(flatArray)     )
    print('median = ', flatArray[int(numpy.size(flatArray) * 0.50)] ) 
    print('max = ', numpy.max(flatArray)    )
    
    percentiles = [None]*nbins
    percentiles[0] = numpy.min(flatArray)
    # add percentile values in steps to the list of percentiles
    print('populating list with percentile values...')
    for i in range(1,nbins):
        percentiles[i] = flatArray[int(numpy.size(flatArray)*i/nbins)]
    percentiles.append(numpy.max(flatArray))
    pkeys = [str(k/nbins*100)+'%' for k in range(nbins+1)]
    pdict = dict(zip(pkeys,percentiles))
    print(pdict)
    return(pdict)


def arcpy_NormalizeRasterValues(inRaster,outRaster,maxValue=1,deleteAfter=False):
    # note that zonal statistics already calculates this so it may be faster to 
    # use http://help.arcgis.com/En/Arcgisdesktop/10.0/Help/index.html#//0017000000m7000000
    # load data, convert to array
    # requires arcpy and numpy
    orig_raster = arcpy.Raster(inRaster)
    array = arcpy.RasterToNumPyArray(inRaster)
    # do your math
    normalized_array = (array - array.min()) / (array.max() - array.min()) * maxValue
    # back to a raster
    normalized_raster = arcpy.NumPyArrayToRaster(
        in_array=normalized_array,
        lower_left_corner=inRaster.extent.lowerLeft,
        x_cell_size=inRaster.meanCellWidth,
        y_cell_size=inRaster.meanCellHeight)
    # and scene
    normalized_raster.save(outRaster)
    if deleteAfter:
        del normalized_raster
    return()
    

def arcpy_ExtractByMask(inRaster,inMask,outRaster,deleteAfter=False,clip=True,resample=True):
#takes either a raster or a shapefile and extracts data from inRaster inside mask
# requires spatial analyst extension
    if clip:
        try:
            e = arcpy.Describe(inMask).extent # create object of extent description
            extent = ("{} {} {} {}").format(e.XMin,e.YMin,e.XMax,e.YMax)
        except:
            print("oops something went wrong",traceback.print_exc())
        print('clipping ',inRaster,'to extent:\n',extent)
        inRaster = arcpy.Clip_management(inRaster,extent)
    if resample:
        inMask = arcpy_ResampleFromTemplate(inMask,inRaster)
    print("extracting by mask...")   
    masked = arcpy.sa.ExtractByMask(inRaster,inMask)
    masked.save(outRaster)
    if deleteAfter:
        del masked

def load_modules():
# arcpy doesn't
    # Loads modules and links directory paths
    import os
    import glob
    import numpy
    import arcpy
    from arcpy import env 
    arcpy.CheckOutExtension("Spatial")
    return()
   
   
def print_dir_contents(dir):
    print("Directory:\n", os.getcwd())
    print("-"*15,"contents","-"*15,"\n",os.listdir())
    return()

def setup_workspace(interactive):
    # set local variables
    import os
    import glob
    import numpy
    import arcpy
    from arcpy import env 
    arcpy.CheckOutExtension("Spatial")
    import os
    print("Current environment workspace:\n", os.getcwd())
    
    if interactive:
        wd = env.workspace = input("copy environment workspace path below\n(e.g. ' Z:/awiegman/GeoData ')\n>>>")
        dirs = [wd]
        more = input("(A) press enter continue\n(B) press any other key then enter to link more directories)")
        while len(more)>0:
            dirs[len(dirs)+1] = input("copy environment workspace path below\n(e.g. ' Z:/awiegman/GeoData ')\n>>>")
            print_dir_contents()
            more = input("(A) press enter continue\n(B) press any other key then enter to link more directories)")
        return(dirs)
    else:
        wd = env.workspace = 'Z:/awiegman/GeoData/Temp'
        print("New environment workspace: ",wd)
        return([wd])


if __name__ == '__main__':
    main(interactive=False)
Example #17
0
import os, numpy as np
if 1:  #arcpy2
    import arcpy, os
    from arcpy.sa import *
    arcpy.CheckOutExtension("Spatial")
    arcpy.CheckOutExtension("3D")
    arcpy.CheckOutExtension("GeoStats")
    arcpy.env.overwriteOutput = True
    arcpy.env.workspace = cd
else:  #python3
    try:
        from osgeo import gdal
        from osgeo import ogr
        from osgeo import osr
    except:
        import ogr, osr, gdal
    from IPython import embed

if __name__ == '__main__':
    root = os.path.dirname(os.path.abspath(__file__))
    outdir = ""
    if not os.path.exists(outdir): os.makedirs(outdir)
    print('finished')
    print("add a line in working directory")
    embed()
Example #18
0
def main(lf_dir=str(), crit_lf=float(), prj_name=str(), unit=str(), version=str(), n_m=float(), txcr=float(), s_sed=2.68):
    """ derive and draw stabilizing features for vegetation plantings
    crit_lf = 2.5               # years of minimum plant survival without stabilization
    prj_name = "TBR"             # corresponding to folder name
    unit = "us" or "si"
    version = "v10"             # type() =  3-char str: vII
    n_m = Mannings n
    txcr = critical dimensionless bed shear stress for grain motion
    s_sed = grain
    """
    logger = logging.getLogger("logfile")
    logger.info("STABILIZING TERRAIN ----- ----- ----- -----")
    error = False
    if unit == "us":
        area_units = "SQUARE_FEET_US"
        ft2_to_acres = config.ft2ac
        n_m = n_m / 1.49  # (s/ft^(1/3)) global Manning's n where k =1.49 converts to US customary
    else:
        area_units = "SQUARE_METERS"
        ft2_to_acres = 1.0

    arcpy.CheckOutExtension('Spatial')
    arcpy.gp.overwriteOutput = True

    dir2pp = config.dir2pm + prj_name + "_" + version + "\\"

    # folder settings
    ras_dir = dir2pp + "Geodata\\Rasters\\"
    shp_dir = dir2pp + "Geodata\\Shapefiles\\"
    quant_dir = dir2pp + "Quantities\\"

    # file and variable settings
    xlsx_target = dir2pp + prj_name + "_assessment_" + version + ".xlsx"
    feature_dict = {"Large wood": 211,
                    "Bioengineering (veget.)": 213,
                    "Bioengineering (mineral)": 214,
                    "Angular boulders (instream)": 215}

    # LOOK UP INPUT RASTERS
    try:
        project_ras = arcpy.Raster(ras_dir + "ProjectArea.tif")
    except:
        try:
            project_ras = arcpy.Raster(ras_dir + "projectarea.tif")
        except:
            logger.info("ERROR: Could not create Raster of the project area.")
            return -1

    try:
        hy_condition = lf_dir.split("_lyr")[0].split("\\")[-1].split("/")[-1]
        logger.info("Looking up hydraulic Rasters for %s ..." % hy_condition)
    except:
        logger.info("ERROR: Could not find hydraulic Rasters (associated with %s)." % lf_dir)
        return -1
    try:
        h = cPa.FlowDepth(hy_condition)
        u = cPa.FlowVelocity(hy_condition)
        info = cRIL.Info(hy_condition)
        lifespans = info.lifespan_read()
    except:
        logger.info("ERROR: Could not find hydraulic Rasters (01_Conditions/%s)." % hy_condition)
        return -1

    try:
        logger.info("Looking up grain lifespan Raster ...")
        max_lf_grains = arcpy.Raster(lf_dir + "lf_grains.tif")
    except:
        logger.info("ERROR: Could not find Lifespan Raster (%slf_grains.tif)." % lf_dir)
        return -1

    logger.info("Retrieving wood lifespan Raster ...")
    try:
        lf_wood = arcpy.Raster(lf_dir + "lf_wood.tif")
    except:
        lf_wood = Float(0.0)
        logger.info("WARNING: Could not find Lifespan Raster (%slf_wood.tif) -- continue anyway using 0-wood-lifespans ..." % lf_dir)

    logger.info("Retrieving bioengineering lifespan Raster ...")
    try:
        lf_bio = arcpy.Raster(lf_dir + "lf_bio_v_bio.tif")
    except:
        lf_bio = Float(0.0)
        logger.info("WARNING: Could not find Lifespan Raster (%slf_bio.tif) -- continue anyway using 0-bio-lifespans ..." % lf_dir)
    logger.info(" -- OK (Lifespan Rasters read)\n")

    # EVALUATE BEST STABILIZATION FEATURES
    tar_lf = fGl.get_closest_val_in_list(lifespans, crit_lf)
    if int(tar_lf) != int(crit_lf):
        logger.info(
            "WARNING: Substituting user-defined crit. lifespan ({0}) with {1} (Condition: {2}).".format(str(crit_lf),
                                                                                                        str(tar_lf),
                                                                                                        hy_condition))
    try:
        logger.info("Calculating required stable grains sizes to yield a lifespan of %s years ..." % str(tar_lf))
        arcpy.env.extent = max_lf_grains.extent
        i = lifespans.index(int(tar_lf))
        stab_grain_ras = Con(~IsNull(project_ras), Float(Square(u.rasters[i] * Float(n_m)) / ((Float(s_sed) - 1.0) * Float(txcr) * Power(h.rasters[i], (1 / 3)))))
    except arcpy.ExecuteError:
        logging.info("ExecuteERROR: (arcpy).")
        logging.info(arcpy.GetMessages(2))
        arcpy.AddError(arcpy.GetMessages(2))
        return -1
    except Exception as e:
        logging.info("ExceptionERROR: (arcpy).")
        logging.info(e.args[0])
        arcpy.AddError(e.args[0])
        return -1
    except:
        logging.info("ERROR: Could not calculate stable grain size Raster for %s." % str(tar_lf))
        logging.info(arcpy.GetMessages())
        return -1

    try:
        logger.info("Assigning stabilization features (hierarchy: Streamwood -> Bioengineering (other) -> Boulder paving")
        arcpy.env.extent = max_lf_grains.extent
        best_stab_i = Con(max_lf_grains <= crit_lf, Con(~IsNull(lf_wood), Con(lf_wood > crit_lf,
                                                                              Int(feature_dict["Large wood"])),
                                                        Con(~IsNull(lf_bio), Con(lf_bio > crit_lf,
                                                                                 Int(feature_dict["Bioengineering (veget.)"]),
                                                                                 Int(feature_dict["Bioengineering (mineral)"])),
                                                            Int(feature_dict["Angular boulders (instream)"]))))
        best_boulders = Con(max_lf_grains <= crit_lf, Con(IsNull(best_stab_i), Float(stab_grain_ras)))
        best_stab = Con(IsNull(best_stab_i), Con(~IsNull(best_boulders), Int(feature_dict["Angular boulders (instream)"])), Int(best_stab_i))
        logger.info(" -- OK (Stabilization assessment)\n")
    except:
        logger.info("ERROR: Best stabilization assessment failed.")
        return -1

    # SAVE RASTERS
    try:
        logger.info("Saving results Raster " + ras_dir + "terrain_stab.tif")
        best_stab.save(ras_dir + "terrain_stab.tif")
        logger.info(" -- OK (Raster saved.)")
    except:
        logger.info("ERROR: Result geofile saving failed.")
    try:
        logger.info("Saving results Raster " + ras_dir + "terrain_boulder_stab.tif")
        best_boulders.save(ras_dir + "terrain_boulder_stab.tif")
        logger.info(" -- OK (Stabilization Rasters saved)\n")
    except:
        logger.info("ERROR: Result geofile saving failed.")

    # SHAPEFILE CONVERSION AND STATS
    try:
        logger.info("Extracting quantities from geodata ...")
        logger.info(" >> Converting terrain_stab.tif to polygon shapefile ...")
        t_stab_shp = shp_dir + "Terrain_stab.shp"
        conversion_success = True
        try:
            arcpy.RasterToPolygon_conversion(best_stab, t_stab_shp, "NO_SIMPLIFY")
            if not fGl.verify_shp_file(t_stab_shp):
                logger.info("NO BIOENGINEERING STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." % t_stab_shp)
        except:
            conversion_success = True

        logger.info(" >> Converting terrain_boulder_stab.tif to layer ...")
        t_boulder_shp = shp_dir + "Terrain_boulder_stab.shp"
        try:
            arcpy.RasterToPolygon_conversion(Int(best_boulders + 1.0), t_boulder_shp, "NO_SIMPLIFY")
            if not fGl.verify_shp_file(t_stab_shp):
                logger.info("NO BOULDER STABILIZATION MEASURE IDENTIFIED (EMPTY: %s)." % t_boulder_shp)
        except:
            if not conversion_success:
                logger.info("No stabilization requirement identified. Returning without action.")
                return -1

        logger.info(" >> Calculating area statistics ... ")
        try:
            arcpy.AddField_management(t_stab_shp, "F_AREA", "FLOAT", 9)
        except:
            logger.info("    * field F_AREA already exists or the dataset is opened by another software.")
        try:
            arcpy.CalculateGeometryAttributes_management(t_stab_shp, geometry_property=[["F_AREA", "AREA"]],
                                                         area_unit=area_units)
        except:
            logger.info("    * no terrain stabilization applicable ")

        logger.info(" >> Adding field (stabilizing feature) ... ")
        try:
            arcpy.AddField_management(t_stab_shp, "Stab_feat", "TEXT")
        except:
            logger.info("    * field Stab_feat already exists ")
        logger.info(" >> Evaluating field (stabilizing feature) ... ")
        inv_feature_dict = {v: k for k, v in feature_dict.items()}
        code_block = "inv_feature_dict = " + str(inv_feature_dict)
        try:
            arcpy.CalculateField_management(t_stab_shp, "Stab_feat", "inv_feature_dict[!gridcode!]", "PYTHON", code_block)
        except:
            logger.info("    * no plant stabilization added ... ")
        logger.info(" >> Exporting tables ...")
        arcpy.TableToTable_conversion(t_stab_shp, quant_dir, "terrain_stab.txt")
        logger.info(" -- OK (Quantity export)\n")
    except:
        logger.info("ERROR: Shapefile operations failed.")
        return -1

    # PREPARE AREA DATA (QUANTITIES)
    logger.info("Processing table statistics ...")
    write_dict = {}
    for k in feature_dict.keys():
        write_dict.update({k: 0.0})  # set to zero for surface count

    stat_data = fGl.read_txt(quant_dir + "terrain_stab.txt")
    logger.info(" >> Extracting relevant area sizes ...")

    for row in stat_data:
        try:
            write_dict[inv_feature_dict[int(row[0])]] += row[1]
        except:
            logger.info("      --- Unknown key: " + str(int(row[0])))
            error = True

    if unit == "us":
        logger.info(" >> Converting ft2 to acres ...")
        for k in write_dict.keys():
            write_dict[k] = write_dict[k] * float(ft2_to_acres)
    logger.info(" -- OK (Area extraction finished)\n")

    # WRITE AREA DATA TO EXCEL FILE
    logger.info("Writing results to costs workbook (sheet: from_geodata) ...")
    fGl.write_dict2xlsx(write_dict, xlsx_target, "E", "F", 12)

    # CLEAN UP useless shapefiles
    logger.info("Cleaning up redundant shapefiles ...")
    arcpy.env.workspace = shp_dir
    all_shps = arcpy.ListFeatureClasses()
    for shp in all_shps:
        if "_del" in str(shp):
            try:
                arcpy.Delete_management(shp)
            except:
                logger.info(str(shp) + " is locked. Remove manually to avoid confusion.")
    arcpy.env.workspace = dir2pp + "Geodata\\"
    logger.info(" -- OK (Clean up)\n")

    if not error:
        fGl.open_file(xlsx_target)
def NAtoCSV_trans(inSpace, inGdb, inNetworkDataset, impedanceAttribute,
                  accumulateAttributeName, inOrigins, inDestinations,
                  outNALayerName, outFile, outField):
    '''
    Same as NAtoCSV, but removed "oneway" since Transit network doesnot have this attribute
    Also changed the SNAP code to "ions" for all stations
    '''
    fields = outField
    import arcpy
    from arcpy import env
    try:
        #Check out the Network Analyst extension license
        if arcpy.CheckExtension("Network") == "Available":
            arcpy.CheckOutExtension("Network")
        else:
            # Raise a custom exception
            print "Network license unavailable, make sure you have network analyst extension installed."

        #Check out the Network Analyst extension license
        arcpy.CheckOutExtension("Network")

        #Set environment settings
        env.workspace = inSpace + inGdb
        env.overwriteOutput = True

        #Create a new OD Cost matrix layer.
        outNALayer = arcpy.na.MakeODCostMatrixLayer(
            inNetworkDataset, outNALayerName, impedanceAttribute, "#", "#",
            accumulateAttributeName, "ALLOW_UTURNS", "#", "NO_HIERARCHY", "#",
            "NO_LINES", "#")

        #Get the layer object from the result object. The OD cost matrix layer can
        #now be referenced using the layer object.
        outNALayer = outNALayer.getOutput(0)

        #Get the names of all the sublayers within the OD cost matrix layer.
        subLayerNames = arcpy.na.GetNAClassNames(outNALayer)

        #Stores the layer names that we will use later
        originsLayerName = subLayerNames["Origins"]
        destinationsLayerName = subLayerNames["Destinations"]
        linesLayerName = subLayerNames["ODLines"]

        #Adjust field names
        #Exploit the fact that the detector feature is named hd_ML_snap, (or AllStationsML)
        #change the field mapping of Name to id_stn
        oriField = "Name ID_TAZ12A #"
        oriSort = "ID_TAZ12A"
        destField = "Name ID_TAZ12A #"
        destSort = "ID_TAZ12A"
        searchMetro = "BlueLine_Split SHAPE;GoldLine_split SHAPE;GreenLine_split SHAPE;LABus_prj NONE;LABus_prj_conn NONE;Metro_Tiger_Conn SHAPE;Orange_Conn SHAPE;OrangeLine_split SHAPE;RedLine_split SHAPE;Silver_Conn SHAPE;SilverLine_split SHAPE;TAZ_Tiger_Conn NONE;tl_2012_LAC_prj NONE;BusStopsWLines_prj NONE;Metro_Tiger_Conn_pt SHAPE;Orange_Conn_pt SHAPE;Silver_Conn_pt SHAPE;PreBusDPS_ND_Junctions NONE"
        searchTAZ = "BlueLine_Split NONE;GoldLine_split NONE;GreenLine_split NONE;LABus_prj NONE;LABus_prj_conn NONE;Metro_Tiger_Conn NONE;Orange_Conn NONE;OrangeLine_split NONE;RedLine_split NONE;Silver_Conn NONE;SilverLine_split NONE;TAZ_Tiger_Conn SHAPE;tl_2012_LAC_prj NONE;BusStopsWLines_prj NONE;Metro_Tiger_Conn_pt NONE;Orange_Conn_pt NONE;Silver_Conn_pt NONE;PreBusDPS_ND_Junctions NONE"
        print "Origins: ", inOrigins, " Destinations: ", inDestinations
        if "Station" in inOrigins:
            oriField = "Name id_stn #"
            oriSort = "id_stn"
            arcpy.AddLocations_na(outNALayer,
                                  originsLayerName,
                                  inOrigins,
                                  oriField,
                                  sort_field=oriSort,
                                  append="CLEAR",
                                  search_criteria=searchMetro)
            print "loaded stations onto transit network (search_criteria)"
        else:
            arcpy.AddLocations_na(outNALayer,
                                  originsLayerName,
                                  inOrigins,
                                  oriField,
                                  sort_field=oriSort,
                                  append="CLEAR",
                                  search_criteria=searchTAZ)
            print "loaded stations onto network"

        if "Station" in inDestinations:
            destField = "Name id_stn #"
            destSort = "id_stn"
            arcpy.AddLocations_na(outNALayer,
                                  destinationsLayerName,
                                  inDestinations,
                                  destField,
                                  sort_field=destSort,
                                  append="CLEAR",
                                  search_criteria=searchMetro)
            print "loaded stations onto transit network (search_criteria)"
        else:
            arcpy.AddLocations_na(outNALayer,
                                  destinationsLayerName,
                                  inDestinations,
                                  destField,
                                  sort_field=destSort,
                                  append="CLEAR",
                                  search_criteria=searchTAZ)
            print "loaded stations onto network"

        #Solve the OD cost matrix layer
        print "Begin Solving"
        arcpy.na.Solve(outNALayer)
        print "Done Solving"

        # Extract lines layer, export to CSV
        for lyr in arcpy.mapping.ListLayers(outNALayer):
            if lyr.name == linesLayerName:
                with open(outFile, 'w') as f:
                    #f.write(','.join(fields)+'\n') # csv headers
                    with arcpy.da.SearchCursor(lyr, fields) as cursor:
                        print "Successfully created lines searchCursor.  Exporting to " + outFile
                        for row in cursor:
                            f.write(','.join([str(r) for r in row]) + '\n')

        # Deleteing using del outNALayer is not enough.  Need to delete within arcpy to release
        arcpy.Delete_management(outNALayer)

    except Exception as e:
        # If an error occurred, print line number and error message
        import sys
        tb = sys.exc_info()[2]
        print "An error occurred in NAtoCSV_Trans line %i" % tb.tb_lineno
        print str(e)

    finally:
        #Check the network analyst extension license back in, regardless of errors.
        arcpy.CheckInExtension("Network")
    def make_pdf_maps(self, *args):
        ## accepts args[0] as alternative directory for input layouts (mxd_dir)
        self.start_logging()
        self.logger.info(
            "----- ----- ----- ----- ----- ----- ----- ----- -----")
        self.logger.info("PDF - MAPPING")
        self.logger.info("Map format: ANSI E landscape (w = 44in, h = 34in)")
        self.logger.info(
            "----- ----- ----- ----- ----- ----- ----- ----- -----")
        self.logger.info(
            "                                                     ")

        try:
            length = args[0].__len__()
            if length > 3:
                self.output_mxd_dir = args[0]
                fg.chk_dir(self.input_dir)
        except:
            pass

        arcpy.CheckOutExtension("Spatial")
        arcpy.env.workspace = self.output_mxd_dir
        arcpy.env.overwriteOutput = True

        # new_src = arcpy.mapping.Layer(self.layout_dir + "lf_sym_ras.lyr")

        mxd_list = arcpy.ListFiles(
            "*.mxd")  # gets all layout in arcpy.env.workspace

        for layout in mxd_list:
            self.logger.info(" >> Preparing map assembly: " +
                             str(layout[:-4]) + ".pdf (takes a while) ...")
            try:

                __outputPDF__ = arcpy.mapping.PDFDocumentCreate(
                    self.output_dir + str(layout[:-4]) + ".pdf")
                ## Instantiate mxd and df -- both need to be global variables here!!
                self.mxd = arcpy.mapping.MapDocument(self.output_mxd_dir +
                                                     layout)
                self.df = arcpy.mapping.ListDataFrames(self.mxd)[0]

                ## handle legend
                styleItem = arcpy.mapping.ListStyleItems(
                    self.layout_dir + "legend.ServerStyle", "Legend Items")
                legend = arcpy.mapping.ListLayoutElements(
                    self.mxd, "LEGEND_ELEMENT", "Legend")[0]
                legend.autoAdd = True
                ref_lyr_name = self.choose_ref_layer(str(
                    self.mxd.title[:-4]))  # cFi definition in prepare_maps
                sym_lyr = arcpy.mapping.ListLayers(self.mxd, ref_lyr_name,
                                                   self.df)[0]
                arcpy.mapping.RemoveLayer(self.df, sym_lyr)
                act_lyr = arcpy.mapping.ListLayers(self.mxd,
                                                   str(legend.items[0]),
                                                   self.df)[0]
                legend.updateItem(act_lyr, styleItem[0])

                arcpy.RefreshActiveView()
                # self.mxd.save()
                # legend.removeItem(leg.items[1])

                __tempPDFs__ = []
                __count__ = 0
                for xy in self.xy_center_points:
                    __count__ += 1
                    self.zoom2map(xy)
                    fig_name = "fig_" + str(
                        layout) + "_" + "%02d" % (__count__, )
                    __PDFpath__ = self.output_dir + fig_name + "_temp.pdf"
                    arcpy.mapping.ExportToPDF(self.mxd,
                                              __PDFpath__,
                                              image_compression="ADAPTIVE",
                                              resolution=96)
                    # arcpy.mapping.ExportToJPEG(self.mxd, __PDFpath__)
                    __outputPDF__.appendPages(str(__PDFpath__))
                    __tempPDFs__.append(
                        __PDFpath__)  # remember temp names to remove later one
                __outputPDF__.saveAndClose()

                for deletePDF in __tempPDFs__:
                    os.remove(deletePDF)

                del self.mxd, self.df

                self.logger.info(" >> Done. Map-PDF prepared in:")
                self.logger.info("    " + self.output_dir)

            except arcpy.ExecuteError:
                self.logger.info(arcpy.GetMessages(2))
                arcpy.AddError(arcpy.GetMessages(2))
                self.logger.info(" >> Mapping failed.")
                self.error = True
            except:
                self.logger.info(" >> Mapping failed.")
                self.error = True

        arcpy.CheckInExtension('Spatial')
# Date:        March 2019
# Author:      Maggie Hallerud ([email protected])
# -------------------------------------------------------------------------------

# User defined arguments:

# pf_path - path to parent folder that holds HUC8 folders
# ownership_path - path to ownership shapefile to be clipped
pf_path = r"C:\Users\ETAL\Dekstop\GYE_BRAT\wrk_Data"
ownership_path = r"C:\Users\ETAL\Dekstop\GYE_BRAT\wrk_Data\00_Projectwide\LandOwnership\BLM_National_Surface_Management_Agency\NationalSurfaceManagementAgency_ProjectArea.shp"

#  import required modules and extensions
import arcpy
import os
import re
arcpy.CheckOutExtension('Spatial')


def main():

    # change directory to the parent folder path
    os.chdir(pf_path)
    # list all folders in parent folder path - note this is not recursive
    dir_list = filter(lambda x: os.path.isdir(x), os.listdir('.'))
    # remove folders in the list that start with '00_' since these aren't our huc8 folders
    for dir in dir_list[:]:
        if dir.startswith('00_'):
            dir_list.remove(dir)
    # set arcpy environment settings
    arcpy.env.overwriteOutput = 'TRUE'  # overwrite output
    def prepare_layout(self):
        self.start_logging()
        self.logger.info(
            "----- ----- ----- ----- ----- ----- ----- ----- -----")
        self.logger.info("PREPARE MAPPING")
        self.logger.info("Map format: ANSI E landscape (w = 44in, h = 34in)")
        self.logger.info(
            "----- ----- ----- ----- ----- ----- ----- ----- -----")
        self.logger.info(
            "                                                     ")

        arcpy.CheckOutExtension("Spatial")
        arcpy.env.workspace = self.input_dir
        arcpy.env.overwriteOutput = True

        rasterlist = arcpy.ListRasters(
            "*", "GRID")  # gets all rasters in arcpy.env. workspace

        for raster in rasterlist:
            self.logger.info(" >> Preparing map layout: " + self.output_dir +
                             str(raster) + ".mxd")

            ## choose layout
            ref_layout_name = self.choose_ref_layout(str(raster))
            ref_lyr_name = self.choose_ref_layer(str(raster))
            try:
                mxd = arcpy.mapping.MapDocument(self.layout_dir +
                                                ref_layout_name)
                df = arcpy.mapping.ListDataFrames(mxd)[0]

                lf_sourceLayer = arcpy.mapping.ListLayers(
                    mxd, ref_lyr_name, df)[0]
                __ras_lyr_name__ = "temp.lyr"
                full_ras = arcpy.Raster(self.input_dir + str(raster))

                __ras_lyr__ = arcpy.MakeRasterLayer_management(
                    full_ras, __ras_lyr_name__[:-4], "#", "", "#")
                arcpy.SaveToLayerFile_management(
                    __ras_lyr__, self.layout_dir + __ras_lyr_name__)
                __lyr_file__ = arcpy.mapping.Layer(self.layout_dir +
                                                   __ras_lyr_name__)
                arcpy.mapping.InsertLayer(df, lf_sourceLayer, __lyr_file__,
                                          "BEFORE")  # Insert New

                arcpy.mapping.UpdateLayer(
                    df, __lyr_file__,
                    lf_sourceLayer)  # Update Symbology with example lyr-file
                arcpy.RefreshActiveView()
                arcpy.RefreshTOC()
                mxd.title = str(
                    raster)  # necessary for later identification of make_maps
                if os.path.isfile(
                        os.path.join(self.output_mxd_dir,
                                     str(raster) + ".mxd")):
                    self.logger.info(
                        "WARNING: overwriting existing version of " +
                        str(raster) + ".mxd")
                mxd.saveACopy(self.output_mxd_dir + str(raster) + ".mxd")
                del mxd, df, __lyr_file__, __ras_lyr__

            except arcpy.ExecuteError:
                self.logger.info(arcpy.GetMessages(2))
                self.logger.info(arcpy.AddError(arcpy.GetMessages(2)))
                self.logger.info(" >> Map layout preparation failed.")
                self.error = True
            except:
                self.logger.info(" >> Map layout preparation failed.")
                self.error = True
            try:
                # arcpy.Delete_management(__ras_lyr_name__)
                arcpy.Delete_management(self.layout_dir + __ras_lyr_name__)
                self.logger.info(
                    " >> Clearing temp.lyr (arcpy.Delete_management).")
                self.logger.info(" >> Done.")

            except:
                self.logger.info("WARNING: Could not clear temp.lyr")
                self.error = True

        arcpy.CheckInExtension('Spatial')
Example #23
0
def service_area_use_meters(net,
                            rdv,
                            distance,
                            loc,
                            out,
                            OVERLAP=True,
                            ONEWAY=None):
    """
    Execute service area tool using metric distances
    """

    from gasp.mng.gen import copy_feat

    if arcpy.CheckExtension("Network") == "Available":
        arcpy.CheckOutExtension("Network")

    else:
        raise ValueError('Network analyst extension is not avaiable')

    network_name = str(os.path.basename(net))
    JUNCTIONS = network_name + '_Junctions'

    oneway = "" if not ONEWAY else "Oneway"

    INTERVALS = str(distance) if type(distance) == int or \
        type(distance) == float else distance if \
        type(distance) == str or type(distance) == unicode \
        else ' '.join([str(int(x)) for x in distance]) if \
        type(distance) == list else None

    if not INTERVALS: raise ValueError(('distance format is not valid'))

    arcpy.MakeServiceAreaLayer_na(
        in_network_dataset=net,
        out_network_analysis_layer="servArea",
        impedance_attribute="Length",
        travel_from_to="TRAVEL_FROM",
        default_break_values=INTERVALS,
        polygon_type="DETAILED_POLYS",
        merge="NO_MERGE" if OVERLAP else "NO_OVERLAP",
        nesting_type="RINGS",
        line_type="NO_LINES",
        overlap="OVERLAP" if OVERLAP else "NON_OVERLAP",
        split="NO_SPLIT",
        excluded_source_name="",
        accumulate_attribute_name="",
        UTurn_policy="NO_UTURNS",
        restriction_attribute_name=oneway,
        polygon_trim="TRIM_POLYS",
        poly_trim_value="250 Meters",
        lines_source_fields="NO_LINES_SOURCE_FIELDS",
        hierarchy="NO_HIERARCHY",
        time_of_day="")

    # Add locations to the service area layer
    arcpy.AddLocations_na(
        "servArea", "Facilities", loc, "", "5000 Meters", "",
        "{_rdv} SHAPE;{j} NONE".format(_rdv=str(rdv), j=str(JUNCTIONS)),
        "MATCH_TO_CLOSEST", "APPEND", "NO_SNAP", "5 Meters", "INCLUDE",
        "{_rdv} #;{j} #".format(_rdv=str(rdv), j=str(JUNCTIONS)))
    # Solve
    arcpy.Solve_na("servArea", "SKIP", "TERMINATE", "")
    # Export to a shapefile
    save_servArea = copy_feat("servArea\\Polygons", out, gisApi='arcpy')

    return save_servArea
Example #24
0
def GenerateCores(pointLayer, workspace, aExtent, stinfDistance, rdinfDistance,
                  lineLayer):
    arcpy.AddMessage("Generating habitat cores...")
    #Check out necessary extensions...
    arcpy.CheckOutExtension("spatial")

    #Set workspace
    if workspace == '':
        workspace = arcpy.Workspace

    #Create local variables
    pBuff = arcpy.env.scratchGDB + os.sep + "pointBuffer"
    lBuff = arcpy.env.scratchGDB + os.sep + "lineBuffer"
    polyLayer = arcpy.env.scratchGDB + os.sep + "polyLayer"
    #lineLayer = workspace + os.sep + "lineLayer.shp"
    eraseLayer1 = arcpy.env.scratchGDB + os.sep + "eraseLayer1"
    eraseLayer2 = arcpy.env.scratchGDB + os.sep + "eraseLayer2"
    clip = arcpy.env.scratchGDB + os.sep + "clipLayer"
    habitat = arcpy.env.scratchGDB + os.sep + "habitatPatches"
    selectDist = str(2 * float(stinfDistance))

    empty = False
    polyLayer = aExtent
    arcpy.env.extent = aExtent
    pLayer = arcpy.MakeFeatureLayer_management(
        pointLayer, "pLayer", "", "", "Input_FID Input_FID VISIBLE NONE")
    pointLayer = arcpy.SelectLayerByLocation_management(
        pLayer, "WITHIN_A_DISTANCE", aExtent, selectDist, "NEW_SELECTION")

    ## NOTE: Vector processing eliminated to improve performance.
    ##        try:
    ##
    ##                # Process: Buffer points by disturbance distance...
    ##                arcpy.AddMessage("\tBuffering structures by disturbance distance...")
    ##                arcpy.Buffer_analysis(pointLayer, pBuff, buffDistance, "FULL", "ROUND", "NONE", "")
    ##                # Process: Buffer lines (roads) by disturbance distance ...
    ##                arcpy.AddMessage("\tBuffering roads by disturbance distance...")
    ##                arcpy.Buffer_analysis(lineLayer, lBuff, buffDistance, "FULL", "ROUND", "NONE", "")
    ##
    ####                try:
    ##                # Process: Erase point (houses) disturbance buffers from thiessen polygons ...
    ##                arcpy.AddMessage("\tRemoving point buffers from analysis extent...")
    ##                arcpy.Erase_analysis(polyLayer, pBuff, eraseLayer1, "")
    ##                arcpy.Delete_management(pBuff,"")
    ##                # Process: Erase line buffers from thiessen polygons (note: polygons have now been erased for both houses and roads) ...
    ##                arcpy.AddMessage("\tRemoving road buffers from analysis extent...")
    ##                arcpy.Erase_analysis(eraseLayer1, lBuff, eraseLayer2, "")
    ##                arcpy.Delete_management(lBuff,"")
    ##
    ####                except:
    ####                        # erase fails, assume extent is empty
    ####                        empty = True
    ##        except:
    # if buffer or erase fails, try running processing in raster format.
    ##        arcpy.AddWarning("\tVector processing failed due to memory limitations... \n\tAttempting to continue processing using raster analysis...")

    ##        EucDist1 = workspace + os.sep + "EucDist1"
    ##        SetNull1 = workspace + os.sep + "SetNull1"
    ##        EucDist2 = workspace + os.sep + "EucDist2"
    ##        SetNull2 = workspace + os.sep + "SetNull2"
    arcpy.AddMessage("\tCalculating euclidian distance from structures...")
    EucDist1 = arcpy.sa.EucDistance(pointLayer, "", "30", "")

    arcpy.AddMessage("\tSetting areas within " + str(stinfDistance) +
                     " meters of structures to NULL...")
    SetNull1 = arcpy.sa.SetNull(EucDist1, "1", "Value < " + str(stinfDistance))

    arcpy.AddMessage("\tCalculating euclidian distance from roads...")
    EucDist2 = arcpy.sa.EucDistance(lineLayer, "", "30", "")
    arcpy.AddMessage("\tSetting areas within " + str(rdinfDistance) +
                     " meters of roads to NULL...")
    SetNull2 = arcpy.sa.SetNull(EucDist2, SetNull1,
                                "Value < " + str(rdinfDistance))

    del pointLayer
    del pLayer
    try:
        arcpy.AddMessage("\tConverting raster to polygons...")
        arcpy.RasterToPolygon_conversion(SetNull2, eraseLayer2, "NO_SIMPLIFY")
    except:
        empty = True

    try:
        # Process: Clip cores by analysis extent ...
        arcpy.Clip_analysis(eraseLayer2, aExtent, clip, "")
    except:
        empty = True

    if not empty:

        # Make sure features are single part for proper area calculations
        arcpy.AddMessage("\tGenerating single part features...")
        arcpy.MultipartToSinglepart_management(clip, habitat)

        return habitat
    else:
        #if extent is empty return false
        return False
Example #25
0
    PathShape = "NO_LINES"
    accumulate = ""
    uturns = "ALLOW_UTURNS"
    hierarchy = "NO_HIERARCHY"

    #----- Set up the run -----

    # Output file designated by user
    outDir = os.path.dirname(outFile)
    outFilename = os.path.basename(outFile)

    #Check out the Network Analyst extension license
    # (note that this does NOT check out the extension in ArcMap.
    # It has to be done manually there.)
    if arcpy.CheckExtension("Network") == "Available":
        arcpy.CheckOutExtension("Network")
    else:
        arcpy.AddError(
            "You must have a Network Analyst license to use this tool.")
        raise CustomError

    # If running in Pro, make sure an fgdb workspace is set so NA layers can be created.
    if BBB_SharedFunctions.ProductName == "ArcGISPro":
        if not arcpy.env.workspace:
            arcpy.AddError(BBB_SharedFunctions.CurrentGPWorkspaceError)
            print(BBB_SharedFunctions.CurrentGPWorkspaceError)
            raise CustomError
        else:
            workspacedesc = arcpy.Describe(arcpy.env.workspace)
            if not workspacedesc.workspaceFactoryProgID.startswith(
                    'esriDataSourcesGDB.FileGDBWorkspaceFactory'):
 def extract_by_mask(inRaster, inMaskData, output_file):
     arcpy.CheckOutExtension("Spatial")
     outExtractByMask = ExtractByMask(inRaster, inMaskData)
     outExtractByMask.save(output_file)
     print("%s has been produced" % output_file)
Example #27
0
        precip = []
        date = []
        q = []

        for day in rrule.rrule(rrule.DAILY, dtstart=start, until=end):
            folder = "C:\\Recharge_GIS\\Precip\\800m\\Daily\\"
            yr = day.year
            if (yr <= 1991):
                arcpy.env.overwriteOutput = True  ## Ensure overwrite capability
                arcpy.env.workspace = folder + str(day.year) + "a"
                ras = folder + str(day.year) + "a\\" + "PRISM_NM_" + str(
                    day.year) + day.strftime('%m') + day.strftime(
                        '%d') + ".tif"
                if arcpy.Exists(ras):
                    try:
                        arcpy.CheckOutExtension("Spatial")
                        mask = "C:\\Recharge_GIS\\nm_gauges.gdb\\nm_wtrshds"
                        rasPart = arcpy.sa.ExtractByMask(ras, geo)
                        if day == beginPrecip:
                            rasPart.save(folder + str(day.year) + "a\\" +
                                         str(gPoly) + "_rasterClipTest.tif")
                        arr = arcpy.RasterToNumPyArray(rasPart,
                                                       nodata_to_value=0)
                        arrVal = np.multiply(arr, rasSq)
                        arrSum = arrVal.sum()
                        print("Sum of precip on " + str(day) + ":  " +
                              str(arrSum))
                        precip.append(arrSum)
                        date.append(day)
                        for rec in qRecs:
                            if (rec[0] == day):
Example #28
0
def processSoilData(AggLevel):
    '''Calculates average soil characteristics at AggLevel = "Woreda" or "Kebele" and outputs them to WoredaSoilData.csv or KebeleSoilData.csv'''

    #set the working directory
    workingDir = os.getcwd()

    #Download all of the soil data
    downloadSoilData(workingDir)

    #Turn on Spatial Statistics package and define field over which ZonalStatisticsAsTable will be calculated (Woreda or Kebele ID)
    ap.CheckOutExtension("Spatial")
    if AggLevel == 'Kebele':
        in_zone_data = os.path.dirname(
            workingDir
        ) + "\\Shapefiles\\Ethiopia Kebeles without Somali region.shp"
        in_template_dataset = in_zone_data
        zone_field = "KebeleID"
    elif AggLevel == 'Woreda':
        in_zone_data = os.path.dirname(
            workingDir) + "\\Shapefiles\\WoredasWGS1984.shp"
        in_template_dataset = in_zone_data
        zone_field = "WOREDANO_"

    #Define the projection and change the working directory to the directory with all of the soil data folders
    latLongRef = "Coordinate Systems\Geographic Coordinate Systems\World\WGS 1984.prj"
    os.chdir(workingDir)
    directories = [
        f for f in os.listdir(os.getcwd()) if os.path.isfile(f) == False
    ]

    for i in range(len(directories)):
        #Find all the tiffs with soil data in each soil characteristic folder
        os.chdir(workingDir + "\\" + directories[i])
        filelist = os.listdir(os.getcwd())
        tiffs = []
        clipTiffs = []
        for j in range(len(filelist)):
            name = filelist[j]
            if name[-8::] == '250m.tif':
                tiffs.append(name)
            elif name[-9::] == '_Clip.tif':
                clipTiffs.append(name)

        for j in range(len(clipTiffs)):
            clipTiffs[j] = os.getcwd() + "\\" + clipTiffs[j]

        for j in range(len(tiffs)):
            in_raster = os.getcwd() + "\\" + tiffs[j]
            out_raster = os.getcwd() + "\\" + tiffs[j][0:-4] + "_Clip.tif"
            #Clip the tiffs to Ethiopia if they haven't been already
            if out_raster not in clipTiffs:
                ap.Clip_management(in_raster, "#", out_raster,
                                   in_template_dataset, "#", 1)

            #Calculate Zonal Statistics of soil data at AggLevel
            in_value_raster = out_raster
            out_table = os.getcwd() + "\\" + tiffs[j][0:-4] + AggLevel + ".dbf"
            ap.sa.ZonalStatisticsAsTable(in_zone_data, zone_field,
                                         in_value_raster, out_table)

    #Convert the DBFs with all the AggLevel soil data to CSVs
    #Change the working directory to the directory with all the soil data folders
    os.chdir(workingDir)
    directories = [
        f for f in os.listdir(os.getcwd()) if os.path.isfile(f) == False
    ]

    for i in range(len(directories)):
        #Find all the DBFs with soil data in each soil characteristic folder
        os.chdir(workingDir + "\\" + directories[i])
        filelist = os.listdir(os.getcwd())
        DBFs = []
        for j in range(len(filelist)):
            name = filelist[j]
            if name[-10::] == AggLevel + '.dbf':
                DBFs.append(name)

        #Convert the DBF to a CSV
        for j in range(len(DBFs)):
            convertDBFtoCSV(os.getcwd() + "\\" + DBFs[j])

    #Join the CSVs with all the AggLevel soil data into one CSV titled "WoredaSoilData.csv" or "KebeleSoilData.csv" depending on the AggLevel
    joinSoilCSVs(AggLevel, workingDir)

    return None
#  centroids. This is much faster, but possibly less accurate than the
#  cost distance approach.
#
# Inputs: <Patch raster> <edge list> <maxDistance>
# Output: <Patch connected area raster> {patch connected area table}
#
# June 14, 2012
# [email protected]
#---------------------------------------------------------------------------------

# Import system modules
import sys, string, os, arcpy, math
import arcpy.sa as sa

# Check out any necessary licenses
arcpy.CheckOutExtension("spatial")
arcpy.env.overwriteOutput = True

# Input variables
patchRaster = sys.argv[1]
saveCentroids = sys.argv[3]

# Output variables
edgeListFN = sys.argv[2]
centroidFC = sys.argv[4]


##---FUNCTIONS---
def msg(txt):
    print txt
    arcpy.AddMessage(txt)
def compute_CII_scores_per_lts3():
    # Prepare everything before the loop
    arcpy.CheckOutExtension("Spatial")
    arcpy.Delete_management("lts3_unprocessed")
    arcpy.CopyFeatures_management("lts3_top30pct_buffered", "lts3_unprocessed")
    # Rename the EDGE field to its simplest form
    field_list = arcpy.ListFields("lts3_unprocessed")
    for field in field_list:
        if field.aliasName == "EDGE":
            arcpy.AlterField_management("lts3_unprocessed", field.name, "EDGE")
            break

    # Initialize the number_of_rows variable and the "i" iteration counter
    num_of_rows = int(arcpy.GetCount_management("lts3_unprocessed")[0])
    i = 1

    # Loop to perform the Zonal Statistics as Table iteratively, processing each time them
    # subset of rows that were not already processed in the last iteration. It generates
    # multiple table output that will be merged later on in the function
    # aggregate_all_zonalTables()
    while num_of_rows > 0:
        # At each iteration we will generate a new lts3_with_CII_scores_table
        lts3_with_CII_scores_table  = gdb_output_roads + "\\lts3_with_CII_scores_table" + str(i)
        # Compute the CII score per LTS3 segment as zonal statistics. The output is a table
        arcpy.sa.ZonalStatisticsAsTable("lts3_unprocessed", "EDGE", "cii_overall_score_ras1",
                                        lts3_with_CII_scores_table, "DATA", "MEAN")

        # Create a temporary feature class that contains the results of the Zonal Statistics tool
        lts3_table = "lts3_with_CII_scores_table"  + str(i)
        arcpy.AddJoin_management("lts3_unprocessed", "EDGE", lts3_table, "EDGE", "KEEP_ALL")
        arcpy.CopyFeatures_management("lts3_unprocessed", "lts3_temp")
        # Remove the join and delete lts3_unprocessed
        arcpy.RemoveJoin_management("lts3_unprocessed") #Not needed?
        arcpy.Delete_management("lts3_unprocessed")

        # Put any LTS3 segment that didn't not get processed in a new feature class
        # lts3_unprocessed. They can be recognized because some of their attributes are NULL.
        expr = "lts3_with_CII_scores_table" + str(i) + "_MEAN IS NULL"
        #print(expr)
        arcpy.SelectLayerByAttribute_management("lts3_temp", "NEW_SELECTION", expr)
        arcpy.CopyFeatures_management("lts3_temp", "lts3_unprocessed")
        arcpy.SelectLayerByAttribute_management("lts3_temp", "CLEAR_SELECTION")

        # Delete all fields in new_lts3_unprocessed that came from lts3_with_CII_scores_table, so that
        # we start with a blank slate in the next round
        drop_fields = ["lts3_with_CII_scores_table" + str(i) + "_OBJECTID",
                       "lts3_with_CII_scores_table" + str(i) + "_EDGE",
                       "lts3_with_CII_scores_table" + str(i) + "_COUNT",
                       "lts3_with_CII_scores_table" + str(i) + "_AREA",
                       "lts3_with_CII_scores_table" + str(i) + "_MEAN"]
        arcpy.DeleteField_management("lts3_unprocessed", drop_fields)

        # Rename the EDGE field to its simplest form in lts3_unprocessed
        field_list = arcpy.ListFields("lts3_unprocessed")
        for field in field_list:
            if field.aliasName == "EDGE":
                print(field.name)
                arcpy.AlterField_management("lts3_unprocessed", field.name, "EDGE")
                break

        # Increment the counters
        num_of_rows = int(arcpy.GetCount_management("lts3_unprocessed")[0])
        i += 1
        print("num_of_rows: " + str(num_of_rows))
        print("i: "+ str(i))
    num_of_score_tables = i - 1