def vegetation_fis(database: str, label: str, veg_type: str):
    """Calculate vegetation suitability for each reach in a BRAT
    SQLite database

    Arguments:
        database {str} -- Path to BRAT SQLite database
        label {str} -- Either 'historic' or 'existing'. Only used for lof messages.
        veg_type {str} -- Prefix either 'EX' for existing or 'HPE' for historic
    """

    log = Logger('Vegetation FIS')
    log.info('Processing {} vegetation'.format(label))

    streamside_field = 'iVeg_30{}'.format(veg_type)
    riparian_field = 'iVeg100{}'.format(veg_type)
    out_field = 'oVC_{}'.format(veg_type)

    feature_values = load_attributes(
        database, [streamside_field, riparian_field],
        '({} IS NOT NULL) AND ({} IS NOT NULL)'.format(streamside_field,
                                                       riparian_field))
    calculate_vegegtation_fis(feature_values, streamside_field, riparian_field,
                              out_field)
    write_db_attributes(database, feature_values, [out_field])

    log.info('Process completed successfully.')
Exemplo n.º 2
0
def get_transform_from_epsg(inSpatialRef, epsg):
    """Transform a spatial ref using an epsg code provided

    This is done explicitly and includes a GetAxisMappingStrategy check to
    account for GDAL3's projection differences.

    Args:
        inSpatialRef ([type]): [description]
        epsg ([type]): [description]

    Returns:
        [type]: [description]
    """
    log = Logger('get_transform_from_epsg')
    outSpatialRef = osr.SpatialReference()
    outSpatialRef.ImportFromEPSG(int(epsg))

    # https://github.com/OSGeo/gdal/issues/1546
    outSpatialRef.SetAxisMappingStrategy(inSpatialRef.GetAxisMappingStrategy())

    log.info('Input spatial reference is {0}'.format(
        inSpatialRef.ExportToProj4()))
    log.info('Output spatial reference is {0}'.format(
        outSpatialRef.ExportToProj4()))
    transform = osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
    return outSpatialRef, transform
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('dem', help='DEM raster', type=str)
    parser.add_argument('flowaccum', help='Flow accumulation raster', type=str)
    parser.add_argument('drainagearea', help='Drainage Area output raster', type=str)
    parser.add_argument('--cleanup', help='Deletes temporary files', action='store_true', default=False)
    parser.add_argument('--verbose', help='(optional) verbose logging mode', action='store_true', default=False)
    parser.add_argument('--dinfinity', help='(optional) Use the Dinifinity algorthim. D8 used if omitted', action='store_true', default=False)
    parser.add_argument('--pitfill', help='(optional) Fill DEM pits before flow direction', action='store_true', default=False)
    args = dotenv.parse_args_env(parser)

    # Initiate the log file
    log = Logger("Flow Accum")
    log.setup(logPath=os.path.join(os.path.dirname(args.flowaccum), "flow_accum.log"))

    if os.path.isfile(args.flowaccum):
        log.info('Deleting existing output raster {}'.format(args.flowaccum))
        driver = gdal.GetDriverByName('GTiff')
        gdal.Driver.Delete(driver, args.flowaccum)

    try:
        flow_accumulation(args.dem, args.flowaccum, args.cleanup, args.dinfinity, args.pitfill)
        flow_accum_to_drainage_area(args.flowaccum, args.drainagearea)

    except Exception as e:
        print(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 4
0
def gnat(huc, output_folder):
    """[summary]

    Args:
        huc ([type]): [description]

    Raises:
        Exception: [description]
        Exception: [description]
        Exception: [description]
        Exception: [description]

    Returns:
        [type]: [description]
    """

    log = Logger("GNAT")
    log.info('GNAT v.{}'.format(cfg.version))

    try:
        int(huc)
    except ValueError:
        raise Exception(
            'Invalid HUC identifier "{}". Must be an integer'.format(huc))

    if not (len(huc) == 4 or len(huc) == 8):
        raise Exception('Invalid HUC identifier. Must be four digit integer')

    safe_makedirs(output_folder)
Exemplo n.º 5
0
def download_shapefile_collection(url,
                                  download_folder,
                                  unzip_folder,
                                  force_download=False):
    """
    Download the one and only item from Science base and unzip it.
    :param url: URL of the Science Base catalog item
    :param download_folder: Folder where the NHD zip will be downloaded
    :param unzip_folder: Folder where downloaded files will be unzipped
    :param force_download: The download will always be performed if this is true.
    Otherwise the download will be skipped if this is false and the file exists
    :return: Dictionary of all ShapeFiles contained in the NHD zip file.
    """

    log = Logger('Download Shapefile Collection')

    # download and unzip the archive. Note: leftover files are a possibility
    # so we allow one retry because unzip can clean things up
    final_unzip_folder = download_unzip(url, download_folder, unzip_folder,
                                        force_download)

    # Build a dictionary of all the ShapeFiles within the archive.
    # Keys will be the name of the ShapeFile without extension (e.g. WBDHU8)
    shapefiles = {}
    for root, subFolder, files in os.walk(final_unzip_folder):
        for item in files:
            if item.endswith('.shp'):
                shapefiles[os.path.splitext(
                    os.path.basename(item))[0]] = os.path.join(root, item)

    log.info('{} shapefiles identified.'.format(len(shapefiles)))
    return shapefiles
Exemplo n.º 6
0
def _get_url(params: Dict[str, str]):
    """
    Call Science Base API with the argument params and return list of download URLs
    :param params: Science Base params object
    :return: List of HTTPS download URLs for items on S3
    """

    log = Logger('Download')
    log.info('Science base query: {}'.format(params))

    sb = sciencebasepy.SbSession()
    items = sb.find_items(params)

    log.info('{} Science base item(s) identified.'.format(items['total']))

    urls = []
    while items and 'items' in items:
        for item in items['items']:
            result = sb.get_item(item['id'])
            for weblink in result['webLinks']:
                if weblink['type'] == 'download':
                    urls.append(weblink['uri'])

        # pylintrc is freaking out about sb.next being "not callable"
        # I don't know what that means but it's just an annoyance
        items = sb.next(items)

    return urls
Exemplo n.º 7
0
def combined_fis(database: str, label: str, veg_type: str,
                 max_drainage_area: float):
    """
    Combined beaver dam capacity FIS
    :param network: Shapefile path containing necessary FIS inputs
    :param label: Plain English label identifying vegetation type ("Existing" or "Historical")
    :param veg_type: Vegetation type suffix added to end of output ShapeFile fields
    :param max_drainage_area: Max drainage above which features are not processed.
    :return: None
    """

    log = Logger('Combined FIS')
    log.info('Processing {} vegetation'.format(label))

    veg_fis_field = 'oVC_{}'.format(veg_type)
    capacity_field = 'oCC_{}'.format(veg_type)
    dam_count_field = 'mCC_{}_CT'.format(veg_type)

    fields = [
        veg_fis_field, 'iGeo_Slope', 'iGeo_DA', 'iHyd_SP2', 'iHyd_SPLow',
        'iGeo_Len'
    ]
    reaches = load_attributes(
        database, fields,
        ' AND '.join(['({} IS NOT NULL)'.format(f) for f in fields]))

    calculate_combined_fis(reaches, veg_fis_field, capacity_field,
                           dam_count_field, max_drainage_area)
    write_db_attributes(database, reaches, [capacity_field, dam_count_field],
                        log)

    log.info('Process completed successfully.')
Exemplo n.º 8
0
def run_subprocess(cwd: str, cmd: List[str]):

    log = Logger("Subprocess")
    log.info('Running command: {}'.format(' '.join(cmd)))

    # Realtime logging from subprocess
    process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               cwd=cwd)
    # Here we print the lines in real time but we will also log them afterwords
    # replace '' with b'' for Python 3
    for output in iter(process.stdout.readline, b''):
        for line in output.decode('utf-8').split('\n'):
            if len(line) > 0:
                log.info(line)

    for errout in iter(process.stderr.readline, b''):
        for line in errout.decode('utf-8').split('\n'):
            if len(line) > 0:
                log.error(line)

    retcode = process.poll()
    if retcode is not None and retcode > 0:
        log.error('Process returned with code {}'.format(retcode))

    return retcode
Exemplo n.º 9
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('lst_xml_folder',
                        help='Top level data folder containing LST data',
                        type=str)
    parser.add_argument('--verbose',
                        help='(optional) a little extra logging ',
                        action='store_true',
                        default=False)

    args = dotenv.parse_args_env(
        parser, os.path.join(os.path.dirname(__file__), '.env'))

    # Initiate the log file
    log = Logger('Land Surface Temperature XML Generator')
    log.setup(logPath=os.path.join(os.path.dirname(args.lst_xml_folder),
                                   'lst_xml.log'),
              verbose=args.verbose)

    try:
        process_lst(args.lst_xml_folder)
        log.info('Process completed successfully')
    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 10
0
def get_nhd_states(inpath):
    """
    Gets the list of US States that an NHD HUC encompasses

    This relies on the watershed boundary ShapeFile having a column called
    'States' that stores a comma separated list of state abbreviations
    such as 'OR,WA'. A dcitionary is used to retrieve the full names.
    :param inpath: Path to the watershed boundary ShapeFile
    :return: List of full US state names that the watershed touches (.e.g. Oregon)
    """
    log = Logger('RS Context')

    driver = ogr.GetDriverByName("ESRI Shapefile")
    data_source = driver.Open(inpath, 0)
    layer = data_source.GetLayer()
    states = []
    for feature in layer:
        value = feature.GetField('States')
        [states.append(us_states[acronym]) for acronym in value.split(',')]

    data_source = None

    if 'Canada' in states:
        if len(states) == 1:
            log.error(
                'HUC is entirely within Canada. No DEMs will be available.')
        else:
            log.warning(
                'HUC is partially in Canada. Certain data will only be available for US portion.'
            )

    log.info('HUC intersects {} state(s): {}'.format(len(states),
                                                     ', '.join(states)))
    return list(dict.fromkeys(states))
Exemplo n.º 11
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('vpuids',
                        help='Comma separated list of VPUs to process',
                        type=str)
    parser.add_argument(
        'data_folder',
        help='Top level data folder containing riverscapes context projects',
        type=str)
    #parser.add_argument('user_name', help='Postgres user name', type=str)
    #parser.add_argument('password', help='Postgres password', type=str)
    args = dotenv.parse_args_env(
        parser, os.path.join(os.path.dirname(__file__), '.env'))

    # Initiate the log file
    log = Logger('Load NHD')
    log.setup(logPath=os.path.join(args.data_folder, 'load_nhd.log'),
              verbose=True)

    try:
        load_nhd(args.vpuids,
                 args.data_folder)  # , args.user_name, args.password)
        log.info('Process completed successfully')
    except Exception as ex:
        log.error(ex)
Exemplo n.º 12
0
def load_lookup_data(db_path, csv_dir):
    """Load the database lookup data from CSV files.
    This gets called both during database creation during BRAT build,
    but also during refresh of lookup data at the start of BRAT Run so that
    the database has the latest hydrologic equations and other BRAT parameters

    Args:
        db_path (str): Full path to SQLite database
        csv_dir (str): Full path to the root folder containing CSV lookup files
    """

    conn = sqlite3.connect(db_path)
    conn.row_factory = dict_factory
    curs = conn.cursor()

    log = Logger('Database')

    # Load lookup table data into the database
    for dir_name in ['data', os.path.join('data', 'intersect')]:
        dir_search = os.path.join(csv_dir, dir_name, '*.csv')
        for file_name in glob.glob(dir_search):
            table_name = os.path.splitext(os.path.basename(file_name))[0]
            with open(os.path.join(dir_name, file_name), mode='r') as csvfile:
                d = csv.DictReader(csvfile)
                sql = 'INSERT OR REPLACE INTO {0} ({1}) VALUES ({2})'.format(table_name, ','.join(d.fieldnames), ','.join('?' * len(d.fieldnames)))

                to_db = [[i[col] for col in d.fieldnames] for i in d]
                curs.executemany(sql, to_db)
                log.info('{:,} records loaded into {} lookup data table'.format(curs.rowcount, table_name))

    conn.commit()
Exemplo n.º 13
0
def create_field(layer, field, field_type=ogr.OFTReal):
    """
    Remove and then re-add a field to a feature class
    :param layer: Feature class that will receive the attribute field
    :param field: Name of the attribute field to be created
    :param log:
    :return: name of the field created (same as function argument)
    """
    log = Logger('Shapefile')

    if not field or len(field) < 1 or len(field) > 10:
        raise Exception(
            'Attempting to create field with invalid field name "{}".'.format(
                field))

    # Delete output column from network ShapeFile if it exists and then recreate it
    networkDef = layer.GetLayerDefn()
    for fieldidx in range(0, networkDef.GetFieldCount()):
        if networkDef.GetFieldDefn(fieldidx).GetName() == field:
            log.info(
                'Deleting existing output field "{}" in network ShapeFile.'.
                format(field))
            layer.DeleteField(fieldidx)
            break

    log.info('Creating output field "{}" in network ShapeFile.'.format(field))
    field_def = ogr.FieldDefn(field, field_type)

    if field_type == ogr.OFTReal:
        field_def.SetPrecision(10)
        field_def.SetWidth(18)

    layer.CreateField(field_def)

    return field
Exemplo n.º 14
0
def create_database(huc: str, db_path: str, metadata: Dict[str, str], epsg: int, schema_path: str, delete: bool = False):
    """[summary]

    Args:
        huc (str): [description]
        db_path (str): [description]
        metadata (Dict[str, str]): [description]
        epsg (int): [description]
        schema_path (str): [description]
        delete (bool, optional): [description]. Defaults to False.

    Raises:
        Exception: [description]

    Returns:
        [type]: [description]
    """

    # We need to create a projection for this DB
    db_srs = osr.SpatialReference()
    db_srs.ImportFromEPSG(int(epsg))
    metadata['gdal_srs_proj4'] = db_srs.ExportToProj4()
    metadata['gdal_srs_axis_mapping_strategy'] = osr.OAMS_TRADITIONAL_GIS_ORDER

    if not os.path.isfile(schema_path):
        raise Exception('Unable to find database schema file at {}'.format(schema_path))

    log = Logger('Database')
    if os.path.isfile(db_path) and delete is True:
        log.info('Removing existing SQLite database at {0}'.format(db_path))
        os.remove(db_path)

    log.info('Creating database schema at {0}'.format(db_path))
    qry = open(schema_path, 'r').read()
    sqlite3.complete_statement(qry)
    conn = sqlite3.connect(db_path)
    conn.execute('PRAGMA foreign_keys = ON;')
    curs = conn.cursor()
    curs.executescript(qry)

    load_lookup_data(db_path, os.path.dirname(schema_path))

    # Keep only the designated watershed
    curs.execute('DELETE FROM Watersheds WHERE WatershedID <> ?', [huc])

    # Retrieve the name of the watershed so it can be stored in riverscapes project
    curs.execute('SELECT Name FROM Watersheds WHERE WatershedID = ?', [huc])
    row = curs.fetchone()
    watershed_name = row[0] if row else None

    conn.commit()
    conn.execute("VACUUM")

    # Write the metadata to the database
    if metadata:
        [store_metadata(db_path, key, value) for key, value in metadata.items()]

    return watershed_name
Exemplo n.º 15
0
def set_reach_fields_null(database, fields):

    log = Logger('Database')
    log.info('Setting {} reach fields to NULL'.format(len(fields)))
    conn = sqlite3.connect(database)
    conn.execute('PRAGMA foreign_keys = ON')
    conn.execute('UPDATE ReachAttributes SET {}'.format(','.join(['{} = NULL'.format(field) for field in fields])))
    conn.commit()
    conn.close()
Exemplo n.º 16
0
def network_statistics(label: str, vector_layer_path: str):

    log = Logger('network_statistics')
    log.info('Network ShapeFile Summary: {}'.format(vector_layer_path))

    results = {}
    total_length = 0.0
    min_length = None
    max_length = None
    invalid_features = 0
    no_geometry = 0

    with get_shp_or_gpkg(vector_layer_path) as vector_layer:

        # Delete output column from network ShapeFile if it exists and then recreate it
        for fieldidx in range(0, vector_layer.ogr_layer_def.GetFieldCount()):
            results[vector_layer.ogr_layer_def.GetFieldDefn(fieldidx).GetName()] = 0

        for feature, _counter, _progbar in vector_layer.iterate_features("Calculating Stats"):
            geom = feature.GetGeometryRef()

            if geom is None:
                no_geometry += 1
                return

            shapely_obj = VectorBase.ogr2shapely(geom)
            length = shapely_obj.length

            if shapely_obj.is_empty or shapely_obj.is_valid is False:
                invalid_features += 1

            total_length += length
            min_length = length if not min_length or min_length > length else min_length
            max_length = length if not max_length or max_length < length else max_length

            for fieldidx in range(0, vector_layer.ogr_layer_def.GetFieldCount()):
                field = vector_layer.ogr_layer_def.GetFieldDefn(fieldidx).GetName()
                if field not in results:
                    results[field] = 0

                results[field] += 0 if feature.GetField(field) else 1

        features = vector_layer.ogr_layer.GetFeatureCount()
        results['Feature Count'] = features
        results['Invalid Features'] = invalid_features
        results['Features without geometry'] = no_geometry
        results['Min Length'] = min_length
        results['Max Length'] = max_length
        results['Avg Length'] = (total_length / features) if features > 0 and total_length != 0 else 0.0
        results['Total Length'] = total_length

        for key, value in results.items():
            if value > 0:
                log.info('{}, {} with {:,} NULL values'.format(label, key, value))

    return results
Exemplo n.º 17
0
def raster_vrt_stitch(inrasters,
                      outraster,
                      epsg,
                      clip=None,
                      clean=False,
                      warp_options: dict = {}):
    """[summary]
    https://gdal.org/python/osgeo.gdal-module.html#BuildVRT
    Keyword arguments are :
        options --- can be be an array of strings, a string or let empty and filled from other keywords..
        resolution --- 'highest', 'lowest', 'average', 'user'.
        outputBounds --- output bounds as (minX, minY, maxX, maxY) in target SRS.
        xRes, yRes --- output resolution in target SRS.
        targetAlignedPixels --- whether to force output bounds to be multiple of output resolution.
        separate --- whether each source file goes into a separate stacked band in the VRT band.
        bandList --- array of band numbers (index start at 1).
        addAlpha --- whether to add an alpha mask band to the VRT when the source raster have none.
        resampleAlg --- resampling mode.
            near: nearest neighbour resampling (default, fastest algorithm, worst interpolation quality).
            bilinear: bilinear resampling.
            cubic: cubic resampling.
            cubicspline: cubic spline resampling.
            lanczos: Lanczos windowed sinc resampling.
            average: average resampling, computes the average of all non-NODATA contributing pixels.
            mode: mode resampling, selects the value which appears most often of all the sampled points.
            max: maximum resampling, selects the maximum value from all non-NODATA contributing pixels.
            min: minimum resampling, selects the minimum value from all non-NODATA contributing pixels.
            med: median resampling, selects the median value of all non-NODATA contributing pixels.
            q1: first quartile resampling, selects the first quartile value of all non-NODATA contributing pixels.
            q3: third quartile resampling, selects the third quartile value of all non-NODATA contributing pixels.
        outputSRS --- assigned output SRS.
        allowProjectionDifference --- whether to accept input datasets have not the same projection. Note: they will *not* be reprojected.
        srcNodata --- source nodata value(s).
        VRTNodata --- nodata values at the VRT band level.
        hideNodata --- whether to make the VRT band not report the NoData value.
        callback --- callback method.
        callback_data --- user data for callback.
    """
    log = Logger('Raster Stitch')

    # Build a virtual dataset that points to all the rasters then mosaic them together
    # clipping out the HUC boundary and reprojecting to the output spatial reference
    path_vrt = get_unique_file_path(
        os.path.dirname(outraster),
        os.path.basename(outraster).split('.')[0] + '.vrt')

    log.info('Building temporary vrt: {}'.format(path_vrt))
    vrt_options = gdal.BuildVRTOptions()
    gdal.BuildVRT(path_vrt, inrasters, options=vrt_options)

    raster_warp(path_vrt, outraster, epsg, clip, warp_options)

    if clean:
        for rpath in inrasters:
            safe_remove_file(rpath)
Exemplo n.º 18
0
def execute_query(database, sql, message='Executing database SQL query'):

    log = Logger('Database')
    log.info(message)

    conn = sqlite3.connect(database)
    conn.execute('pragma foreign_keys=ON')
    curs = conn.cursor()
    curs.execute(sql)
    conn.commit()
    log.info('{:,} records affected.'.format(curs.rowcount))
Exemplo n.º 19
0
def clip_ownership(boundary, ownership, out_path, output_epsg, buffer_meters):

    log = Logger('Ownership')

    log.info('Clipping ownership feature class to {}m buffer around HUC boundary.'.format(buffer_meters))

    # Rough
    buff_dist = _rough_convert_metres_to_shapefile_units(boundary, buffer_meters)
    huc_boundary = get_geometry_unary_union(boundary, output_epsg)
    buffered = huc_boundary.buffer(buff_dist)
    copy_feature_class(ownership, output_epsg, out_path, buffered)
    log.info('Ownership clip complete.')
Exemplo n.º 20
0
def process_lst(lst_xml_folder):
    """This is a slightly hack-y script to create some XMLS for the land_surface_temp script
        It's a bit of an afterthought so it just plunks down the XMLS all alone in a folder

    Args:
        lst_xml_folder ([type]): [description]
    """

    log = Logger("Generate XMLS for LST")
    hucs = [str(1700 + x) for x in range(1, 13)]

    for huc in hucs:
        hucdir = os.path.join(lst_xml_folder, huc)
        xml_file = os.path.join(hucdir, 'project.rs.xml')
        safe_makedirs(hucdir)
        if os.path.exists(xml_file):
            safe_remove_file(xml_file)

        project_name = f'Land Surface Temperature for HUC {huc}'
        project = RSProject(cfg, xml_file)
        project.create(project_name, 'LST')

        project.add_metadata({
            'ModelVersion': cfg.version,
            'HUC': huc,
            'dateCreated': datetime.datetime.now().isoformat(),
            'HUC{}'.format(len(huc)): huc
        })

        realizations = project.XMLBuilder.add_sub_element(
            project.XMLBuilder.root, 'Realizations')
        realization = project.XMLBuilder.add_sub_element(
            realizations, 'LST', None, {
                'id': 'LST1',
                'dateCreated': datetime.datetime.now().isoformat(),
                'guid': str(uuid.uuid4()),
                'productVersion': cfg.version
            })
        project.XMLBuilder.add_sub_element(realization, 'Name', project_name)

        output_node = project.XMLBuilder.add_sub_element(
            realization, 'Outputs')
        zipfile_node = project.add_dataset(output_node,
                                           f'{huc}.zip',
                                           RSLayer(f'LST Result for {huc}',
                                                   'LST_ZIP', 'ZipFile',
                                                   '1706.zip'),
                                           'ZipFile',
                                           replace=True,
                                           rel_path=True)

        project.XMLBuilder.write()
    log.info('done')
Exemplo n.º 21
0
 def finish(self):
     if (self.start_time is None):
         duration = "0s"
     else:
         duration = pretty_duration(int(time.time() - self.start_time))
     if self.byteFormat:
         writestr = "Completed: {}  Total Time: {}     \n".format(
             sizeof_fmt(self.total), duration)
     else:
         writestr = "Completed {:,} operations.  Total Time: {}     \n".format(
             self.total, duration)
     log = Logger(self.text)
     log.info(writestr)
Exemplo n.º 22
0
def store_metadata(database, key, value):

    log = Logger('Database')
    log.info('Storing metadata {} = {}'.format(key, value))

    formatted_value = value
    if isinstance(value, list):
        formatted_value = ', '.join(value)

    conn = sqlite3.connect(database)
    curs = conn.cursor()
    curs.execute('INSERT OR REPLACE INTO MetaData (KeyInfo, ValueInfo) VALUES (?, ?)', [key, formatted_value])
    conn.commit()
Exemplo n.º 23
0
def get_transform_from_wkt(inSpatialRef, to_sr_wkt):
    log = Logger('get_transform_from_epsg')
    outSpatialRef = ogr.osr.SpatialReference()
    outSpatialRef.ImportFromWkt(to_sr_wkt)

    # https://github.com/OSGeo/gdal/issues/1546
    outSpatialRef.SetAxisMappingStrategy(inSpatialRef.GetAxisMappingStrategy())

    log.info('Input spatial reference is {0}'.format(
        inSpatialRef.ExportToProj4()))
    log.info('Output spatial reference is {0}'.format(
        outSpatialRef.ExportToProj4()))
    transform = ogr.osr.CoordinateTransformation(inSpatialRef, outSpatialRef)
    return outSpatialRef, transform
Exemplo n.º 24
0
def collect_feature_class(feature_class_path: str,
                          attribute_filter: str = None,
                          clip_shape: BaseGeometry = None,
                          clip_rect: List[float] = None
                          ) -> ogr.Geometry:
    """Collect simple types into Multi types. Does not use Shapely

    Args:
        feature_class_path (str): [description]
        attribute_filter (str, optional): Attribute Query like "HUC = 17060104". Defaults to None.
        clip_shape (BaseGeometry, optional): Iterate over a subset by clipping to a Shapely-ish geometry. Defaults to None.
        clip_rect (List[double minx, double miny, double maxx, double maxy)]): Iterate over a subset by clipping to a Shapely-ish geometry. Defaults to None.

    Raises:
        Exception: [description]

    Returns:
        ogr.Geometry: [description]
    """
    log = Logger('collect_feature_class')
    log.info('Collecting {} feature class.'.format(len(feature_class_path)))

    with get_shp_or_gpkg(feature_class_path) as in_lyr:
        in_geom_type = in_lyr.ogr_layer.GetGeomType()
        output_geom_type = None
        for tp, varr in VectorBase.MULTI_TYPES.items():
            if in_geom_type in varr:
                output_geom_type = tp
                break
        if output_geom_type is None:
            raise Exception('collect_feature_class: Type "{}" not supported'.format(ogr.GeometryTypeToName(in_geom_type)))

        new_geom = ogr.Geometry(output_geom_type)
        for feat, _counter, _progbar in in_lyr.iterate_features('Collecting Geometry', attribute_filter=attribute_filter, clip_rect=clip_rect, clip_shape=clip_shape):
            geom = feat.GetGeometryRef()

            if geom.IsValid() and not geom.IsEmpty():
                if geom.IsMeasured() > 0 or geom.Is3D() > 0:
                    geom.FlattenTo2D()

                # Do the flatten first to speed up the potential transform
                if geom.GetGeometryType() in VectorBase.MULTI_TYPES.keys():
                    sub_geoms = list(geom)
                else:
                    sub_geoms = [geom]
                for subg in sub_geoms:
                    new_geom.AddGeometry(subg)

    log.info('Collect complete.')
    return new_geom
Exemplo n.º 25
0
def calculate_conservation(database: str):
    """ Perform conservation calculations

    Args:
        database (str): path to BRAT geopackage

    Returns:
        dict: dictionary of conservation values keyed by Reach ID
    """

    log = Logger('Conservation')

    # Verify all the input fields are present and load their values
    reaches = load_attributes(
        database, [
            'oVC_HPE', 'oVC_EX', 'oCC_HPE', 'oCC_EX', 'iGeo_Slope',
            'mCC_HisDep', 'iPC_VLowLU', 'iPC_HighLU', 'iPC_LU', 'oPC_Dist',
            'iHyd_SPLow', 'iHyd_SP2', 'iPC_Canal'
        ], '(oCC_EX IS NOT NULL) AND (mCC_HisDep IS NOT NULL)')

    log.info('Calculating conservation for {:,} reaches.'.format(len(reaches)))

    risks = load_lookup(database, 'SELECT Name, RiskID AS ID FROM DamRisks')
    limitations = load_lookup(
        database, 'SELECT Name, LimitationID AS ID FROM DamLimitations')
    opportunties = load_lookup(
        database, 'SELECT Name, OpportunityID AS ID FROM DamOpportunities')

    for values in reaches.values():

        # Areas beavers can build dams, but could have undesireable impacts
        values['RiskID'] = calc_risks(risks, values['oCC_EX'],
                                      values['oPC_Dist'], values['iPC_LU'],
                                      values['iPC_Canal'])

        # Areas beavers can't build dams and why
        values['LimitationID'] = calc_limited(
            limitations, values['oVC_HPE'], values['oVC_EX'], values['oCC_EX'],
            values['iGeo_Slope'], values['iPC_LU'], values['iHyd_SPLow'],
            values['iHyd_SPLow'])

        # Conservation and restoration opportunties
        values['OpportunityID'] = calc_opportunities(
            opportunties, risks, values['RiskID'], values['oCC_HPE'],
            values['oCC_EX'], values['mCC_HisDep'], values['iPC_VLowLU'],
            values['iPC_HighLU'])

    log.info('Conservation calculation complete')
    return reaches
def flow_accumulation(dem, flow_accum, cleanup=True, dinfinity=False, pitfill=False):
    """
    Calculate reach length, slope, drainage area, min and max elevations
    and write them as attributes to the network
    :param dem: Absolute path to a DEM raster.
    :param flow_accum: Absolute path to a flow accumulation raster (cell counts)
    :param cleanup: determines whether intermediate rasters are deleted.
    :param dinfinity: If true then the dInfinity otherwise d8 algorithm
    :param pitfill: If true then DEM is pit filled before flow accumulation
    :return: None
    """

    log = Logger('Flow Accum')

    if os.path.isfile(flow_accum):
        log.info('Skipping flow accumulation because output exists at {}'.format(flow_accum))
        return None

    tempfolder = os.path.join(os.path.dirname(flow_accum), 'temp')
    cleanup_temp_folder(tempfolder)
    if not os.path.isdir(tempfolder):
        os.mkdir(tempfolder)

    outputDir = os.path.dirname(flow_accum)
    tempPitFill = os.path.join(tempfolder, 'temp_pitfill.tif')
    tempFlowDir = os.path.join(tempfolder, 'temp_flowDir.tif')

    prepared_dem = dem
    if pitfill:
        log.info('Filling pits in DEM and writing to {}'.format(tempPitFill))
        fill_pits((dem, 1), tempPitFill, working_dir=outputDir)
        prepared_dem = tempPitFill

    log.info('Calculating flow direction in pit filled raster and writing to: {}'.format(tempFlowDir))
    if dinfinity:
        flow_dir_mfd((prepared_dem, 1), tempFlowDir)
    else:
        flow_dir_d8((prepared_dem, 1), tempFlowDir)

    log.info('Calculating flow accumulation raster and writing to: {}'.format(flow_accum))
    if dinfinity:
        flow_accumulation_mfd((tempFlowDir, 1), flow_accum)
    else:
        flow_accumulation_d8((tempFlowDir, 1), flow_accum)

    if cleanup:
        cleanup_temp_folder(tempfolder)

    log.info('Flow accumulation completed successfully.')
Exemplo n.º 27
0
def admin_agency(database, reaches, ownership, results):

    log = Logger('Conflict')
    log.info(
        'Calculating land ownership administrating agency for {:,} reach(es)'.
        format(len(reaches)))

    # Load the agency lookups
    with SQLiteCon(database) as database:
        database.curs.execute(
            'SELECT AgencyID, Name, Abbreviation FROM Agencies')
        agencies = {
            row['Abbreviation']: {
                'AgencyID': row['AgencyID'],
                'Name': row['Name'],
                'RawGeometries': [],
                'GeometryUnion': None
            }
            for row in database.curs.fetchall()
        }

    with get_shp_or_gpkg(ownership) as ownership_lyr:

        progbar = ProgressBar(len(reaches), 50, "Calc administration agency")
        counter = 0

        # Loop over stream reaches and assign agency
        for reach_id, polyline in reaches.items():
            counter += 1
            progbar.update(counter)

            if reach_id not in results:
                results[reach_id] = {}

            mid_point = polyline.interpolate(0.5, normalized=True)
            results[reach_id]['AgencyID'] = None

            for feature, _counter, _progbar in ownership_lyr.iterate_features(
                    clip_shape=mid_point):
                agency = feature.GetField('ADMIN_AGEN')
                if agency not in agencies:
                    raise Exception(
                        'The ownership agency "{}" is not found in the BRAT SQLite database'
                        .format(agency))
                results[reach_id]['AgencyID'] = agencies[agency]['AgencyID']

    progbar.finish()
    log.info('Adminstration agency assignment complete')
Exemplo n.º 28
0
def merge_geometries(feature_classes, epsg):
    """
    Load all features from multiple feature classes into a single list of geometries
    :param feature_classes:
    :param epsg:
    :return:
    """
    log = Logger('Shapefile')

    driver = ogr.GetDriverByName("ESRI Shapefile")

    union = ogr.Geometry(ogr.wkbMultiLineString)

    fccount = 0
    for fc in feature_classes:
        fccount += 1
        log.info("Merging Geometries for feature class {}/{}".format(
            fccount, len(feature_classes)))
        data_source = driver.Open(fc, 0)
        layer = data_source.GetLayer()

        in_spatial_ref = layer.GetSpatialRef()

        out_spatial_ref, transform = get_transform_from_epsg(
            in_spatial_ref, epsg)

        progbar = ProgressBar(layer.GetFeatureCount(), 50,
                              "Merging Geometries")
        counter = 0
        for feature in layer:
            counter += 1
            progbar.update(counter)
            geom = feature.GetGeometryRef()

            if geom is None:
                progbar.erase()  # get around the progressbar
                log.warning(
                    'Feature with FID={} has no geoemtry. Skipping'.format(
                        feature.GetFID()))
                continue

            geom.Transform(transform)
            union.AddGeometry(geom)

        progbar.finish()
        data_source = None

    return union
def gdal_dem_geographic(dem_raster: str, output_raster: str, operation: str):
    """Perform GDAL DEM operation on raster in geographic coordinates

    Arguments:
        dem_raster {string} -- Path to DEM raster
        output_raster {string} -- Path to output raster that will get created
        operation {string} -- GDAL DEM operation: hillshade,slope,color-relief,TRI,TPI,roughness
    """
    log = Logger('GDAL DEM')

    zfactor = __get_zfactor(dem_raster)
    log.info("Creating '{}' raster from: {}".format(operation, dem_raster))
    gdal.DEMProcessing(output_raster,
                       dem_raster,
                       operation,
                       scale=zfactor,
                       creationOptions=["COMPRESS=DEFLATE"])
Exemplo n.º 30
0
def clip_vegetation(boundary_path: str, existing_veg_path: str,
                    existing_clip_path: str, historic_veg_path: str,
                    historic_clip_path: str, output_epsg: int):
    """[summary]

    Args:
        boundary_path (str): Path to layer
        existing_veg_path (str): Path to raster
        existing_clip_path (str): Path to output raster
        historic_veg_path (str): Path to raster
        historic_clip_path (str): Path to output raster
        output_epsg (int): EPSG
    """
    log = Logger('Vegetation Clip')

    with rasterio.open(existing_veg_path) as exist, rasterio.open(
            historic_veg_path) as hist:
        meta_existing = exist.meta
        meta_hist = hist.meta

        if meta_existing['transform'][0] != meta_hist['transform'][0]:
            msg = 'Vegetation raster cell widths do not match: existing {}, historic {}'.format(
                meta_existing['transform'][0], meta_hist['transform'][0])
            raise Exception(msg)

        if meta_existing['transform'][4] != meta_hist['transform'][4]:
            msg = 'Vegetation raster cell heights do not match: existing {}, historic {}'.format(
                meta_existing['transform'][4], meta_hist['transform'][4])
            raise Exception(msg)

    # https://gdal.org/python/osgeo.gdal-module.html#WarpOptions
    warp_options = {"cutlineBlend": 2}
    # Now do the raster warp
    raster_warp(existing_veg_path,
                existing_clip_path,
                output_epsg,
                clip=boundary_path,
                warp_options=warp_options)
    raster_warp(historic_veg_path,
                historic_clip_path,
                output_epsg,
                clip=boundary_path,
                warp_options=warp_options)

    log.info('Complete')