Exemplo n.º 1
0
def write_attributes(in_layer_path: str, output_values: dict, id_field: str, fields, field_type=ogr.OFTReal, null_values=None):
    """
    Write field values to a feature class
    :param feature_class: Path to feature class
    :param output_values: Dictionary of values keyed by id_field. Each feature is dictionary keyed by field names
    :param id_field: Unique key identifying each feature in both feature class and output_values dictionary
    :param fields: List of fields in output_values to write to
    :return: None
    """

    log = Logger('write_attributes')

    with get_shp_or_gpkg(in_layer_path, write=True) as in_layer:
        # Create each field and store the name and index in a list of tuples
        field_indices = [(field, in_layer.create_field(field, field_type)) for field in fields]  # TODO different field types

        for feature, _counter, _progbar in in_layer.iterate_features("Writing Attributes", write_layers=[in_layer]):
            reach = feature.GetField(id_field)  # TODO Error when id_field is same as FID field .GetFID() seems to work instead
            if reach not in output_values:
                continue

            # Set all the field values and then store the feature
            for field, _idx in field_indices:
                if field in output_values[reach]:
                    if not output_values[reach][field]:
                        if null_values:
                            feature.SetField(field, null_values)
                        else:
                            log.warning('Unhandled feature class value for None type')
                            feature.SetField(field, None)
                    else:
                        feature.SetField(field, output_values[reach][field])
            in_layer.ogr_layer.SetFeature(feature)
Exemplo n.º 2
0
def load_geometries(in_layer_path: str, id_field: str = None, epsg: int = None, spatial_ref: osr.SpatialReference = None) -> dict:
    """[summary]

    Args:
        in_layer_path (str): [description]
        id_field (str, optional): [description]. Defaults to None.
        epsg (int, optional): [description]. Defaults to None.
        spatial_ref (osr.SpatialReference, optional): [description]. Defaults to None.

    Raises:
        VectorBaseException: [description]

    Returns:
        dict: [description]
    """
    log = Logger('load_geometries')

    if epsg is not None and spatial_ref is not None:
        raise VectorBaseException('Specify either an EPSG or a spatial_ref. Not both')

    with get_shp_or_gpkg(in_layer_path) as in_layer:
        # Determine the transformation if user provides an EPSG
        transform = None
        if epsg is not None:
            _outref, transform = VectorBase.get_transform_from_epsg(in_layer.spatial_ref, epsg)
        elif spatial_ref is not None:
            transform = in_layer.get_transform(in_layer.spatial_ref, spatial_ref)

        features = {}

        for feature, _counter, progbar in in_layer.iterate_features("Loading features"):

            if id_field is None:
                reach = feature.GetFID()
            else:
                reach = feature.GetField(id_field)

            geom = feature.GetGeometryRef()
            geo_type = geom.GetGeometryType()

            new_geom = VectorBase.ogr2shapely(geom, transform=transform)

            if new_geom.is_empty:
                progbar.erase()  # get around the progressbar
                log.warning('Empty feature with FID={} cannot be unioned and will be ignored'.format(feature.GetFID()))
            elif not new_geom.is_valid:
                progbar.erase()  # get around the progressbar
                log.warning('Invalid feature with FID={} cannot be unioned and will be ignored'.format(feature.GetFID()))
            # Filter out zero-length lines
            elif geo_type in VectorBase.LINE_TYPES and new_geom.length == 0:
                progbar.erase()  # get around the progressbar
                log.warning('Zero Length for feature with FID={}'.format(feature.GetFID()))
            # Filter out zero-area polys
            elif geo_type in VectorBase.POLY_TYPES and new_geom.area == 0:
                progbar.erase()  # get around the progressbar
                log.warning('Zero Area for feature with FID={}'.format(feature.GetFID()))
            else:
                features[reach] = new_geom

    return features
Exemplo n.º 3
0
def run_subprocess(cwd: str, cmd: List[str]):

    log = Logger("Subprocess")
    log.info('Running command: {}'.format(' '.join(cmd)))

    # Realtime logging from subprocess
    process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE,
                               cwd=cwd)
    # Here we print the lines in real time but we will also log them afterwords
    # replace '' with b'' for Python 3
    for output in iter(process.stdout.readline, b''):
        for line in output.decode('utf-8').split('\n'):
            if len(line) > 0:
                log.info(line)

    for errout in iter(process.stderr.readline, b''):
        for line in errout.decode('utf-8').split('\n'):
            if len(line) > 0:
                log.error(line)

    retcode = process.poll()
    if retcode is not None and retcode > 0:
        log.error('Process returned with code {}'.format(retcode))

    return retcode
Exemplo n.º 4
0
def gnat(huc, output_folder):
    """[summary]

    Args:
        huc ([type]): [description]

    Raises:
        Exception: [description]
        Exception: [description]
        Exception: [description]
        Exception: [description]

    Returns:
        [type]: [description]
    """

    log = Logger("GNAT")
    log.info('GNAT v.{}'.format(cfg.version))

    try:
        int(huc)
    except ValueError:
        raise Exception(
            'Invalid HUC identifier "{}". Must be an integer'.format(huc))

    if not (len(huc) == 4 or len(huc) == 8):
        raise Exception('Invalid HUC identifier. Must be four digit integer')

    safe_makedirs(output_folder)
Exemplo n.º 5
0
def download_shapefile_collection(url,
                                  download_folder,
                                  unzip_folder,
                                  force_download=False):
    """
    Download the one and only item from Science base and unzip it.
    :param url: URL of the Science Base catalog item
    :param download_folder: Folder where the NHD zip will be downloaded
    :param unzip_folder: Folder where downloaded files will be unzipped
    :param force_download: The download will always be performed if this is true.
    Otherwise the download will be skipped if this is false and the file exists
    :return: Dictionary of all ShapeFiles contained in the NHD zip file.
    """

    log = Logger('Download Shapefile Collection')

    # download and unzip the archive. Note: leftover files are a possibility
    # so we allow one retry because unzip can clean things up
    final_unzip_folder = download_unzip(url, download_folder, unzip_folder,
                                        force_download)

    # Build a dictionary of all the ShapeFiles within the archive.
    # Keys will be the name of the ShapeFile without extension (e.g. WBDHU8)
    shapefiles = {}
    for root, subFolder, files in os.walk(final_unzip_folder):
        for item in files:
            if item.endswith('.shp'):
                shapefiles[os.path.splitext(
                    os.path.basename(item))[0]] = os.path.join(root, item)

    log.info('{} shapefiles identified.'.format(len(shapefiles)))
    return shapefiles
Exemplo n.º 6
0
def load_lookup_data(db_path, csv_dir):
    """Load the database lookup data from CSV files.
    This gets called both during database creation during BRAT build,
    but also during refresh of lookup data at the start of BRAT Run so that
    the database has the latest hydrologic equations and other BRAT parameters

    Args:
        db_path (str): Full path to SQLite database
        csv_dir (str): Full path to the root folder containing CSV lookup files
    """

    conn = sqlite3.connect(db_path)
    conn.row_factory = dict_factory
    curs = conn.cursor()

    log = Logger('Database')

    # Load lookup table data into the database
    for dir_name in ['data', os.path.join('data', 'intersect')]:
        dir_search = os.path.join(csv_dir, dir_name, '*.csv')
        for file_name in glob.glob(dir_search):
            table_name = os.path.splitext(os.path.basename(file_name))[0]
            with open(os.path.join(dir_name, file_name), mode='r') as csvfile:
                d = csv.DictReader(csvfile)
                sql = 'INSERT OR REPLACE INTO {0} ({1}) VALUES ({2})'.format(table_name, ','.join(d.fieldnames), ','.join('?' * len(d.fieldnames)))

                to_db = [[i[col] for col in d.fieldnames] for i in d]
                curs.executemany(sql, to_db)
                log.info('{:,} records loaded into {} lookup data table'.format(curs.rowcount, table_name))

    conn.commit()
Exemplo n.º 7
0
def get_watershed_info(gpkg_path):
    """Query a BRAT database and get information about
    the watershed being run. Assumes that all watersheds
    except the one being run have been deleted.

    Arguments:
        database {str} -- Path to the BRAT SQLite database

    Returns:
        [tuple] -- WatershedID, max drainage area, EcoregionID with which
        the watershed is associated.
    """

    with SQLiteCon(gpkg_path) as database:
        database.curs.execute(
            'SELECT WatershedID, MaxDrainage, EcoregionID FROM Watersheds')
        row = database.curs.fetchone()
        watershed = row['WatershedID']
        max_drainage = row['MaxDrainage']
        ecoregion = row['EcoregionID']

    log = Logger('BRAT Run')

    if not watershed:
        raise Exception(
            'Missing watershed in BRAT datatabase {}'.format(database))

    if not max_drainage:
        log.warning('Missing max drainage for watershed {}'.format(watershed))

    if not ecoregion:
        raise Exception('Missing ecoregion for watershed {}'.format(watershed))

    return watershed, max_drainage, ecoregion
Exemplo n.º 8
0
def main():
    """ Main BRAT Run
    """
    parser = argparse.ArgumentParser(
        description='Run brat against a pre-existing sqlite db:',
        # epilog="This is an epilog"
    )
    parser.add_argument('project',
                        help='Riverscapes project folder or project xml file',
                        type=str,
                        default=None)
    parser.add_argument(
        '--csv_dir',
        help='(optional) directory where we can find updated lookup tables',
        action='store_true',
        default=False)
    parser.add_argument('--verbose',
                        help='(optional) a little extra logging ',
                        action='store_true',
                        default=False)
    parser.add_argument(
        '--debug',
        help=
        '(optional) more output about things like memory usage. There is a performance cost',
        action='store_true',
        default=False)

    args = dotenv.parse_args_env(parser)

    if os.path.isfile(args.project):
        logpath = os.path.dirname(args.project)
    elif os.path.isdir(args.project):
        logpath = args.project
    else:
        raise Exception(
            'You must supply a valid path to a riverscapes project')

    log = Logger('BRAT Run')
    log.setup(logPath=os.path.join(logpath, "brat_run.log"),
              verbose=args.verbose)
    log.title('BRAT Run Tool')

    try:
        if args.debug is True:
            from rscommons.debug import ThreadRun
            memfile = os.path.join(logpath, 'brat_run_memusage.log')
            retcode, max_obj = ThreadRun(brat_run, memfile, args.project,
                                         args.csv_dir)
            log.debug('Return code: {}, [Max process usage] {}'.format(
                retcode, max_obj))
        else:
            brat_run(args.project, args.csv_dir)

    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 9
0
def main():
    parser = argparse.ArgumentParser(
        description='RVD',
        # epilog="This is an epilog"
    )

    parser.add_argument('huc', help='HUC identifier', type=str)
    parser.add_argument('flowlines', help='Segmented flowlines input.', type=str)
    parser.add_argument('existing', help='National existing vegetation raster', type=str)
    parser.add_argument('historic', help='National historic vegetation raster', type=str)
    parser.add_argument('valley_bottom', help='Valley bottom (.shp, .gpkg/layer_name)', type=str)
    parser.add_argument('output_folder', help='Output folder input', type=str)
    parser.add_argument('--reach_codes', help='Comma delimited reach codes (FCode) to retain when filtering features. Omitting this option retains all features.', type=str)
    parser.add_argument('--flow_areas', help='(optional) path to the flow area polygon feature class containing artificial paths', type=str)
    parser.add_argument('--waterbodies', help='(optional) waterbodies input', type=str)
    parser.add_argument('--meta', help='riverscapes project metadata as comma separated key=value pairs', type=str)
    parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False)
    parser.add_argument('--debug', help="(optional) save intermediate outputs for debugging", action='store_true', default=False)

    args = dotenv.parse_args_env(parser)

    reach_codes = args.reach_codes.split(',') if args.reach_codes else None

    meta = parse_metadata(args.meta)

    # Initiate the log file
    log = Logger("RVD")
    log.setup(logPath=os.path.join(args.output_folder, "rvd.log"), verbose=args.verbose)
    log.title('RVD For HUC: {}'.format(args.huc))

    try:
        if args.debug is True:
            from rscommons.debug import ThreadRun
            memfile = os.path.join(args.output_dir, 'rvd_mem.log')
            retcode, max_obj = ThreadRun(rvd, memfile, args.huc,
                                         args.flowlines,
                                         args.existing, args.historic, args.valley_bottom,
                                         args.output_folder,
                                         reach_codes,
                                         args.flow_areas, args.waterbodies,
                                         meta=meta)
            log.debug('Return code: {}, [Max process usage] {}'.format(retcode, max_obj))

        else:
            rvd(args.huc,
                args.flowlines,
                args.existing, args.historic, args.valley_bottom,
                args.output_folder,
                reach_codes,
                args.flow_areas, args.waterbodies,
                meta=meta)

    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 10
0
def load_geometries(feature_class, id_field, epsg=None):
    log = Logger('Shapefile')
    # Get the input network
    driver = ogr.GetDriverByName('ESRI Shapefile')
    dataset = driver.Open(feature_class, 0)
    layer = dataset.GetLayer()
    in_spatial_ref = layer.GetSpatialRef()

    # Determine the transformation if user provides an EPSG
    transform = None
    if epsg:
        out_spatial_ref, transform = get_transform_from_epsg(
            in_spatial_ref, epsg)

    features = {}

    progbar = ProgressBar(layer.GetFeatureCount(), 50, "Loading features")
    counter = 0
    for inFeature in layer:
        counter += 1
        progbar.update(counter)

        reach = inFeature.GetField(id_field)
        geom = inFeature.GetGeometryRef()

        # Optional coordinate transformation
        if transform:
            geom.Transform(transform)

        new_geom = wkbload(geom.ExportToWkb())
        geo_type = new_geom.GetGeometryType()

        if new_geom.is_empty:
            progbar.erase()  # get around the progressbar
            log.warning(
                'Empty feature with FID={} cannot be unioned and will be ignored'
                .format(inFeature.GetFID()))
        elif not new_geom.is_valid:
            progbar.erase()  # get around the progressbar
            log.warning(
                'Invalid feature with FID={} cannot be unioned and will be ignored'
                .format(inFeature.GetFID()))
        # Filter out zero-length lines
        elif geo_type in LINE_TYPES and new_geom.Length() == 0:
            progbar.erase()  # get around the progressbar
            log.warning('Zero Length for feature with FID={}'.format(
                inFeature.GetFID()))
        # Filter out zero-area polys
        elif geo_type in POLY_TYPES and new_geom.Area() == 0:
            progbar.erase()  # get around the progressbar
            log.warning('Zero Area for feature with FID={}'.format(
                inFeature.GetFID()))
        else:
            features[reach] = new_geom

    progbar.finish()
    dataset = None
    return features
Exemplo n.º 11
0
def set_reach_fields_null(database, fields):

    log = Logger('Database')
    log.info('Setting {} reach fields to NULL'.format(len(fields)))
    conn = sqlite3.connect(database)
    conn.execute('PRAGMA foreign_keys = ON')
    conn.execute('UPDATE ReachAttributes SET {}'.format(','.join(['{} = NULL'.format(field) for field in fields])))
    conn.commit()
    conn.close()
Exemplo n.º 12
0
def raster_vrt_stitch(inrasters,
                      outraster,
                      epsg,
                      clip=None,
                      clean=False,
                      warp_options: dict = {}):
    """[summary]
    https://gdal.org/python/osgeo.gdal-module.html#BuildVRT
    Keyword arguments are :
        options --- can be be an array of strings, a string or let empty and filled from other keywords..
        resolution --- 'highest', 'lowest', 'average', 'user'.
        outputBounds --- output bounds as (minX, minY, maxX, maxY) in target SRS.
        xRes, yRes --- output resolution in target SRS.
        targetAlignedPixels --- whether to force output bounds to be multiple of output resolution.
        separate --- whether each source file goes into a separate stacked band in the VRT band.
        bandList --- array of band numbers (index start at 1).
        addAlpha --- whether to add an alpha mask band to the VRT when the source raster have none.
        resampleAlg --- resampling mode.
            near: nearest neighbour resampling (default, fastest algorithm, worst interpolation quality).
            bilinear: bilinear resampling.
            cubic: cubic resampling.
            cubicspline: cubic spline resampling.
            lanczos: Lanczos windowed sinc resampling.
            average: average resampling, computes the average of all non-NODATA contributing pixels.
            mode: mode resampling, selects the value which appears most often of all the sampled points.
            max: maximum resampling, selects the maximum value from all non-NODATA contributing pixels.
            min: minimum resampling, selects the minimum value from all non-NODATA contributing pixels.
            med: median resampling, selects the median value of all non-NODATA contributing pixels.
            q1: first quartile resampling, selects the first quartile value of all non-NODATA contributing pixels.
            q3: third quartile resampling, selects the third quartile value of all non-NODATA contributing pixels.
        outputSRS --- assigned output SRS.
        allowProjectionDifference --- whether to accept input datasets have not the same projection. Note: they will *not* be reprojected.
        srcNodata --- source nodata value(s).
        VRTNodata --- nodata values at the VRT band level.
        hideNodata --- whether to make the VRT band not report the NoData value.
        callback --- callback method.
        callback_data --- user data for callback.
    """
    log = Logger('Raster Stitch')

    # Build a virtual dataset that points to all the rasters then mosaic them together
    # clipping out the HUC boundary and reprojecting to the output spatial reference
    path_vrt = get_unique_file_path(
        os.path.dirname(outraster),
        os.path.basename(outraster).split('.')[0] + '.vrt')

    log.info('Building temporary vrt: {}'.format(path_vrt))
    vrt_options = gdal.BuildVRTOptions()
    gdal.BuildVRT(path_vrt, inrasters, options=vrt_options)

    raster_warp(path_vrt, outraster, epsg, clip, warp_options)

    if clean:
        for rpath in inrasters:
            safe_remove_file(rpath)
Exemplo n.º 13
0
def get_metadata(database):
    log = Logger('Database')
    log.debug('Retrieving metadata')

    conn = sqlite3.connect(database)
    curs = conn.cursor()
    curs.execute('SELECT KeyInfo, ValueInfo FROM MetaData')
    meta = {}
    for row in curs.fetchall():
        meta[row[0]] = row[1]
    return meta
Exemplo n.º 14
0
def process_lst(lst_xml_folder):
    """This is a slightly hack-y script to create some XMLS for the land_surface_temp script
        It's a bit of an afterthought so it just plunks down the XMLS all alone in a folder

    Args:
        lst_xml_folder ([type]): [description]
    """

    log = Logger("Generate XMLS for LST")
    hucs = [str(1700 + x) for x in range(1, 13)]

    for huc in hucs:
        hucdir = os.path.join(lst_xml_folder, huc)
        xml_file = os.path.join(hucdir, 'project.rs.xml')
        safe_makedirs(hucdir)
        if os.path.exists(xml_file):
            safe_remove_file(xml_file)

        project_name = f'Land Surface Temperature for HUC {huc}'
        project = RSProject(cfg, xml_file)
        project.create(project_name, 'LST')

        project.add_metadata({
            'ModelVersion': cfg.version,
            'HUC': huc,
            'dateCreated': datetime.datetime.now().isoformat(),
            'HUC{}'.format(len(huc)): huc
        })

        realizations = project.XMLBuilder.add_sub_element(
            project.XMLBuilder.root, 'Realizations')
        realization = project.XMLBuilder.add_sub_element(
            realizations, 'LST', None, {
                'id': 'LST1',
                'dateCreated': datetime.datetime.now().isoformat(),
                'guid': str(uuid.uuid4()),
                'productVersion': cfg.version
            })
        project.XMLBuilder.add_sub_element(realization, 'Name', project_name)

        output_node = project.XMLBuilder.add_sub_element(
            realization, 'Outputs')
        zipfile_node = project.add_dataset(output_node,
                                           f'{huc}.zip',
                                           RSLayer(f'LST Result for {huc}',
                                                   'LST_ZIP', 'ZipFile',
                                                   '1706.zip'),
                                           'ZipFile',
                                           replace=True,
                                           rel_path=True)

        project.XMLBuilder.write()
    log.info('done')
Exemplo n.º 15
0
def centerline_points(
        in_lines: Path,
        distance: float = 0.0,
        transform: Transform = None) -> Dict[int, List[RiverPoint]]:
    """Generates points along each line feature at specified distances from the end as well as quarter and halfway

    Args:
        in_lines (Path): path of shapefile with features
        distance (float, optional): distance from ends to generate points. Defaults to 0.0.
        transform (Transform, optional): coordinate transformation. Defaults to None.

    Returns:
        [type]: [description]
    """
    log = Logger('centerline_points')
    with get_shp_or_gpkg(in_lines) as in_lyr:
        out_group = {}
        ogr_extent = in_lyr.ogr_layer.GetExtent()
        extent = Polygon.from_bounds(ogr_extent[0], ogr_extent[2],
                                     ogr_extent[1], ogr_extent[3])

        for feat, _counter, progbar in in_lyr.iterate_features(
                "Centerline points"):

            line = VectorBase.ogr2shapely(feat, transform)

            fid = feat.GetFID()
            out_points = []
            # Attach the FID in case we need it later
            props = {'fid': fid}

            pts = [
                line.interpolate(distance),
                line.interpolate(0.5, True),
                line.interpolate(-distance)
            ]

            if line.project(line.interpolate(0.25, True)) > distance:
                pts.append(line.interpolate(0.25, True))
                pts.append(line.interpolate(-0.25, True))

            for pt in pts:
                # Recall that interpolation can have multiple solutions due to pythagorean theorem
                # Throw away anything that's not inside our bounds
                if not extent.contains(pt):
                    progbar.erase()
                    log.warning('Point {} is outside of extent: {}'.format(
                        pt.coords[0], ogr_extent))
                out_points.append(RiverPoint(pt, properties=props))

            out_group[int(fid)] = out_points
            feat = None
        return out_group
Exemplo n.º 16
0
 def finish(self):
     if (self.start_time is None):
         duration = "0s"
     else:
         duration = pretty_duration(int(time.time() - self.start_time))
     if self.byteFormat:
         writestr = "Completed: {}  Total Time: {}     \n".format(
             sizeof_fmt(self.total), duration)
     else:
         writestr = "Completed {:,} operations.  Total Time: {}     \n".format(
             self.total, duration)
     log = Logger(self.text)
     log.info(writestr)
Exemplo n.º 17
0
def store_metadata(database, key, value):

    log = Logger('Database')
    log.info('Storing metadata {} = {}'.format(key, value))

    formatted_value = value
    if isinstance(value, list):
        formatted_value = ', '.join(value)

    conn = sqlite3.connect(database)
    curs = conn.cursor()
    curs.execute('INSERT OR REPLACE INTO MetaData (KeyInfo, ValueInfo) VALUES (?, ?)', [key, formatted_value])
    conn.commit()
Exemplo n.º 18
0
def get_riverpoints(inpath, epsg, attribute_filter=None):
    """[summary]

    Args:
        inpath ([type]): Path to a ShapeFile
        epsg ([type]):  Desired output spatial reference
        attribute_filter ([type], optional): [description]. Defaults to None.

    Returns:
        [type]: List of RiverPoint objects
    """

    log = Logger('get_riverpoints')
    points = []

    with get_shp_or_gpkg(inpath) as in_lyr:

        _out_spatial_ref, transform = get_transform_from_epsg(
            in_lyr.spatial_ref, epsg)

        for feat, _counter, progbar in in_lyr.iterate_features(
                'Getting points for use in Thiessen',
                attribute_filter=attribute_filter):

            new_geom = feat.GetGeometryRef()

            if new_geom is None:
                progbar.erase()  # get around the progressbar
                log.warning(
                    'Feature with FID={} has no geometry. Skipping'.format(
                        feat.GetFID()))
                continue

            new_geom.Transform(transform)
            new_shape = VectorBase.ogr2shapely(new_geom)

            if new_shape.type == 'Polygon':
                new_shape = MultiPolygon([new_shape])

            for poly in new_shape:
                # Exterior is the shell and there is only ever 1
                for pt in list(poly.exterior.coords):
                    points.append(RiverPoint(pt, interior=False))

                # Now we consider interiors. NB: Interiors are only qualifying islands in this case
                for idx, island in enumerate(poly.interiors):
                    for pt in list(island.coords):
                        points.append(RiverPoint(pt, interior=True,
                                                 island=idx))

    return points
Exemplo n.º 19
0
def main():
    parser = argparse.ArgumentParser(description='GNAT',
                                     # epilog="This is an epilog"
                                     )
    parser.add_argument('huc', help='HUC identifier', type=str)
    parser.add_argument('output_folder', help='Output folder', type=str)
    parser.add_argument('--verbose',
                        help='(optional) a little extra logging ',
                        action='store_true',
                        default=False)

    args = dotenv.parse_args_env(parser)

    # Initiate the log file
    log = Logger("GNAT")
    log.setup(logPath=os.path.join(args.output, "gnat.log"),
              verbose=args.verbose)
    log.title('GNAT For HUC: {}'.format(args.huc))

    try:
        gnat(args.hu, args.output_folder)

    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 20
0
def ThreadRun(callback, memlogfile: str, *args, **kwargs):
    log = Logger('Debug')
    memmon = MemoryMonitor(memlogfile, 1)
    with ThreadPoolExecutor() as executor:
        mem_thread = executor.submit(memmon.measure_usage)
        try:
            fn_thread = executor.submit(callback, *args, **kwargs)
            result = fn_thread.result()
        finally:
            memmon.keep_measuring = False
            max_obj = mem_thread.result()
            log.debug('MaxStats: {}'.format(max_obj))
    memmon.write_plot(os.path.splitext(memlogfile)[0] + '.png')
    return result, max_obj.toString()
Exemplo n.º 21
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('lst_xml_folder',
                        help='Top level data folder containing LST data',
                        type=str)
    parser.add_argument('--verbose',
                        help='(optional) a little extra logging ',
                        action='store_true',
                        default=False)

    args = dotenv.parse_args_env(
        parser, os.path.join(os.path.dirname(__file__), '.env'))

    # Initiate the log file
    log = Logger('Land Surface Temperature XML Generator')
    log.setup(logPath=os.path.join(os.path.dirname(args.lst_xml_folder),
                                   'lst_xml.log'),
              verbose=args.verbose)

    try:
        process_lst(args.lst_xml_folder)
        log.info('Process completed successfully')
    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 22
0
def edit_xml(projectpath):
    """Here's an example of how to edit a pre-existing project.rs.xml file

    Args:
        projectpath ([type]): [description]
    """
    log = Logger('edit_xml')
    log.info('Loading the XML to make edits...')
    # Load up a new RSProject class
    project = RSProject(cfg, projectpath)

    # Now, instead of creating nodes we can just find them
    r1_node = project.XMLBuilder.find_by_id('INN_CTX01')

    # Now we can add new metadata values to this node
    # Note that we specify r1_node. If you don't do this then it writes to the project metadata
    project.add_metadata({'EditedVal': 'Some Realization Value here'}, r1_node)

    # Same is true for Rasters if we want
    r1_input_raster_node = project.XMLBuilder.find_by_id('AP_01')
    project.add_metadata({'EditedVal Raster': 'Some Raster Value here'},
                         r1_input_raster_node)

    # Don't forget to write back to the file
    log.info('Writing file')
    project.XMLBuilder.write()
    log.info('Done')
def flow_accum_to_drainage_area(flow_accum, drainage_area):

    log = Logger('Flow Accum')

    if os.path.isfile(drainage_area):
        log.info('Skipping conversion of flow accumulation to drainage area because file exists.')
        return

    log.info('Converting flow accumulation to drainage area raster.')

    with rasterio.open(flow_accum) as src:
        chl_meta = src.meta
        chl_meta['compress'] = 'deflate'
        with rasterio.open(drainage_area, 'w', **chl_meta) as dst:
            affine = src.meta['transform']
            cell_area = abs(affine[0] * affine[4]) / 1000000

            for ji, window in src.block_windows(1):
                array = src.read(1, window=window, masked=True)
                result = array * cell_area
                dst.write(result, window=window, indexes=1)

            # TODO: write some basic statistics of the drainage area raster to the log file.

    log.info('Drainage area raster created at {}'.format(drainage_area))
Exemplo n.º 24
0
def get_nhd_states(inpath):
    """
    Gets the list of US States that an NHD HUC encompasses

    This relies on the watershed boundary ShapeFile having a column called
    'States' that stores a comma separated list of state abbreviations
    such as 'OR,WA'. A dcitionary is used to retrieve the full names.
    :param inpath: Path to the watershed boundary ShapeFile
    :return: List of full US state names that the watershed touches (.e.g. Oregon)
    """
    log = Logger('RS Context')

    driver = ogr.GetDriverByName("ESRI Shapefile")
    data_source = driver.Open(inpath, 0)
    layer = data_source.GetLayer()
    states = []
    for feature in layer:
        value = feature.GetField('States')
        [states.append(us_states[acronym]) for acronym in value.split(',')]

    data_source = None

    if 'Canada' in states:
        if len(states) == 1:
            log.error(
                'HUC is entirely within Canada. No DEMs will be available.')
        else:
            log.warning(
                'HUC is partially in Canada. Certain data will only be available for US portion.'
            )

    log.info('HUC intersects {} state(s): {}'.format(len(states),
                                                     ', '.join(states)))
    return list(dict.fromkeys(states))
Exemplo n.º 25
0
def main():

    parser = argparse.ArgumentParser()
    parser.add_argument('vpuids',
                        help='Comma separated list of VPUs to process',
                        type=str)
    parser.add_argument(
        'data_folder',
        help='Top level data folder containing riverscapes context projects',
        type=str)
    #parser.add_argument('user_name', help='Postgres user name', type=str)
    #parser.add_argument('password', help='Postgres password', type=str)
    args = dotenv.parse_args_env(
        parser, os.path.join(os.path.dirname(__file__), '.env'))

    # Initiate the log file
    log = Logger('Load NHD')
    log.setup(logPath=os.path.join(args.data_folder, 'load_nhd.log'),
              verbose=True)

    try:
        load_nhd(args.vpuids,
                 args.data_folder)  # , args.user_name, args.password)
        log.info('Process completed successfully')
    except Exception as ex:
        log.error(ex)
Exemplo n.º 26
0
def main():

    parser = argparse.ArgumentParser(
        description='RVD XML Augmenter',
        # epilog="This is an epilog"
    )
    parser.add_argument('out_project_xml', help='Input XML file', type=str)
    parser.add_argument('in_xmls', help='Comma-separated list of XMLs in decreasing priority', type=str)
    parser.add_argument('--verbose', help='(optional) a little extra logging ', action='store_true', default=False)

    args = dotenv.parse_args_env(parser)

    # Initiate the log file
    log = Logger('XML Augmenter')
    log.setup(verbose=args.verbose)
    log.title('XML Augmenter: {}'.format(args.out_project_xml))

    try:
        out_prj = RSProject(None, args.out_project_xml)
        out_prj.rs_meta_augment(
            args.in_xmls.split(','),
            lyrs_in_out
        )

        out_prj.XMLBuilder.write()
        report_path = out_prj.XMLBuilder.find('.//HTMLFile[@id="RVD_REPORT"]/Path').text
        report = RVDReport(os.path.join(out_prj.project_dir, report_path), out_prj)
        report.write()

    except Exception as e:
        log.error(e)
        traceback.print_exc(file=sys.stdout)
        sys.exit(1)

    sys.exit(0)
Exemplo n.º 27
0
def build_vrt(search_dir, vrt):
    """
    Build a VRT file for images found in a directory and subdirectories.
    :param search_dir: Top level directory that will be searched for *.img and *.tif raster files
    :param vrt:  Output VRT file
    :return: None
    """

    log = Logger("Build VRT")

    if not os.path.isdir(search_dir):
        raise Exception(
            'Directory specified does not exist: {}'.format(search_dir))

    rasters = []
    for root, sub_folder, files in os.walk(search_dir):
        for item in files:
            if item.endswith('.img') or item.endswith('.tif'):
                rasters.append(os.path.join(root, item))

    log.info('{} rasters found in {}'.format(len(rasters), search_dir))
    log.info('Generating VRT file to {}'.format(vrt))

    gdal.BuildVRT(vrt, rasters)

    log.info('Process completed successfully.')
Exemplo n.º 28
0
class TempGISFile():
    """This is just a loose mapping class to allow us to use Python's 'with' keyword.

    Raises:
        VectorBaseException: Various
    """
    log = Logger('TempGISFile')

    def __init__(self, suffix: str, prefix: str = None):
        self.suffix = suffix
        self.prefix = 'rstools_{}'.format(prefix)
        self.filepath = None
        self.file = None

    def __enter__(self) -> TempGISFile:
        """Behaviour on open when using the "with VectorBase():" Syntax
        """
        self.file, self.filepath = mkstemp(suffix=self.suffix, text=True)
        # Immediately close it. This is so windows doesn't hold onto the handle
        os.close(self.file)
        return self

    def __exit__(self, _type, _value, _traceback):
        """Behaviour on close when using the "with VectorBase():" Syntax
        """
        try:
            os.remove(self.filepath)
        except Exception as e:
            self.log.warning('Error cleaning up file: {}'.format(
                self.filepath))
Exemplo n.º 29
0
    def __init__(self, database, report_path, rs_project):
        super().__init__(rs_project, report_path)
        self.log = Logger('GNAT Report')
        self.database = database

        self.images_dir = os.path.join(os.path.dirname(report_path), 'images')
        safe_makedirs(self.images_dir)
Exemplo n.º 30
0
class SQLiteCon():
    """This is just a loose mapping class to allow us to use Python's 'with' keyword.

    Raises:
        VectorBaseException: Various
    """
    log = Logger('SQLite')

    def __init__(self, filepath: str):
        self.filepath = filepath
        self.conn = None
        self.curs = None

    def __enter__(self) -> SQLiteCon:
        """Behaviour on open when using the "with VectorBase():" Syntax
        """

        self.conn = sqlite3.connect(self.filepath)

        # turn on foreign key constraints. Does not happen by default
        self.conn.execute('PRAGMA foreign_keys = ON;')

        self.conn.row_factory = dict_factory
        self.curs = self.conn.cursor()
        return self

    def __exit__(self, _type, _value, _traceback):
        """Behaviour on close when using the "with VectorBase():" Syntax
        """
        self.curs.close()
        self.conn.close()
        self.curs = None
        self.conn = None