예제 #1
0
파일: create.py 프로젝트: SnowEx/snowexsql
def main(overwrite=False, db='snowex', credentials='./credentials.json'):
    """
    Main function to manage creating our tables in the databases

    Args:
        overwrite: Bool indicating whether to ask the user before overwriting the db
        db: Name of a local database to write tables to
    """

    log = get_logger('Create DB')

    engine, session = get_db(f"localhost/{db}", credentials=credentials)

    if overwrite:
        initialize(engine)
        log.warning('Database cleared!\n')

        for t in ['sites', 'points', 'layers', 'images']:

            sql = f'GRANT SELECT ON {t} TO snow;'
            log.info(f'Adding read only permissions for table {t}...')
            engine.execute(sql)
    else:
        log.warning('Aborted. Database has not been modified.\n')

    session.close()
예제 #2
0
    def __init__(self, filenames, **kwargs):
        '''
        Assigns attributes from kwargs and their defaults from self.defaults
        Also opens and assigns the database connection

        Args:
            profile_filenames: List of valid files to be uploaded to the database
            db_name: String name of database this will interact with, default=snowex

            debug: Boolean that allows exceptions when uploading files, when
                 True no exceptions are allowed. Default=True
            n_files: Integer number of files to upload (useful for testing),
                     Default=-1 (meaning all of the files)
            kwargs: Any keywords that can be passed along to the UploadProfile
                    Class. Any kwargs not recognized will be merged into a
                    comment.
        '''
        self.filenames = filenames
        self.meta = assign_default_kwargs(self, kwargs, self.defaults)

        # Grab logger
        self.log = get_logger(__name__)

        # Performance tracking
        self.errors = []
        self.uploaded = 0

        # Grab db
        self.log.info('Accessing Database {}'.format(self.db_name))
        engine, self.session = get_db(self.db_name)
        self.log.info('Preparing to upload {} files...'.format(len(filenames)))
예제 #3
0
def main():
    # Obtain a list of Grand mesa smp files
    directory = abspath('../download/data/SNOWEX/SNEX20_SMP.001')
    all_filenames = glob.glob(join(directory, 'csv_resampled', '*.CSV'))

    # Keyword arguments.
    kwargs = {
        # Uploader kwargs
        'debug': True,

        # Constant metadata
        'site_name': 'Grand Mesa',
        'units': 'Newtons',
        'in_timezone': 'UTC',
        'out_timezone': 'US/Mountain',
        'instrument': 'snowmicropen',
        'header_sep': ':',
        'doi': 'https://doi.org/10.5067/ZYW6IHFRYDSE',
    }

    # Get logger
    log = get_logger('SMP Upload Script')

    # Form the unique pit ids to loop over
    associated_pits = list(
        set([
            '_'.join(l.split('_')[-2:]).replace('.CSV', '')
            for l in all_filenames
        ]))

    # Keep track of errors
    errors = 0
    nthreads = 6
    pits_per_threads = len(associated_pits) // nthreads
    log.info(
        f'Assigning {pits_per_threads} pits of smp profiles to {nthreads} threads'
    )

    # Loop over by pit ID so we can assign it to groups of files

    with concurrent.futures.ThreadPoolExecutor() as executor:
        futures = []

        for i in range(nthreads):
            pits = associated_pits[i * pits_per_threads:(i + 1) *
                                   pits_per_threads]
            futures.append(executor.submit(submit_smp, pits, directory,
                                           kwargs))

    # Collect the errors
    for f in futures:
        errors += f.result()

    # Return the number of errors so run.py can report them
    return errors
예제 #4
0
def main():
    log = get_logger('Create DB')
    db_name = 'snowex'

    engine, session = get_db(db_name)

    a = input('\nWARNING! You are about to overwrite the entire database! Press Y to continue, press any other key to abort: ')
    if a.lower() == 'y':
        initialize(engine)
        log.warning('Database cleared!')

    else:
        log.warning('Aborted. Database has not been modified.')

    session.close()
    log.info('')
예제 #5
0
파일: add_QSI.py 프로젝트: SnowEx/snowexsql
def reproject(filenames, out_epsg, out_dir, adjust_vdatum=False):
    """
    Reproject the data and then adjust the vertical datum
    """
    log = get_logger('reprojection')
    final = []

    if isdir(out_dir):
        shutil.rmtree(out_dir)

    os.mkdir(out_dir)
    n = len(filenames)
    log.info('Reprojecting {} files...'.format(n))

    for i, r in enumerate(filenames):
        bname = basename(r)
        log.info('Working on {} ({}/{})...'.format(bname, i, n))

        # Construct a new filename
        out = join(out_dir, bname.replace('.adf', '.tif'))
        in_ras = r
        # Some files share repeating naming convention
        if isfile(out):
            out = join(out_dir,
                       '_'.join(split(r)[-2:]).replace('.adf', '.tif'))

        if adjust_vdatum:
            # Adjust the vertical datum in bash from python
            log.info('Reprojecting the vertical datum...')
            check_output('dem_geoid -o test {}'.format(in_ras), shell=True)

            # # Move the file back
            # log.info('Moving resulting files and cleaning up...')
            # check_output('mv test-adj.tif {}'.format(''), shell=True)
            in_ras = 'test-adj.tif'

        # Reproject the raster and output to the new location in bash from
        # python
        log.info('Reprojecting data to EPSG:{}'.format(out_epsg))
        check_output('gdalwarp -r bilinear -t_srs "EPSG:{}" {} {}'.format(
            out_epsg, in_ras, out),
                     shell=True)

        # Keep the new file name
        final.append(out)

    return final
예제 #6
0
def main():
    '''
    Uploader script for ASO Snow off data
    '''
    epsg = 26912

    # 1. Define the files, in this case only one
    filenames = [
        '~/Downloads/ASO2016-17-20200918T212934Z-001/ASO2016-17/USCOGM20160926f1a1__lowest_vf_snowEX_extent.tif'
    ]

    # 1B. Expand paths to full absolute paths
    filenames = [abspath(expanduser(f)) for f in filenames]

    # 2. Assign any contant metadata and pass it as keyword arguments to the
    # uploader
    kwargs = {
        'instrument': 'lidar',
        'surveyors': 'ASO',
        'date': date(2016, 9, 26),
        'type': 'DEM',
        'units': 'meters',
        'description': 'Snow off DEM flown by ASO for SNOWEX 2017',
        'tiled': True,
        'epsg': epsg,
        'no_data': -9999
    }

    # # 2B. Convert image from UTM13 to 12
    out_dir = join(dirname(filenames[0]), 'utm_12')

    log = get_logger('ASO Uploader')

    # Reproject to the correct epsg
    final = reproject(filenames, epsg, out_dir, adjust_vdatum=True)

    # 3, Pass them to you batch uploader you need
    u = UploadRasterBatch(final, **kwargs)

    # 4. Push to the database and collect the errors from push function
    u.push()

    return len(u.errors)
예제 #7
0
def main():

    # comparison flag produces the figures to show the impact of the resampling
    making_comparison = False
    downsample = 100
    header_pos = 6
    log = get_logger('SMP Resample')
    log.info('Preparing to resample smp profiles for uploading...')

    # Obtain a list of Grand mesa smp files
    directory = abspath('../download/data/SNOWEX/SNEX20_SMP.001')
    filenames = glob.glob(join(directory, '*/*.CSV'))

    # Are we making the plot to show the comparison of the effects?
    if making_comparison:
        make_comparison(filenames[0])

    else:
        # output location
        output = join(directory, 'csv_resampled')

        if isdir(output):
            ans = input(
                '\nWARNING! You are about overwrite {} previously resampled files '
                'located at {}!\nPress Y to continue and any other '
                'key to abort: '.format(len(filenames), output))

            if ans.lower() == 'y':
                resample_batch(filenames,
                               output,
                               downsample,
                               header_pos=header_pos)
            else:
                log.warning(
                    'Skipping resample and overwriting of resampled files...')
        else:
            mkdir(output)
            resample_batch(filenames,
                           output,
                           downsample,
                           header_pos=header_pos)
예제 #8
0
Unzip to ~/Downloads

Otherwise see main() to redefine the location where the files are stored
'''

from os.path import join, abspath, expanduser, isdir, dirname, basename
from os import listdir, mkdir
from snowexsql.utilities import get_logger, read_n_lines
from snowexsql.conversions import INSAR_to_rasterio
from snowexsql.projection import reproject_raster_by_epsg
from snowexsql.metadata import read_InSar_annotation
import shutil
import glob
import time

log = get_logger('grd2tif')


def convert(filenames, output, epsg, clean_first=False):
    '''
    Convert all grd files from the UAVSAR grd to tiff. Then reporjects
    the resulting files from Lat long to UTM, and then saves to the output dir

    Args:
        filenames: List of *.grd files needed to be converted
        output: directory to output files to
        epsg: epsg of the resulting file
        clean_first: Boolean indicating whether to clear out the output folder first
    '''
    # Keep track of errors, time elapsed, and number of files completed
    start = time.time()
예제 #9
0
def resample_batch(filenames,
                   output,
                   downsample,
                   header_pos=6,
                   clean_on_start=True):
    """
    Resample all the file names and save as csv to the output dir

    Args:
        filenames: List of smp csv files needed to be subsampled
        output: directory to output files to
        downsample: Number of samples to subsample at (e.g. downsample=100 is subsampled to every 100th sample)
        clean_on_start: Remove the output folder at the start when running
    """

    log = get_logger('SMP Resample')

    if clean_on_start:
        shutil.rmtree(output)

    if not isdir(output):
        log.info('Making output folder {}'.format(output))
        mkdir(output)

    log.info('Resampling {} SMP profiles...'.format(len(filenames)))

    # Loop over all the files, name them using the same name just using a
    # different folder
    for f in filenames:
        base_f = basename(f)

        log.info('Resampling {}'.format(base_f))

        # Open the file for the header and grab it as a list
        header = read_n_lines(f, header_pos)

        # Open the file as a dataframe excluding the header
        df = open_df(f, header_pos=header_pos)

        # Grab every 100th sample
        new_df = subsample(df, downsample)

        # Reduce the precision of the original data without much effect
        new_df = new_df.round({'Depth (mm)': 1, 'Force (N)': 3})
        out_f = join(output, base_f)

        # Write out the original header add some information
        with open(out_f, 'w') as fp:

            # Rename the original total samples
            original_samples = header[-1].split(":")[-1]
            header[-1] = '# Original Total Samples: {}'.format(
                original_samples)

            # Add a header for the fact this data is subsampled
            header.append(
                '# Data Subsampled To: Every {:d}th\n'.format(downsample))
            lines = ''.join(header)
            fp.write(lines)
            fp.close()

        new_df.to_csv(out_f, index_label='Original Index', mode='a')
예제 #10
0
def main():

    # Grab the db session
    engine, session = get_db('snowex')

    surveyors = ['aso', 'usgs']

    # Setup
    log = get_logger('Depths Script')
    # Get the count of QSI dates
    dates = session.query(
        ImageData.date).filter(ImageData.surveyors == 'QSI').distinct().all()

    # Build an empy dataframe fro storing our results in
    results = gpd.GeoDataFrame(
        columns=['geometry', 'aso', 'usgs', 'measured', 'date'])

    # Grab all depths and dates.
    q = session.query(PointData)
    q = q.filter(PointData.type == 'depth')
    df = query_to_geopandas(q, engine)
    log.info('Found {} snow depths...'.format(len(df)))

    bar = progressbar.ProgressBar(max_value=len(df.index))

    # Loop over all the points
    for i, row in df.iterrows():

        # Create an empty dict and add geometryand date for each point
        data = {}
        data['measured'] = row['value']
        data['geometry'] = row['geom']
        data['date'] = row['date']

        point = from_shape(row['geom'], srid=26912).ST_AsEWKT()

        # Get the raster value of a cell nearest center after resampling to the
        # resolution
        snow = get_raster_value(session, point, 'QSI', date=dates[0][0])

        for surveyor in surveyors:
            off = get_raster_value(session, point, surveyor.upper())

            if off is None or snow is None:
                data[surveyor] = None
            else:
                data[surveyor] = (snow - off) * 100  # cm

        results = results.append(data, ignore_index=True)
        bar.update(i)

    session.close()

    log.info('Differences:')

    # Calculate the differences
    for n in surveyors:
        name = '{} diff'.format(n)
        results[name] = results[n] - results['measured']

    results.to_csv('snow_depths.csv')

    # report the stats
    for d in ['usgs diff', 'aso diff']:
        log.info(d)
        get_stats(results[d], logger=log)

    # Make a plot
    fig, ax = plt.subplots(1, 1, figsize=(8, 8))

    # Plot the points colored by differences
    results.plot(ax=ax,
                 column='usgs diff',
                 cmap='RdBu',
                 vmin=-50,
                 vmax=50,
                 legend=True)

    # Don't use scientific notation on the axis ticks
    ax.ticklabel_format(style='plain', useOffset=False)

    # Add x/y labels, a title, a legend and avoid overlapping labels
    ax.set_xlabel('Easting [m]')
    ax.set_ylabel('Northing [m]')
    ax.set_title('USGS')

    plt.show()
예제 #11
0
Usage:
    python run.py
"""

import time
from snowexsql.utilities import get_logger
import os
import importlib
from create import main as create

# Import preprocessor functions here.
from resample_smp import main as resample_smp
from convert_uavsar import main as convert_uavsar

start = time.time()
log = get_logger('Populate')
log.info('============= SNOWEX DATABASE BUILDER ==================')
log.info('Starting script to populate entire database...')

# dictionary for holding module names and their main functions
addition_scripts = {}

# error tracking for a final report according to each module
errors = {}

# Find the all addition scripts and import them as local modules
for f in os.listdir('.'):
    info = f.split('.')
    local_mod = info[0]
    ext = info[-1]
예제 #12
0
import numpy as np
from snowexsql.metadata import read_InSar_annotation
from snowexsql.conversions import INSAR_to_rasterio,
from snowexsql.projection import reproject_raster_by_epsg
from snowexsql.utilities import get_logger
import utm
import matplotlib.pyplot as plt
import rasterio
from rasterio.crs import CRS
from rasterio.plot import show
from rasterio.transform import Affine
import glob
from shutil import copyfile, rmtree
from snowexsql.utilities import find_kw_in_lines

log = get_logger('InSar Test Data')


def get_crop_indices(n, ratio):
    '''
    Given a number of columns or row, return the indices of the start and end
    of the value cropped on its middle value out to a certain percentage

    Args:
        n: Total number of columns or rows
        ratio: decimal of the data to cut around the half the count
    '''
    start = int(0.5 * n - 0.5 * ratio * 0.5 * n)
    end = int(0.5 * n + 0.5 * ratio * 0.5 * n)
    spread = end - start
    return start, end, spread