示例#1
0
def main():
    # Site name
    start = time.time()
    site_name = 'Grand Mesa'

    # Read in the Grand Mesa Snow Depths Data
    f = abspath('../download/data/SnowEx2020.snowdepth.snowstakes.alldepths_clean_v10.csv')

    # Start the Database
    db_name = 'localhost/snowex'
    engine, session = get_db(db_name, credentials='./credentials.json')

    csv = PointDataCSV(
        f,
        depth_is_metadata=False,
        units='cm',
        site_name=site_name,
        surveyors='Catherine Breen, Cassie Lumbrazo',
        instrument='camera',
        in_timezone='US/Mountain',
        epsg=26912,
        doi=None)

    csv.submit(session)
    errors = len(csv.errors)

    return errors
示例#2
0
def main():
    # Site name
    start = time.time()
    site_name = 'Grand Mesa'
    timezone = 'MST'

    # Read in the Grand Mesa Snow Depths Data
    base = abspath(join('../download/data/SNOWEX/SNEX20_SD.001/'))

    # Start the Database
    db_name = 'snowex'
    engine, session = get_db(db_name)

    csvs = glob.glob(join(base, '*/*.csv'))

    errors = 0

    for f in csvs:
        csv = PointDataCSV(f,
                           depth_is_metadata=False,
                           units='cm',
                           site_name=site_name,
                           timezone=timezone,
                           epsg=26912)
        csv.submit(session)
        errors += len(csv.errors)

    return errors
示例#3
0
def main(overwrite=False, db='snowex', credentials='./credentials.json'):
    """
    Main function to manage creating our tables in the databases

    Args:
        overwrite: Bool indicating whether to ask the user before overwriting the db
        db: Name of a local database to write tables to
    """

    log = get_logger('Create DB')

    engine, session = get_db(f"localhost/{db}", credentials=credentials)

    if overwrite:
        initialize(engine)
        log.warning('Database cleared!\n')

        for t in ['sites', 'points', 'layers', 'images']:

            sql = f'GRANT SELECT ON {t} TO snow;'
            log.info(f'Adding read only permissions for table {t}...')
            engine.execute(sql)
    else:
        log.warning('Aborted. Database has not been modified.\n')

    session.close()
示例#4
0
def main():
    file = '../download/data/SNOWEX/SNEX20_BSU_GPR.001/2020.01.28/SNEX20_BSU_GPR_pE_01282020_01292020_02042020_downsampled.csv'

    kwargs = {
        # Keyword argument to upload depth measurements
        'depth_is_metadata': False,

        # Constant Metadata for the GPR data
        'site_name': 'Grand Mesa',
        'surveyor': 'Tate Meehan',
        'instrument': 'pulse EKKO Pro multi-polarization 1 GHz GPR',
        'in_timezone': 'UTC',
        'out_timezone': 'US/Mountain',
        'epsg': 26912
    }

    # Break out the path and make it an absolute path
    file = abspath(expanduser(file))

    # Grab a db connection to a local db named snowex
    db_name = 'snowex'
    engine, session = get_db(db_name)

    # Instantiate the point uploader
    csv = PointDataCSV(file, **kwargs)
    # Push it to the database
    csv.submit(session)

    # Close out the session with the DB
    session.close()

    # return the number of errors for run.py can report it
    return len(csv.errors)
示例#5
0
def main():
    # Site name
    start = time.time()
    site_name = 'Grand Mesa'
    timezone = 'US/Mountain'

    # Read in the Grand Mesa Snow Depths Data
    base = abspath(join('../download/data/SNOWEX/SNEX20_SD.001/'))

    # Start the Database
    db_name = 'localhost/snowex'
    engine, session = get_db(db_name, credentials='./credentials.json')

    csvs = glob.glob(join(base, '*/*.csv'))

    errors = 0

    for f in csvs:
        csv = PointDataCSV(f,
                           depth_is_metadata=False,
                           units='cm',
                           site_name=site_name,
                           timezone=timezone,
                           epsg=26912,
                           doi="https://doi.org/10.5067/9IA978JIACAR")

        csv.submit(session)
        errors += len(csv.errors)

    return errors
示例#6
0
    def __init__(self, filenames, **kwargs):
        '''
        Assigns attributes from kwargs and their defaults from self.defaults
        Also opens and assigns the database connection

        Args:
            profile_filenames: List of valid files to be uploaded to the database
            db_name: String name of database this will interact with, default=snowex

            debug: Boolean that allows exceptions when uploading files, when
                 True no exceptions are allowed. Default=True
            n_files: Integer number of files to upload (useful for testing),
                     Default=-1 (meaning all of the files)
            kwargs: Any keywords that can be passed along to the UploadProfile
                    Class. Any kwargs not recognized will be merged into a
                    comment.
        '''
        self.filenames = filenames
        self.meta = assign_default_kwargs(self, kwargs, self.defaults)

        # Grab logger
        self.log = get_logger(__name__)

        # Performance tracking
        self.errors = []
        self.uploaded = 0

        # Grab db
        self.log.info('Accessing Database {}'.format(self.db_name))
        engine, self.session = get_db(self.db_name)
        self.log.info('Preparing to upload {} files...'.format(len(filenames)))
示例#7
0
    def setup_class(self):
        """
        Setup the database one time for testing
        """
        self.db = 'localhost/test'
        self.data_dir = join(dirname(__file__), 'data')
        creds = join(dirname(__file__), 'credentials.json')

        self.engine, self.session, self.metadata = get_db(self.db, credentials=creds, return_metadata=True)

        initialize(self.engine)
示例#8
0
    def setup_class(self):
        """
        Setup the database one time for testing
        """
        self.db = 'test'
        self.data_dir = join(dirname(__file__), 'data')

        self.engine, self.session, self.metadata = get_db(self.db,
                                                          return_metadata=True)

        initialize(self.engine)
示例#9
0
def main():
    log = get_logger('Create DB')
    db_name = 'snowex'

    engine, session = get_db(db_name)

    a = input('\nWARNING! You are about to overwrite the entire database! Press Y to continue, press any other key to abort: ')
    if a.lower() == 'y':
        initialize(engine)
        log.warning('Database cleared!')

    else:
        log.warning('Aborted. Database has not been modified.')

    session.close()
    log.info('')
示例#10
0
def clear_dataset():

    # Add tool arguments
    parser = argparse.ArgumentParser(
        description='Delete sections of the database by dataset name and surveyors')
    parser.add_argument('table_class', metavar='C', type=str, choices=['ImageData', 'SiteData', 'PointData', 'LayerData'],
                        help='TableClass name to query')

    parser.add_argument(
        '--types',
        '-t',
        dest='types',
        nargs='+',
        help='Names of the data to delete')
    parser.add_argument(
        '--surveyors',
        '-s',
        dest='surveyors',
        help='Name of the surveyors to filter the data to delete')
    parser.add_argument(
        '--date',
        '-d',
        dest='date',
        help='Date of the data to file by')
    parser.add_argument(
        '--database',
        '-db',
        dest='db',
        default='localhost/snowex',
        help='name of the postgres database to connect to')

    parser.add_argument(
        '--credentials',
        '-c',
        dest='credentials',
        default='./credentials.json',
        help='path to a json containing username and password keys')


    args = parser.parse_args()

    print('\n  SNOWEX DB Deletion Tool')
    print('==============================\n')

    # Get the DB table
    tables = {
        'ImageData': ImageData,
        'SiteData': SiteData,
        'PointData': PointData,
        'LayerData': LayerData}

    TableClass = tables[args.table_class]

    print('Using the {} table to query...'.format(
        args.table_class.replace('data', '')))

    # Grab database session
    engine, session = get_db(args.db, credentials=args.credentials)

    # Form the query
    q = session.query(TableClass)

    # Filter by data names
    if args.types:
        print(
            'Filtering results to data types in {}...'.format(
                ', '.join(
                    args.types)))
        q = q.filter(TableClass.type.in_(args.types))

    # Filter by surveyor
    if args.surveyors is not None:
        print('Filtering results to surveyors {}...'.format(args.surveyors))
        q = q.filter(TableClass.surveyors == args.surveyors)

    # Filter by date
    if args.date is not None:
        d = pd.to_datetime(args.date).date()
        print('Filtering results to the date {}...'.format(d))
        q = q.filter(TableClass.date == d)

    # Form a count query
    count = q.count()

    if count > 0:
        question = ('\nYou are about delete {} {} records. Continue? (Y/n) '
                    ''.format(count, args.table_class.replace('Data', ' Data')))
        ans = input(question)

        if ans == 'Y':
            print("Deleting {} records...".format(count))
            q.delete()
            session.commit()
            print('Complete!\n')

        else:
            print('Aborting.\n')
    else:
        print('No records to delete!\n')

    session.close()
示例#11
0
def main():

    # Grab the db session
    engine, session = get_db('snowex')

    surveyors = ['aso', 'usgs']

    # Setup
    log = get_logger('Depths Script')
    # Get the count of QSI dates
    dates = session.query(
        ImageData.date).filter(ImageData.surveyors == 'QSI').distinct().all()

    # Build an empy dataframe fro storing our results in
    results = gpd.GeoDataFrame(
        columns=['geometry', 'aso', 'usgs', 'measured', 'date'])

    # Grab all depths and dates.
    q = session.query(PointData)
    q = q.filter(PointData.type == 'depth')
    df = query_to_geopandas(q, engine)
    log.info('Found {} snow depths...'.format(len(df)))

    bar = progressbar.ProgressBar(max_value=len(df.index))

    # Loop over all the points
    for i, row in df.iterrows():

        # Create an empty dict and add geometryand date for each point
        data = {}
        data['measured'] = row['value']
        data['geometry'] = row['geom']
        data['date'] = row['date']

        point = from_shape(row['geom'], srid=26912).ST_AsEWKT()

        # Get the raster value of a cell nearest center after resampling to the
        # resolution
        snow = get_raster_value(session, point, 'QSI', date=dates[0][0])

        for surveyor in surveyors:
            off = get_raster_value(session, point, surveyor.upper())

            if off is None or snow is None:
                data[surveyor] = None
            else:
                data[surveyor] = (snow - off) * 100  # cm

        results = results.append(data, ignore_index=True)
        bar.update(i)

    session.close()

    log.info('Differences:')

    # Calculate the differences
    for n in surveyors:
        name = '{} diff'.format(n)
        results[name] = results[n] - results['measured']

    results.to_csv('snow_depths.csv')

    # report the stats
    for d in ['usgs diff', 'aso diff']:
        log.info(d)
        get_stats(results[d], logger=log)

    # Make a plot
    fig, ax = plt.subplots(1, 1, figsize=(8, 8))

    # Plot the points colored by differences
    results.plot(ax=ax,
                 column='usgs diff',
                 cmap='RdBu',
                 vmin=-50,
                 vmax=50,
                 legend=True)

    # Don't use scientific notation on the axis ticks
    ax.ticklabel_format(style='plain', useOffset=False)

    # Add x/y labels, a title, a legend and avoid overlapping labels
    ax.set_xlabel('Easting [m]')
    ax.set_ylabel('Northing [m]')
    ax.set_title('USGS')

    plt.show()