示例#1
0
def calculateUTMbounds(mappingfile,
                       mappingfile_crs={'init': 'epsg:4326'},
                       spatial_resolution=0.06250):
    # read in the mappingfile
    map_df, nstation = ogh.mappingfileToDF(mappingfile)

    # loop though each LAT/LONG_ +/-0.06250 centroid into gridded cells
    geom = []
    midpt = spatial_resolution / 2
    for ind in map_df.index:
        mid = map_df.loc[ind]
        geom.append(
            box(mid.LONG_ - midpt,
                mid.LAT - midpt,
                mid.LONG_ + midpt,
                mid.LAT + midpt,
                ccw=True))

    # generate the GeoDataFrame
    test = gpd.GeoDataFrame(map_df, crs=mappingfile_crs, geometry=geom)

    # compile gridded cells to extract bounding box
    test['shapeName'] = 1

    # dissolve shape into new shapefile
    newShape = test.dissolve(by='shapeName').reset_index()
    print(newShape.bounds)

    # take the minx and miny, and centroid_x and centroid_y
    minx, miny, maxx, maxy = newShape.bounds.loc[0]
    lon0, lat0 = np.array(newShape.centroid[0])

    # generate the basemap raster
    fig = plt.figure(figsize=(10, 10), dpi=500)
    ax1 = plt.subplot2grid((1, 1), (0, 0))
    m = Basemap(projection='tmerc',
                resolution='h',
                ax=ax1,
                lat_0=lat0,
                lon_0=lon0,
                llcrnrlon=minx,
                llcrnrlat=miny,
                urcrnrlon=maxx,
                urcrnrlat=maxy)

    # transform each polygon to the utm basemap projection
    for ind in newShape.index:
        eachpol = newShape.loc[ind]
        newShape.loc[ind, 'g2'] = shapely.ops.transform(m, eachpol['geometry'])

    # transform each polygon to the utm basemap projection
    newShape['g2'] = newShape.apply(
        lambda x: shapely.ops.transform(m, x['geometry']), axis=1)

    # remove the plot
    plt.gcf().clear()

    # establish the UTM basemap bounding box dimensions
    minx2, miny2, maxx2, maxy2 = newShape['g2'].iloc[0].bounds
    return (minx2, miny2, maxx2, maxy2)
def netcdf_to_ascii_PNNL2018(homedir, subdir, netcdfs, mappingfile,
                             catalog_label, meta_file):
    # initialize list of dataframe outputs
    outfiledict = {}

    # generate destination folder
    filedir = os.path.join(homedir, subdir)
    ogh.ensure_dir(filedir)

    # connect with collection of netcdfs
    ds_mf = xray.open_mfdataset(netcdfs, engine='netcdf4')

    # convert netcdfs to pandas.Panel API
    ds_pan = ds_mf.to_dataframe().reset_index('TIME')

    # generate list of variables
    ds_vars = [
        ds_var for ds_var in ds_pan.columns
        if ds_var not in ['YEAR', 'MONTH', 'DAY', 'TIME', 'LAT', 'LON']
    ]

    # read in gridded cells of interest
    maptable, nstation = ogh.mappingfileToDF(mappingfile, colvar=None)

    # maptable = maptable[0:3]

    # at each latlong of interest
    for ind, eachrow in maptable.iterrows():

        # generate ASCII time-series
        ds_df = ds_pan.loc[eachrow['SN'], eachrow['WE'], :].reset_index(
            drop=True).loc[:, ds_vars]

        # create file name
        #        outfilename = os.path.join(filedir, catalog_label  + '_' + time_nm + '_{0}_{1}'.format(eachrow['LAT'],eachrow['LONG_']))
        outfilename = os.path.join(
            filedir, catalog_label +
            '_{0}_{1}'.format(eachrow['LAT'], eachrow['LONG_']))
        # save ds_df
        outfiledict[outfilename] = da.delayed(ds_df.to_csv)(
            path_or_buf=outfilename, sep='\t', header=False, index=False)

    # compute ASCII time-series files
    ProgressBar().register()
    outfiledict = da.compute(outfiledict)[0]

    # update metadata file
    meta_file[catalog_label]['variable_info'].update(dict(ds_mf.attrs))
    meta_file[catalog_label]['variable_info'].update(dict(ds_mf.variables))
    meta_file[catalog_label]['variable_list'] = np.array(ds_vars)

    # catalog the output files
    ogh.addCatalogToMap(outfilepath=mappingfile,
                        maptable=maptable,
                        folderpath=filedir,
                        catalog_label=catalog_label)
    os.chdir(homedir)
    return (list(outfiledict.keys()))
示例#3
0
 def test_addCatalogToMap(self):
     # read in a sample mappingfile as test_map
     test_map, nstat = ogh.mappingfileToDF(os.path.join(
         data_path, 'test_mappingfile.csv'),
                                           colvar=None)
     ogh.addCatalogToMap(outfilepath=os.path.join(data_path,
                                                  'test_catalog.csv'),
                         maptable=test_map,
                         folderpath=os.path.join(data_path, 'test_files'),
                         catalog_label='test')
     assert True
示例#4
0
    def test_readmappingfile(self):
        test_map, nstat = ogh.mappingfileToDF(os.path.join(
            data_path, 'test_mappingfile.csv'),
                                              colvar=None)
        test_map = test_map.drop_duplicates()
        test_map.to_csv(os.path.join(data_path, 'test_mappingfile.csv'),
                        index=False,
                        columns=['FID', 'LAT', 'LONG_', 'ELEV'])
        assert True

        test_compare = ogh.compareonvar(map_df=test_map, colvar=None)
        assert True
示例#5
0
def netcdf_to_ascii(homedir,
                    subdir,
                    source_directory,
                    mappingfile,
                    catalog_label,
                    meta_file,
                    temporal_resolution='D',
                    netcdfs=None,
                    variable_list=None):
    # initialize list of dataframe outputs
    outfiledict = {}

    # generate destination folder
    filedir = os.path.join(homedir, subdir)
    ogh.ensure_dir(filedir)

    # connect with collection of netcdfs
    if isinstance(netcdfs, type(None)):
        netcdfs = [
            os.path.join(source_directory, file)
            for file in os.listdir(source_directory) if file.endswith('.nc')
        ]
    ds_mf = xray.open_mfdataset(netcdfs, engine='netcdf4').sortby('TIME')

    # generate list of variables
    if not isinstance(variable_list, type(None)):
        ds_vars = variable_list.copy()
    else:
        ds_vars = [
            ds_var for ds_var in dict(ds_mf.variables).keys()
            if ds_var not in ['YEAR', 'MONTH', 'DAY', 'TIME', 'LAT', 'LON']
        ]

    # convert netcdfs to pandas.Panel API
    ds_pan = ds_mf.to_dataframe()[ds_vars]

    # read in gridded cells of interest
    maptable, nstation = ogh.mappingfileToDF(mappingfile,
                                             colvar=None,
                                             summary=False)

    # at each latlong of interest
    for ind, eachrow in maptable.iterrows():

        # generate ASCII time-series
        ds_df = ds_pan.loc[eachrow['LAT'],
                           eachrow['LONG_'], :].reset_index(drop=True,
                                                            level=[0, 1])

        # create file name
        outfilename = os.path.join(
            filedir, 'data_{0}_{1}'.format(eachrow['LAT'], eachrow['LONG_']))

        # save ds_df
        outfiledict[outfilename] = da.delayed(ds_df.to_csv)(
            path_or_buf=outfilename, sep='\t', header=False, index=False)

    # compute ASCII time-series files
    ProgressBar().register()
    outfiledict = da.compute(outfiledict)[0]

    # annotate metadata file
    meta_file[catalog_label] = dict(ds_mf.attrs)
    meta_file[catalog_label]['variable_list'] = list(np.array(ds_vars))
    meta_file[catalog_label]['delimiter'] = '\t'
    meta_file[catalog_label]['start_date'] = pd.Series(
        ds_mf.TIME).sort_values().iloc[0].strftime('%Y-%m-%d %H:%M:%S')
    meta_file[catalog_label]['end_date'] = pd.Series(
        ds_mf.TIME).sort_values().iloc[-1].strftime('%Y-%m-%d %H:%M:%S')
    meta_file[catalog_label]['temporal_resolution'] = temporal_resolution
    meta_file[catalog_label]['variable_info'] = dict(ds_mf.variables)

    # catalog the output files
    ogh.addCatalogToMap(outfilepath=mappingfile,
                        maptable=maptable,
                        folderpath=filedir,
                        catalog_label=catalog_label)
    os.chdir(homedir)
    return (list(outfiledict.keys()))