label_thin = []

    ### cycle through all files, retrieve data and plot
    print files_path
    for ind, ncfile in enumerate(files_path):
        print "Working on {activefile}".format(activefile=ncfile)

        #open/read netcdf files
        df = EcoFOCI_netCDF(ncfile)
        global_atts = df.get_global_atts()
        vars_dic = df.get_vars()
        ncdata = df.ncreadfile_dic()
        df.close()

        if args.timeseries_overlay:
            nctime = EPIC2Datetime(ncdata['time'], ncdata['time2'])

            def set_year_even(x):
                if x.year % 2 == 0:
                    return x.replace(year=2000)
                elif x.year % 2 == 1:
                    return x.replace(year=2001)

            def set_year_odd(x):
                if x.year % 2 == 0:
                    return x.replace(year=2000)
                elif x.year % 2 == 1:
                    return x.replace(year=1999)

            if nctime[0].year % 2 == 0:
                nctime = [set_year_even(x) for x in nctime]
    '-timebounds',
    '--timebounds',
    nargs=3,
    type=str,
    help='fixed timebounds start: yyyy-mm-dd end: yyyy-mm-dd "month"')

args = parser.parse_args()

print "Working on file %s " % args.DataPath

nc = EcoFOCI_netCDF(args.DataPath)
ncdata = nc.ncreadfile_dic()
g_atts = nc.get_global_atts()
nc.close()

cast_time = EPIC2Datetime(ncdata['time'], ncdata['time2'])
doy = np.array([x.timetuple().tm_yday for x in cast_time])

p1 = TimeseriesPorpertyPropertyPlot()
try:
    t1 = p1.add_title(mooringid=global_atts['MOORING'],
                      lat=ncdata['lat'][0],
                      lon=ncdata['lon'][0],
                      depth=ncdata['depth'][0],
                      instrument=args.instname)
except KeyError:
    t1 = p1.add_title(mooringid=global_atts['MOORING'],
                      lat=ncdata['latitude'][0],
                      lon=ncdata['longitude'][0],
                      depth=ncdata['depth'][0],
                      instrument=args.instname)
Beispiel #3
0
parser.add_argument(
    "-fp",
    '--full_path',
    action="store_true",
    help='provides full path to program: used if run as script')

args = parser.parse_args()

#read in WPAK data file
ncfile = args.DataPath
df = EcoFOCI_netCDF(ncfile)
global_atts = df.get_global_atts()
vars_dic = df.get_vars()
ncdata = df.ncreadfile_dic()
df.close()
nctime = get_UDUNITS(EPIC2Datetime(ncdata['time'], ncdata['time2']),
                     'days since 0001-1-1') + 1.0

# filter data to convert 1e35 -> np.nan
for keynames in ncdata.keys():
    if keynames not in [
            'time', 'time2', 'lat', 'lon', 'latitude', 'longitude', 'depth',
            'dep'
    ]:
        ncdata[keynames][np.where(ncdata[keynames][:, 0, 0, 0] >= 1e30), 0, 0,
                         0] = np.nan

p1 = TimeseriesWPAK(stylesheet='seaborn-poster', labelsize=16)
try:
    t1 = p1.add_title(mooringid=global_atts['MOORING'],
                      lat=ncdata['lat'][0],
parser.add_argument('instname', metavar='instname', type=str,
               help='instrument name')
parser.add_argument("-fp",'--full_path', action="store_true", help='provides full path to program: used if run as script')
          
args = parser.parse_args()

#read in 1d data file
ncfile = args.DataPath
df = EcoFOCI_netCDF(args.DataPath)
global_atts = df.get_global_atts()
vars_dic = df.get_vars()
#check that variable is in data file and exit if not
is_in_dic(args.epickey,vars_dic)
ncdata = df.ncreadfile_dic()
df.close()
nctime = get_UDUNITS(EPIC2Datetime(ncdata['time'],ncdata['time2']),'days since 0001-1-1') + 1.0



# filter data to convert 1e35 -> np.nan
ncdata[args.epickey[0]][np.where(ncdata[args.epickey[0]][:,0,0,0] >= 1e30),0,0,0] = np.nan
ncdata[args.epickey[1]][np.where(ncdata[args.epickey[1]][:,0,0,0] >= 1e30),0,0,0] = np.nan
ncdata[args.epickey[2]][np.where(ncdata[args.epickey[2]][:,0,0,0] >= 1e30),0,0,0] = np.nan

p1 = Timeseries3varPlot(plotstyle='k.-',stylesheet='ggplot')
try:
    t1 = p1.add_title(mooringid=global_atts['MOORING'],
                             lat=ncdata['lat'][0],
                             lon=ncdata['lon'][0],
                             depth=ncdata['depth'][0],
                             instrument=args.instname)
Beispiel #5
0
        print("Reading file for {}".format(filename))

        ncin = Dataset(str(filename), 'r')
        try:
            data = ncin.variables[args.varname][:, 0, 0, 0]
        except KeyError:
            data = ncin.variables[altvarname][:, 0, 0, 0]
        ncdata = {
            'time': ncin.variables['time'][:],
            'time2': ncin.variables['time2'][:]
        }
        ncin.close()

        #Convert two word EPIC time to python datetime.datetime
        # representation and then format for CF standards
        dt_from_epic = np.array(EPIC2Datetime(ncdata['time'], ncdata['time2']))

        #Find relevant chuck of times to keep based on arguments
        dt_index = np.where((dt_from_epic >= start_date)
                            & (dt_from_epic <= end_date))

        if key == 0:
            eof_data = data[dt_index]
        else:
            try:
                eof_data = np.vstack((eof_data, data[dt_index]))
            except ValueError:
                sys.exit("Exiting: timeseries have different lengths")

    if args.normalize:
        eof_data_std = np.std(eof_data, axis=1)
Beispiel #6
0
if args.tave and args.isCF:
    #use argument string to set up frequency
    for ind_file in ifile:
        ds = xr.open_dataset(ind_file)
        dsr = ds.resample(args.tave,
                          dim='time',
                          how='mean',
                          closed='right',
                          label='right')
        print dsr.to_dataframe().to_csv()
        ds.close()

elif args.tave and not args.isCF:
    for ind_file in ifile:
        ds = xr.open_dataset(ind_file, decode_times=False)
        ds['time'] = EPIC2Datetime(ds.time.data, ds.time2.data)
        ds = ds.drop('time2')
        dsr = ds.resample(args.tave,
                          dim='time',
                          how='mean',
                          closed='right',
                          label='right')
        print dsr.to_dataframe().to_csv()
        ds.close()

if args.tdecim and not args.isCF:
    for ind_file in ifile:
        if args.tdecim in ['10T']:
            df = EcoFOCI_netCDF(ind_file)
            global_atts = df.get_global_atts()
            vars_dic = df.get_vars()
if args.featureType:
    featureType=args.featureType
else:
    featureType=''

if args.operation in ['CF','CF Convert','CF_Convert']:
    #generates near file
    if args.is2D:

        df = EcoFOCI_netCDF( args.sourcefile )
        global_atts = df.get_global_atts()
        vars_dic = df.get_vars()
        ncdata = df.ncreadfile_dic()

        #Convert two word EPIC time to python datetime.datetime representation and then format for CF standards
        dt_from_epic =  EPIC2Datetime(ncdata['time'], ncdata['time2'])
        if args.time_since_str:
            time_since_str = " ".join(args.time_since_str)
            CF_time = get_UDUNITS(dt_from_epic,time_since_str)
        else:
            time_since_str = 'days since 1900-01-01'
            CF_time = get_UDUNITS(dt_from_epic,time_since_str)

        try:
            History=global_atts['History']
        except:
            History=''
        
        ###build/copy attributes and fill if empty
        try:
            data_cmnt = global_atts['DATA_CMNT']