Exemplo n.º 1
0
def pandas2netcdf(df=None, ofile="data.nc"):

    if df.empty:
        return
    else:
        df["time"] = [
            date2num(x[1], "hours since 1900-01-01T00:00:00Z")
            for x in enumerate(df.index)
        ]

        EPIC_VARS_dict = ConfigParserLocal.get_config(
            "config_files/drifters.yaml", "yaml")

        # create new netcdf file
        ncinstance = EcF_write.NetCDF_Create_Profile_Ragged1D(savefile=ofile)
        ncinstance.file_create()
        ncinstance.sbeglobal_atts(
            raw_data_file="", History="File Created from ARGSOS Drifter Data.")
        ncinstance.dimension_init(recnum_len=len(df))
        ncinstance.variable_init(EPIC_VARS_dict)
        ncinstance.add_coord_data(recnum=range(1, len(df) + 1))
        ncinstance.add_data(EPIC_VARS_dict,
                            data_dic=df,
                            missing_values=np.nan,
                            pandas=True)
        ncinstance.close()
Exemplo n.º 2
0
def PointerReader(pointer_file_path):
    """
    Get parameters from specified pointerfile: 
        An example is shown in the header description of
        this program.

    """

    if pointer_file_path.split('.')[-1] == 'pyini':
        pointer_file = ConfigParserLocal.get_config(pointer_file_path, 'json')
    elif pointer_file_path.split('.')[-1] == 'yaml':
        pointer_file = ConfigParserLocal.get_config(pointer_file_path, 'yaml')
    else:
        print("PointerFile format not recognized")
        sys.exit()

    return pointer_file
Exemplo n.º 3
0
    def save2nc(self, PointerFile=None):

        inst_type = self.pointer_dic['Ylabel']
        EPIC_VARS_dict = ConfigParserLocal.get_config(
            'EcoFOCI_config/epickeys/' + self.plot_var + '_epickeys.json',
            'json')

        epic_dt = Datetime2EPIC((num2date(self.time_array,
                                          'days since 0001-01-01')).tolist())

        data_dic = {}
        self.mesh_grid_data[np.isnan(self.mesh_grid_data)] = 1e35
        data_dic[self.plot_var] = self.mesh_grid_data
        ncinstance = GriddedNC(savefile='data/' + self.MooringID + '_' +
                               self.plot_var + '_gridded.nc')
        ncinstance.file_create()
        ncinstance.sbeglobal_atts(raw_data_file='',
                                  Station_Name=self.MooringID,
                                  Water_Depth=self.pointer_dic['depth_m'],
                                  InstType=inst_type)
        ncinstance.dimension_init(time_len=len(self.time_array),
                                  depth_len=len(self.data.keys()))
        ncinstance.variable_init(EPIC_VARS_dict)
        try:
            ncinstance.add_coord_data(depth=sorted(self.data.keys()),
                                      latitude=self.ncdata['lat'][0],
                                      longitude=self.ncdata['lon'][0],
                                      time1=epic_dt[0],
                                      time2=epic_dt[1])
        except:
            ncinstance.add_coord_data(depth=sorted(self.data.keys()),
                                      latitude=self.ncdata['latitude'][0],
                                      longitude=self.ncdata['longitude'][0],
                                      time1=epic_dt[0],
                                      time2=epic_dt[1])
        ncinstance.add_data(EPIC_VARS_dict, data_dic=data_dic)
        if self.gaps_filled:
            ncinstance.add_history(
                'Gridded using:{program}, Config file used:{file}\n'.format(
                    program=__file__, file=PointerFile))
        else:
            ncinstance.add_history(
                'Gridded using:{program}, Config file used:{file}\n Gaps filled Linearly\n'
                .format(program=__file__, file=PointerFile))
        ncinstance.close()
Exemplo n.º 4
0
                    action="store_true",
                    help='output multiple mooring data on one file')
parser.add_argument("-ctd",
                    '--ctd_calibration_plots',
                    action="store_true",
                    help='output CTD calibration point with timeseries')

args = parser.parse_args()
"""---------------------------------------------------------------------------------------
Get parameters from specified pointerfile - 
an example is shown in the header description of
this program.  It can be of the .pyini (json) form or .yaml form

"""
if args.PointerFile.split('.')[-1] == 'pyini':
    pointer_file = ConfigParserLocal.get_config(args.PointerFile,
                                                ftype='pyini')
elif args.PointerFile.split('.')[-1] == 'yaml':
    pointer_file = ConfigParserLocal.get_config(args.PointerFile, ftype='yaml')
else:
    print "PointerFile format not recognized"
    sys.exit()

MooringID = pointer_file['MooringID']
color_options = pointer_file['colors']
label = pointer_file['legend']
legend_loc = pointer_file['legend_loc']
legend_off = pointer_file['legend_off']
datatype = pointer_file['dtype']
plot_var = pointer_file['EPIC_Key']
plot_var_ctd = pointer_file['EPIC_Key_ctd']
LocatorInterval = pointer_file['Date_Ticks']
                    '--ctd_calibration_plots',
                    action="store_true",
                    help='plot CTD calibration point on timeseries')
parser.add_argument("-overlay",
                    "--timeseries_overlay",
                    action="store_true",
                    help='plot timeseries over each other')
args = parser.parse_args()
"""---------------------------------------------------------------------------------------
Get parameters from specified pointerfile - 
an example is shown in the header description of
this program.  It can be of the .pyini (json) form or .yaml form

"""
if args.PointerFile.split('.')[-1] == 'pyini':
    pointer_file = ConfigParserLocal.get_config(args.PointerFile, 'yaml')
elif args.PointerFile.split('.')[-1] == 'yaml':
    pointer_file = ConfigParserLocal.get_config(args.PointerFile, 'yaml')
else:
    print "PointerFile format not recognized"
    sys.exit()

MooringID = pointer_file['MooringID']
color = pointer_file['colors']
linestyle = pointer_file['linestyle']
label = pointer_file['legend']
nominal_depth = pointer_file['nominal_depth']
legend_loc = pointer_file['legend_loc']
legend_off = pointer_file['legend_off']
datatype = pointer_file['dtype']
plot_var = pointer_file['EPIC_Key']
parser.add_argument('OutPreFix',
                    metavar='OutPreFix',
                    type=str,
                    help='prefix for output file')
parser.add_argument('-perdive',
                    '--perdive',
                    action="store_true",
                    help='file per downcast')
args = parser.parse_args()

df = pd.read_csv(args.DataPath, parse_dates=True)
df['time_num'] = [
    date2num(datetime.datetime.strptime(x, '%Y-%m-%d %H:%M:%S'),
             'hours since 1900-01-01T00:00:00Z') for x in df.datetime
]
EPIC_VARS_dict = ConfigParserLocal.get_config(args.ConfigFile, 'yaml')

data_dic = {}

dfg = df.groupby('divenumber')
for prw_cast_id in dfg.groups.keys():
    sample, time, Depth, Temp, Cond, Salinity = [], [], [], [], [], []
    DO, DO_Temp, Chl, Turb, SigmaT, DO_Sat = [], [], [], [], [], []

    data_dic[prw_cast_id] = [{
        'sample':
        dfg.get_group(prw_cast_id).index.values,
        'time':
        dfg.get_group(prw_cast_id).time_num.values,
        'Depth':
        dfg.get_group(prw_cast_id).press.values,
Exemplo n.º 7
0
ncfile = args.sourcefile
df = EcoFOCI_netCDF(ncfile)
global_atts = df.get_global_atts()
df.close()

if args.screen:

    for k in global_atts.keys():
        print "{0}: {1}".format(k, global_atts[k])

if args.out_config:
    for k in global_atts.keys():
        global_atts[k] = str(global_atts[k])
        print "{0}: {1}".format(k, global_atts[k])

    ConfigParserLocal.write_config("header_config.yaml", global_atts, 'yaml')

if args.in_config:
    nc_meta = ConfigParserLocal.get_config('header_config.yaml', 'yaml')
    print nc_meta

    print "Setting attributes"
    nchandle = Dataset(args.sourcefile, 'a')
    set_global_atts(nchandle, nc_meta)
    nchandle.close()

if args.add_history:

    print "adding history attribute"
    nchandle = Dataset(args.sourcefile, 'a')
    nchandle.setncattr('history', '')
Exemplo n.º 8
0
            "s11",
            "s12",
        ],
        axis=1,
        inplace=True,
    )

    # merge the three dataframes
    df = pd.concat([df0, df1, df2])

else:
    print("No recognized argos-pmel version")
    sys.exit()

if args.config:
    config_settings = ConfigParserLocal.get_config(args.config, "yaml")
    print("Constraining data to {start}-{end}".format(
        start=config_settings["Mooring"]["StartDate"],
        end=config_settings["Mooring"]["EndDate"],
    ))
    df = df.loc[config_settings["Mooring"]["StartDate"]:
                config_settings["Mooring"]["EndDate"]]

if args.interpolate:
    # hourly binned with linear interpolation to fill gaps
    df = (df.resample("1H", label="right",
                      closed="right").mean().interpolate(method="linear"))
"""------------------------ output options----------------------"""
if args.csv and (not args.version
                 in ["beacon", "buoy_3hr", "buoy", "met", "sfc_package"]):
    df["longitude"] = df.longitude.apply(lambda x: "%.3f" % x)
if args.screen:

    for k in data.keys():
        if k == args.varname:
            data[k] = str(data[k])
            print "{0}: {1}".format(k, data[args.varname])

if args.out_config:
    for k in data.keys():
        if k == args.varname:
            data[k] = str(data[k])
            print "{0}: {1}".format(k, data[args.varname])
            data_write = {k: data[args.varname]}
            ConfigParserLocal.write_config(
                "instrument_" + args.varname + "_config.yaml", data_write,
                'yaml')

if args.in_config:
    nc_meta = ConfigParserLocal.get_config(
        "instrument_" + args.varname + "_config.yaml", 'yaml')
    editvar = [
        float(x) for x in nc_meta[args.varname].strip('[').strip(']').split()
    ]
    print editvar

    print "Setting {0}".format(args.varname)
    nchandle = Dataset(args.sourcefile, 'a')
    nchandle.variables[args.varname][:] = editvar
    nchandle.close()
Exemplo n.º 10
0
global_atts = df.get_global_atts()
vars_dic = df.get_vars()

if args.screen:

    for k in vars_dic.keys():
        if k == args.varname:
            atts = df.get_vars_attributes(var_name=k, var_type='long_name')
            print "{0}: {1}".format(k, atts)

if args.out_config:
    for k in vars_dic.keys():
        if k == args.varname:
            atts = df.get_vars_attributes(var_name=k, var_type='long_name')
            print "{0}: {1}".format(k, atts)
            data_write = {k: atts}
            ConfigParserLocal.write_config(
                "instrument_" + args.varname + "_config.pyini", data_write,
                'json')

if args.in_config:
    nc_meta = ConfigParserLocal.get_config(
        "instrument_" + args.varname + "_config.pyini", 'json')

    print "Setting {0}".format(args.varname)
    df.set_vars_attributes(var_name=args.varname,
                           var_type=args.attname,
                           attr_value=nc_meta[args.varname])

df.close()
Exemplo n.º 11
0
                    action="store_true",
                    help='plot multiple mooring data on one panel')
parser.add_argument("-ctd",
                    '--ctd_calibration_plots',
                    action="store_true",
                    help='plot CTD calibration point on timeseries')

args = parser.parse_args()
"""---------------------------------------------------------------------------------------
Get parameters from specified pointerfile - 
an example is shown in the header description of
this program.  It can be of the .pyini (json) form or .yaml form

"""
if args.PointerFile.split('.')[-1] == 'pyini':
    pointer_file = ConfigParserLocal.get_config(args.PointerFile)
elif args.PointerFile.split('.')[-1] == 'yaml':
    pointer_file = ConfigParserLocal.get_config_yaml(args.PointerFile)
else:
    print "PointerFile format not recognized"
    sys.exit()

MooringDataPath = pointer_file['mooring_data_path']
MooringID = pointer_file['MooringID']
color_options = pointer_file['colors']
label = pointer_file['legend']
legend_loc = pointer_file['legend_loc']
legend_off = pointer_file['legend_off']
datatype = pointer_file['dtype']
plot_var = pointer_file['EPIC_Key']
plot_var_ctd = pointer_file['EPIC_Key_ctd']
                    type=str,
                    help='name of new epic variable')

args = parser.parse_args()

# If these variables are not defined, no data will be archived into the nc file for that parameter.

###nc readin
df = EcoFOCI_netCDF(args.sourcedir)
global_atts = df.get_global_atts()
vars_dic = df.get_vars()
nchandle = df._getnchandle_()
data = df.ncreadfile_dic()

if args.add_epic_var:
    EPIC_VARS_dict = ConfigParserLocal.get_config(
        'EcoFOCI_config/epickeys/epickey.json', 'json')
    try:
        epic_var_ind = (args.add_epic_var).split('_')[1]
        print "Adding {0} by searching for {1}".format(args.add_epic_var,
                                                       epic_var_ind)
        try:
            newvar = nchandle.createVariable(
                EPIC_VARS_dict[epic_var_ind]['EPIC_KEY'], 'f4',
                ('time', 'depth', 'lat', 'lon'))
        except:
            newvar = nchandle.createVariable(
                EPIC_VARS_dict[epic_var_ind]['EPIC_KEY'], 'f4',
                ('time', 'dep', 'lat', 'lon'))
        newvar.setncattr('name', EPIC_VARS_dict[epic_var_ind]['NAME'])
        newvar.long_name = EPIC_VARS_dict[epic_var_ind]['LONGNAME']
        newvar.generic_name = EPIC_VARS_dict[epic_var_ind]['GENERIC_NAME']