def main(inargs):
    """Run the program."""

    # Read the data
    dset_in = xarray.open_dataset(inargs.infile)
    gio.check_xarrayDataset(dset_in, inargs.variable)

    subset_dict = gio.get_subset_kwargs(inargs)
    darray = dset_in[inargs.variable].sel(**subset_dict)

    # Calculate the zonal anomaly
    zonal_mean = darray.mean(dim='longitude')
    zonal_anomaly = darray - zonal_mean

    # Write output file
    d = {}
    for dim in darray.dims:
        d[dim] = darray[dim]
    d[inargs.variable] = (darray.dims, zonal_anomaly)

    dset_out = xarray.Dataset(d)

    dset_out[inargs.variable].attrs = {
        'long_name': darray.attrs['long_name'],
        'standard_name': darray.attrs['standard_name'],
        'units': darray.attrs['units'],
        'notes': 'The zonal mean has been subtracted at each time step.'
    }

    gio.set_global_atts(dset_out, dset_in.attrs, {
        inargs.infile: dset_in.attrs['history'],
    })
    dset_out.to_netcdf(inargs.outfile, format='NETCDF3_CLASSIC')
def calc_zw3(ifile, var_id, ofile):
    """Calculate an index of the Southern Hemisphere ZW3 pattern.
    
    Ref: Raphael (2004). A zonal wave 3 index for the Southern Hemisphere. 
      Geophysical Research Letters, 31(23), L23212. 
      doi:10.1029/2004GL020365.

    Expected input: Raphael (2004) uses is the 500hPa geopotential height, 
      sea level pressure or 500hPa zonal anomalies which are constructed by 
      removing the zonal mean of the geopotential height from each grid point 
      (preferred). The running mean (and zonal mean too if using it) should 
      have been applied to the input data beforehand. Raphael (2004) uses a 
      3-month running mean.

    Design notes: This function uses cdo instead of CDAT because the
      cdutil library doesn't have routines for calculating the daily 
      climatology or stdev.
    
    """

    # Read data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    # Calculate the index
    groupby_op = get_groupby_op(dset_in['time'].values)
    index = {}
    for region in ['zw31', 'zw32', 'zw33']:
        south_lat, north_lat, west_lon, east_lon = gio.regions[region]
        darray = dset_in[var_id].sel(
            latitude=slice(south_lat, north_lat),
            longitude=slice(west_lon,
                            east_lon)).mean(dim=['latitude', 'longitude'])

        clim = darray.groupby(groupby_op).mean(dim='time')
        anom = darray.groupby(groupby_op) - clim
        stdev = darray.groupby(groupby_op).std(dim='time')
        norm = anom.groupby(groupby_op) / stdev

        index[region] = norm.values

    zw3_timeseries = (index['zw31'] + index['zw32'] + index['zw33']) / 3.0

    # Write output file
    d = {}
    d['time'] = darray['time']
    d['zw3'] = (['time'], zw3_timeseries)
    dset_out = xarray.Dataset(d)

    dset_out['zw3'].attrs = {
        'id': 'zw3',
        'long_name': 'zonal_wave_3_index',
        'standard_name': 'zonal_wave_3_index',
        'units': '',
        'notes': 'Ref: ZW3 index of Raphael (2004)'
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
def calc_nino_new(index, ifile, var_id, base_period, ofile):
    """Calculate a new Nino index.

    Ref: Ren & Jin (2011). Nino indices for two types of ENSO. 
      Geophysical Research Letters, 38(4), L04704. 
      doi:10.1029/2010GL046031.

    Expected input: Sea surface temperature data.

    """

    # Calculate the traditional NINO3 and NINO4 indices
    regions = ['NINO3', 'NINO4']
    anomaly_timeseries = {}
    for reg in regions:
        dset_in, anomaly_timeseries[reg] = calc_nino(reg, ifile, var_id,
                                                     base_period, None)

    # Calculate the new Ren & Jin index
    ntime = len(anomaly_timeseries['NINO3'])

    nino_new_timeseries = numpy.ma.zeros(ntime)
    for i in range(0, ntime):
        nino3_val = anomaly_timeseries['NINO3'][i]
        nino4_val = anomaly_timeseries['NINO4'][i]
        product = nino3_val * nino4_val

        alpha = 0.4 if product > 0 else 0.0

        if index == 'NINOCT':
            nino_new_timeseries[i] = numpy.ma.subtract(
                nino3_val, (numpy.ma.multiply(nino4_val, alpha)))
        elif index == 'NINOWP':
            nino_new_timeseries[i] = numpy.ma.subtract(
                nino4_val, (numpy.ma.multiply(nino3_val, alpha)))

    # Write output
    d = {}
    d['time'] = dset_in['time']
    d['nino' + index[4:]] = (['time'], nino_new_timeseries)
    dset_out = xarray.Dataset(d)

    hx = 'Ref: Ren & Jin 2011, GRL, 38, L04704. Base period: %s to %s' % (
        base_period[0], base_period[1])
    long_name = {}
    long_name['ninoCT'] = 'nino_cold_tongue_index'
    long_name['ninoWP'] = 'nino_warm_pool_index'
    dset_out['nino' + index[4:]].attrs = {
        'long_name': long_name['nino' + index[4:]],
        'standard_name': long_name['nino' + index[4:]],
        'units': 'Celsius',
        'notes': hx
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
def calc_sam(ifile, var_id, ofile):
    """Calculate an index of the Southern Annular Mode.

    Ref: Gong & Wang (1999). Definition of Antarctic Oscillation index. 
      Geophysical Research Letters, 26(4), 459-462.
      doi:10.1029/1999GL900003

    Expected input: Mean sea level pressure data.

    Concept: Difference between the normalised zonal mean pressure 
      at 40S and 65S.

    """

    # Read data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    # Calculate index
    north_lat = uconv.find_nearest(dset_in['latitude'].values, -40)
    south_lat = uconv.find_nearest(dset_in['latitude'].values, -65)
    darray = dset_in[var_id].sel(latitude=(south_lat,
                                           north_lat)).mean(dim='longitude')

    groupby_op = get_groupby_op(darray['time'].values)
    clim = darray.groupby(groupby_op).mean(dim='time')
    anom = darray.groupby(groupby_op) - clim
    stdev = darray.groupby(groupby_op).std(dim='time')
    norm = anom.groupby(groupby_op) / stdev

    sam_timeseries = norm.sel(latitude=north_lat).values - norm.sel(
        latitude=south_lat).values

    # Write output file
    d = {}
    d['time'] = darray['time']
    d['sam'] = (['time'], sam_timeseries)
    dset_out = xarray.Dataset(d)

    hx = 'Ref: Gong & Wang (1999). GRL, 26, 459-462. doi:10.1029/1999GL900003'
    dset_out['sam'].attrs = {
        'long_name': 'Southern_Annular_Mode_Index',
        'standard_name': 'Southern_Annular_Mode_Index',
        'units': '',
        'notes': hx
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
Ejemplo n.º 5
0
def main(inargs):
    """Run the program."""

    # Read the data
    dset_in = xarray.open_dataset(inargs.infile)
    gio.check_xarrayDataset(dset_in, inargs.var)
    darray, long_name, units = extract_data(dset_in, inargs)

    # Perform task
    outdata_dict = {}
    if inargs.outtype == 'coefficients':
        outdata_dict = _get_coefficients(darray.values,
                                         darray['longitude'].values,
                                         inargs.min_freq, inargs.max_freq,
                                         long_name, units, outdata_dict)
        if inargs.sign_change:
            outdata_dict = _get_sign_change(darray.values, outdata_dict)

        if inargs.env_max:
            env_max_min_freq, env_max_max_freq = inargs.env_max
            outdata_dict = _get_env_max(darray.values,
                                        darray['longitude'].values,
                                        env_max_min_freq, env_max_max_freq,
                                        units, outdata_dict)
        dims = darray.dims[:-1]
    else:
        outdata_dict = _filter_data(darray.values, darray['longitude'].values,
                                    inargs.min_freq, inargs.max_freq,
                                    inargs.var, long_name, units,
                                    inargs.outtype, outdata_dict)
        dims = darray.dims

    # Write the output file
    d = {}
    for dim in dims:
        d[dim] = darray[dim]

    for outvar in outdata_dict.keys():
        d[outvar] = (dims, outdata_dict[outvar][0])

    dset_out = xarray.Dataset(d)

    for outvar in outdata_dict.keys():
        dset_out[outvar].attrs = outdata_dict[outvar][1]

    gio.set_global_atts(dset_out, dset_in.attrs, {
        inargs.infile: dset_in.attrs['history'],
    })
    dset_out.to_netcdf(inargs.outfile)
def calc_nino(index, ifile, var_id, base_period, ofile):
    """Calculate a Nino index.

    Expected input: Sea surface temperature data.

    """

    index_name = 'nino' + index[4:]

    # Read the data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    # Calculate the index
    south_lat, north_lat, west_lon, east_lon = gio.regions[index_name]
    darray = dset_in[var_id].sel(
        latitude=slice(south_lat, north_lat),
        longitude=slice(west_lon,
                        east_lon)).mean(dim=['latitude', 'longitude'])

    groupby_op = get_groupby_op(darray['time'].values)
    clim = darray.sel(
        time=slice(base_period[0], base_period[1])).groupby(groupby_op).mean()
    anom = darray.groupby(groupby_op) - clim

    # Write output
    if ofile:
        d = {}
        d['time'] = darray['time']
        d[index_name] = (['time'], anom.values)
        dset_out = xarray.Dataset(d)

        hx = 'lat: %s to %s, lon: %s to %s, base: %s to %s' % (
            south_lat, north_lat, west_lon, east_lon, base_period[0],
            base_period[1])

        dset_out[index_name].attrs = {
            'long_name': index_name + '_index',
            'standard_name': index_name + '_index',
            'units': 'Celsius',
            'notes': hx
        }

        gio.set_global_atts(dset_out, dset_in.attrs,
                            {ifile: dset_in.attrs['history']})
        dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
    else:
        return dset_in, anom.values
Ejemplo n.º 7
0
def main(inargs):
    """Run the program."""

    # Read the data
    dset_in = xray.open_dataset(inargs.infile)
    #gio.check_xrayDataset(dset_in, inargs.var)

    subset_dict = gio.get_subset_kwargs(inargs)
    darray = dset_in[inargs.var].sel(**subset_dict)

    assert darray.dims == ('time', 'latitude', 'longitude'), \
    "Order of the data must be time, latitude, longitude"

    # Generate datetime list
    dt_list, dt_list_metadata = get_datetimes(darray, inargs.date_file)

    # Calculate the composites
    if not inargs.date_file:
        inargs.no_sig = True
    cmeans, cmean_atts, pvals, pval_atts = calc_composites(
        darray, dt_list, sig_test=not inargs.no_sig)

    # Write the output file
    d = {}
    d['latitude'] = darray['latitude']
    d['longitude'] = darray['longitude']

    for season in season_months.keys():
        d[inargs.var + '_' + season] = (['latitude',
                                         'longitude'], cmeans[season])
        if not inargs.no_sig:
            d['p_' + season] = (['latitude', 'longitude'], pvals[season])

    dset_out = xray.Dataset(d)

    for season in season_months.keys():
        dset_out[inargs.var + '_' + season].attrs = cmean_atts[season]
        if not inargs.no_sig:
            dset_out['p_' + season].attrs = pval_atts[season]

    output_metadata = {
        inargs.infile: dset_in.attrs['history'],
    }
    if inargs.date_file:
        output_metadata[inargs.date_file] = dt_list_metadata

    gio.set_global_atts(dset_out, dset_in.attrs, output_metadata)
    dset_out.to_netcdf(inargs.outfile, format='NETCDF3_CLASSIC')
def main(inargs):
    """Run the program."""

    dset = xray.open_dataset(inargs.infile)

    try:
        dset = dset.drop('bnds')
    except ValueError:
        print "Did not delete time bounds variable"

    try:
        dset.coords['time'].attrs.pop('bounds')
    except KeyError:
        pass

    gio.set_global_atts(dset, dset.attrs, {inargs.infile: dset.attrs['history'],})
    dset.to_netcdf(inargs.outfile)
def main(inargs):
    """Run the program."""

    # Read the data
    dset_in_u = xray.open_dataset(inargs.infileu)
    gio.check_xrayDataset(dset_in_u, inargs.varu)

    dset_in_v = xray.open_dataset(inargs.infilev)
    gio.check_xrayDataset(dset_in_v, inargs.varv)

    subset_dict = gio.get_subset_kwargs(inargs)

    darray_u = dset_in_u[inargs.varu].sel(**subset_dict)
    darray_v = dset_in_v[inargs.varv].sel(**subset_dict)

    lat_axis = darray_u['latitude'].values
    lon_axis = darray_u['longitude'].values
    axis_order = axis_letters(darray_u.dims)

    # Calculate the desired quantity
    data_out = calc_quantity(darray_u.values, darray_v.values, inargs.quantity,
                             lat_axis, lon_axis, axis_order)

    # Write the output file
    d = {}
    for dim in darray_u.dims:
        d[dim] = darray_u[dim]

    for var in data_out.keys():
        d[var] = (darray_u.dims, data_out[var])

    dset_out = xray.Dataset(d)

    for var in data_out.keys():
        dset_out[var].attrs = var_atts[var]

    outfile_metadata = {
        inargs.infileu: dset_in_u.attrs['history'],
        inargs.infilev: dset_in_v.attrs['history']
    }
    gio.set_global_atts(dset_out, dset_in_u.attrs, outfile_metadata)
    dset_out.to_netcdf(inargs.outfile, format='NETCDF3_CLASSIC')
def calc_pwi(ifile, var_id, ofile):
    """Calculate the Planetary Wave Index.

    Ref: Irving & Simmonds (2015). A novel approach to diagnosing Southern 
      Hemisphere planetary wave activity and its influence on regional 
      climate variability. Journal of Climate. 28, 9041-9057. 
      doi:10.1175/JCLI-D-15-0287.1.
      
    Expected input: Wave envelope.   

    """

    # Read data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    # Calculate index
    darray = dset_in[var_id].sel(latitude=slice(-70, -40))
    mermax = darray.max(dim='latitude')
    pwi_timeseries = mermax.median(dim='longitude')

    # Write output file
    d = {}
    d['time'] = darray['time']
    d['pwi'] = (['time'], pwi_timeseries.values)
    dset_out = xarray.Dataset(d)

    dset_out['pwi'].attrs = {
        'long_name': 'planetary_wave_index',
        'standard_name': 'planetary_wave_index',
        'units': darray.attrs['units'],
        'notes': 'Ref: PWI of Irving and Simmonds (2015)'
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
def calc_mi(ifile, var_id, ofile):
    """Calculate the meridional wind index.

    Represents the average amplitude of the meridional wind
    over the 70S to 40S latitude band.
      
    Expected input: Meridional wind

    """

    # Read data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    # Calculate index
    darray = dset_in[var_id].sel(latitude=slice(-70, -40))
    units = darray.attrs['units']

    darray = (darray**2)**0.5  # absolute value
    mi_timeseries = darray.mean(dim=['latitude', 'longitude'])

    # Write output file
    d = {}
    d['time'] = darray['time']
    d['pwi'] = (['time'], mi_timeseries.values)
    dset_out = xarray.Dataset(d)

    dset_out['pwi'].attrs = {
        'long_name': 'meridional_wind_index',
        'standard_name': 'meridional_wind_index',
        'units': units,
        'notes': 'Average amplitude of meridional wind over 70S to 40S'
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
def calc_asl(ifile, var_id, ofile):
    """Calculate the Amundsen Sea Low index.

    Ref: Turner et al (2013). The Amundsen Sea Low. 
      International Journal of Climatology. 33(7), 1818-1829
      doi:10.1002/joc.3558.

    Expected input: Mean sea level pressure data.

    Concept: Location and value of minimum MSLP is the region
      bounded by 60-75S and 180-310E.   

    """

    # Read data
    dset_in = xarray.open_dataset(ifile)
    gio.check_xarrayDataset(dset_in, var_id)

    south_lat, north_lat, west_lon, east_lon = gio.regions['asl']
    darray = dset_in[var_id].sel(latitude=slice(south_lat, north_lat),
                                 longitude=slice(west_lon, east_lon))

    assert darray.dims == ('time', 'latitude', 'longitude'), \
    "Order of the data must be time, latitude, longitude"

    # Get axis information
    lat_values = darray['latitude'].values
    lon_values = darray['longitude'].values
    lats, lons = uconv.coordinate_pairs(lat_values, lon_values)

    # Reshape data
    ntimes, nlats, nlons = darray.values.shape
    darray_reshaped = numpy.reshape(darray.values, (ntimes, nlats * nlons))

    # Get the ASL index info (min value for each timestep and its lat/lon)
    min_values = numpy.amin(darray_reshaped, axis=1)
    min_indexes = numpy.argmin(darray_reshaped, axis=1)
    min_lats = numpy.take(lats, min_indexes)
    min_lons = numpy.take(lons, min_indexes)

    # Write the output file
    d = {}
    d['time'] = darray['time']
    d['asl_value'] = (['time'], min_values)
    d['asl_lat'] = (['time'], min_lats)
    d['asl_lon'] = (['time'], min_lons)
    dset_out = xarray.Dataset(d)

    ref = 'Ref: Turner et al (2013). Int J Clim. 33, 1818-1829. doi:10.1002/joc.3558.'
    dset_out['asl_value'].attrs = {
        'long_name': 'asl_minimum_pressure',
        'standard_name': 'asl_minimum_pressure',
        'units': 'Pa',
        'notes': ref
    }
    dset_out['asl_lat'].attrs = {
        'long_name': 'asl_latitude',
        'standard_name': 'asl_latitude',
        'units': 'degrees_north',
        'notes': ref
    }
    dset_out['asl_lon'].attrs = {
        'long_name': 'asl_longitude',
        'standard_name': 'asl_longitude',
        'units': 'degrees_east',
        'notes': ref
    }

    gio.set_global_atts(dset_out, dset_in.attrs,
                        {ifile: dset_in.attrs['history']})
    dset_out.to_netcdf(ofile, format='NETCDF3_CLASSIC')
Ejemplo n.º 13
0
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        u_cube = iris.load_cube(inargs.infileU,
                                inargs.longnameU & time_constraint)
        v_cube = iris.load_cube(inargs.infileV,
                                inargs.longnameV & time_constraint)

    for coords in [u_cube.coords(), v_cube.coords()]:
        coord_names = [coord.name() for coord in coords]
        assert coord_names == ['time', 'latitude', 'longitude']

    time_coord = v_cube.coord('time')
    lat_coord = v_cube.coord('latitude')
    lon_coord = v_cube.coord('longitude')

    # Rotate wind
    np_lat, np_lon = inargs.north_pole
    rotated_cs = iris.coord_systems.RotatedGeogCS(np_lat, np_lon)
    urot_cube, vrot_cube = iris.analysis.cartography.rotate_winds(
        u_cube, v_cube, rotated_cs)

    # Regrid
    target_grid_cube = make_grid(lat_coord.points, lon_coord.points, np_lat,
                                 np_lon)
    scheme = iris.analysis.Linear()
    vrot_cube.coords('latitude')[0].coord_system = iris.coord_systems.GeogCS(
        iris.fileformats.pp.EARTH_RADIUS)
    vrot_cube.coords('longitude')[0].coord_system = iris.coord_systems.GeogCS(
        iris.fileformats.pp.EARTH_RADIUS)
    vrot_regridded = vrot_cube.regrid(target_grid_cube, scheme)
    #could use clean_data here to remove spurious large values that regirdding produces

    # Write to file
    d = {}
    d['time'] = ('time', time_coord.points)
    d['latitude'] = ('latitude', lat_coord.points)
    d['longitude'] = ('longitude', lon_coord.points)
    d['vrot'] = (['time', 'latitude', 'longitude'], vrot_regridded.data)

    dset_out = xray.Dataset(d)
    dset_out['vrot'].attrs = {
        'standard_name':
        'rotated_northward_wind',
        'long_name':
        'rotated_northward_wind',
        'units':
        str(v_cube.units),
        'notes':
        'North Pole at lat=%s, lon=%s. Data defined on rotated grid.' %
        (np_lat, np_lon)
    }
    gio.set_dim_atts(dset_out, str(time_coord.units))

    outfile_metadata = {
        inargs.infileU: u_cube.attributes['history'],
        inargs.infileV: v_cube.attributes['history']
    }

    gio.set_global_atts(dset_out, v_cube.attributes, outfile_metadata)
    dset_out.to_netcdf(inargs.outfile, )  #format='NETCDF3_CLASSIC')
Ejemplo n.º 14
0
def main(inargs):
    """Run the program."""

    # Prepate input data
    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    try:
        lat_constraint = iris.Constraint(latitude=lambda y: y <= inargs.maxlat)
    except AttributeError:
        lat_constraint = iris.Constraint()

    try:
        season_constraint = season_constraints[inargs.season]
    except AttributeError:
        season_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load_cube(
            inargs.infile, inargs.longname & time_constraint
            & season_constraint & lat_constraint)

    coord_names = [coord.name() for coord in cube.coords()]
    assert coord_names == ['time', 'latitude', 'longitude']

    time_coord = cube.coord('time')
    lat_coord = cube.coord('latitude')
    lon_coord = cube.coord('longitude')

    # Perform EOF analysis
    eof_anal = EofAnalysis(
        cube,
        **uconv.dict_filter(vars(inargs),
                            uconv.list_kwargs(EofAnalysis.__init__)))

    eof_cube, eof_atts = eof_anal.eof(
        **uconv.dict_filter(vars(inargs), uconv.list_kwargs(eof_anal.eof)))
    pc_cube, pc_atts = eof_anal.pcs(
        **uconv.dict_filter(vars(inargs), uconv.list_kwargs(eof_anal.pcs)))

    # Write output file
    d = {}
    d['time'] = ('time', time_coord.points)
    d['latitude'] = ('latitude', lat_coord.points)
    d['longitude'] = ('longitude', lon_coord.points)

    eof_dims = ['latitude', 'longitude']
    pc_dims = ['time']
    for index in xrange(inargs.neofs):
        d['eof' +
          str(index + 1)] = (eof_dims,
                             eof_cube.extract(
                                 iris.Constraint(eof_number=index)).data)
        d['pc' + str(index + 1)] = (pc_dims,
                                    pc_cube.extract(
                                        iris.Constraint(pc_number=index)).data)

    dset_out = xray.Dataset(d)

    for index in xrange(inargs.neofs):
        eof_var = 'eof' + str(index + 1)
        pc_var = 'pc' + str(index + 1)
        dset_out[eof_var].attrs = eof_atts[eof_var]
        dset_out[pc_var].attrs = pc_atts[pc_var]

    gio.set_dim_atts(dset_out, str(time_coord.units))

    outfile_metadata = {inargs.infile: cube.attributes['history']}

    gio.set_global_atts(dset_out, cube.attributes, outfile_metadata)
    dset_out.to_netcdf(inargs.outfile, )  #format='NETCDF3_CLASSIC')