def read_data(infiles,
              var,
              area_cube,
              annual=False,
              multiply_by_area=False,
              chunk_annual=False):
    """Read the input data."""

    cube, history = gio.combine_files(infiles, var)
    if annual:
        cube = timeseries.convert_to_annual(cube,
                                            days_in_month=True,
                                            chunk=chunk_annual)

    cube = uconv.flux_to_magnitude(cube)
    if multiply_by_area:
        cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube)

    coord_names = [coord.name() for coord in cube.coords(dim_coords=True)]
    assert cube.ndim == 3
    lats = cube.coord('latitude').points
    if lats.ndim == 1:
        lat_pos = coord_names.index('latitude')
        lats = uconv.broadcast_array(lats, lat_pos - 1, cube.shape[1:])
    else:
        assert lats.shape == cube.shape[1:]

    return cube, lats, history
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             sftlf_cube=None,
             realm=None):
    """Read, merge, temporally aggregate and calculate zonal sum.
    
    Positive is defined as down.
    
    """

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            depth_constraint = iris.Constraint(depth=0)
            cube = cube.extract(depth_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = spatial_weights.multiply_by_area(cube)

        if 'up' in cube.standard_name:
            cube.data = cube.data * -1

        if sftlf_cube and realm in ['ocean', 'land']:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, realm)

        zonal_sum = cube.collapsed('longitude', iris.analysis.SUM)
        zonal_sum.remove_coord('longitude')

        grid_spacing = grids.get_grid_spacing(zonal_sum)
        zonal_sum.data = zonal_sum.data / grid_spacing

    else:
        zonal_sum = None

    return zonal_sum, metadata_dict
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             area=False,
             invert_evap=False):
    """Read, merge, temporally aggregate and calculate zonal mean."""

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

        assert cube.units == 'kg m-2 s-1'
        cube.data = cube.data * 86400
        units = 'mm/day'

        if invert_evap and (var == 'water_evaporation_flux'):
            cube.data = cube.data * -1

        if area:
            cube = spatial_weights.multiply_by_area(cube)

        zonal_mean = cube.collapsed('longitude', iris.analysis.MEAN)
        zonal_mean.remove_coord('longitude')
    else:
        zonal_mean = None

    return zonal_mean, metadata_dict
Example #4
0
def main(inargs):
    """Run the program."""

    assert inargs.var == 'precipitation_flux'

    if inargs.area_file:
        area_cube = gio.get_ocean_weights(inargs.area_file)
    else:
        area_cube = None

    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        cube = iris.load_cube(infile,
                              inargs.var)  # kg m-2 s-1 (monthly, gridded)
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        days_in_year = timeseries.get_days_in_year(cube)

        cube = spatial_weights.multiply_by_area(
            cube, area_cube=area_cube)  # kg s-1 (monthly, gridded)
        cube = cube.collapsed(coord_names[1:],
                              iris.analysis.SUM)  # kg s-1 (monthly, globe)
        cube = timeseries.flux_to_total(cube)  # kg (monthly, globe)
        cube = timeseries.convert_to_annual(
            cube, aggregation='sum')  # kg (annual, globe)
        cube.data = cube.data / 5.1e14  # kg m-2 = mm (annual, globe)
        cube.data = cube.data / days_in_year.values  # mm/day (annual, globe)

        cube.units = 'mm/day'
        for coord in coord_names[1:] + aux_coord_names:
            cube.remove_coord(coord)
        output_cubelist.append(cube)
        print(infile)

    outcube = gio.combine_cubes(output_cubelist)
    outcube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(inargs):
    """Run the program."""

    logging.basicConfig(level=logging.DEBUG)

    spatial_data = ('vol' in inargs.weights_var) or ('area'
                                                     in inargs.weights_var)
    flux_data = not spatial_data

    w_cube, w_history = get_weights_data(inargs.weights_files,
                                         inargs.weights_var, inargs.area_file)
    t_cube, t_history = get_bin_data(inargs.temperature_files,
                                     inargs.temperature_var, w_cube)
    s_cube, s_history = get_bin_data(inargs.salinity_files,
                                     inargs.salinity_var, w_cube)
    b_cube = iris.load_cube(inargs.basin_file, 'region')
    if inargs.area_file:
        assert flux_data
        a_cube = gio.get_ocean_weights(inargs.area_file)
    else:
        assert spatial_data
        a_cube = None

    log = get_log(inargs, w_history, t_history, s_history, b_cube, a_cube)

    b_values, b_edges = uconv.get_basin_details(b_cube)
    if inargs.bin_percentile:
        pct_edges = np.arange(0, 1.01, 0.01)
        pct_values = (pct_edges[1:] + pct_edges[:-1]) / 2
        nt_values = ns_values = len(pct_values)
        s_bounds = (-0.2, 80)
        pct_cube = a_cube
    else:
        t_min, t_max = inargs.temperature_bounds
        t_step = inargs.tbin_size
        t_edges = np.arange(t_min, t_max + t_step, t_step)
        t_values = (t_edges[1:] + t_edges[:-1]) / 2
        s_values, s_edges = uconv.salinity_bins()
        s_bounds = (s_edges[0], s_edges[-1])
        nt_values = len(t_values)
        ns_values = len(s_values)
        pct_cube = None

    iris.coord_categorisation.add_year(t_cube, 'time')
    iris.coord_categorisation.add_year(s_cube, 'time')
    t_years = set(t_cube.coord('year').points)
    s_years = set(s_cube.coord('year').points)
    assert t_years == s_years
    if flux_data:
        iris.coord_categorisation.add_year(w_cube, 'time')
        w_years = set(w_cube.coord('year').points)
        assert w_years == t_years
    years = np.array(list(t_years))
    years.sort()

    w_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
    w_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
    w_tsbin_outdata = np.ma.zeros(
        [len(years), ns_values, nt_values,
         len(b_values)])
    if spatial_data:
        ws_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
        wt_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
        ws_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
        wt_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
        ws_tsbin_outdata = np.ma.zeros(
            [len(years), ns_values, nt_values,
             len(b_values)])
        wt_tsbin_outdata = np.ma.zeros(
            [len(years), ns_values, nt_values,
             len(b_values)])
    if inargs.bin_percentile:
        pct_edges_t = np.ma.zeros([len(years), nt_values + 1])
        pct_edges_s = np.ma.zeros([len(years), ns_values + 1])
    if inargs.bin_clim:
        iris.coord_categorisation.add_month(s_cube, 'time')
        s_year_cube = s_cube.aggregated_by(['month'], iris.analysis.MEAN)
        s_year_cube.remove_coord('month')
        s_year_cube.replace_coord(s_cube[0:12, ::].coord('time'))
        iris.coord_categorisation.add_month(t_cube, 'time')
        t_year_cube = t_cube.aggregated_by(['month'], iris.analysis.MEAN)
        t_year_cube.remove_coord('month')
        t_year_cube.replace_coord(t_cube[0:12, ::].coord('time'))
    for year_index, year in enumerate(years):
        print(year)
        year_constraint = iris.Constraint(year=year)
        if not inargs.bin_clim:
            s_year_cube = s_cube.extract(year_constraint)
            t_year_cube = t_cube.extract(year_constraint)
        if flux_data:
            w_year_cube = w_cube.extract(year_constraint)
            w_year_cube = spatial_weights.multiply_by_area(w_year_cube,
                                                           area_cube=a_cube)
        else:
            w_year_cube = w_cube
        df, s_units, t_units = water_mass.create_df(
            w_year_cube,
            t_year_cube,
            s_year_cube,
            b_cube,
            pct_cube=pct_cube,
            multiply_weights_by_days_in_year_frac=True)
        if inargs.bin_percentile:
            weight_var = 'percentile_weights' if pct_cube else 'weight'
            t_edges = weighted_percentiles(df['temperature'].values,
                                           df[weight_var].values, pct_edges)
            s_edges = weighted_percentiles(df['salinity'].values,
                                           df[weight_var].values, pct_edges)
            pct_edges_t[year_index, :] = t_edges
            pct_edges_s[year_index, :] = s_edges
        if flux_data:
            w_tbin_outdata[year_index, ::] = bin_data(df,
                                                      ['temperature', 'basin'],
                                                      [t_edges, b_edges])
            w_sbin_outdata[year_index, ::] = bin_data(df,
                                                      ['salinity', 'basin'],
                                                      [s_edges, b_edges])
            w_tsbin_outdata[year_index, ::] = bin_data(
                df, ['salinity', 'temperature', 'basin'],
                [s_edges, t_edges, b_edges])
        else:
            tbin_list = bin_data(df, ['temperature', 'basin'],
                                 [t_edges, b_edges],
                                 mul_ts=True)
            sbin_list = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges],
                                 mul_ts=True)
            tsbin_list = bin_data(df, ['salinity', 'temperature', 'basin'],
                                  [s_edges, t_edges, b_edges],
                                  mul_ts=True)
            w_tbin_outdata[year_index, ::], ws_tbin_outdata[
                year_index, ::], wt_tbin_outdata[year_index, ::] = tbin_list
            w_sbin_outdata[year_index, ::], ws_sbin_outdata[
                year_index, ::], wt_sbin_outdata[year_index, ::] = sbin_list
            w_tsbin_outdata[year_index, ::], ws_tsbin_outdata[
                year_index, ::], wt_tsbin_outdata[year_index, ::] = tsbin_list

    outdata_dict = {}
    outdata_dict['w_tbin'] = np.ma.masked_invalid(w_tbin_outdata)
    outdata_dict['w_sbin'] = np.ma.masked_invalid(w_sbin_outdata)
    outdata_dict['w_tsbin'] = np.ma.masked_invalid(w_tsbin_outdata)
    if spatial_data:
        outdata_dict['ws_tbin'] = np.ma.masked_invalid(ws_tbin_outdata)
        outdata_dict['wt_tbin'] = np.ma.masked_invalid(wt_tbin_outdata)
        outdata_dict['ws_sbin'] = np.ma.masked_invalid(ws_sbin_outdata)
        outdata_dict['wt_sbin'] = np.ma.masked_invalid(wt_sbin_outdata)
        outdata_dict['ws_tsbin'] = np.ma.masked_invalid(ws_tsbin_outdata)
        outdata_dict['wt_tsbin'] = np.ma.masked_invalid(wt_tsbin_outdata)
    if inargs.bin_percentile:
        t_values = s_values = pct_values * 100
        t_edges = s_edges = pct_edges * 100
        pct_edges_ts = [pct_edges_t, pct_edges_s]
    else:
        pct_edges_ts = []
    outcube_list = construct_cube(outdata_dict,
                                  w_year_cube,
                                  t_cube,
                                  s_cube,
                                  b_cube,
                                  years,
                                  t_values,
                                  t_edges,
                                  t_units,
                                  s_values,
                                  s_edges,
                                  s_units,
                                  log,
                                  mul_ts=spatial_data,
                                  pct_edges_ts=pct_edges_ts)

    equalise_attributes(outcube_list)
    iris.save(outcube_list, inargs.outfile)
Example #6
0
def process_data(t_cube, s_cube, w_cube,
                 a_cube, v_cube, b_cube,
                 t_values, s_values, b_values,
                 nt_values, ns_values,
                 s_edges, t_edges, b_edges,
                 w_dtype, log, inargs):
    """Implement binning."""

    nmonths = t_cube.coord('time').shape[0]
    
    w_tbin_outdata = np.ma.zeros([nmonths, nt_values])
    w_sbin_outdata = np.ma.zeros([nmonths, ns_values])
    w_tsbin_outdata = np.ma.zeros([nmonths, ns_values, nt_values])
    if w_dtype == 'spatial':
        ws_tbin_outdata = np.ma.zeros([nmonths, nt_values])
        wt_tbin_outdata = np.ma.zeros([nmonths, nt_values])
        ws_sbin_outdata = np.ma.zeros([nmonths, ns_values])
        wt_sbin_outdata = np.ma.zeros([nmonths, ns_values])
        ws_tsbin_outdata = np.ma.zeros([nmonths, ns_values, nt_values])
        wt_tsbin_outdata = np.ma.zeros([nmonths, ns_values, nt_values])

    for month in range(nmonths):
        print(month)         
        s_month_cube = s_cube[month, ::]
        t_month_cube = t_cube[month, ::]

        if w_dtype == 'spatial':
            w_month_cube = w_cube
        else:
            w_month_cube = w_cube[month, ::]

        if a_cube:
            w_month_cube = spatial_weights.multiply_by_area(w_month_cube, area_cube=a_cube)
        elif v_cube:
            w_month_cube = spatial_weights.multiply_by_volume(w_month_cube, volume_cube=v_cube)
        
        df, s_units, t_units = water_mass.create_df(w_month_cube, t_month_cube, s_month_cube, b_cube)
        if w_dtype == 'spatial':
            tbin_list = bin_data(df, ['temperature', 'basin'], [t_edges, b_edges], b_cube, mul_ts=True)
            sbin_list = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges], b_cube, mul_ts=True)
            tsbin_list = bin_data(df, ['salinity', 'temperature', 'basin'], [s_edges, t_edges, b_edges], b_cube, mul_ts=True)
            w_tbin_outdata[month, ::], ws_tbin_outdata[month, ::], wt_tbin_outdata[month, ::] = tbin_list
            w_sbin_outdata[month, ::], ws_sbin_outdata[month, ::], wt_sbin_outdata[month, ::] = sbin_list
            w_tsbin_outdata[month, ::], ws_tsbin_outdata[month, ::], wt_tsbin_outdata[month, ::] = tsbin_list
        else:
            w_tbin_outdata[month, ::] = bin_data(df, ['temperature', 'basin'], [t_edges, b_edges], b_cube)
            w_sbin_outdata[month, ::] = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges], b_cube)
            w_tsbin_outdata[month, ::] = bin_data(df, ['salinity', 'temperature', 'basin'], [s_edges, t_edges, b_edges], b_cube)

    outdata_dict = {}
    outdata_dict['w_tbin'] = w_tbin_outdata
    outdata_dict['w_sbin'] = w_sbin_outdata
    outdata_dict['w_tsbin'] = w_tsbin_outdata
    if w_dtype == 'spatial':
        outdata_dict['ws_tbin'] = ws_tbin_outdata
        outdata_dict['wt_tbin'] = wt_tbin_outdata
        outdata_dict['ws_sbin'] = ws_sbin_outdata
        outdata_dict['wt_sbin'] = wt_sbin_outdata
        outdata_dict['ws_tsbin'] = ws_tsbin_outdata
        outdata_dict['wt_tsbin'] = wt_tsbin_outdata
        mul_ts = True
    else:
        mul_ts = False

    outcube_list = construct_cube(outdata_dict, w_month_cube, t_cube, s_cube, b_cube,
                                  t_values, t_edges, t_units, s_values, s_edges, s_units,
                                  log, mul_ts=mul_ts)

    return outcube_list
Example #7
0
def process_data_by_year(t_cube, s_cube, w_cube,
                         a_cube, v_cube, b_cube,
                         t_values, s_values, b_values,
                         nt_values, ns_values,
                         s_edges, t_edges, b_edges,
                         w_dtype, log, inargs):
    """Implement annual binning."""

    iris.coord_categorisation.add_year(t_cube, 'time')
    iris.coord_categorisation.add_year(s_cube, 'time')
    t_years = set(t_cube.coord('year').points)
    s_years = set(s_cube.coord('year').points)
    assert t_years == s_years
    if not w_dtype == 'spatial':
        iris.coord_categorisation.add_year(w_cube, 'time')
        w_years = set(w_cube.coord('year').points)
        assert w_years == t_years
    years = np.array(list(t_years))
    years.sort()
    
    w_tbin_outdata = np.ma.zeros([len(years), nt_values])
    w_sbin_outdata = np.ma.zeros([len(years), ns_values])
    w_tsbin_outdata = np.ma.zeros([len(years), ns_values, nt_values])
    if w_dtype == 'spatial':
        ws_tbin_outdata = np.ma.zeros([len(years), nt_values])
        wt_tbin_outdata = np.ma.zeros([len(years), nt_values])
        ws_sbin_outdata = np.ma.zeros([len(years), ns_values])
        wt_sbin_outdata = np.ma.zeros([len(years), ns_values])
        ws_tsbin_outdata = np.ma.zeros([len(years), ns_values, nt_values])
        wt_tsbin_outdata = np.ma.zeros([len(years), ns_values, nt_values])

    for year_index, year in enumerate(years):
        print(year)         
        year_constraint = iris.Constraint(year=year)
        s_year_cube = s_cube.extract(year_constraint)
        t_year_cube = t_cube.extract(year_constraint)
        if w_dtype == 'spatial':
            w_year_cube = w_cube
        else:
            w_year_cube = w_cube.extract(year_constraint)

        if a_cube:
            w_year_cube = spatial_weights.multiply_by_area(w_year_cube, area_cube=a_cube)
        elif v_cube:
            w_year_cube = spatial_weights.multiply_by_volume(w_year_cube, volume_cube=v_cube)

        df, s_units, t_units = water_mass.create_df(w_year_cube, t_year_cube, s_year_cube, b_cube,
                                                    multiply_weights_by_days_in_year_frac=True)
        if w_dtype == 'spatial':
            tbin_list = bin_data(df, ['temperature', 'basin'], [t_edges, b_edges], b_cube, mul_ts=True)
            sbin_list = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges], b_cube, mul_ts=True)
            tsbin_list = bin_data(df, ['salinity', 'temperature', 'basin'], [s_edges, t_edges, b_edges], b_cube, mul_ts=True)
            w_tbin_outdata[year_index, ::], ws_tbin_outdata[year_index, ::], wt_tbin_outdata[year_index, ::] = tbin_list
            w_sbin_outdata[year_index, ::], ws_sbin_outdata[year_index, ::], wt_sbin_outdata[year_index, ::] = sbin_list
            w_tsbin_outdata[year_index, ::], ws_tsbin_outdata[year_index, ::], wt_tsbin_outdata[year_index, ::] = tsbin_list
        else:
            w_tbin_outdata[year_index, ::] = bin_data(df, ['temperature', 'basin'], [t_edges, b_edges], b_cube)
            w_sbin_outdata[year_index, ::] = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges], b_cube)
            w_tsbin_outdata[year_index, ::] = bin_data(df, ['salinity', 'temperature', 'basin'], [s_edges, t_edges, b_edges], b_cube)
            
    outdata_dict = {}
    outdata_dict['w_tbin'] = w_tbin_outdata
    outdata_dict['w_sbin'] = w_sbin_outdata
    outdata_dict['w_tsbin'] = w_tsbin_outdata
    if w_dtype == 'spatial':
        outdata_dict['ws_tbin'] = ws_tbin_outdata
        outdata_dict['wt_tbin'] = wt_tbin_outdata
        outdata_dict['ws_sbin'] = ws_sbin_outdata
        outdata_dict['wt_sbin'] = wt_sbin_outdata
        outdata_dict['ws_tsbin'] = ws_tsbin_outdata
        outdata_dict['wt_tsbin'] = wt_tsbin_outdata
        mul_ts = True
    else:
        mul_ts = False

    outcube_list = construct_cube(outdata_dict, w_year_cube, t_cube, s_cube, b_cube,
                                  t_values, t_edges, t_units, s_values, s_edges, s_units,
                                  log, years=years, mul_ts=mul_ts)

    return outcube_list
Example #8
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    #depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region') #& depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area') # & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube, history = gio.combine_files(filename, inargs.var, checks=True)  #& depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)
            if inargs.weights:
                weights_cube = select_basin(weights_cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim - weights_cube.ndim == 1
            broadcast_weights_cube = cube.copy()
            broadcast_weights_array = uconv.broadcast_array(weights_cube.data, [1, weights_cube.ndim], cube.shape)
            broadcast_weights_cube.data = broadcast_weights_array
        else:
            broadcast_weights_cube = None
            broadcast_weights_array = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            if not ref_cube:
                lats = numpy.arange(-89.5, 90, 1)
                lons = numpy.arange(0.5, 360, 1)
                ref_cube = grids.make_grid(lats, lons)
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord,
                                              aggregation_functions[inargs.aggregation],
                                              weights=broadcast_weights_cube)
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcast_weights_array)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        #horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)
        output_cubelist.append(horiz_aggregate)

    iris.util.equalise_attributes(output_cubelist)
    iris.util.unify_time_units(output_cubelist)
    output_cubelist = output_cubelist.concatenate_cube()

    if inargs.cumsum:
        output_cubelist = cumsum(output_cubelist)

    metadata_dict[filename] = history[0]
    output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(output_cubelist, inargs.outfile) 
Example #9
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region' & depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area' & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var) & depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim == 3
            broadcasted_weights = uconv.broadcast_array(weights_cube.data, [1, 2], cube.shape)
        else:
            broadcasted_weights = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            assert ref_cube
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord, aggregation_functions[inargs.aggregation])     
            #TODO: Add weights=broadcasted_weights
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcasted_weights)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)

        if inargs.outfile[-3:] == '.nc':
            output_cubelist.append(horiz_aggregate)
        elif inargs.outfile[-1] == '/': 
            if inargs.cumsum:
                horiz_aggregate = cumsum(horiz_aggregate)       
            infile = filename.split('/')[-1]
            infile = re.sub(cube.var_name + '_', cube.var_name + '-' + inargs.direction + '-' + inargs.aggregation + '_', infile)
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
                infile = re.sub('Amon', 'Ayr', infile)
       
            outfile = inargs.outfile + infile
            metadata_dict[filename] = cube.attributes['history'] 
            horiz_aggregate.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)

            iris.save(horiz_aggregate, outfile)
            print('output:', outfile)
            del horiz_aggregate

    if inargs.outfile[-3:] == '.nc':
        equalise_attributes(output_cubelist)
        iris.util.unify_time_units(output_cubelist)
        output_cubelist = output_cubelist.concatenate_cube()

        if inargs.cumsum:
            output_cubelist = cumsum(output_cubelist)

        metadata_dict[filename] = cube.attributes['history']
        output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
        iris.save(output_cubelist, inargs.outfile)