示例#1
0
def main(inargs):
    """Run the program."""

    logging.basicConfig(level=logging.DEBUG)
    if ('vol' in inargs.weights_var) or ('area' in inargs.weights_var):
        weights_dtype = 'spatial'
    elif 'flux' in inargs.weights_var:
        weights_dtype = 'flux'
    else:
        weights_dtype = 'full field'

    w_cube, w_history = get_weights_data(inargs.weights_files, inargs.weights_var)
    t_cube, t_history = get_bin_data(inargs.temperature_files, inargs.temperature_var, w_cube)
    s_cube, s_history = get_bin_data(inargs.salinity_files, inargs.salinity_var, w_cube)
    b_cube = iris.load_cube(inargs.basin_file, 'region')
    if inargs.area_file:
        assert not inargs.volume_file, "Cannot multiply weights by area and volume"
        a_cube = gio.get_ocean_weights(inargs.area_file)
    else:
        a_cube = None
    if inargs.volume_file:
        assert not inargs.area_file, "Cannot multiply weights by area and volume"
        v_cube = gio.get_ocean_weights(inargs.volume_file)
    else:
        v_cube = None

    log = get_log(inargs, w_history, t_history, s_history, b_cube, a_cube, v_cube)

    b_values, b_edges = uconv.get_basin_details(b_cube)
    t_min, t_max = inargs.temperature_bounds
    t_step = inargs.tbin_size
    t_edges = np.arange(t_min, t_max + t_step, t_step)
    t_values = (t_edges[1:] + t_edges[:-1]) / 2 
    s_values, s_edges = uconv.salinity_bins()
    nt_values = len(t_values)
    ns_values = len(s_values)

    if inargs.bin_freq == 'yr':
        outcube_list = process_data_by_year(t_cube, s_cube, w_cube,
                                            a_cube, v_cube, b_cube,
                                            t_values, s_values, b_values,
                                            nt_values, ns_values,
                                            s_edges, t_edges, b_edges,
                                            weights_dtype,
                                            log, inargs) 
    elif inargs.bin_freq == 'mon':
        outcube_list = process_data(t_cube, s_cube, w_cube,
                                    a_cube, v_cube, b_cube,
                                    t_values, s_values, b_values,
                                    nt_values, ns_values,
                                    s_edges, t_edges, b_edges,
                                    weights_dtype,
                                    log, inargs)

    iris.util.equalise_attributes(outcube_list)
    iris.save(outcube_list, inargs.outfile)
示例#2
0
def main(inargs):
    """Run the program."""

    standard_names = {
        'thetao': 'sea_water_potential_temperature',
        'so': 'sea_water_salinity'
    }
    volume_cube = gio.get_ocean_weights(inargs.volfile)
    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        cube = iris.load_cube(infile, standard_names[inargs.invar])
        weights = uconv.broadcast_array(volume_cube.data, [1, 3], cube.shape)
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        ga = cube.collapsed(coord_names[1:],
                            iris.analysis.MEAN,
                            weights=weights)
        for coord in coord_names[1:] + aux_coord_names:
            ga.remove_coord(coord)
        ga.var_name = inargs.invar + 'ga'
        output_cubelist.append(ga)
        print(infile)

    outcube = gio.combine_cubes(output_cubelist)
    metadata_dict = {}
    metadata_dict[infile] = cube.attributes['history']
    metadata_dict[inargs.volfile] = volume_cube.attributes['history']
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(inargs):
    """Run the program."""

    standard_names = {
        'thetao': 'sea_water_potential_temperature',
        'so': 'sea_water_salinity'
    }
    volume_cube = gio.get_ocean_weights(inargs.volfile)
    output_cubelist = iris.cube.CubeList([])

    cube, history = gio.combine_files(inargs.infiles,
                                      standard_names[inargs.invar],
                                      checks=True)
    ntsteps = cube.shape[0]
    for tstep, cube_slice in enumerate(cube.slices_over('time')):
        print(f'time step {tstep + 1} of {ntsteps}')
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        ga = cube_slice.collapsed(coord_names[1:],
                                  iris.analysis.MEAN,
                                  weights=volume_cube.data)
        for coord in coord_names[1:] + aux_coord_names:
            ga.remove_coord(coord)
            ga.var_name = inargs.invar + 'ga'
        output_cubelist.append(ga)
    outcube = output_cubelist.merge()[0]
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    metadata_dict[inargs.volfile] = volume_cube.attributes['history']
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
示例#4
0
def get_weights_data(file_list, var):
    """Read the weights data file/s"""
    
    w_var = mom_vars[var] if var in mom_vars else var
    if ('vol' in w_var) or ('area' in w_var):
        assert len(file_list) == 1
        w_cube = gio.get_ocean_weights(file_list[0])
        history = w_cube.attributes['history'] 
    else:
        w_cube, history = gio.combine_files(file_list, var, checks=True)

    return w_cube, history
示例#5
0
def main(inargs):
    """Run the program."""

    if inargs.var == 'ocean_volume':
        data_cube = gio.get_ocean_weights(inargs.infile, sanity_check=False)
    else:
        data_cube, data_history = gio.combine_files(inargs.infile, inargs.var)
    mask_cube, mask_history = gio.combine_files(inargs.mask_file, inargs.mask_var)

    if inargs.mask_method == 'copy':    
        mask = copy_mask(mask_cube, data_cube.shape)
    else:
        mask = create_mask(mask_cube, data_cube.shape)
    data_cube.data = numpy.ma.asarray(data_cube.data)
    data_cube.data.mask = mask
            
    data_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir) 
    iris.save(data_cube, inargs.outfile)
示例#6
0
def main(inargs):
    """Run the program."""

    data_cube = iris.load_cube(inargs.dummy_file, inargs.dummy_var)
    coord_names = [coord.name() for coord in data_cube.dim_coords]
    if inargs.volcello_file:
        assert data_cube.ndim == 4
        volume_cube = gio.get_ocean_weights(inargs.volcello_file)
        depth_coord = data_cube.coord(coord_names[1])
        assert depth_coord.units in ['m', 'dbar']
        surface_depth = depth_coord.bounds[0][1] - depth_coord.bounds[0][0]
        area_data = volume_cube.data[0, ::] / surface_depth
        data_cube = data_cube[0, 0, ::]
        data_cube.remove_coord(coord_names[0])
        data_cube.remove_coord(coord_names[1])
    else:
        assert coord_names[-2:] == ['latitude', 'longitude']
        if data_cube.ndim == 3:
            data_cube = data_cube[0, ::]
            data_cube.remove_coord(coord_names[0])
        else:
            data_cube = data_cube[0, 0, ::]
            data_cube.remove_coord(coord_names[0])
            data_cube.remove_coord(coord_names[1])
        area_data = spatial_weights.area_array(data_cube)
        area_data = numpy.ma.asarray(area_data)
        if inargs.outvar == 'areacello':
            area_data.mask = data_cube.data.mask

    area_cube = construct_area_cube(inargs.outvar, area_data,
                                    data_cube.attributes, data_cube.dim_coords)
    area_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)

    if inargs.outvar == 'areacello':
        gio.check_global_ocean_area(area_cube.data.sum())
    else:
        gio.check_global_surface_area(area_cube.data.sum())

    iris.save(area_cube, inargs.outfile)
示例#7
0
def main(inargs):
    """Run the program."""

    assert inargs.var == 'precipitation_flux'

    if inargs.area_file:
        area_cube = gio.get_ocean_weights(inargs.area_file)
    else:
        area_cube = None

    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        cube = iris.load_cube(infile,
                              inargs.var)  # kg m-2 s-1 (monthly, gridded)
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        days_in_year = timeseries.get_days_in_year(cube)

        cube = spatial_weights.multiply_by_area(
            cube, area_cube=area_cube)  # kg s-1 (monthly, gridded)
        cube = cube.collapsed(coord_names[1:],
                              iris.analysis.SUM)  # kg s-1 (monthly, globe)
        cube = timeseries.flux_to_total(cube)  # kg (monthly, globe)
        cube = timeseries.convert_to_annual(
            cube, aggregation='sum')  # kg (annual, globe)
        cube.data = cube.data / 5.1e14  # kg m-2 = mm (annual, globe)
        cube.data = cube.data / days_in_year.values  # mm/day (annual, globe)

        cube.units = 'mm/day'
        for coord in coord_names[1:] + aux_coord_names:
            cube.remove_coord(coord)
        output_cubelist.append(cube)
        print(infile)

    outcube = gio.combine_cubes(output_cubelist)
    outcube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
示例#8
0
def main(inargs):
    """Run the program."""

    agg_functions = {'mean': iris.analysis.MEAN, 'sum': iris.analysis.SUM}
    metadata_dict = {}
    
    basin_cube = iris.load_cube(inargs.basin_file, 'region')
    assert basin_cube.data.min() == 11
    assert basin_cube.data.max() == 18
    basin_numbers = numpy.array([1, 2, 3])
    metadata_dict[inargs.basin_file] = basin_cube.attributes['history']

    flag_values = '0 1 2'
    flag_meanings = 'atlantic indo-pacific globe'
    basin_coord = iris.coords.DimCoord(basin_numbers,
                                       standard_name=basin_cube.standard_name,
                                       long_name=basin_cube.long_name,
                                       var_name=basin_cube.var_name,
                                       units=basin_cube.units,
                                       attributes={'flag_values': flag_values,
                                                   'flag_meanings': flag_meanings})

    if inargs.weights:
        weights_cube = gio.get_ocean_weights(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']        

    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        print(infile)

        if inargs.var == 'ocean_volume':
            cube = gio.get_ocean_weights(infile)
            history = [cube.attributes['history']]
        else:
            cube, history = gio.combine_files(infile, inargs.var, checks=True)

        assert cube.ndim in [3, 4]
        coord_names = [coord.name() for coord in cube.dim_coords]

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)   

        assert basin_cube.shape == cube.shape[-2:]
        basin_array = uconv.broadcast_array(basin_cube.data, [cube.ndim - 2, cube.ndim - 1], cube.shape)
        basin_masks = {'atlantic': basin_array > 12,
                       'indo-pacific': (basin_array < 13) | (basin_array > 15),
                       'globe': basin_array > 16}

        if inargs.weights:
            assert weights_cube.data.shape == cube.shape[-3:]
            if cube.ndim == 4:
                weights_array = uconv.broadcast_array(weights_cube.data, [1, 3], cube.shape)
            else:
                weights_array = weights_cube.data
        else:
            weights_array = None

        if cube.ndim == 3:
            outdata = numpy.ma.zeros([cube.shape[0], len(basin_numbers)])
        else:
            outdata = numpy.ma.zeros([cube.shape[0], cube.shape[1], len(basin_numbers)])

        for basin_index, basin_name in enumerate(['atlantic', 'indo-pacific', 'globe']):
            temp_cube = cube.copy()
            mask = basin_masks[basin_name]
            temp_cube.data = numpy.ma.masked_where(mask, temp_cube.data)
            if len(coord_names) == cube.ndim:
                horiz_agg = temp_cube.collapsed(coord_names[-2:], agg_functions[inargs.agg], weights=weights_array).data
            elif inargs.agg == 'mean':
                horiz_agg = numpy.ma.average(temp_cube.data, axis=(-2,-1), weights=weights_array)
            elif inargs.agg == 'sum':
                horiz_agg = numpy.ma.sum(temp_cube.data, axis=(-2,-1))
            if outdata.ndim == 2:
                outdata[:, basin_index] = horiz_agg
            else:
                outdata[:, :, basin_index] = horiz_agg

        coord_list = [(cube.dim_coords[0], 0)]
        if cube.ndim == 4:
            coord_list.append((cube.dim_coords[1], 1))
            coord_list.append((basin_coord, 2))
        else:
            coord_list.append((basin_coord, 1))
        outcube = iris.cube.Cube(outdata,
                                 standard_name=cube.standard_name,
                                 long_name=cube.long_name,
                                 var_name=cube.var_name,
                                 units=cube.units,
                                 attributes=cube.attributes,
                                 dim_coords_and_dims=coord_list)
        output_cubelist.append(outcube)

    iris.util.equalise_attributes(output_cubelist)
    iris.util.unify_time_units(output_cubelist)
    outcube = output_cubelist.concatenate_cube()
    if history:
        metadata_dict[inargs.infiles[-1]] = history[0]
    outcube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(inargs):
    """Run the program."""

    logging.basicConfig(level=logging.DEBUG)

    spatial_data = ('vol' in inargs.weights_var) or ('area'
                                                     in inargs.weights_var)
    flux_data = not spatial_data

    w_cube, w_history = get_weights_data(inargs.weights_files,
                                         inargs.weights_var, inargs.area_file)
    t_cube, t_history = get_bin_data(inargs.temperature_files,
                                     inargs.temperature_var, w_cube)
    s_cube, s_history = get_bin_data(inargs.salinity_files,
                                     inargs.salinity_var, w_cube)
    b_cube = iris.load_cube(inargs.basin_file, 'region')
    if inargs.area_file:
        assert flux_data
        a_cube = gio.get_ocean_weights(inargs.area_file)
    else:
        assert spatial_data
        a_cube = None

    log = get_log(inargs, w_history, t_history, s_history, b_cube, a_cube)

    b_values, b_edges = uconv.get_basin_details(b_cube)
    if inargs.bin_percentile:
        pct_edges = np.arange(0, 1.01, 0.01)
        pct_values = (pct_edges[1:] + pct_edges[:-1]) / 2
        nt_values = ns_values = len(pct_values)
        s_bounds = (-0.2, 80)
        pct_cube = a_cube
    else:
        t_min, t_max = inargs.temperature_bounds
        t_step = inargs.tbin_size
        t_edges = np.arange(t_min, t_max + t_step, t_step)
        t_values = (t_edges[1:] + t_edges[:-1]) / 2
        s_values, s_edges = uconv.salinity_bins()
        s_bounds = (s_edges[0], s_edges[-1])
        nt_values = len(t_values)
        ns_values = len(s_values)
        pct_cube = None

    iris.coord_categorisation.add_year(t_cube, 'time')
    iris.coord_categorisation.add_year(s_cube, 'time')
    t_years = set(t_cube.coord('year').points)
    s_years = set(s_cube.coord('year').points)
    assert t_years == s_years
    if flux_data:
        iris.coord_categorisation.add_year(w_cube, 'time')
        w_years = set(w_cube.coord('year').points)
        assert w_years == t_years
    years = np.array(list(t_years))
    years.sort()

    w_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
    w_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
    w_tsbin_outdata = np.ma.zeros(
        [len(years), ns_values, nt_values,
         len(b_values)])
    if spatial_data:
        ws_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
        wt_tbin_outdata = np.ma.zeros([len(years), nt_values, len(b_values)])
        ws_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
        wt_sbin_outdata = np.ma.zeros([len(years), ns_values, len(b_values)])
        ws_tsbin_outdata = np.ma.zeros(
            [len(years), ns_values, nt_values,
             len(b_values)])
        wt_tsbin_outdata = np.ma.zeros(
            [len(years), ns_values, nt_values,
             len(b_values)])
    if inargs.bin_percentile:
        pct_edges_t = np.ma.zeros([len(years), nt_values + 1])
        pct_edges_s = np.ma.zeros([len(years), ns_values + 1])
    if inargs.bin_clim:
        iris.coord_categorisation.add_month(s_cube, 'time')
        s_year_cube = s_cube.aggregated_by(['month'], iris.analysis.MEAN)
        s_year_cube.remove_coord('month')
        s_year_cube.replace_coord(s_cube[0:12, ::].coord('time'))
        iris.coord_categorisation.add_month(t_cube, 'time')
        t_year_cube = t_cube.aggregated_by(['month'], iris.analysis.MEAN)
        t_year_cube.remove_coord('month')
        t_year_cube.replace_coord(t_cube[0:12, ::].coord('time'))
    for year_index, year in enumerate(years):
        print(year)
        year_constraint = iris.Constraint(year=year)
        if not inargs.bin_clim:
            s_year_cube = s_cube.extract(year_constraint)
            t_year_cube = t_cube.extract(year_constraint)
        if flux_data:
            w_year_cube = w_cube.extract(year_constraint)
            w_year_cube = spatial_weights.multiply_by_area(w_year_cube,
                                                           area_cube=a_cube)
        else:
            w_year_cube = w_cube
        df, s_units, t_units = water_mass.create_df(
            w_year_cube,
            t_year_cube,
            s_year_cube,
            b_cube,
            pct_cube=pct_cube,
            multiply_weights_by_days_in_year_frac=True)
        if inargs.bin_percentile:
            weight_var = 'percentile_weights' if pct_cube else 'weight'
            t_edges = weighted_percentiles(df['temperature'].values,
                                           df[weight_var].values, pct_edges)
            s_edges = weighted_percentiles(df['salinity'].values,
                                           df[weight_var].values, pct_edges)
            pct_edges_t[year_index, :] = t_edges
            pct_edges_s[year_index, :] = s_edges
        if flux_data:
            w_tbin_outdata[year_index, ::] = bin_data(df,
                                                      ['temperature', 'basin'],
                                                      [t_edges, b_edges])
            w_sbin_outdata[year_index, ::] = bin_data(df,
                                                      ['salinity', 'basin'],
                                                      [s_edges, b_edges])
            w_tsbin_outdata[year_index, ::] = bin_data(
                df, ['salinity', 'temperature', 'basin'],
                [s_edges, t_edges, b_edges])
        else:
            tbin_list = bin_data(df, ['temperature', 'basin'],
                                 [t_edges, b_edges],
                                 mul_ts=True)
            sbin_list = bin_data(df, ['salinity', 'basin'], [s_edges, b_edges],
                                 mul_ts=True)
            tsbin_list = bin_data(df, ['salinity', 'temperature', 'basin'],
                                  [s_edges, t_edges, b_edges],
                                  mul_ts=True)
            w_tbin_outdata[year_index, ::], ws_tbin_outdata[
                year_index, ::], wt_tbin_outdata[year_index, ::] = tbin_list
            w_sbin_outdata[year_index, ::], ws_sbin_outdata[
                year_index, ::], wt_sbin_outdata[year_index, ::] = sbin_list
            w_tsbin_outdata[year_index, ::], ws_tsbin_outdata[
                year_index, ::], wt_tsbin_outdata[year_index, ::] = tsbin_list

    outdata_dict = {}
    outdata_dict['w_tbin'] = np.ma.masked_invalid(w_tbin_outdata)
    outdata_dict['w_sbin'] = np.ma.masked_invalid(w_sbin_outdata)
    outdata_dict['w_tsbin'] = np.ma.masked_invalid(w_tsbin_outdata)
    if spatial_data:
        outdata_dict['ws_tbin'] = np.ma.masked_invalid(ws_tbin_outdata)
        outdata_dict['wt_tbin'] = np.ma.masked_invalid(wt_tbin_outdata)
        outdata_dict['ws_sbin'] = np.ma.masked_invalid(ws_sbin_outdata)
        outdata_dict['wt_sbin'] = np.ma.masked_invalid(wt_sbin_outdata)
        outdata_dict['ws_tsbin'] = np.ma.masked_invalid(ws_tsbin_outdata)
        outdata_dict['wt_tsbin'] = np.ma.masked_invalid(wt_tsbin_outdata)
    if inargs.bin_percentile:
        t_values = s_values = pct_values * 100
        t_edges = s_edges = pct_edges * 100
        pct_edges_ts = [pct_edges_t, pct_edges_s]
    else:
        pct_edges_ts = []
    outcube_list = construct_cube(outdata_dict,
                                  w_year_cube,
                                  t_cube,
                                  s_cube,
                                  b_cube,
                                  years,
                                  t_values,
                                  t_edges,
                                  t_units,
                                  s_values,
                                  s_edges,
                                  s_units,
                                  log,
                                  mul_ts=spatial_data,
                                  pct_edges_ts=pct_edges_ts)

    equalise_attributes(outcube_list)
    iris.save(outcube_list, inargs.outfile)
示例#10
0
def main(inargs):
    """Run the program."""

    var = inargs.pe_files[0].split('/')[-1].split('_')[0]
    assert var in ['pe', 'wfo']
    var_name = 'precipitation minus evaporation flux' if var == 'pe' else 'water_flux_into_sea_water'

    area_cube = gio.get_ocean_weights(
        inargs.area_file) if inargs.area_file else None
    pe_cube, pe_lats, pe_history = read_data(inargs.pe_files,
                                             var_name,
                                             area_cube,
                                             annual=inargs.annual,
                                             multiply_by_area=inargs.area)
    basin_cube = iris.load_cube(inargs.basin_file, 'region')

    metadata = {
        inargs.pe_files[0]: pe_history[0],
        inargs.basin_file: basin_cube.attributes['history']
    }
    if inargs.data_var == 'cell_area':
        data_cube = iris.load_cube(inargs.data_files[0], 'cell_area')
        assert data_cube.shape == pe_cube.shape[1:]
    elif inargs.data_files:
        data_cube, data_lats, data_history = read_data(
            inargs.data_files,
            inargs.data_var,
            area_cube,
            annual=inargs.annual,
            multiply_by_area=inargs.area)
        assert data_cube.shape == pe_cube.shape
        metadata[inargs.data_files[0]] = data_history[0]
    else:
        data_cube = pe_cube.copy()
        data_var = var_name

    if area_cube:
        area_data = area_cube.data
    else:
        if data_cube.ndim == 3:
            area_data = spatial_weights.area_array(data_cube[0, ::])
        else:
            assert data_cube.ndim == 2
            area_data = spatial_weights.area_array(data_cube)

    region_data = np.zeros([pe_cube.shape[0], 6, 8])
    tstep = 0
    ntimes = pe_cube.shape[0]
    for tstep in range(ntimes):
        var_data = data_cube.data if inargs.data_var == 'cell_area' else data_cube[
            tstep, ::].data
        region_data[tstep, :] = get_regional_aggregates(
            inargs.agg, var_data, pe_cube[tstep, ::].data, pe_lats,
            basin_cube.data, area_data)

    if inargs.cumsum:
        region_data = np.cumsum(region_data, axis=0)

    pe_region_coord = create_pe_region_coord()
    basin_coord = create_basin_coord()
    time_coord = pe_cube.coord('time')

    if inargs.data_var:
        standard_name = data_cube.standard_name
    elif var == 'pe':
        iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {
            'canonical_units': pe_cube.units
        }
        standard_name = 'precipitation_minus_evaporation_flux'
    else:
        standard_name = pe_cube.standard_name
    atts = pe_cube.attributes if inargs.data_var == 'cell_area' else data_cube.attributes
    dim_coords_list = [(time_coord, 0), (pe_region_coord, 1), (basin_coord, 2)]
    out_cube = iris.cube.Cube(region_data,
                              standard_name=standard_name,
                              long_name=data_cube.long_name,
                              var_name=data_cube.var_name,
                              units=data_cube.units,
                              attributes=atts,
                              dim_coords_and_dims=dim_coords_list)

    out_cube.attributes['history'] = cmdprov.new_log(infile_history=metadata,
                                                     git_repo=repo_dir)
    iris.save(out_cube, inargs.outfile)
示例#11
0
def main(inargs):
    """Run program"""

    nmodels = get_nmodels(inargs)
    ensemble_ref_cube = ensemble_grid() if nmodels > 1 else None

    var_list = ['wfo', 'so', 'transport']
    exp_list = [
        'historicalGHG', 'historicalMisc', 'historical', 'GHG+AA',
        'hist-GHG+AA', '1pctCO2', 'historical-rcp85'
    ]
    time_constraint = gio.get_time_constraint(inargs.time)

    anomaly_dict = {}
    for combo in itertools.product(var_list, exp_list):
        anomaly_dict[combo] = iris.cube.CubeList([])

    # Get data for the experiments
    volcello_vzsum = gio.get_ocean_weights(inargs.volcello_file)
    masscello_orig = volcello_vzsum * inargs.density

    for exp_files in [
            inargs.ghg_files, inargs.aa_files, inargs.hist_files,
            inargs.pctCO2_files
    ]:
        for model_num, model_files in enumerate(exp_files):
            wfo_file, so_file = model_files
            metadata_dict = {}
            anomaly_dict, metadata_dict = get_anomalies(
                wfo_file, so_file, masscello_orig, time_constraint, model_num,
                ensemble_ref_cube, anomaly_dict, metadata_dict)

    # Calculate the GHG + AA variables
    if inargs.ghg_files and inargs.aa_files:
        for mod_num in range(nmodels):
            for var in var_list:
                data_sum = anomaly_dict[(var, 'historicalGHG')][mod_num] + \
                           anomaly_dict[(var, 'historicalMisc')][mod_num]
                data_diff = anomaly_dict[(var,
                                          'historical')][mod_num] - data_sum
                anomaly_dict[(var, 'GHG+AA')].append(data_sum)
                anomaly_dict[(var, 'hist-GHG+AA')].append(data_diff)

    # Plot individual model data
    nexp = len(inargs.experiments)
    fig = plt.figure(figsize=[11 * nexp, 14])
    gs = gridspec.GridSpec(2, nexp)
    if nmodels > 1:
        for plot_index, exp in enumerate(inargs.experiments):
            for mod_num in range(nmodels):
                plot_uptake_storage(gs[plot_index],
                                    anomaly_dict[('wfo', exp)][mod_num],
                                    anomaly_dict[('so', exp)][mod_num],
                                    linewidth=0.8,
                                    linestyle='--',
                                    decorate=False,
                                    ylim=inargs.ylim_storage)
                plot_transport(gs[plot_index + nexp],
                               anomaly_dict[('transport', exp)][mod_num],
                               linewidth=0.8,
                               linestyle='--',
                               decorate=False,
                               ylim=inargs.ylim_transport)

    # Plot ensemble data
    ensemble_dict = {}
    for combo in itertools.product(var_list, exp_list):
        cube_list = iris.cube.CubeList(filter(None, anomaly_dict[combo]))
        ensemble_dict[combo] = ensemble_mean(cube_list)

    linewidth = None if nmodels == 1 else 5.0
    for plot_index, exp in enumerate(inargs.experiments):
        storage_letter = panel_labels[
            plot_index] if inargs.panel_letters else None
        plot_uptake_storage(gs[plot_index],
                            ensemble_dict[('wfo', exp)],
                            ensemble_dict[('so', exp)],
                            linewidth=linewidth,
                            title=titles[exp],
                            exp_num=plot_index,
                            ylim=inargs.ylim_storage,
                            panel_label=storage_letter,
                            legloc=inargs.legloc_storage)
        transport_letter = panel_labels[plot_index +
                                        nexp] if inargs.panel_letters else None
        plot_transport(gs[plot_index + nexp],
                       ensemble_dict[('transport', exp)],
                       linewidth=linewidth,
                       exp_num=plot_index,
                       ylim=inargs.ylim_transport,
                       panel_label=transport_letter,
                       legloc=inargs.legloc_transport)

    if not inargs.no_title:
        time_text = get_time_text(inargs.time)
        fig.suptitle('zonally integrated change in fresh water, ' + time_text,
                     fontsize='large')

    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams'][
        'figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)