def load_data(infile, basin_cube, basin_name):
    """Load, temporally aggregate and spatially slice input data"""

    cube = iris.load_cube(infile, 'surface_downward_x_stress')
    cube = timeseries.convert_to_annual(cube)
    experiment = cube.attributes['experiment_id']
    if experiment == 'historicalMisc':
        experiment = 'historicalAA'

    if not basin_name == 'globe':
        if basin_cube:
            ndim = cube.ndim
            basin_array = uconv.broadcast_array(basin_cube.data,
                                                [ndim - 2, ndim - 1],
                                                cube.shape)
        else:
            basin_array = uconv.create_basin_array(cube)
        cube.data.mask = numpy.where(
            (cube.data.mask == False) & (basin_array == basins[basin_name]),
            False, True)

    sh_constraint = iris.Constraint(latitude=lambda cell: -30.0 <= cell < 0.0)
    nh_constraint = iris.Constraint(latitude=lambda cell: 0.0 < cell <= 30.0)
    scrit_constraint = iris.Constraint(
        latitude=lambda cell: -17.0 <= cell < -13.0)
    ncrit_constraint = iris.Constraint(
        latitude=lambda cell: 13.0 < cell <= 17.0)

    sh_cube = cube.extract(sh_constraint)
    nh_cube = cube.extract(nh_constraint)
    scrit_cube = cube.extract(scrit_constraint)
    ncrit_cube = cube.extract(ncrit_constraint)

    sh_mean = sh_cube.collapsed(['longitude', 'latitude'], iris.analysis.MEAN)
    nh_mean = nh_cube.collapsed(['longitude', 'latitude'], iris.analysis.MEAN)
    scrit_mean = scrit_cube.collapsed(['longitude', 'latitude'],
                                      iris.analysis.MEAN)
    ncrit_mean = ncrit_cube.collapsed(['longitude', 'latitude'],
                                      iris.analysis.MEAN)

    return sh_mean, nh_mean, scrit_mean, ncrit_mean, experiment
示例#2
0
def main(inargs):
    """Run the program."""

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if inargs.basin_file and not hfbasin:
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, inargs.model, basin_cube)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name

    history_attribute = get_history_attribute(inargs.infiles, data_cube,
                                              inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]

    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif regular_grid and not hfbasin:
        basin_array = uconv.create_basin_array(data_cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_list = ['globe']

    # Calculate output for each basin
    out_cubes = []
    for basin_name in basin_list:
        data_cube_copy = data_cube.copy()
        if not basin_name == 'globe':
            data_cube_copy.data.mask = numpy.where(
                (data_cube_copy.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal mean
        if hfbasin:
            zonal_cube = data_cube_copy
        else:
            zonal_cube = data_cube_copy.collapsed(x_axis_name,
                                                  iris.analysis.SUM)
            zonal_cube.remove_coord(x_axis_name)

        # Convergence
        zonal_cube = convergence(zonal_cube, y_axis_name)

        # Attributes
        standard_name = 'zonal_sum_%s_convergence_%s' % (orig_standard_name,
                                                         basin_name)
        var_name = '%s_czs_%s' % (orig_var_name, basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
示例#3
0
def main(inargs):
    """Run the program."""

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        data_cubes = iris.load(inargs.infiles, inargs.var & time_constraint, callback=save_history)
        equalise_attributes(data_cubes)

    climatology_cube = read_climatology(inargs.climatology_file, inargs.var)
    basin_cube = read_optional(inargs.basin_file)
    depth_cube = read_optional(inargs.depth_file) 

    atts = set_attributes(inargs, data_cubes[0], climatology_cube, basin_cube, depth_cube)

    out_cubes = []
    for data_cube in data_cubes:
        standard_name = data_cube.standard_name
        var_name = data_cube.var_name

        if climatology_cube:
            data_cube = data_cube - climatology_cube

        if basin_cube:
            data_cube = uconv.mask_marginal_seas(data_cube, basin_cube)

        data_cube, coord_names, regrid_status = regrid_cube(data_cube)
        if regrid_status:
            depth_cube = None
            # FIXME: Could delete depth file from atts

        assert coord_names[-3:] == ['depth', 'latitude', 'longitude']
        depth_axis = data_cube.coord('depth')
        assert depth_axis.units in ['m', 'dbar'], "Unrecognised depth axis units"

        out_list = iris.cube.CubeList([])
        start_indexes, step = uconv.get_chunks(data_cube.shape, coord_names, chunk=inargs.chunk)
        for index in start_indexes:

            cube_slice = data_cube[index:index+step, 0:1000, ...]

            # Vertical
            
            for layer in vertical_layers.keys():
                vertical_mean = calc_vertical_mean(cube_slice, layer, coord_names, atts, standard_name, var_name)
                out_list.append(vertical_mean)
                if layer in ['surface', 'argo']:
                    for basin in basins.keys():

                        if basin_cube and not regrid_status:
                            basin_array = basin_cube.data
                        else: 
                            basin_array = uconv.create_basin_array(vertical_mean)

                        out_list.append(calc_zonal_vertical_mean(vertical_mean.copy(), depth_cube, basin_array, basin, layer, atts, standard_name, var_name))

            # Zonal

            if basin_cube and not regrid_status:
                ndim = cube_slice.ndim
                basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], cube_slice.shape) 
            else: 
                basin_array = uconv.create_basin_array(cube_slice)

            for basin in basins.keys():
                out_list.append(calc_zonal_mean(cube_slice.copy(), basin_array, basin, atts, standard_name, var_name))

        out_cubes.append(out_list.concatenate())
        del out_list
        del cube_slice
        del basin_array

    cube_list = []
    nvars = len(vertical_layers.keys()) + len(basins.keys()) + 2*len(basins.keys())
    for var_index in range(0, nvars):
        temp_list = []
        for infile_index in range(0, len(inargs.infiles)):
            temp_list.append(out_cubes[infile_index][var_index])
        
        temp_list = iris.cube.CubeList(temp_list)     
        cube_list.append(temp_list.concatenate_cube())
    
    cube_list = iris.cube.CubeList(cube_list)

    assert cube_list[0].data.dtype == numpy.float32
    if not 'time' in coord_names:
        iris.FUTURE.netcdf_no_unlimited = True

    iris.save(cube_list, inargs.outfile)
示例#4
0
def main(inargs):
    """Run the program."""

    region = inargs.region.replace('-', '_')

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if not hfbasin:
        assert inargs.basin_file, "Must provide a basin file for hfy data"
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, basin_cube, region)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name

    history_attribute = get_history_attribute(inargs.infiles, data_cube,
                                              inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]

    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = data_cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1],
                                            data_cube.shape)
    elif regular_grid and not hfbasin:
        basin_array = uconv.create_basin_array(data_cube)

    # Calculate the zonal sum (if required)
    data_cube_copy = data_cube.copy()

    if hfbasin:
        zonal_cube = data_cube_copy
    else:
        zonal_cube = data_cube_copy.collapsed(x_axis_name, iris.analysis.SUM)
        zonal_cube.remove_coord(x_axis_name)

    # Attributes
    try:
        zonal_cube.remove_coord('region')
    except iris.exceptions.CoordinateNotFoundError:
        pass

    standard_name = 'northward_ocean_heat_transport'
    var_name = 'hfbasin'

    zonal_cube.standard_name = standard_name
    zonal_cube.long_name = standard_name.replace('_', ' ')
    zonal_cube.var_name = var_name

    iris.save(zonal_cube, inargs.outfile)
示例#5
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
def main(inargs):
    """Run the program."""

    file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)

    metadata_dict = {}
    climatology_dict = {}
    time_trend_dict = {}
    tas_scaled_trend_dict = {}
    branch_dict = {}
    for experiment in [
            'historical', 'historicalGHG', 'historicalAA', 'historicalnoAA',
            'piControl'
    ]:
        filenames = file_dict[experiment]
        if not filenames:
            climatology_dict[experiment] = None
            time_trend_dict[experiment] = None
            tas_scaled_trend_dict[experiment] = None
        else:
            print(experiment)
            try:
                time_constraint = gio.get_time_constraint(inargs.total_time)
            except (AttributeError, TypeError):
                time_constraint = iris.Constraint()

            with iris.FUTURE.context(cell_datetime_objects=True):
                cube = iris.load(filenames, gio.check_iris_var(inargs.var))

                # Merge cubes
                metadata_dict[filenames[0]] = cube[0].attributes['history']
                equalise_attributes(cube)
                iris.util.unify_time_units(cube)
                cube = cube.concatenate_cube()
                cube = gio.check_time_units(cube)

                # Time extraction and branch time info
                coord_names = [coord.name() for coord in cube.dim_coords]
                assert coord_names[0] == 'time'

                if 'historical' in experiment:
                    original_time_length = cube.shape[0]
                    cube = cube.extract(time_constraint)
                    new_time_length = cube.shape[0]
                    branch_time_index_offset = original_time_length - new_time_length

                    branch_time = cube.attributes['branch_time']
                    time_length = cube.shape[0]
                    branch_dict[experiment] = (branch_time, time_length,
                                               branch_time_index_offset)

                elif experiment == 'piControl':
                    branch_time, time_length, branch_time_index_offset = branch_dict[
                        'historical']
                    start_index, error = uconv.find_nearest(
                        cube.coord('time').points,
                        float(branch_time) + 15.5,
                        index=True)
                    if abs(error) > 15:
                        print(
                            "WARNING: Large error of %f in locating branch time"
                            % (error))
                        start_index = 0
                    start_index = start_index + branch_time_index_offset
                    cube = cube[start_index:start_index + time_length, ::]

                # Temporal smoothing
                cube = timeseries.convert_to_annual(cube, full_months=True)

                # Mask marginal seas
                if basin_dict[experiment]:
                    basin_cube = iris.load_cube(basin_dict[experiment])
                    cube = uconv.mask_marginal_seas(cube, basin_cube)

                # Regrid and select basin
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube)
                if not inargs.basin == 'globe':
                    if basin_dict[experiment] and not regrid_status:
                        ndim = cube.ndim
                        basin_array = uconv.broadcast_array(
                            basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
                    else:
                        basin_array = uconv.create_basin_array(cube)
                    cube.data.mask = numpy.where(
                        (cube.data.mask == False) &
                        (basin_array == basins[inargs.basin]), False, True)

                # Scale
                cube, units = scale_data(cube,
                                         inargs.var,
                                         reverse_sign=inargs.reverse_sign)

                # Zonal statistic
                if inargs.area_adjust:
                    if regrid_status:
                        area_dict[experiment] = None
                    cube, units, metadata_dict = area_ajustment(
                        cube, area_dict[experiment], metadata_dict)
                    zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
                    aggregation = 'Zonally integrated'
                else:
                    zonal_cube = cube.collapsed('longitude',
                                                iris.analysis.MEAN)
                    aggregation = 'Zonal mean'
                zonal_cube.remove_coord('longitude')

                # Climatology and trends
                climatology_dict[experiment] = calculate_climatology(
                    zonal_cube, inargs.climatology_time, experiment)
                time_trend_dict[experiment] = get_trend_cube(zonal_cube)
                if tas_dict[experiment]:
                    tas_cube = iris.load_cube(
                        tas_dict[experiment],
                        'air_temperature' & time_constraint)
                    scale_factor = get_scale_factor(tas_cube)
                    print(experiment, 'warming:', scale_factor)
                    tas_scaled_trend_dict[experiment] = time_trend_dict[
                        experiment] * (1. / abs(scale_factor))
                    metadata_dict[tas_dict[experiment]
                                  [0]] = tas_cube.attributes['history']
                else:
                    tas_scaled_trend_dict[experiment] = None

    # Create the plots

    tas_scaled_trend_flag = tas_scaled_trend_dict[
        'historicalGHG'] and tas_scaled_trend_dict['historicalAA']

    fig = plt.figure(figsize=[15, 20])
    gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)

    ax_main = plt.subplot(gs[0])
    plt.sca(ax_main)
    plot_climatology(climatology_dict, inargs.var, units, inargs.legloc,
                     aggregation)
    plt.title('%s (%s), %s' % (inargs.model, inargs.run, inargs.basin))

    ax_diff = plt.subplot(gs[1])
    plt.sca(ax_diff)
    plot_difference(climatology_dict)

    ax_time_trend = plt.subplot(gs[2])
    plt.sca(ax_time_trend)
    plot_trend(time_trend_dict, units)

    if tas_scaled_trend_flag:
        ax_tas_trend = plt.subplot(gs[3])
        plt.sca(ax_tas_trend)
        plot_trend(tas_scaled_trend_dict, units, scaled=True)

    plt.xlabel('latitude')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)