Пример #1
0
def read_data(infile_list, var, model, basin_cube):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the "global_ocean" by name

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]

    if var == 'northward_ocean_heat_transport':
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, 2, :]
        else:
            cube = cube.extract(iris.Constraint(region='global_ocean'))

    cube = timeseries.convert_to_annual(cube, full_months=True)

    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)

    return cube
Пример #2
0
def read_data(infile_list, var, basin_cube, region):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the regioins by name.

    """

    cube, history = gio.combine_files(infile_list, var)

    cube.attributes = atts
    cube.attributes['history'] = history[0]
    model = atts['model_id']

    if var == 'northward_ocean_heat_transport':
        region_index = {}
        region_index['atlantic_arctic_ocean'] = 0
        region_index['indian_pacific_ocean'] = 1
        region_index['global_ocean'] = 2
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, region_index[region], :]
        else:
            cube = cube.extract(iris.Constraint(region=region))

    cube = timeseries.convert_to_annual(cube, full_months=True)
    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)
        if region != 'global_ocean':
            basin_numbers = {}
            basin_numbers['atlantic_arctic_ocean'] = [2, 4]
            basin_numbers['indian_pacific_ocean'] = [3, 5]
            cube = uconv.mask_unwanted_seas(cube, basin_cube, basin_numbers[region])
            
    return cube
Пример #3
0
def main(inargs):
    """Run the program."""

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        data_cubes = iris.load(inargs.infiles, inargs.var & time_constraint, callback=save_history)
        equalise_attributes(data_cubes)

    climatology_cube = read_climatology(inargs.climatology_file, inargs.var)
    basin_cube = read_optional(inargs.basin_file)
    depth_cube = read_optional(inargs.depth_file) 

    atts = set_attributes(inargs, data_cubes[0], climatology_cube, basin_cube, depth_cube)

    out_cubes = []
    for data_cube in data_cubes:
        standard_name = data_cube.standard_name
        var_name = data_cube.var_name

        if climatology_cube:
            data_cube = data_cube - climatology_cube

        if basin_cube:
            data_cube = uconv.mask_marginal_seas(data_cube, basin_cube)

        data_cube, coord_names, regrid_status = regrid_cube(data_cube)
        if regrid_status:
            depth_cube = None
            # FIXME: Could delete depth file from atts

        assert coord_names[-3:] == ['depth', 'latitude', 'longitude']
        depth_axis = data_cube.coord('depth')
        assert depth_axis.units in ['m', 'dbar'], "Unrecognised depth axis units"

        out_list = iris.cube.CubeList([])
        start_indexes, step = uconv.get_chunks(data_cube.shape, coord_names, chunk=inargs.chunk)
        for index in start_indexes:

            cube_slice = data_cube[index:index+step, 0:1000, ...]

            # Vertical
            
            for layer in vertical_layers.keys():
                vertical_mean = calc_vertical_mean(cube_slice, layer, coord_names, atts, standard_name, var_name)
                out_list.append(vertical_mean)
                if layer in ['surface', 'argo']:
                    for basin in basins.keys():

                        if basin_cube and not regrid_status:
                            basin_array = basin_cube.data
                        else: 
                            basin_array = uconv.create_basin_array(vertical_mean)

                        out_list.append(calc_zonal_vertical_mean(vertical_mean.copy(), depth_cube, basin_array, basin, layer, atts, standard_name, var_name))

            # Zonal

            if basin_cube and not regrid_status:
                ndim = cube_slice.ndim
                basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], cube_slice.shape) 
            else: 
                basin_array = uconv.create_basin_array(cube_slice)

            for basin in basins.keys():
                out_list.append(calc_zonal_mean(cube_slice.copy(), basin_array, basin, atts, standard_name, var_name))

        out_cubes.append(out_list.concatenate())
        del out_list
        del cube_slice
        del basin_array

    cube_list = []
    nvars = len(vertical_layers.keys()) + len(basins.keys()) + 2*len(basins.keys())
    for var_index in range(0, nvars):
        temp_list = []
        for infile_index in range(0, len(inargs.infiles)):
            temp_list.append(out_cubes[infile_index][var_index])
        
        temp_list = iris.cube.CubeList(temp_list)     
        cube_list.append(temp_list.concatenate_cube())
    
    cube_list = iris.cube.CubeList(cube_list)

    assert cube_list[0].data.dtype == numpy.float32
    if not 'time' in coord_names:
        iris.FUTURE.netcdf_no_unlimited = True

    iris.save(cube_list, inargs.outfile)
Пример #4
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
Пример #5
0
def main(inargs):
    """Run the program."""

    file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)

    metadata_dict = {}
    climatology_dict = {}
    time_trend_dict = {}
    tas_scaled_trend_dict = {}
    branch_dict = {}
    for experiment in [
            'historical', 'historicalGHG', 'historicalAA', 'historicalnoAA',
            'piControl'
    ]:
        filenames = file_dict[experiment]
        if not filenames:
            climatology_dict[experiment] = None
            time_trend_dict[experiment] = None
            tas_scaled_trend_dict[experiment] = None
        else:
            print(experiment)
            try:
                time_constraint = gio.get_time_constraint(inargs.total_time)
            except (AttributeError, TypeError):
                time_constraint = iris.Constraint()

            with iris.FUTURE.context(cell_datetime_objects=True):
                cube = iris.load(filenames, gio.check_iris_var(inargs.var))

                # Merge cubes
                metadata_dict[filenames[0]] = cube[0].attributes['history']
                equalise_attributes(cube)
                iris.util.unify_time_units(cube)
                cube = cube.concatenate_cube()
                cube = gio.check_time_units(cube)

                # Time extraction and branch time info
                coord_names = [coord.name() for coord in cube.dim_coords]
                assert coord_names[0] == 'time'

                if 'historical' in experiment:
                    original_time_length = cube.shape[0]
                    cube = cube.extract(time_constraint)
                    new_time_length = cube.shape[0]
                    branch_time_index_offset = original_time_length - new_time_length

                    branch_time = cube.attributes['branch_time']
                    time_length = cube.shape[0]
                    branch_dict[experiment] = (branch_time, time_length,
                                               branch_time_index_offset)

                elif experiment == 'piControl':
                    branch_time, time_length, branch_time_index_offset = branch_dict[
                        'historical']
                    start_index, error = uconv.find_nearest(
                        cube.coord('time').points,
                        float(branch_time) + 15.5,
                        index=True)
                    if abs(error) > 15:
                        print(
                            "WARNING: Large error of %f in locating branch time"
                            % (error))
                        start_index = 0
                    start_index = start_index + branch_time_index_offset
                    cube = cube[start_index:start_index + time_length, ::]

                # Temporal smoothing
                cube = timeseries.convert_to_annual(cube, full_months=True)

                # Mask marginal seas
                if basin_dict[experiment]:
                    basin_cube = iris.load_cube(basin_dict[experiment])
                    cube = uconv.mask_marginal_seas(cube, basin_cube)

                # Regrid and select basin
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube)
                if not inargs.basin == 'globe':
                    if basin_dict[experiment] and not regrid_status:
                        ndim = cube.ndim
                        basin_array = uconv.broadcast_array(
                            basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
                    else:
                        basin_array = uconv.create_basin_array(cube)
                    cube.data.mask = numpy.where(
                        (cube.data.mask == False) &
                        (basin_array == basins[inargs.basin]), False, True)

                # Scale
                cube, units = scale_data(cube,
                                         inargs.var,
                                         reverse_sign=inargs.reverse_sign)

                # Zonal statistic
                if inargs.area_adjust:
                    if regrid_status:
                        area_dict[experiment] = None
                    cube, units, metadata_dict = area_ajustment(
                        cube, area_dict[experiment], metadata_dict)
                    zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
                    aggregation = 'Zonally integrated'
                else:
                    zonal_cube = cube.collapsed('longitude',
                                                iris.analysis.MEAN)
                    aggregation = 'Zonal mean'
                zonal_cube.remove_coord('longitude')

                # Climatology and trends
                climatology_dict[experiment] = calculate_climatology(
                    zonal_cube, inargs.climatology_time, experiment)
                time_trend_dict[experiment] = get_trend_cube(zonal_cube)
                if tas_dict[experiment]:
                    tas_cube = iris.load_cube(
                        tas_dict[experiment],
                        'air_temperature' & time_constraint)
                    scale_factor = get_scale_factor(tas_cube)
                    print(experiment, 'warming:', scale_factor)
                    tas_scaled_trend_dict[experiment] = time_trend_dict[
                        experiment] * (1. / abs(scale_factor))
                    metadata_dict[tas_dict[experiment]
                                  [0]] = tas_cube.attributes['history']
                else:
                    tas_scaled_trend_dict[experiment] = None

    # Create the plots

    tas_scaled_trend_flag = tas_scaled_trend_dict[
        'historicalGHG'] and tas_scaled_trend_dict['historicalAA']

    fig = plt.figure(figsize=[15, 20])
    gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)

    ax_main = plt.subplot(gs[0])
    plt.sca(ax_main)
    plot_climatology(climatology_dict, inargs.var, units, inargs.legloc,
                     aggregation)
    plt.title('%s (%s), %s' % (inargs.model, inargs.run, inargs.basin))

    ax_diff = plt.subplot(gs[1])
    plt.sca(ax_diff)
    plot_difference(climatology_dict)

    ax_time_trend = plt.subplot(gs[2])
    plt.sca(ax_time_trend)
    plot_trend(time_trend_dict, units)

    if tas_scaled_trend_flag:
        ax_tas_trend = plt.subplot(gs[3])
        plt.sca(ax_tas_trend)
        plot_trend(tas_scaled_trend_dict, units, scaled=True)

    plt.xlabel('latitude')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)