def main(inargs):
    """Run the program."""

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    volume_cube, climatology_cube = read_optional_data(inargs, level_subset)
    temperature_cubelist = iris.load(inargs.temperature_files,
                                     inargs.temperature_var & level_subset,
                                     callback=save_history)
    #history.append(temperature_cubelist[0].attributes['history'])
    equalise_attributes(temperature_cubelist)

    if inargs.metric == 'ohc':
        units = '10^%d J' % (inargs.scaling)
    elif inargs.metric == 'inttemp':
        units = '10^%d K m3' % (inargs.scaling)

    atts = set_attributes(inargs, temperature_cubelist[0], volume_cube,
                          climatology_cube)

    out_cubes = []
    for temperature_cube in temperature_cubelist:

        if climatology_cube:
            temperature_cube = temperature_cube - climatology_cube

        if not volume_cube:
            volume_cube = create_volume_cube(temperature_cube)
        elif not type(volume_cube.data) == numpy.ma.core.MaskedArray:
            assert temperature_cube.dim_coords[0].name() == 'time'
            volume_cube = apply_volume_mask(volume_cube, temperature_cube[0,
                                                                          ...])

        metric_dict = calc_metrics(inargs,
                                   temperature_cube,
                                   volume_cube,
                                   ref_region=inargs.ref_region)
        metric_cubelist = create_metric_cubelist(
            inargs.metric, metric_dict, units, atts,
            temperature_cube.coord('time'))

        out_cubes.append(metric_cubelist)

    cube_list = []
    for region_index in range(0, len(regions.keys())):
        temp_list = []
        for infile_index in range(0, len(inargs.temperature_files)):
            temp_list.append(out_cubes[infile_index][region_index])

        temp_list = iris.cube.CubeList(temp_list)
        cube_list.append(temp_list.concatenate_cube())

    cube_list = iris.cube.CubeList(cube_list)
    assert cube_list[0].data.dtype == numpy.float32
    iris.save(cube_list, inargs.outfile)
Esempio n. 2
0
def get_constraints(inargs):
    """Get the time, depth and mask information"""

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    depth_constraint = gio.iris_vertical_constraint(0.0, inargs.max_depth)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
    else:
        sftlf_cube = None

    return time_constraint, depth_constraint, sftlf_cube
Esempio n. 3
0
def main(inargs):
    """Run the program."""

    temperature_cube, history = gio.combine_files(inargs.temperature_files,
                                                  inargs.var)

    temperature_atts = temperature_cube.attributes
    metadata_dict = {inargs.temperature_files[0]: history[0]}

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    temperature_cube = temperature_cube.extract(level_subset)

    if inargs.annual:
        temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                        chunk=inargs.chunk)

    if inargs.regrid:
        area_cube = read_area_file(inargs.regrid)
        temperature_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            temperature_cube, weights=area_cube.data)
        volume_data = spatial_weights.volume_array(temperature_cube)
        grid = 'y72x144'
    else:
        assert inargs.volume_file, "Must provide volume file if not regridding data"
        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                temperature_cube, level_subset,
                                                metadata_dict)
        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        grid = None

    ohc_cube = ohc(temperature_cube,
                   volume_data,
                   inargs.density,
                   inargs.specific_heat,
                   coord_names,
                   vertical_integral=inargs.vertical_integral,
                   chunk=inargs.chunk)

    ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                            inargs)
    log = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    ohc_cube.attributes['history'] = log

    iris.save(ohc_cube, inargs.outfile)
Esempio n. 4
0
def main(inargs):
    """Run the program."""

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    for temperature_file in inargs.temperature_files:
        temperature_cube = iris.load_cube(
            temperature_file, inargs.temperature_var & level_subset)
        temperature_cube = gio.check_time_units(temperature_cube)
        metadata_dict = {
            temperature_file: temperature_cube.attributes['history']
        }
        temperature_atts = temperature_cube.attributes

        if inargs.annual:
            temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                            chunk=inargs.chunk)

        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        assert coord_names[0] == 'time'
        assert coord_names[1] == 'depth'

        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                inargs.area_file,
                                                temperature_cube,
                                                metadata_dict)

        ohc_cube = calc_ohc_vertical_integral(temperature_cube,
                                              volume_data,
                                              inargs.density,
                                              inargs.specific_heat,
                                              coord_names,
                                              chunk=inargs.chunk)
        ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                                metadata_dict, inargs)
        ohc_file = get_outfile_name(temperature_file, annual=inargs.annual)

        iris.save(ohc_cube, ohc_file)
        print(ohc_file)
Esempio n. 5
0
def calc_vertical_mean(cube, layer, coord_names, atts,
                       original_standard_name, original_var_name):
    """Calculate the vertical mean over a given depth range."""

    min_depth, max_depth = vertical_layers[layer]
    level_subset = gio.iris_vertical_constraint(min_depth, max_depth)
    cube_segment = cube.extract(level_subset)

    depth_axis = cube_segment.coord('depth')
    if depth_axis.units == 'm':
        vertical_weights = spatial_weights.calc_vertical_weights_1D(depth_axis, coord_names, cube_segment.shape)
    elif depth_axis.units == 'dbar':
        vertical_weights = spatial_weights.calc_vertical_weights_2D(depth_axis, cube_segment.coord('latitude'), coord_names, cube_segment.shape)

    vertical_mean_cube = cube_segment.collapsed(['depth'], iris.analysis.MEAN, weights=vertical_weights.astype(numpy.float32))   
    vertical_mean_cube.remove_coord('depth')
    vertical_mean_cube.data = vertical_mean_cube.data.astype(numpy.float32)
        
    units = str(cube.units)
    standard_name = 'vertical_mean_%s_%s' %(layer, original_standard_name)
    var_name = '%s_vm_%s'   %(original_var_name, layer)
    vertical_mean_cube = add_metadata(atts, vertical_mean_cube, standard_name, var_name, units)

    return vertical_mean_cube
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time_bounds)
    except AttributeError:
        time_constraint = iris.Constraint()

    depth_constraint = gio.iris_vertical_constraint(inargs.min_depth,
                                                    inargs.max_depth)

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles,
                         gio.check_iris_var(inargs.var) & depth_constraint)
        history = cube[0].attributes['history']
        atts = cube[0].attributes
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()

        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)

        cube = iris.util.squeeze(cube)

        if 'salinity' in inargs.var:
            cube = gio.salinity_unit_check(cube)

        infile_metadata = {inargs.infiles[0]: history}

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    if inargs.min_depth or inargs.max_depth:
        cube = vertical_mean(cube)

    agg_cube = get_agg_cube(cube, inargs.aggregation)

    if inargs.regrid:
        before_sum = agg_cube.data.sum()
        before_mean = agg_cube.data.mean()
        agg_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            agg_cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum))
            print('After sum:', '%.2E' % Decimal(agg_cube.data.sum()))
            print('Before mean:', '%.2E' % Decimal(before_mean))
            print('After mean:', '%.2E' % Decimal(agg_cube.data.mean()))

    if inargs.subtract_tropics:
        agg_cube = subtract_tropics(agg_cube)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
        agg_cube = uconv.apply_land_ocean_mask(agg_cube, sftlf_cube, 'ocean')

    atts['history'] = gio.write_metadata(file_info=infile_metadata)
    agg_cube.attributes = atts

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(agg_cube, inargs.outfile)
Esempio n. 7
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region' & depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area' & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var) & depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim == 3
            broadcasted_weights = uconv.broadcast_array(weights_cube.data, [1, 2], cube.shape)
        else:
            broadcasted_weights = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            assert ref_cube
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord, aggregation_functions[inargs.aggregation])     
            #TODO: Add weights=broadcasted_weights
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcasted_weights)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)

        if inargs.outfile[-3:] == '.nc':
            output_cubelist.append(horiz_aggregate)
        elif inargs.outfile[-1] == '/': 
            if inargs.cumsum:
                horiz_aggregate = cumsum(horiz_aggregate)       
            infile = filename.split('/')[-1]
            infile = re.sub(cube.var_name + '_', cube.var_name + '-' + inargs.direction + '-' + inargs.aggregation + '_', infile)
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
                infile = re.sub('Amon', 'Ayr', infile)
       
            outfile = inargs.outfile + infile
            metadata_dict[filename] = cube.attributes['history'] 
            horiz_aggregate.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)

            iris.save(horiz_aggregate, outfile)
            print('output:', outfile)
            del horiz_aggregate

    if inargs.outfile[-3:] == '.nc':
        equalise_attributes(output_cubelist)
        iris.util.unify_time_units(output_cubelist)
        output_cubelist = output_cubelist.concatenate_cube()

        if inargs.cumsum:
            output_cubelist = cumsum(output_cubelist)

        metadata_dict[filename] = cube.attributes['history']
        output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
        iris.save(output_cubelist, inargs.outfile) 
Esempio n. 8
0
def main(inargs):
    """Run the program."""

    metadata_dict = {}
    ensemble_dict = {
        'historical': iris.cube.CubeList([]),
        'historicalGHG': iris.cube.CubeList([]),
        'historicalMisc': iris.cube.CubeList([])
    }
    depth_constraint = gio.iris_vertical_constraint(inargs.min_depth,
                                                    inargs.max_depth)
    new_grid = make_grid(
        numpy.arange(inargs.min_depth + 0.5, inargs.max_depth, 1))
    experiment_list = []

    for infile in inargs.infiles:
        cube = iris.load_cube(
            infile,
            gio.check_iris_var(inargs.var) & depth_constraint)
        depth_cube = collapse_dims(cube, inargs.dimagg)

        experiment = cube.attributes['experiment_id']
        experiment_list.append(experiment)

        ensemble_number = experiment_list.count(experiment)
        new_aux_coord = iris.coords.AuxCoord(ensemble_number,
                                             long_name='ensemble_member',
                                             units='no_unit')
        depth_cube.add_aux_coord(new_aux_coord)

        if inargs.regrid or inargs.ensagg:
            new_depth_cube = regrid(depth_cube, new_grid)
        else:
            new_depth_cube = depth_cube
        ensemble_dict[experiment].append(new_depth_cube)

    fig = plt.figure(figsize=[10, 30])
    enswidth = 2.0
    ilinewidth = enswidth * 0.25 if inargs.ensagg else enswidth
    for experiment in ['historical', 'historicalGHG', 'historicalMisc']:
        for num, cube in enumerate(ensemble_dict[experiment]):
            label = experiment if (num == 1) and not inargs.ensagg else False
            plot_data(cube, experiment, label=label, linewidth=ilinewidth)
        if inargs.ensagg:
            ensagg_cube = ensemble_aggregate(ensemble_dict[experiment],
                                             inargs.ensagg)
            plot_data(ensagg_cube, experiment, label=experiment, linewidth=2.0)

    plt.gca().invert_yaxis()
    plt.ylim([inargs.max_depth, inargs.min_depth])
    plt.legend()
    if inargs.xbounds:
        xmin, xmax = inargs.xbounds
        plt.xlim([xmin, xmax])
    plt.grid(True)
    plt.xlabel(str(cube.units))
    plt.ylabel('Depth (m)')
    plt.title('Excess heat storage, 1861-2005')

    # Save output
    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams'][
        'figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)

    log_text = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)