Пример #1
0
def get_data(filename, var, target_grid=None):
    """Read data.
    
    Positive is defined as down.
    
    """

    if filename:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(filename, gio.check_iris_var(var))
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            if target_grid:
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube, target_grid_cube=target_grid)

            coord_names = [coord.name() for coord in cube.dim_coords]
            if 'depth' in coord_names:
                depth_constraint = iris.Constraint(depth=0)
                cube = cube.extract(depth_constraint)

            if 'up' in cube.standard_name:
                cube.data = cube.data * -1
    else:
        cube = None

    return cube
Пример #2
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var), callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)

    if inargs.area:
        cube = multiply_by_area(cube) 

    if inargs.sftlf_file and inargs.realm:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

    zonal_aggregate = cube.collapsed('longitude', aggregation_functions[inargs.aggregation])
    zonal_aggregate.remove_coord('longitude')

    zonal_aggregate.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(zonal_aggregate, inargs.outfile)
Пример #3
0
def process_cube(cube, inargs, sftlf_cube):
    """Process a data cube"""

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)
        
    if inargs.aggregation:
        cube = get_agg_cube(cube, inargs.aggregation, remove_outliers=inargs.remove_outliers)

    if 'salinity' in inargs.var:
        cube = gio.salinity_unit_check(cube)

    if inargs.regrid:
        before_sum = cube.data.sum()
        before_mean = cube.data.mean()
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum) )
            print('After sum:', '%.2E' % Decimal(cube.data.sum()) )
            print('Before mean:', '%.2E' % Decimal(before_mean) )
            print('After mean:', '%.2E' % Decimal(cube.data.mean()) )
            
    if sftlf_cube:
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, 'ocean')

    return cube
Пример #4
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)
    if inargs.surface:
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            cube = cube.extract(iris.Constraint(depth=0))
        else:
            print('no depth axis for surface extraction')
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)
    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]},
                          git_repo=repo_dir)

    dim_vals = {}
    dim_vals['latitude'] = get_dim_vals(inargs.lats)
    dim_vals['longitude'] = get_dim_vals(inargs.lons)
    if inargs.levs:
        dim_vals['depth'] = get_dim_vals(inargs.levs)
    else:
        dim_vals['depth'] = get_dim_vals(inargs.depth_bnds, bounds=True)

    # Regrid from curvilinear to rectilinear if necessary
    regrid_status = False
    if inargs.lats:
        horizontal_grid = grids.make_grid(dim_vals['latitude'],
                                          dim_vals['longitude'])
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube, target_grid_cube=horizontal_grid)

    # Regrid to new grid
    if dim_vals['depth'] or not regrid_status:
        sample_points = get_sample_points(cube, dim_vals)
        cube = cube.interpolate(sample_points, iris.analysis.Linear())
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'latitude' in coord_names:
            cube.coord('latitude').guess_bounds()
        if 'longitude' in coord_names:
            cube.coord('longitude').guess_bounds()
        if inargs.levs:
            cube = spatial_weights.guess_depth_bounds(cube)
        else:
            cube.coord('depth').bounds = get_depth_bounds(inargs.depth_bnds)

    if numpy.isnan(numpy.min(cube.data)):
        cube = remove_nans(cube)

    # Reinstate time dim_coord if necessary
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    if 'time' in aux_coord_names:
        cube = iris.util.new_axis(cube, 'time')

    cube.attributes['history'] = log
    iris.save(cube, inargs.outfile, fill_value=1e20)
Пример #5
0
def main(inargs):
    """Run the program."""

    # Read data
    level_constraint, lat_constraint = get_constraints(inargs.depth,
                                                       inargs.hemisphere)
    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var) & level_constraint,
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Get area file (if applicable)
    if inargs.hemisphere:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = cube.extract(lat_constraint)
        area_cube = None
    else:
        area_cube = read_optional(inargs.area_file)

    # Mask ocean or atmosphere (if applicable)
    if inargs.sftlf_file:
        sftlf_file, selected_region = inargs.sftlf_file
        sftlf_cube = read_optional(sftlf_file)
        mask = create_mask(sftlf_cube, selected_region)
        cube.data = numpy.ma.asarray(cube.data)
        cube.data.mask = mask
        if area_cube:
            areas_dict = area_info(area_cube.copy(), mask, selected_region)
    else:
        areas_dict = {}
        sftlf_cube = None

    # Outfile attributes
    atts = set_attributes(inargs, cube, area_cube, sftlf_cube, areas_dict)

    # Temporal smoothing
    if inargs.smoothing:
        cube = smooth_data(cube, inargs.smoothing)

    # Calculate metric
    area_weights = get_area_weights(cube, area_cube)
    if inargs.metric == 'bulk-deviation':
        metric = calc_bulk_deviation(cube, area_weights, atts)
    elif inargs.metric == 'mean':
        metric = calc_global_mean(cube, area_weights, atts)
    elif inargs.metric == 'grid-deviation':
        metric = calc_grid_deviation(cube, inargs.var, area_weights, atts)

    iris.save(metric, inargs.outfile)
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             sftlf_cube=None,
             realm=None):
    """Read, merge, temporally aggregate and calculate zonal sum.
    
    Positive is defined as down.
    
    """

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            depth_constraint = iris.Constraint(depth=0)
            cube = cube.extract(depth_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = multiply_by_area(cube)

        if 'up' in cube.standard_name:
            cube.data = cube.data * -1

        if sftlf_cube and realm in ['ocean', 'land']:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, realm)

        zonal_sum = cube.collapsed('longitude', iris.analysis.SUM)
        zonal_sum.remove_coord('longitude')

        grid_spacing = grids.get_grid_spacing(zonal_sum)
        zonal_sum.data = zonal_sum.data / grid_spacing

    else:
        zonal_sum = None

    return zonal_sum, metadata_dict
Пример #7
0
def mask_regrid_area(cube, sftlf_cube, realm=None, area=False):
    """Mask, regrid and multiply data by area."""

    if realm:
        mask = create_mask(sftlf_cube, cube.shape, realm)
        cube.data = numpy.ma.asarray(cube.data)
        cube.data.mask = mask

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)
    if area:
        cube = multiply_by_area(cube)

    return cube
Пример #8
0
def main(inargs):
    """Run the program."""

    temperature_cube, history = gio.combine_files(inargs.temperature_files,
                                                  inargs.var)

    temperature_atts = temperature_cube.attributes
    metadata_dict = {inargs.temperature_files[0]: history[0]}

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    temperature_cube = temperature_cube.extract(level_subset)

    if inargs.annual:
        temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                        chunk=inargs.chunk)

    if inargs.regrid:
        area_cube = read_area_file(inargs.regrid)
        temperature_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            temperature_cube, weights=area_cube.data)
        volume_data = spatial_weights.volume_array(temperature_cube)
        grid = 'y72x144'
    else:
        assert inargs.volume_file, "Must provide volume file if not regridding data"
        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                temperature_cube, level_subset,
                                                metadata_dict)
        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        grid = None

    ohc_cube = ohc(temperature_cube,
                   volume_data,
                   inargs.density,
                   inargs.specific_heat,
                   coord_names,
                   vertical_integral=inargs.vertical_integral,
                   chunk=inargs.chunk)

    ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                            inargs)
    log = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    ohc_cube.attributes['history'] = log

    iris.save(ohc_cube, inargs.outfile)
Пример #9
0
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             area=False,
             invert_evap=False):
    """Read, merge, temporally aggregate and calculate zonal mean."""

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

        assert cube.units == 'kg m-2 s-1'
        cube.data = cube.data * 86400
        units = 'mm/day'

        if invert_evap and (var == 'water_evaporation_flux'):
            cube.data = cube.data * -1

        if area:
            cube = spatial_weights.multiply_by_area(cube)

        zonal_mean = cube.collapsed('longitude', iris.analysis.MEAN)
        zonal_mean.remove_coord('longitude')
    else:
        zonal_mean = None

    return zonal_mean, metadata_dict
Пример #10
0
def regrid_cube(cube):
    """Regrid the cube.

    For a singleton axis, curvilinear_to_rectilinear moves 
      that axis from being a dimension coordinate to a 
      scalar coordinate. 

    This function only focuses on a singleton time axis and 
      moves it back to being a dimension coordinate if need be 

    """

    singleton_flag = False
    if cube.shape[0] == 1:
        singleton_flag = True

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)

    if singleton_flag:
        cube = iris.util.new_axis(cube, 'time')
        coord_names = [coord.name() for coord in cube.dim_coords]

    return cube, coord_names, regrid_status
Пример #11
0
def main(inargs):
    """Run the program."""

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var) & time_constraint, callback=save_history)
    equalise_attributes(cube)
    cube = cube.concatenate_cube()

    annual_climatology = cube.collapsed('time', iris.analysis.MEAN)

    if inargs.regrid:
        annual_climatology, coord_names, regrid_status = grids.curvilinear_to_rectilinear(annual_climatology)

    if inargs.scale_factor:
        annual_climatology = scale_data(annual_climatology, inargs.scale_factor)

    annual_climatology.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(annual_climatology, inargs.outfile)
Пример #12
0
def main(inargs):
    """Run the program."""

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if inargs.basin_file and not hfbasin:
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, inargs.model, basin_cube)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name

    history_attribute = get_history_attribute(inargs.infiles, data_cube,
                                              inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]

    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif regular_grid and not hfbasin:
        basin_array = uconv.create_basin_array(data_cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_list = ['globe']

    # Calculate output for each basin
    out_cubes = []
    for basin_name in basin_list:
        data_cube_copy = data_cube.copy()
        if not basin_name == 'globe':
            data_cube_copy.data.mask = numpy.where(
                (data_cube_copy.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal mean
        if hfbasin:
            zonal_cube = data_cube_copy
        else:
            zonal_cube = data_cube_copy.collapsed(x_axis_name,
                                                  iris.analysis.SUM)
            zonal_cube.remove_coord(x_axis_name)

        # Convergence
        zonal_cube = convergence(zonal_cube, y_axis_name)

        # Attributes
        standard_name = 'zonal_sum_%s_convergence_%s' % (orig_standard_name,
                                                         basin_name)
        var_name = '%s_czs_%s' % (orig_var_name, basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time_bounds)
    except AttributeError:
        time_constraint = iris.Constraint()

    depth_constraint = gio.iris_vertical_constraint(inargs.min_depth,
                                                    inargs.max_depth)

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles,
                         gio.check_iris_var(inargs.var) & depth_constraint)
        history = cube[0].attributes['history']
        atts = cube[0].attributes
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()

        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)

        cube = iris.util.squeeze(cube)

        if 'salinity' in inargs.var:
            cube = gio.salinity_unit_check(cube)

        infile_metadata = {inargs.infiles[0]: history}

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    if inargs.min_depth or inargs.max_depth:
        cube = vertical_mean(cube)

    agg_cube = get_agg_cube(cube, inargs.aggregation)

    if inargs.regrid:
        before_sum = agg_cube.data.sum()
        before_mean = agg_cube.data.mean()
        agg_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            agg_cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum))
            print('After sum:', '%.2E' % Decimal(agg_cube.data.sum()))
            print('Before mean:', '%.2E' % Decimal(before_mean))
            print('After mean:', '%.2E' % Decimal(agg_cube.data.mean()))

    if inargs.subtract_tropics:
        agg_cube = subtract_tropics(agg_cube)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
        agg_cube = uconv.apply_land_ocean_mask(agg_cube, sftlf_cube, 'ocean')

    atts['history'] = gio.write_metadata(file_info=infile_metadata)
    agg_cube.attributes = atts

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(agg_cube, inargs.outfile)
Пример #14
0
def main(inargs):
    """Run the program."""

    region = inargs.region.replace('-', '_')

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if not hfbasin:
        assert inargs.basin_file, "Must provide a basin file for hfy data"
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, basin_cube, region)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name

    history_attribute = get_history_attribute(inargs.infiles, data_cube,
                                              inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]

    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = data_cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1],
                                            data_cube.shape)
    elif regular_grid and not hfbasin:
        basin_array = uconv.create_basin_array(data_cube)

    # Calculate the zonal sum (if required)
    data_cube_copy = data_cube.copy()

    if hfbasin:
        zonal_cube = data_cube_copy
    else:
        zonal_cube = data_cube_copy.collapsed(x_axis_name, iris.analysis.SUM)
        zonal_cube.remove_coord(x_axis_name)

    # Attributes
    try:
        zonal_cube.remove_coord('region')
    except iris.exceptions.CoordinateNotFoundError:
        pass

    standard_name = 'northward_ocean_heat_transport'
    var_name = 'hfbasin'

    zonal_cube.standard_name = standard_name
    zonal_cube.long_name = standard_name.replace('_', ' ')
    zonal_cube.var_name = var_name

    iris.save(zonal_cube, inargs.outfile)
Пример #15
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
Пример #16
0
def main(inargs):
    """Run the program."""

    file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)

    metadata_dict = {}
    climatology_dict = {}
    time_trend_dict = {}
    tas_scaled_trend_dict = {}
    branch_dict = {}
    for experiment in [
            'historical', 'historicalGHG', 'historicalAA', 'historicalnoAA',
            'piControl'
    ]:
        filenames = file_dict[experiment]
        if not filenames:
            climatology_dict[experiment] = None
            time_trend_dict[experiment] = None
            tas_scaled_trend_dict[experiment] = None
        else:
            print(experiment)
            try:
                time_constraint = gio.get_time_constraint(inargs.total_time)
            except (AttributeError, TypeError):
                time_constraint = iris.Constraint()

            with iris.FUTURE.context(cell_datetime_objects=True):
                cube = iris.load(filenames, gio.check_iris_var(inargs.var))

                # Merge cubes
                metadata_dict[filenames[0]] = cube[0].attributes['history']
                equalise_attributes(cube)
                iris.util.unify_time_units(cube)
                cube = cube.concatenate_cube()
                cube = gio.check_time_units(cube)

                # Time extraction and branch time info
                coord_names = [coord.name() for coord in cube.dim_coords]
                assert coord_names[0] == 'time'

                if 'historical' in experiment:
                    original_time_length = cube.shape[0]
                    cube = cube.extract(time_constraint)
                    new_time_length = cube.shape[0]
                    branch_time_index_offset = original_time_length - new_time_length

                    branch_time = cube.attributes['branch_time']
                    time_length = cube.shape[0]
                    branch_dict[experiment] = (branch_time, time_length,
                                               branch_time_index_offset)

                elif experiment == 'piControl':
                    branch_time, time_length, branch_time_index_offset = branch_dict[
                        'historical']
                    start_index, error = uconv.find_nearest(
                        cube.coord('time').points,
                        float(branch_time) + 15.5,
                        index=True)
                    if abs(error) > 15:
                        print(
                            "WARNING: Large error of %f in locating branch time"
                            % (error))
                        start_index = 0
                    start_index = start_index + branch_time_index_offset
                    cube = cube[start_index:start_index + time_length, ::]

                # Temporal smoothing
                cube = timeseries.convert_to_annual(cube, full_months=True)

                # Mask marginal seas
                if basin_dict[experiment]:
                    basin_cube = iris.load_cube(basin_dict[experiment])
                    cube = uconv.mask_marginal_seas(cube, basin_cube)

                # Regrid and select basin
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube)
                if not inargs.basin == 'globe':
                    if basin_dict[experiment] and not regrid_status:
                        ndim = cube.ndim
                        basin_array = uconv.broadcast_array(
                            basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
                    else:
                        basin_array = uconv.create_basin_array(cube)
                    cube.data.mask = numpy.where(
                        (cube.data.mask == False) &
                        (basin_array == basins[inargs.basin]), False, True)

                # Scale
                cube, units = scale_data(cube,
                                         inargs.var,
                                         reverse_sign=inargs.reverse_sign)

                # Zonal statistic
                if inargs.area_adjust:
                    if regrid_status:
                        area_dict[experiment] = None
                    cube, units, metadata_dict = area_ajustment(
                        cube, area_dict[experiment], metadata_dict)
                    zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
                    aggregation = 'Zonally integrated'
                else:
                    zonal_cube = cube.collapsed('longitude',
                                                iris.analysis.MEAN)
                    aggregation = 'Zonal mean'
                zonal_cube.remove_coord('longitude')

                # Climatology and trends
                climatology_dict[experiment] = calculate_climatology(
                    zonal_cube, inargs.climatology_time, experiment)
                time_trend_dict[experiment] = get_trend_cube(zonal_cube)
                if tas_dict[experiment]:
                    tas_cube = iris.load_cube(
                        tas_dict[experiment],
                        'air_temperature' & time_constraint)
                    scale_factor = get_scale_factor(tas_cube)
                    print(experiment, 'warming:', scale_factor)
                    tas_scaled_trend_dict[experiment] = time_trend_dict[
                        experiment] * (1. / abs(scale_factor))
                    metadata_dict[tas_dict[experiment]
                                  [0]] = tas_cube.attributes['history']
                else:
                    tas_scaled_trend_dict[experiment] = None

    # Create the plots

    tas_scaled_trend_flag = tas_scaled_trend_dict[
        'historicalGHG'] and tas_scaled_trend_dict['historicalAA']

    fig = plt.figure(figsize=[15, 20])
    gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)

    ax_main = plt.subplot(gs[0])
    plt.sca(ax_main)
    plot_climatology(climatology_dict, inargs.var, units, inargs.legloc,
                     aggregation)
    plt.title('%s (%s), %s' % (inargs.model, inargs.run, inargs.basin))

    ax_diff = plt.subplot(gs[1])
    plt.sca(ax_diff)
    plot_difference(climatology_dict)

    ax_time_trend = plt.subplot(gs[2])
    plt.sca(ax_time_trend)
    plot_trend(time_trend_dict, units)

    if tas_scaled_trend_flag:
        ax_tas_trend = plt.subplot(gs[3])
        plt.sca(ax_tas_trend)
        plot_trend(tas_scaled_trend_dict, units, scaled=True)

    plt.xlabel('latitude')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)