예제 #1
0
def calc_spatial_agg(cube,
                     coord_names,
                     aux_coord_names,
                     grid_type,
                     aggregation_method,
                     area_cube,
                     lat_bounds=None,
                     chunk=False):
    """Load the infiles and calculate the spatial aggregate (sum or mean)."""

    cube = cube.copy()
    coord_names = coord_names.copy()

    # Extract region
    if lat_bounds:
        if grid_type == 'curvilinear':
            cube = extract_region_curvilinear(cube, lat_bounds)
        else:
            cube = extract_region_latlon(cube, lat_bounds)

    # Get area weights
    if type(area_cube) == iris.cube.Cube:
        if grid_type == 'latlon' and lat_bounds:
            area_cube = extract_region_latlon(area_cube, lat_bounds)
        area_weights = uconv.broadcast_array(area_cube.data, [1, 2],
                                             cube.shape)
    elif type(area_cube) == str:
        area_weights = spatial_weights.area_array(cube)
    else:
        area_weights = None

    # Calculate spatial aggregate
    coord_names.remove('time')
    if chunk:
        spatial_agg = uconv.chunked_collapse_by_time(cube,
                                                     coord_names,
                                                     aggregation_method,
                                                     weights=area_weights)
    else:
        spatial_agg = cube.collapsed(coord_names,
                                     aggregation_method,
                                     weights=area_weights)

    if area_cube and (aggregation_method == iris.analysis.SUM):
        units = str(spatial_agg.units)
        spatial_agg.units = units.replace('m-2', '')
    spatial_agg.remove_coord('latitude')
    spatial_agg.remove_coord('longitude')
    if grid_type == 'curvilinear':
        spatial_agg.remove_coord(coord_names[0])
        spatial_agg.remove_coord(coord_names[1])

    return spatial_agg
예제 #2
0
def main(inargs):
    """Run the program."""

    tas_cube, history = gio.combine_files(inargs.tas_files, inargs.var)
    if inargs.annual:
        tas_cube = timeseries.convert_to_annual(tas_cube)
    area_data = spatial_weights.area_array(tas_cube)
    coord_names = [coord.name() for coord in tas_cube.dim_coords]
    tasga_cube = tas_cube.collapsed(coord_names[1:], iris.analysis.MEAN, weights=area_data)
    tasga_cube.remove_coord(coord_names[1])
    tasga_cube.remove_coord(coord_names[2])

    tasga_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(tasga_cube, inargs.outfile)
def get_area_weights(cube, area_file, lat_constraint):
    """Get area weights for averaging"""

    if area_file:
        area_cube = iris.load_cube(inargs.area_file, lat_constraint)
    else:
        area_cube = None

    if area_cube:
        area_weights = uconv.broadcast_array(area_cube.data, [1, 2],
                                             cube.shape)
    else:
        area_weights = spatial_weights.area_array(cube)

    return area_weights
예제 #4
0
def calc_region_sum(cube, coord_names, aux_coord_names, grid_type, area_cube,
                    region):
    """Calculate the spatial sum."""

    if grid_type == 'curvilinear':
        assert area_cube, "Must provide an area cube of curvilinear data"

    cube = cube.copy()
    coord_names = coord_names.copy()
    lat_bounds = region_bounds[region]

    # Extract region
    if lat_bounds:
        if grid_type == 'curvilinear':
            cube = extract_region_curvilinear(cube, lat_bounds)
        else:
            cube = extract_region_latlon(cube, lat_bounds)

    if 'm-2' in str(cube.units):
        # Get area weights
        if area_cube:
            if grid_type == 'latlon' and lat_bounds:
                area_cube = extract_region_latlon(area_cube, lat_bounds)
            area_data = uconv.broadcast_array(area_cube.data, [1, 2],
                                              cube.shape)
        else:
            area_data = spatial_weights.area_array(cube)

        # Multiply by area
        cube.data = cube.data * area_data
        units = str(cube.units)
        cube.units = units.replace('m-2', '')

    assert cube.units == 'J'

    coord_names.remove('time')
    spatial_agg = cube.collapsed(coord_names, iris.analysis.SUM)

    spatial_agg.remove_coord('latitude')
    spatial_agg.remove_coord('longitude')
    if grid_type == 'curvilinear':
        spatial_agg.remove_coord(coord_names[0])
        spatial_agg.remove_coord(coord_names[1])

    return spatial_agg
예제 #5
0
def main(inargs):
    """Run the program."""

    prw_cube, history = gio.combine_files(inargs.prw_files, 'atmosphere_mass_content_of_water_vapor')
    if inargs.area_file:
        area_cube = iris.load_cube(inargs.area_file, 'cell_area')
        weights = uconv.broadcast_array(area_cube.data, [1, 2], prw_cube.shape)
    else:
        weights = spatial_weights.area_array(prw_cube)

    coord_names = [coord.name() for coord in prw_cube.dim_coords]
    massa_cube = prw_cube.collapsed(coord_names[1:], iris.analysis.SUM, weights=weights)
    units = str(massa_cube.units)
    massa_cube.units = units.replace('m-2', '')
    massa_cube.remove_coord(coord_names[1])
    massa_cube.remove_coord(coord_names[2])

    massa_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(massa_cube, inargs.outfile)
예제 #6
0
def main(inargs):
    """Run the program."""

    # Depth data
    data_cube = iris.load_cube(inargs.dummy_file, inargs.dummy_var)
    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
    assert dim_coord_names[0] == 'time'
    depth_name = dim_coord_names[1]
    data_cube = data_cube[0, ::]
    data_cube.remove_coord('time')
    depth_data = spatial_weights.get_depth_array(data_cube, depth_name)
    # Area data
    if inargs.area_file:
        area_cube = iris.load_cube(inargs.area_file, 'cell_area')
        gio.check_global_ocean_area(area_cube.data.sum())
        area_data = uconv.broadcast_array(area_cube.data, [1, 2],
                                          depth_data.shape)
    else:
        area_data = spatial_weights.area_array(data_cube)

    volume_data = depth_data * area_data

    if inargs.sftof_file:
        sftof_cube = iris.load_cube(inargs.sftof_file)
        assert sftof_cube.data.max() == 100
        sftof_data = uconv.broadcast_array(sftof_cube.data, [1, 2],
                                           depth_data.shape)
        volume_data = volume_data * (sftof_data / 100.0)

    volume_data = numpy.ma.asarray(volume_data)
    data = numpy.ma.masked_invalid(data_cube.data)
    volume_data.mask = data.mask
    global_atts = area_cube.attributes if inargs.area_file else None
    volume_cube = construct_volume_cube(volume_data, data_cube, global_atts)
    volume_cube.attributes['history'] = gio.write_metadata()

    gio.check_global_ocean_volume(volume_cube.data.sum())

    iris.save(volume_cube, inargs.outfile)
예제 #7
0
def main(inargs):
    """Run the program."""

    data_cube = iris.load_cube(inargs.dummy_file, inargs.dummy_var)
    coord_names = [coord.name() for coord in data_cube.dim_coords]
    if inargs.volcello_file:
        assert data_cube.ndim == 4
        volume_cube = gio.get_ocean_weights(inargs.volcello_file)
        depth_coord = data_cube.coord(coord_names[1])
        assert depth_coord.units in ['m', 'dbar']
        surface_depth = depth_coord.bounds[0][1] - depth_coord.bounds[0][0]
        area_data = volume_cube.data[0, ::] / surface_depth
        data_cube = data_cube[0, 0, ::]
        data_cube.remove_coord(coord_names[0])
        data_cube.remove_coord(coord_names[1])
    else:
        assert coord_names[-2:] == ['latitude', 'longitude']
        if data_cube.ndim == 3:
            data_cube = data_cube[0, ::]
            data_cube.remove_coord(coord_names[0])
        else:
            data_cube = data_cube[0, 0, ::]
            data_cube.remove_coord(coord_names[0])
            data_cube.remove_coord(coord_names[1])
        area_data = spatial_weights.area_array(data_cube)
        area_data = numpy.ma.asarray(area_data)
        if inargs.outvar == 'areacello':
            area_data.mask = data_cube.data.mask

    area_cube = construct_area_cube(inargs.outvar, area_data,
                                    data_cube.attributes, data_cube.dim_coords)
    area_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)

    if inargs.outvar == 'areacello':
        gio.check_global_ocean_area(area_cube.data.sum())
    else:
        gio.check_global_surface_area(area_cube.data.sum())

    iris.save(area_cube, inargs.outfile)
예제 #8
0
def calc_spatial_agg(cube,
                     coord_names,
                     aux_coord_names,
                     grid_type,
                     aggregation_method,
                     weights_cube,
                     lat_bounds=None,
                     chunk=False):
    """Load the infiles and calculate the spatial aggregate (sum or mean)."""

    cube = cube.copy()
    coord_names = coord_names.copy()

    # Extract region
    if lat_bounds:
        if grid_type == 'curvilinear':
            cube = grids.extract_latregion_curvilinear(cube, lat_bounds)
        else:
            cube = grids.extract_latregion_rectilinear(cube, lat_bounds)

    # Get weights
    if type(weights_cube) == iris.cube.Cube:
        if grid_type == 'latlon' and lat_bounds:
            weights_cube = grids.extract_latregion_rectilinear(
                weights_cube, lat_bounds)
        weights_array = uconv.broadcast_array(weights_cube.data,
                                              [1, weights_cube.ndim],
                                              cube.shape)
    elif type(weights_cube) == str:
        weights_array = spatial_weights.area_array(cube)
    else:
        weights_array = None

    # Calculate spatial aggregate
    coord_names.remove('time')
    if (aggregation_method == iris.analysis.SUM) and weights_cube:
        units = str(cube.units)
        cube.units = units.replace('m-2', '')
        cube.data = cube.data * weights_array
        weights = None
    else:
        weights = weights_array

    if chunk:
        spatial_agg = uconv.chunked_collapse_by_time(cube,
                                                     coord_names,
                                                     aggregation_method,
                                                     weights=weights)
    else:
        spatial_agg = cube.collapsed(coord_names,
                                     aggregation_method,
                                     weights=weights)

    spatial_agg.remove_coord('latitude')
    spatial_agg.remove_coord('longitude')
    try:
        spatial_agg.remove_coord('depth')
    except iris.exceptions.CoordinateNotFoundError:
        pass
    if grid_type == 'curvilinear':
        spatial_agg.remove_coord(coord_names[-2])
        spatial_agg.remove_coord(coord_names[-1])

    return spatial_agg
예제 #9
0
def main(inargs):
    """Run the program."""

    var = inargs.pe_files[0].split('/')[-1].split('_')[0]
    assert var in ['pe', 'wfo']
    var_name = 'precipitation minus evaporation flux' if var == 'pe' else 'water_flux_into_sea_water'

    area_cube = gio.get_ocean_weights(
        inargs.area_file) if inargs.area_file else None
    pe_cube, pe_lats, pe_history = read_data(inargs.pe_files,
                                             var_name,
                                             area_cube,
                                             annual=inargs.annual,
                                             multiply_by_area=inargs.area)
    basin_cube = iris.load_cube(inargs.basin_file, 'region')

    metadata = {
        inargs.pe_files[0]: pe_history[0],
        inargs.basin_file: basin_cube.attributes['history']
    }
    if inargs.data_var == 'cell_area':
        data_cube = iris.load_cube(inargs.data_files[0], 'cell_area')
        assert data_cube.shape == pe_cube.shape[1:]
    elif inargs.data_files:
        data_cube, data_lats, data_history = read_data(
            inargs.data_files,
            inargs.data_var,
            area_cube,
            annual=inargs.annual,
            multiply_by_area=inargs.area)
        assert data_cube.shape == pe_cube.shape
        metadata[inargs.data_files[0]] = data_history[0]
    else:
        data_cube = pe_cube.copy()
        data_var = var_name

    if area_cube:
        area_data = area_cube.data
    else:
        if data_cube.ndim == 3:
            area_data = spatial_weights.area_array(data_cube[0, ::])
        else:
            assert data_cube.ndim == 2
            area_data = spatial_weights.area_array(data_cube)

    region_data = np.zeros([pe_cube.shape[0], 6, 8])
    tstep = 0
    ntimes = pe_cube.shape[0]
    for tstep in range(ntimes):
        var_data = data_cube.data if inargs.data_var == 'cell_area' else data_cube[
            tstep, ::].data
        region_data[tstep, :] = get_regional_aggregates(
            inargs.agg, var_data, pe_cube[tstep, ::].data, pe_lats,
            basin_cube.data, area_data)

    if inargs.cumsum:
        region_data = np.cumsum(region_data, axis=0)

    pe_region_coord = create_pe_region_coord()
    basin_coord = create_basin_coord()
    time_coord = pe_cube.coord('time')

    if inargs.data_var:
        standard_name = data_cube.standard_name
    elif var == 'pe':
        iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {
            'canonical_units': pe_cube.units
        }
        standard_name = 'precipitation_minus_evaporation_flux'
    else:
        standard_name = pe_cube.standard_name
    atts = pe_cube.attributes if inargs.data_var == 'cell_area' else data_cube.attributes
    dim_coords_list = [(time_coord, 0), (pe_region_coord, 1), (basin_coord, 2)]
    out_cube = iris.cube.Cube(region_data,
                              standard_name=standard_name,
                              long_name=data_cube.long_name,
                              var_name=data_cube.var_name,
                              units=data_cube.units,
                              attributes=atts,
                              dim_coords_and_dims=dim_coords_list)

    out_cube.attributes['history'] = cmdprov.new_log(infile_history=metadata,
                                                     git_repo=repo_dir)
    iris.save(out_cube, inargs.outfile)