def read_data(infiles,
              var,
              area_cube,
              annual=False,
              multiply_by_area=False,
              chunk_annual=False):
    """Read the input data."""

    cube, history = gio.combine_files(infiles, var)
    if annual:
        cube = timeseries.convert_to_annual(cube,
                                            days_in_month=True,
                                            chunk=chunk_annual)

    cube = uconv.flux_to_magnitude(cube)
    if multiply_by_area:
        cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube)

    coord_names = [coord.name() for coord in cube.coords(dim_coords=True)]
    assert cube.ndim == 3
    lats = cube.coord('latitude').points
    if lats.ndim == 1:
        lat_pos = coord_names.index('latitude')
        lats = uconv.broadcast_array(lats, lat_pos - 1, cube.shape[1:])
    else:
        assert lats.shape == cube.shape[1:]

    return cube, lats, history
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, aggregation='mean', days_in_month=True)
    if inargs.flux_to_mag:
        cube = uconv.flux_to_magnitude(cube)

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    assert dim_coord_names[0] in ['time', 'year']
    cube.data = numpy.cumsum(cube.data, axis=0)

    cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir, infile_history={inargs.infiles[0]: history[0]})
    iris.save(cube, inargs.outfile)
Example #3
0
def main(inargs):
    """Run the program."""

    region = inargs.region.replace('-', '_')

    # Basin data
    hfbasin = True if inargs.var == 'northward_ocean_heat_transport' else False
    if not hfbasin:
        assert inargs.basin_file, "Must provide a basin file for hfy data"
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None
        inargs.basin_file = None

    # Heat transport data
    data_cube = read_data(inargs.infiles, inargs.var, basin_cube, region)
    orig_standard_name = data_cube.standard_name
    orig_var_name = data_cube.var_name
  
    history_attribute = get_history_attribute(inargs.infiles, data_cube, inargs.basin_file, basin_cube)
    data_cube.attributes['history'] = gio.write_metadata(file_info=history_attribute)

    # Regrid (if needed)
    if inargs.regrid:
        data_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(data_cube)

    dim_coord_names = [coord.name() for coord in data_cube.dim_coords]
    aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
    
    regular_grid = False if aux_coord_names else True

    if hfbasin:
        assert len(dim_coord_names) == 2
        assert dim_coord_names[0] == 'time'
        y_axis_name = dim_coord_names[1]
    else:
        assert len(dim_coord_names) == 3
        assert dim_coord_names[0] == 'time'
        y_axis_name, x_axis_name = dim_coord_names[1:]    
        for aux_coord in aux_coord_names:
            data_cube.remove_coord(aux_coord)

    # Basin array
    if inargs.basin_file and not inargs.regrid:
        ndim = data_cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data, [ndim - 2, ndim - 1], data_cube.shape) 
    elif regular_grid and not hfbasin: 
        basin_array = uconv.create_basin_array(data_cube)

    # Calculate the zonal sum (if required)
    data_cube_copy = data_cube.copy()
 
    if hfbasin:
        zonal_cube = data_cube_copy
    else:
        zonal_cube = data_cube_copy.collapsed(x_axis_name, iris.analysis.SUM)
        zonal_cube.remove_coord(x_axis_name)

    # Attributes
    try:
        zonal_cube.remove_coord('region')
    except iris.exceptions.CoordinateNotFoundError:
        pass

    standard_name = 'northward_ocean_heat_transport'
    var_name = 'hfbasin'

    zonal_cube.standard_name = standard_name
    zonal_cube.long_name = standard_name.replace('_', ' ')
    zonal_cube.var_name = var_name   

    if inargs.cumsum:
        zonal_cube = uconv.flux_to_magnitude(zonal_cube)
        zonal_aggregate = cumsum(zonal_cube)
        
    iris.save(zonal_cube, inargs.outfile)
Example #4
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    #depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region') #& depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area') # & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube, history = gio.combine_files(filename, inargs.var, checks=True)  #& depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)
            if inargs.weights:
                weights_cube = select_basin(weights_cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim - weights_cube.ndim == 1
            broadcast_weights_cube = cube.copy()
            broadcast_weights_array = uconv.broadcast_array(weights_cube.data, [1, weights_cube.ndim], cube.shape)
            broadcast_weights_cube.data = broadcast_weights_array
        else:
            broadcast_weights_cube = None
            broadcast_weights_array = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            if not ref_cube:
                lats = numpy.arange(-89.5, 90, 1)
                lons = numpy.arange(0.5, 360, 1)
                ref_cube = grids.make_grid(lats, lons)
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord,
                                              aggregation_functions[inargs.aggregation],
                                              weights=broadcast_weights_cube)
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcast_weights_array)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        #horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)
        output_cubelist.append(horiz_aggregate)

    iris.util.equalise_attributes(output_cubelist)
    iris.util.unify_time_units(output_cubelist)
    output_cubelist = output_cubelist.concatenate_cube()

    if inargs.cumsum:
        output_cubelist = cumsum(output_cubelist)

    metadata_dict[filename] = history[0]
    output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(output_cubelist, inargs.outfile) 
Example #5
0
def main(inargs):
    """Run the program."""

    keep_coord = 'latitude' if inargs.direction == 'zonal' else 'longitude'
    collapse_coord = 'longitude' if inargs.direction == 'zonal' else 'latitude'

    depth_constraint = gio.iris_vertical_constraint(None, inargs.max_depth)
    metadata_dict = {}

    if inargs.basin:
        basin_file, basin_name = inargs.basin
        basin_cube = iris.load_cube(basin_file, 'region' & depth_constraint)
        metadata_dict[basin_file] = basin_cube.attributes['history']
    else:
        basin_cube = None

    if inargs.area:
        area_cube = iris.load_cube(inargs.area, 'cell_area' & depth_constraint)
    else:
        area_cube = None

    if inargs.weights:
        weights_cube = iris.load_cube(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']

    if inargs.sftlf_file or inargs.realm:
        assert inargs.sftlf_file and inargs.realm, "Must give --realm and --sftlf_file"
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    output_cubelist = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        print(filename)
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var) & depth_constraint)

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube)
    
        if basin_cube:
            cube = select_basin(cube, basin_cube, basin_name)        

        if args.multiply_by_area:
            cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube) 

        if inargs.realm:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

        if inargs.weights:
            assert cube.ndim == 3
            broadcasted_weights = uconv.broadcast_array(weights_cube.data, [1, 2], cube.shape)
        else:
            broadcasted_weights = None
            
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        if 'latitude' in aux_coord_names:
            # curvilinear grid
            assert ref_cube
            horiz_aggregate = curvilinear_agg(cube, ref_cube, keep_coord, aggregation_functions[inargs.aggregation])     
            #TODO: Add weights=broadcasted_weights
        else:
            # rectilinear grid
            horiz_aggregate = cube.collapsed(collapse_coord, aggregation_functions[inargs.aggregation],
                                             weights=broadcasted_weights)
            horiz_aggregate.remove_coord(collapse_coord)

        if inargs.flux_to_mag:
            horiz_aggregate = uconv.flux_to_magnitude(horiz_aggregate)
            
        horiz_aggregate.data = horiz_aggregate.data.astype(numpy.float32)

        if inargs.outfile[-3:] == '.nc':
            output_cubelist.append(horiz_aggregate)
        elif inargs.outfile[-1] == '/': 
            if inargs.cumsum:
                horiz_aggregate = cumsum(horiz_aggregate)       
            infile = filename.split('/')[-1]
            infile = re.sub(cube.var_name + '_', cube.var_name + '-' + inargs.direction + '-' + inargs.aggregation + '_', infile)
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
                infile = re.sub('Amon', 'Ayr', infile)
       
            outfile = inargs.outfile + infile
            metadata_dict[filename] = cube.attributes['history'] 
            horiz_aggregate.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)

            iris.save(horiz_aggregate, outfile)
            print('output:', outfile)
            del horiz_aggregate

    if inargs.outfile[-3:] == '.nc':
        equalise_attributes(output_cubelist)
        iris.util.unify_time_units(output_cubelist)
        output_cubelist = output_cubelist.concatenate_cube()

        if inargs.cumsum:
            output_cubelist = cumsum(output_cubelist)

        metadata_dict[filename] = cube.attributes['history']
        output_cubelist.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
        iris.save(output_cubelist, inargs.outfile)