def _main(args):
    """Run the command line program."""

    temperature_cube, temperature_history = gio.combine_files(args.temperature_file, args.temperature_var, checks=True)
    salinity_cube, salinity_history = gio.combine_files(args.salinity_file, 'sea_water_salinity', checks=True)
   
    assert 'c' in str(temperature_cube.units).lower(), "Input temperature units must be in celsius"
#    if not 'C' in str(bigthetao_cube.units):
#        bigthetao_cube.data = bigthetao_cube.data - 273.15
#        data_median = np.ma.median(bigthetao_cube.data)
#        assert data_median < 100
#        assert data_median > -10
#        bigthetao_cube.units = 'C'

    target_shape = temperature_cube.shape[1:]
    depth = temperature_cube.coord('depth').points * -1
    broadcast_depth = uconv.broadcast_array(depth, 0, target_shape)
    broadcast_longitude = uconv.broadcast_array(temperature_cube.coord('longitude').points, [1, 2], target_shape)
    broadcast_latitude = uconv.broadcast_array(temperature_cube.coord('latitude').points, [1, 2], target_shape)
    pressure = gsw.p_from_z(broadcast_depth, broadcast_latitude)

    absolute_salinity = gsw.SA_from_SP(salinity_cube.data, pressure, broadcast_longitude, broadcast_latitude)
    if args.temperature_var == 'sea_water_conservative_temperature':
        conservative_temperature = temperature_cube.data
    elif args.temperature_var == 'sea_water_potential_temperature':  
        conservative_temperature = gsw.CT_from_pt(absolute_salinity, temperature_cube.data)
    else:
        raise ValueError('Invalid temperature variable')

    if args.coefficient == 'alpha':
        coefficient_data = gsw.alpha(absolute_salinity, conservative_temperature, pressure)
        var_name = 'alpha'
        standard_name = 'thermal_expansion_coefficient'
        long_name = 'thermal expansion coefficient'
        units = '1/K'
    elif args.coefficient == 'beta':
        coefficient_data = gsw.beta(absolute_salinity, conservative_temperature, pressure)
        var_name = 'beta'
        standard_name = 'saline_contraction_coefficient'
        long_name = 'saline contraction coefficient'
        units = 'kg/g'
    else:
        raise ValueError('Coefficient must be alpha or beta')

    iris.std_names.STD_NAMES[standard_name] = {'canonical_units': units}
    coefficient_cube = temperature_cube.copy()
    coefficient_cube.data = coefficient_data
    coefficient_cube.standard_name = standard_name    
    coefficient_cube.long_name = long_name
    coefficient_cube.var_name = var_name
    coefficient_cube.units = units

    coefficient_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(coefficient_cube, args.outfile)
def read_data(infiles,
              var,
              area_cube,
              annual=False,
              multiply_by_area=False,
              chunk_annual=False):
    """Read the input data."""

    cube, history = gio.combine_files(infiles, var)
    if annual:
        cube = timeseries.convert_to_annual(cube,
                                            days_in_month=True,
                                            chunk=chunk_annual)

    cube = uconv.flux_to_magnitude(cube)
    if multiply_by_area:
        cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube)

    coord_names = [coord.name() for coord in cube.coords(dim_coords=True)]
    assert cube.ndim == 3
    lats = cube.coord('latitude').points
    if lats.ndim == 1:
        lat_pos = coord_names.index('latitude')
        lats = uconv.broadcast_array(lats, lat_pos - 1, cube.shape[1:])
    else:
        assert lats.shape == cube.shape[1:]

    return cube, lats, history
Ejemplo n.º 3
0
def read_data(infile_list, var, basin_cube, region):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the regioins by name.

    """

    cube, history = gio.combine_files(infile_list, var)

    cube.attributes = atts
    cube.attributes['history'] = history[0]
    model = atts['model_id']

    if var == 'northward_ocean_heat_transport':
        region_index = {}
        region_index['atlantic_arctic_ocean'] = 0
        region_index['indian_pacific_ocean'] = 1
        region_index['global_ocean'] = 2
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, region_index[region], :]
        else:
            cube = cube.extract(iris.Constraint(region=region))

    cube = timeseries.convert_to_annual(cube, full_months=True)
    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)
        if region != 'global_ocean':
            basin_numbers = {}
            basin_numbers['atlantic_arctic_ocean'] = [2, 4]
            basin_numbers['indian_pacific_ocean'] = [3, 5]
            cube = uconv.mask_unwanted_seas(cube, basin_cube, basin_numbers[region])
            
    return cube
Ejemplo n.º 4
0
def main(inargs):
    """Run the program."""

    standard_names = {
        'thetao': 'sea_water_potential_temperature',
        'so': 'sea_water_salinity'
    }
    volume_cube = gio.get_ocean_weights(inargs.volfile)
    output_cubelist = iris.cube.CubeList([])

    cube, history = gio.combine_files(inargs.infiles,
                                      standard_names[inargs.invar],
                                      checks=True)
    ntsteps = cube.shape[0]
    for tstep, cube_slice in enumerate(cube.slices_over('time')):
        print(f'time step {tstep + 1} of {ntsteps}')
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        ga = cube_slice.collapsed(coord_names[1:],
                                  iris.analysis.MEAN,
                                  weights=volume_cube.data)
        for coord in coord_names[1:] + aux_coord_names:
            ga.remove_coord(coord)
            ga.var_name = inargs.invar + 'ga'
        output_cubelist.append(ga)
    outcube = output_cubelist.merge()[0]
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    metadata_dict[inargs.volfile] = volume_cube.attributes['history']
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(inargs):
    """Run the program."""

    pe_cube, pe_history = gio.combine_files(
        inargs.pe_files, 'precipitation minus evaporation flux')
    lat_coord = pe_cube.coord('latitude').points

    region_data = np.apply_along_axis(get_regional_totals, 1, pe_cube.data,
                                      lat_coord)
    if inargs.cumsum:
        region_data = np.cumsum(region_data, axis=0)

    region_coord = create_region_coord()
    time_coord = pe_cube.coord('time')

    iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {
        'canonical_units': pe_cube.units
    }
    dim_coords_list = [(time_coord, 0), (region_coord, 1)]
    out_cube = iris.cube.Cube(
        region_data,
        standard_name='precipitation_minus_evaporation_flux',
        long_name=pe_cube.long_name,
        var_name=pe_cube.var_name,
        units=pe_cube.units,
        attributes=pe_cube.attributes,
        dim_coords_and_dims=dim_coords_list)

    out_cube.attributes['history'] = cmdprov.new_log(
        infile_history={inargs.pe_files[0]: pe_history[0]}, git_repo=repo_dir)
    iris.save(out_cube, inargs.outfile)
Ejemplo n.º 6
0
def get_bin_data(files, var, w_cube):
    """Get binning variable data."""

    cube, history = gio.combine_files(files, var, checks=True)
    w_coord_names = [coord.name() for coord in w_cube.dim_coords]
    coord_names = [coord.name() for coord in cube.dim_coords]
    assert w_cube.shape[-2:] == cube.shape[-2:]
    if not w_cube.shape == cube.shape:
        if (w_cube.ndim == 3) and (cube.ndim == 4) and (w_coord_names[0]
                                                        == coord_names[0]):
            #e.g. w_cube is surface flux (time, i, j),
            #cube is full depth temperature (time, depth, i, j)
            cube = cube[:, 0, ::]
            cube.remove_coord(coord_names[1])
            assert w_cube.shape == cube.shape
        elif (w_cube.ndim == 2) and (cube.ndim == 4):
            #e.g. w_cube is area (i, j),
            #cube is full depth temperature (time, depth, i, j)
            cube = cube[:, 0, ::]
            cube.remove_coord(coord_names[1])
            assert w_cube.shape == cube.shape[1:]
        else:
            #e.g. w_cube is area (i, j),
            #cube is surface temperature (time, i, j)
            #e.g. w_cube is volume (depth, i, j),
            #cube is temperature (time, depth, i, j)
            assert w_cube.shape == cube.shape[1:]

    return cube, history
def get_data(infile, var, time_constraint, operation, ref_model=None):
    """Get the data for a particular component"""
    
    assert operation in ['anomaly', 'mean']

    cube, history = gio.combine_files(infile, var, new_calendar='365_day')
    cube = cube[:, :, -1]
    cube.remove_coord('region')
    if time_constraint:
        cube = cube.extract(time_constraint)
    if operation == 'mean':
        cube = cube.collapsed('time', iris.analysis.MEAN)
    else:
        cube.data = cube.data - cube.data[0, ::]
        cube = cube[-1, ::]
    cube.remove_coord('time')

    if ref_model:
        try:
            model = cube.attributes['model_id']
        except KeyError:
            model = cube.attributes['source_id']
        assert model == ref_model, f"Model mismatch: {ref_model}, {model}"
        
    return cube, history
Ejemplo n.º 8
0
def main(inargs):
    """Run the program."""
    
    coefficient_a_cube = iris.load_cube(inargs.coefficient_file, 'coefficient a')
    coefficient_b_cube = iris.load_cube(inargs.coefficient_file, 'coefficient b')
    coefficient_c_cube = iris.load_cube(inargs.coefficient_file, 'coefficient c')
    coefficient_d_cube = iris.load_cube(inargs.coefficient_file, 'coefficient d')

    data_cube, data_history = gio.combine_files(inargs.data_files, inargs.var)
    coord_names = [coord.name() for coord in data_cube.coords(dim_coords=True)]
    assert coord_names[0] == 'year'
    if not inargs.branch_year == None:
        branch_year = inargs.branch_year
    else:
        branch_year = get_branch_year(data_cube, inargs.control_time_units)
    time_values = numpy.arange(branch_year, branch_year + data_cube.shape[0]) 
    drift_signal, start_polynomial = remove_drift.apply_polynomial(time_values, coefficient_a_cube.data,
                                                                   coefficient_b_cube.data, coefficient_c_cube.data,
                                                                   coefficient_d_cube.data, poly_start=None)

    new_cube = data_cube - drift_signal
    #remove_drift.check_data(new_cube, data_cube, inargs.data_file)
    new_cube.metadata = data_cube.metadata
            
    metadata_dict = {inargs.data_files[0]: data_history[0], 
                     inargs.coefficient_file: coefficient_a_cube.attributes['history']}
    new_cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(new_cube, inargs.outfile)
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=False)  

    if inargs.aggregation == 'sum':
        cube = cube.collapsed('depth', iris.analysis.SUM)
    else:
        dim_coord_names = [coord.name() for coord in cube.dim_coords]
        depth_coord = cube.coord('depth')
        assert depth_coord.units in ['m', 'dbar'], "Unrecognised depth axis units"
        if depth_coord.units == 'm':
            vert_extents = spatial_weights.calc_vertical_weights_1D(depth_coord, dim_coord_names, cube.shape)
        elif depth_coord.units == 'dbar':
            vert_extents = spatial_weights.calc_vertical_weights_2D(depth_coord, cube.coord('latitude'), dim_coord_names, cube.shape)
        cube = cube.collapsed('depth', iris.analysis.MEAN, weights=vert_extents)
    cube.remove_coord('depth')
    
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(cube, inargs.outfile)  
Ejemplo n.º 10
0
def main(inargs):
    """Run the program."""

    assert len(inargs.infiles) > 1
    if inargs.variables:
        variable_list = inargs.variables
    else:
        cube_list = iris.load(inargs.infiles[0])
        variable_list = [cube.long_name for cube in cube_list]

    cube_list = iris.cube.CubeList([])
    for var in variable_list:
        cube, history = gio.combine_files(inargs.infiles, var)
        cube_list.append(cube)

    metadata_dict = {inargs.infiles[-1]: history[-1]}
    log_entry = cmdprov.new_log(infile_history=metadata_dict,
                                git_repo=repo_dir)
    if len(cube_list) > 1:
        iris.util.equalise_attributes(cube_list)
        for cube in cube_list:
            cube.attributes['history'] = log_entry
    else:
        cube_list = cube_list[0]
        cube_list.attributes['history'] = log_entry

    iris.save(cube_list, inargs.outfile)
Ejemplo n.º 11
0
def main(inargs):
    """Run the program."""

    pr_cube, pr_history = gio.combine_files(inargs.pr_files, 'precipitation_flux', checks=True)
    evap_cube, evap_history = gio.combine_files(inargs.evap_files, 'water_evapotranspiration_flux', checks=True)

    assert pr_cube.shape == evap_cube.shape
    pe_cube = pr_cube.copy()
    pe_cube.data = pr_cube.data - evap_cube.data

    iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {'canonical_units': pe_cube.units}
    pe_cube.standard_name = 'precipitation_minus_evaporation_flux'
    pe_cube.long_name = 'precipitation minus evaporation flux'
    pe_cube.var_name = 'pe'
    pe_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)

    iris.save(pe_cube, inargs.outfile)
Ejemplo n.º 12
0
def main(inargs):
    """Run the program."""

    if inargs.var == 'ocean_volume':
        data_cube = gio.get_ocean_weights(inargs.infile, sanity_check=False)
    else:
        data_cube, data_history = gio.combine_files(inargs.infile, inargs.var)
    mask_cube, mask_history = gio.combine_files(inargs.mask_file, inargs.mask_var)

    if inargs.mask_method == 'copy':    
        mask = copy_mask(mask_cube, data_cube.shape)
    else:
        mask = create_mask(mask_cube, data_cube.shape)
    data_cube.data = numpy.ma.asarray(data_cube.data)
    data_cube.data.mask = mask
            
    data_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir) 
    iris.save(data_cube, inargs.outfile)
Ejemplo n.º 13
0
def read_data(file_list, var, grid_point, convert_to_annual=False):
    """Read input data."""

    cube, history = gio.combine_files(file_list, var)
    if grid_point:  
        cube = select_point(cube, grid_point, timeseries=True)
    if convert_to_annual:
        cube = timeseries.convert_to_annual(cube)
    
    return cube, history[0]
Ejemplo n.º 14
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)
    if inargs.surface:
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            cube = cube.extract(iris.Constraint(depth=0))
        else:
            print('no depth axis for surface extraction')
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)
    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]},
                          git_repo=repo_dir)

    dim_vals = {}
    dim_vals['latitude'] = get_dim_vals(inargs.lats)
    dim_vals['longitude'] = get_dim_vals(inargs.lons)
    if inargs.levs:
        dim_vals['depth'] = get_dim_vals(inargs.levs)
    else:
        dim_vals['depth'] = get_dim_vals(inargs.depth_bnds, bounds=True)

    # Regrid from curvilinear to rectilinear if necessary
    regrid_status = False
    if inargs.lats:
        horizontal_grid = grids.make_grid(dim_vals['latitude'],
                                          dim_vals['longitude'])
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube, target_grid_cube=horizontal_grid)

    # Regrid to new grid
    if dim_vals['depth'] or not regrid_status:
        sample_points = get_sample_points(cube, dim_vals)
        cube = cube.interpolate(sample_points, iris.analysis.Linear())
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'latitude' in coord_names:
            cube.coord('latitude').guess_bounds()
        if 'longitude' in coord_names:
            cube.coord('longitude').guess_bounds()
        if inargs.levs:
            cube = spatial_weights.guess_depth_bounds(cube)
        else:
            cube.coord('depth').bounds = get_depth_bounds(inargs.depth_bnds)

    if numpy.isnan(numpy.min(cube.data)):
        cube = remove_nans(cube)

    # Reinstate time dim_coord if necessary
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    if 'time' in aux_coord_names:
        cube = iris.util.new_axis(cube, 'time')

    cube.attributes['history'] = log
    iris.save(cube, inargs.outfile, fill_value=1e20)
def read_infiles(infiles, var, time_constraint, ensnum):
    """Combine multiple input files into one cube"""

    cube, history = gio.combine_files(infiles, var)
    cube = gio.check_time_units(cube)
    cube = cube.extract(time_constraint)

    new_aux_coord = iris.coords.AuxCoord(ensnum, long_name='ensemble_member', units='no_unit')
    cube.add_aux_coord(new_aux_coord)

    return cube, history[0]
Ejemplo n.º 16
0
def get_data(infile, variable, time_constraint):
    """Get the data for a particular model"""

    cube, history = gio.combine_files(infile, variable, new_calendar='365_day')
    #cube = iris.load_cube(infile, 'precipitation minus evaporation flux' & time_constraint)
    cube = cube.extract(time_constraint)
    iris.coord_categorisation.add_year(cube, 'time')
    anomaly_data = cube.data - cube.data[0, :]
    start_data = cube.data[0, :]

    return cube, anomaly_data, start_data
Ejemplo n.º 17
0
def get_weights_data(file_list, var):
    """Read the weights data file/s"""
    
    w_var = mom_vars[var] if var in mom_vars else var
    if ('vol' in w_var) or ('area' in w_var):
        assert len(file_list) == 1
        w_cube = gio.get_ocean_weights(file_list[0])
        history = w_cube.attributes['history'] 
    else:
        w_cube, history = gio.combine_files(file_list, var, checks=True)

    return w_cube, history
Ejemplo n.º 18
0
def combine_infiles(inargs, time_constraint, depth_constraint):
    """Combine multiple input files into one cube"""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
    atts = cube[0].attributes

    cube = cube.extract(time_constraint & depth_constraint)
    cube = iris.util.squeeze(cube)

    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]}, git_repo=repo_dir)
    cube.attributes['history'] = log

    return cube
Ejemplo n.º 19
0
def get_data(infile, var, time_constraint):
    """Get the data for a particular component"""

    cube, history = gio.combine_files(infile, var, new_calendar='365_day')
    cube = cube[:, -1, :]
    cube.remove_coord('precipitation minus evaporation region')
    if time_constraint:
        cube = cube.extract(time_constraint)
    cube.data = cube.data - cube.data[0, ::]
    cube = cube[-1, ::]
    cube.remove_coord('time')

    return cube, history
Ejemplo n.º 20
0
def main(inargs):
    """Run the program."""

    tas_cube, history = gio.combine_files(inargs.tas_files, inargs.var)
    if inargs.annual:
        tas_cube = timeseries.convert_to_annual(tas_cube)
    area_data = spatial_weights.area_array(tas_cube)
    coord_names = [coord.name() for coord in tas_cube.dim_coords]
    tasga_cube = tas_cube.collapsed(coord_names[1:], iris.analysis.MEAN, weights=area_data)
    tasga_cube.remove_coord(coord_names[1])
    tasga_cube.remove_coord(coord_names[2])

    tasga_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(tasga_cube, inargs.outfile)
Ejemplo n.º 21
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, aggregation='mean', days_in_month=True)
    if inargs.flux_to_mag:
        cube = uconv.flux_to_magnitude(cube)

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    assert dim_coord_names[0] in ['time', 'year']
    cube.data = numpy.cumsum(cube.data, axis=0)

    cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir, infile_history={inargs.infiles[0]: history[0]})
    iris.save(cube, inargs.outfile)
Ejemplo n.º 22
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
   
    start_cube = period_mean(cube.copy(), inargs.start_period)
    end_cube = period_mean(cube.copy(), inargs.end_period)

    outcube = cube[0, ::].copy()
    outcube.remove_coord('time')
    outcube.data = end_cube.data - start_cube.data

    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]}, git_repo=repo_dir) 
    outcube.attributes['history'] = log

    iris.save(outcube, inargs.outfile)
Ejemplo n.º 23
0
def get_data(infiles, var, data_type, time_constraint, agg_method, pct=False):
    """Get the data for a particular model"""

    cube_list = iris.cube.CubeList([])
    for ensnum, infile in enumerate(infiles):
        cube, history = gio.combine_files(infile, var, new_calendar='365_day')
        assert cube.units == 'kg', f'{infile} units not kg'
        if time_constraint:
            cube = cube.extract(time_constraint)
        if data_type == 'cumulative_anomaly':
            cube.data = cube.data - cube.data[0, ::]
            cube = cube[-1, ::]
        elif data_type == 'climatology':
            cube = cube.collapsed('time', iris.analysis.MEAN)
        cube.remove_coord('time')
        new_aux_coord = iris.coords.AuxCoord(ensnum,
                                             long_name='ensemble_member',
                                             units='no_unit')
        cube.add_aux_coord(new_aux_coord)
        cube_list.append(cube)

    if len(cube_list) > 1:
        ens_cube = ensagg.calc_ensagg(cube_list, operator=agg_method)
    else:
        ens_cube = cube_list[0]

    basins = [
        'Atlantic', 'Pacific', 'Indian', 'Arctic', 'Marginal Seas', 'Land',
        'Ocean', 'Globe'
    ]
    pe_regions = ['SH-P', 'SH-E', 'T-P', 'NH-E', 'NH-P', 'Globe']
    df = pd.DataFrame(ens_cube.data, columns=basins, index=pe_regions)
    #df.loc['Globe', 'Globe'] = np.nan
    df = df.iloc[::-1]
    if pct:
        if var in [
                'precipitation_minus_evaporation_flux',
                'water_flux_into_sea_water'
        ]:
            total_pos = df['Globe']['NH Precip'] + df['Globe'][
                'Tropical Precip'] + df['Globe']['SH Precip']
            total_neg = abs(df['Globe']['NH Evap'] + df['Globe']['SH Evap'])
            df = df / max(total_pos, total_neg)
        else:
            df = df / df['Globe']['Globe']

    return df, history
Ejemplo n.º 24
0
def get_data(infile, var, time_constraint):
    """Get the data for a particular model"""

    cube, history = gio.combine_files(infile, var, new_calendar='365_day')
    cube = cube.extract(time_constraint)
    iris.coord_categorisation.add_year(cube, 'time')
    assert cube.shape[-1] == 8
    data_array = np.zeros([cube.shape[0], 4])
    data_array[:, 0] = cube.data[:, -1, 0]  #atlantic
    data_array[:, 1] = cube.data[:, -1, 2]  #indian
    data_array[:, 2] = cube.data[:, -1, 1]  #pacific
    data_array[:, 3] = cube.data[:, -1, 5]  #land

    start_data = data_array[0, :]
    anomaly_data = data_array - start_data

    return cube, anomaly_data, start_data
Ejemplo n.º 25
0
def main(inargs):
    """Run the program."""

    temperature_cube, history = gio.combine_files(inargs.temperature_files,
                                                  inargs.var)

    temperature_atts = temperature_cube.attributes
    metadata_dict = {inargs.temperature_files[0]: history[0]}

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    temperature_cube = temperature_cube.extract(level_subset)

    if inargs.annual:
        temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                        chunk=inargs.chunk)

    if inargs.regrid:
        area_cube = read_area_file(inargs.regrid)
        temperature_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            temperature_cube, weights=area_cube.data)
        volume_data = spatial_weights.volume_array(temperature_cube)
        grid = 'y72x144'
    else:
        assert inargs.volume_file, "Must provide volume file if not regridding data"
        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                temperature_cube, level_subset,
                                                metadata_dict)
        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        grid = None

    ohc_cube = ohc(temperature_cube,
                   volume_data,
                   inargs.density,
                   inargs.specific_heat,
                   coord_names,
                   vertical_integral=inargs.vertical_integral,
                   chunk=inargs.chunk)

    ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                            inargs)
    log = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    ohc_cube.attributes['history'] = log

    iris.save(ohc_cube, inargs.outfile)
Ejemplo n.º 26
0
def read_global_variable(model, variable, ensemble, manual_files):
    """Read data for a global variable"""

    manual = file_match(manual_files, model, variable, ensemble)
    if manual or variable == 'massa':
        file_list = manual
    else:
        file_list = clef_search(model, variable, ensemble)

    if file_list:
        cube, history = gio.combine_files(file_list, names[variable])
        cube = timeseries.convert_to_annual(cube)
        cube = time_check(cube)
        extra_log.append(file_list)
    else:
        cube = None

    return cube
Ejemplo n.º 27
0
def main(inargs):
    """Run the program."""

    prw_cube, history = gio.combine_files(inargs.prw_files, 'atmosphere_mass_content_of_water_vapor')
    if inargs.area_file:
        area_cube = iris.load_cube(inargs.area_file, 'cell_area')
        weights = uconv.broadcast_array(area_cube.data, [1, 2], prw_cube.shape)
    else:
        weights = spatial_weights.area_array(prw_cube)

    coord_names = [coord.name() for coord in prw_cube.dim_coords]
    massa_cube = prw_cube.collapsed(coord_names[1:], iris.analysis.SUM, weights=weights)
    units = str(massa_cube.units)
    massa_cube.units = units.replace('m-2', '')
    massa_cube.remove_coord(coord_names[1])
    massa_cube.remove_coord(coord_names[2])

    massa_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(massa_cube, inargs.outfile)
Ejemplo n.º 28
0
def get_cube_list(infiles, agg, time_bounds=None, quick=False):
    """Read and process data."""

    assert agg in ['clim', 'anom']

    ensemble_cube_list = iris.cube.CubeList([])
    for ensnum, ensemble_member in enumerate(infiles):
        print(ensemble_member)
        cube, history = gio.combine_files(
            ensemble_member,
            'precipitation_minus_evaporation_flux',
            new_calendar='365_day')
        cube = gio.check_time_units(cube)
        if time_bounds:
            time_constraint = gio.get_time_constraint(time_bounds)
            cube = cube.extract(time_constraint)
        elif quick:
            cube = cube[0:120, ::]
        if agg == 'clim':
            cube = timeseries.convert_to_annual(cube,
                                                aggregation='mean',
                                                days_in_month=True)
            cube = cube.collapsed('time', iris.analysis.MEAN)
        elif agg == 'anom':
            start_data = cube.data[0, ::]
            cube = cube[-1, ::]
            cube.data = cube.data - start_data
        cube.remove_coord('time')
        cube = regrid(cube)
        new_aux_coord = iris.coords.AuxCoord(ensnum,
                                             long_name='ensemble_member',
                                             units='no_unit')
        cube.add_aux_coord(new_aux_coord)
        cube.cell_methods = ()
        ensemble_cube_list.append(cube)
        print("Total number of models:", len(ensemble_cube_list))

    return ensemble_cube_list, history
Ejemplo n.º 29
0
def main(inargs):
    """Run the program."""

    agg_functions = {'mean': iris.analysis.MEAN, 'sum': iris.analysis.SUM}
    metadata_dict = {}
    
    basin_cube = iris.load_cube(inargs.basin_file, 'region')
    assert basin_cube.data.min() == 11
    assert basin_cube.data.max() == 18
    basin_numbers = numpy.array([1, 2, 3])
    metadata_dict[inargs.basin_file] = basin_cube.attributes['history']

    flag_values = '0 1 2'
    flag_meanings = 'atlantic indo-pacific globe'
    basin_coord = iris.coords.DimCoord(basin_numbers,
                                       standard_name=basin_cube.standard_name,
                                       long_name=basin_cube.long_name,
                                       var_name=basin_cube.var_name,
                                       units=basin_cube.units,
                                       attributes={'flag_values': flag_values,
                                                   'flag_meanings': flag_meanings})

    if inargs.weights:
        weights_cube = gio.get_ocean_weights(inargs.weights)
        metadata_dict[inargs.weights] = weights_cube.attributes['history']        

    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        print(infile)

        if inargs.var == 'ocean_volume':
            cube = gio.get_ocean_weights(infile)
            history = [cube.attributes['history']]
        else:
            cube, history = gio.combine_files(infile, inargs.var, checks=True)

        assert cube.ndim in [3, 4]
        coord_names = [coord.name() for coord in cube.dim_coords]

        if inargs.annual:
            cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)   

        assert basin_cube.shape == cube.shape[-2:]
        basin_array = uconv.broadcast_array(basin_cube.data, [cube.ndim - 2, cube.ndim - 1], cube.shape)
        basin_masks = {'atlantic': basin_array > 12,
                       'indo-pacific': (basin_array < 13) | (basin_array > 15),
                       'globe': basin_array > 16}

        if inargs.weights:
            assert weights_cube.data.shape == cube.shape[-3:]
            if cube.ndim == 4:
                weights_array = uconv.broadcast_array(weights_cube.data, [1, 3], cube.shape)
            else:
                weights_array = weights_cube.data
        else:
            weights_array = None

        if cube.ndim == 3:
            outdata = numpy.ma.zeros([cube.shape[0], len(basin_numbers)])
        else:
            outdata = numpy.ma.zeros([cube.shape[0], cube.shape[1], len(basin_numbers)])

        for basin_index, basin_name in enumerate(['atlantic', 'indo-pacific', 'globe']):
            temp_cube = cube.copy()
            mask = basin_masks[basin_name]
            temp_cube.data = numpy.ma.masked_where(mask, temp_cube.data)
            if len(coord_names) == cube.ndim:
                horiz_agg = temp_cube.collapsed(coord_names[-2:], agg_functions[inargs.agg], weights=weights_array).data
            elif inargs.agg == 'mean':
                horiz_agg = numpy.ma.average(temp_cube.data, axis=(-2,-1), weights=weights_array)
            elif inargs.agg == 'sum':
                horiz_agg = numpy.ma.sum(temp_cube.data, axis=(-2,-1))
            if outdata.ndim == 2:
                outdata[:, basin_index] = horiz_agg
            else:
                outdata[:, :, basin_index] = horiz_agg

        coord_list = [(cube.dim_coords[0], 0)]
        if cube.ndim == 4:
            coord_list.append((cube.dim_coords[1], 1))
            coord_list.append((basin_coord, 2))
        else:
            coord_list.append((basin_coord, 1))
        outcube = iris.cube.Cube(outdata,
                                 standard_name=cube.standard_name,
                                 long_name=cube.long_name,
                                 var_name=cube.var_name,
                                 units=cube.units,
                                 attributes=cube.attributes,
                                 dim_coords_and_dims=coord_list)
        output_cubelist.append(outcube)

    iris.util.equalise_attributes(output_cubelist)
    iris.util.unify_time_units(output_cubelist)
    outcube = output_cubelist.concatenate_cube()
    if history:
        metadata_dict[inargs.infiles[-1]] = history[0]
    outcube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(args):
    """Run the program."""

    infiles = sorted(args.infiles)

    var_names = {
        'precipitation_flux': 'precipitation',
        'water_evapotranspiration_flux': 'evaporation',
        'precipitation_minus_evaporation_flux': 'P-E'
    }
    var_name = var_names[args.var]
    ylabel = f"annual mean {var_name} (kg)"

    fig, axes = plt.subplots(1, 2, figsize=(12, 6))

    region_data = []
    basin_data = []
    nfiles = len(infiles)
    print(f"Number models: {nfiles}")
    for modelnum in range(nfiles):
        cube, history = gio.combine_files(infiles[modelnum], args.var)
        cube = cube.collapsed('time', iris.analysis.MEAN)
        cube.remove_coord('time')
        try:
            model = cube.attributes['model_id']
        except KeyError:
            model = cube.attributes['source_id']

        # cube(time, pereg, basin)
        #  pereg: SH_precip SH_evap tropical_precip NH_evap NH_precip globe
        #  basin: atlantic pacific indian arctic marginal_seas land ocean globe

        region_data.append([model, 'SH-P', cube.data[0, -1]])
        region_data.append([model, 'SH-E', cube.data[1, -1]])
        region_data.append([model, 'T-P', cube.data[2, -1]])
        region_data.append([model, 'NH-E', cube.data[3, -1]])
        region_data.append([model, 'NH-P', cube.data[4, -1]])

        basin_data.append([model, 'Atlantic', cube.data[-1, 0]])
        basin_data.append([model, 'Indian', cube.data[-1, 2]])
        basin_data.append([model, 'Pacific', cube.data[-1, 1]])
        basin_data.append([model, 'Land', cube.data[-1, 5]])

    region_df = pd.DataFrame(region_data,
                             columns=['model', 'P-E region', ylabel])
    basin_df = pd.DataFrame(basin_data, columns=['model', 'basin', ylabel])
    plot_data(axes[0],
              region_df,
              'P-E region',
              ylabel,
              '(a) meridional climatology',
              model_dots=args.dots)
    plot_data(axes[1],
              basin_df,
              'basin',
              ylabel,
              '(b) zonal climatology',
              model_dots=args.dots)

    plt.savefig(args.outfile, bbox_inches='tight')

    metadata_dict = {infiles[-1]: history[0]}
    log_text = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', args.outfile)
    cmdprov.write_log(log_file, log_text)