コード例 #1
0
def read_data(infile_list, var, model, basin_cube):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the "global_ocean" by name

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]

    if var == 'northward_ocean_heat_transport':
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, 2, :]
        else:
            cube = cube.extract(iris.Constraint(region='global_ocean'))

    cube = timeseries.convert_to_annual(cube, full_months=True)

    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)

    return cube
コード例 #2
0
def read_data(infiles, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    infile_history = {}
    infile_history[infiles[0]] = history[0]
    cube.attributes['history'] = gio.write_metadata(file_info=infile_history)

    return cube, coord_names, aux_coord_names, grid_type
コード例 #3
0
def main(inargs):
    """Run the program."""

    if inargs.depth:
        level_constraint = iris.Constraint(depth=inargs.depth)
    else:
        level_constraint = iris.Constraint()

    cube = iris.load(inargs.infiles,
                     inargs.var & level_constraint,
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    area_cube = read_area(inargs.area_file)
    clim_cube = iris.load_cube(inargs.climatology,
                               inargs.var & level_constraint)

    atts = set_attributes(inargs, cube, area_cube, clim_cube)

    if inargs.smoothing:
        cube = smooth_data(cube, inargs.smoothing)
    area_weights = get_area_weights(clim_cube, area_cube)

    metric = calc_amplification_metric(cube, clim_cube, area_weights, atts)
    iris.save(metric, inargs.outfile)
コード例 #4
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var), callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)

    if inargs.area:
        cube = multiply_by_area(cube) 

    if inargs.sftlf_file and inargs.realm:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

    zonal_aggregate = cube.collapsed('longitude', aggregation_functions[inargs.aggregation])
    zonal_aggregate.remove_coord('longitude')

    zonal_aggregate.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(zonal_aggregate, inargs.outfile)
コード例 #5
0
def read_data(infiles, variable, time_constraint):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube = cube.extract(time_constraint)

    if not 'J' in str(cube.units):
        cube = convert_to_joules(cube)

    if variable == 'surface_downward_heat_flux_in_sea_water':
        agg_method = 'sum'
    elif variable == 'ocean_heat_content':
        agg_method = 'mean'
    cube = timeseries.convert_to_annual(cube, aggregation=agg_method)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
コード例 #6
0
def load_data(infile, basin):
    """Load, temporally aggregate and spatially slice input data"""

    try:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(
                infile,
                'ocean_meridional_overturning_mass_streamfunction',
                callback=save_history)
            equalise_attributes(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)

            cube = cube[:, basin_index[basin], :, :]
            cube = timeseries.convert_to_annual(cube)

        experiment = cube.attributes['experiment_id']
        if experiment == 'historicalMisc':
            experiment = 'historicalAA'

        depth_constraint = iris.Constraint(depth=lambda cell: cell <= 250)
        sh_constraint = iris.Constraint(
            latitude=lambda cell: -30.0 <= cell < 0.0)
        nh_constraint = iris.Constraint(
            latitude=lambda cell: 0.0 < cell <= 30.0)

        sh_cube = cube.extract(depth_constraint & sh_constraint)
        nh_cube = cube.extract(depth_constraint & nh_constraint)
    except OSError:
        sh_cube = nh_cube = experiment = None

    return sh_cube, nh_cube, experiment
コード例 #7
0
def get_data(filename, var, target_grid=None):
    """Read data.
    
    Positive is defined as down.
    
    """

    if filename:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(filename, gio.check_iris_var(var))
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            if target_grid:
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube, target_grid_cube=target_grid)

            coord_names = [coord.name() for coord in cube.dim_coords]
            if 'depth' in coord_names:
                depth_constraint = iris.Constraint(depth=0)
                cube = cube.extract(depth_constraint)

            if 'up' in cube.standard_name:
                cube.data = cube.data * -1
    else:
        cube = None

    return cube
コード例 #8
0
def get_data(filename, var, time_constraint):
    """Read data."""

    cube = iris.load_cube(filename, gio.check_iris_var(var) & time_constraint)
    cube = gio.check_time_units(cube)
    cube = iris.util.squeeze(cube)

    return cube
コード例 #9
0
def concat_cubes(cube_list):
    """Concatenate an iris cube list."""

    iris.util.unify_time_units(cube_list)
    cube = cube_list.concatenate_cube()
    cube = gio.check_time_units(cube)

    return cube
コード例 #10
0
def get_data(filename, var, target_grid=None):
    """Read data."""
    
    cube = iris.load_cube(filename, gio.check_iris_var(var))
    cube = gio.check_time_units(cube)
    cube = iris.util.squeeze(cube)

    return cube
コード例 #11
0
def combine_cubes(cube_list):
    """Combine two like cubes"""

    equalise_attributes(cube_list)
    iris.util.unify_time_units(cube_list)
    cube = cube_list.concatenate_cube()
    cube = gio.check_time_units(cube)

    return cube
コード例 #12
0
def main(inargs):
    """Run the program."""

    # Read data
    cube = iris.load(inargs.infiles,
                     'surface_downward_eastward_stress',
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Prepare data
    cube = timeseries.convert_to_annual(cube)

    sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    mask = create_land_mask(sftlf_cube, cube.shape)
    cube.data = numpy.ma.asarray(cube.data)
    cube.data.mask = mask

    cube = cube.collapsed('longitude', iris.analysis.MEAN)

    # Calculate metrics
    xdata = cube.coord('latitude').points
    xnew = numpy.linspace(xdata[0], xdata[-1], num=1000, endpoint=True)

    hemispheres = ['sh', 'nh']
    directions = ['easterly', 'westerly']

    metric_dict = {}
    for hemisphere, direction in itertools.product(hemispheres, directions):
        metric_dict[(hemisphere, direction, 'location')] = []
        metric_dict[(hemisphere, direction, 'magnitude')] = []

    for ycube in cube.slices(['latitude']):
        func = interp1d(xdata, ycube.data, kind='cubic')
        ynew = func(xnew)
        for hemisphere, direction in itertools.product(hemispheres,
                                                       directions):
            loc, mag = wind_stress_metrics(xnew, ynew, hemisphere, direction)
            metric_dict[(hemisphere, direction, 'location')].append(loc)
            metric_dict[(hemisphere, direction, 'magnitude')].append(mag)

    # Write the output file
    atts = cube.attributes
    infile_history = {inargs.infiles[0]: history[0]}
    atts['history'] = gio.write_metadata(file_info=infile_history)
    units_dict = {
        'magnitude': cube.units,
        'location': cube.coord('latitude').units
    }
    cube_list = create_outcubes(metric_dict, cube.attributes, units_dict,
                                cube.coord('time'))

    iris.save(cube_list, inargs.outfile)
コード例 #13
0
def read_infiles(infiles, var, time_constraint, ensnum):
    """Combine multiple input files into one cube"""

    cube, history = gio.combine_files(infiles, var)
    cube = gio.check_time_units(cube)
    cube = cube.extract(time_constraint)

    new_aux_coord = iris.coords.AuxCoord(ensnum, long_name='ensemble_member', units='no_unit')
    cube.add_aux_coord(new_aux_coord)

    return cube, history[0]
コード例 #14
0
def main(inargs):
    """Run the program."""

    # Read data
    level_constraint, lat_constraint = get_constraints(inargs.depth,
                                                       inargs.hemisphere)
    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var) & level_constraint,
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Get area file (if applicable)
    if inargs.hemisphere:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = cube.extract(lat_constraint)
        area_cube = None
    else:
        area_cube = read_optional(inargs.area_file)

    # Mask ocean or atmosphere (if applicable)
    if inargs.sftlf_file:
        sftlf_file, selected_region = inargs.sftlf_file
        sftlf_cube = read_optional(sftlf_file)
        mask = create_mask(sftlf_cube, selected_region)
        cube.data = numpy.ma.asarray(cube.data)
        cube.data.mask = mask
        if area_cube:
            areas_dict = area_info(area_cube.copy(), mask, selected_region)
    else:
        areas_dict = {}
        sftlf_cube = None

    # Outfile attributes
    atts = set_attributes(inargs, cube, area_cube, sftlf_cube, areas_dict)

    # Temporal smoothing
    if inargs.smoothing:
        cube = smooth_data(cube, inargs.smoothing)

    # Calculate metric
    area_weights = get_area_weights(cube, area_cube)
    if inargs.metric == 'bulk-deviation':
        metric = calc_bulk_deviation(cube, area_weights, atts)
    elif inargs.metric == 'mean':
        metric = calc_global_mean(cube, area_weights, atts)
    elif inargs.metric == 'grid-deviation':
        metric = calc_grid_deviation(cube, inargs.var, area_weights, atts)

    iris.save(metric, inargs.outfile)
コード例 #15
0
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             sftlf_cube=None,
             realm=None):
    """Read, merge, temporally aggregate and calculate zonal sum.
    
    Positive is defined as down.
    
    """

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            depth_constraint = iris.Constraint(depth=0)
            cube = cube.extract(depth_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = multiply_by_area(cube)

        if 'up' in cube.standard_name:
            cube.data = cube.data * -1

        if sftlf_cube and realm in ['ocean', 'land']:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, realm)

        zonal_sum = cube.collapsed('longitude', iris.analysis.SUM)
        zonal_sum.remove_coord('longitude')

        grid_spacing = grids.get_grid_spacing(zonal_sum)
        zonal_sum.data = zonal_sum.data / grid_spacing

    else:
        zonal_sum = None

    return zonal_sum, metadata_dict
コード例 #16
0
def main(inargs):
    """Run the program."""

    if inargs.sftlf_file:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    else:
        sftlf_cube = None

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var))
        history = cube[0].attributes['history']

        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = iris.util.squeeze(cube)
        cube.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: history})

        cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

    output = {}
    output['full'] = calc_fields(cube,
                                 sftlf_cube,
                                 inargs.aggregation,
                                 realm=None,
                                 area=inargs.area)
    if inargs.sftlf_file:
        for realm in ['ocean', 'land']:
            output[realm] = calc_fields(cube,
                                        sftlf_cube,
                                        inargs.aggregation,
                                        realm=realm,
                                        area=inargs.area)

    cube_list = iris.cube.CubeList()
    for realm, output_cubes in output.items():
        for cube in output_cubes:
            cube_list.append(cube)

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(cube_list, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
コード例 #17
0
def main(inargs):
    """Run the program."""

    region_constraint = iris.Constraint(
        region='atlantic_arctic_ocean'
    )  # "atlantic_arctic_ocean", "indian_pacific_ocean ", "global_ocean         "
    cube = iris.load(inargs.infiles,
                     'ocean_meridional_overturning_mass_streamfunction'
                     & region_constraint,
                     callback=save_history)
    if not cube:
        cube = iris.load(inargs.infiles,
                         'ocean_meridional_overturning_mass_streamfunction',
                         callback=save_history)
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube = cube[:, 0, :, :]  # index for Atlantic
    else:
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube.remove_coord('region')

    cube = gio.check_time_units(cube)

    cube = timeseries.convert_to_annual(cube)

    target_lat, error = uconv.find_nearest(cube.coord('latitude').points,
                                           30,
                                           index=False)
    cube = cube.extract(iris.Constraint(latitude=target_lat))
    cube.remove_coord('latitude')

    assert str(cube.units) == 'kg s-1'
    cube.data = (cube.data / 1023) / 1e+6
    cube.units = 'Sv'

    #dim_coord_names = [coord.name() for coord in cube.dim_coords]
    #vert_extents = spatial_weights.calc_vertical_weights_1D(cube.coord('depth'), dim_coord_names, cube.shape)

    metric = cube.collapsed('depth', iris.analysis.MAX)
    metric.remove_coord('depth')

    try:
        metric.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: cube.attributes['history']})
    except KeyError:
        pass
    iris.save(metric, inargs.outfile)
コード例 #18
0
ファイル: regrid.py プロジェクト: sida-wang/ocean-analysis
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile, inargs.var)
    cube = gio.check_time_units(cube)
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)
    log = cmdprov.new_log(
        infile_history={inargs.infile: cube.attributes['history']},
        git_repo=repo_dir)

    dim_vals = {}
    dim_vals['latitude'] = get_dim_vals(inargs.lats)
    dim_vals['longitude'] = get_dim_vals(inargs.lons)
    if inargs.levs:
        dim_vals['depth'] = get_dim_vals(inargs.levs)
    else:
        dim_vals['depth'] = get_dim_vals(inargs.depth_bnds, bounds=True)

    # Regrid from curvilinear to rectilinear if necessary
    regrid_status = False
    if inargs.lats:
        horizontal_grid = grids.make_grid(dim_vals['latitude'],
                                          dim_vals['longitude'])
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube, target_grid_cube=horizontal_grid)

    # Regrid to new grid
    if dim_vals['depth'] or not regrid_status:
        sample_points = get_sample_points(cube, dim_vals)
        cube = cube.interpolate(sample_points, iris.analysis.Linear())
        cube.coord('latitude').guess_bounds()
        cube.coord('longitude').guess_bounds()
        if inargs.levs:
            cube = spatial_weights.guess_depth_bounds(cube)
        else:
            cube.coord('depth').bounds = get_depth_bounds(inargs.depth_bnds)

    if numpy.isnan(numpy.min(cube.data)):
        cube = remove_nans(cube)

    # Reinstate time dim_coord if necessary
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    if 'time' in aux_coord_names:
        cube = iris.util.new_axis(cube, 'time')

    cube.attributes['history'] = log
    iris.save(cube, inargs.outfile, fill_value=1e20)
コード例 #19
0
def read_data(infile, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load_cube(infile, gio.check_iris_var(variable))
    cube = gio.check_time_units(cube)

    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
コード例 #20
0
def load_data(infiles, basin):
    """Load, temporally aggregate and spatially slice input data"""
    
    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(infiles, 'ocean_meridional_overturning_mass_streamfunction', callback=save_history)
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)

        cube = cube[:, basin_index[basin], : ,:]
        cube = timeseries.convert_to_annual(cube)

    experiment = cube.attributes['experiment_id']
    
    depth_constraint = iris.Constraint(depth=lambda cell: cell <= 250)
    lat_constraint = iris.Constraint(latitude=lambda cell: -30.0 <= cell < 30.0)

    cube = cube.extract(depth_constraint & lat_constraint)
    
    return cube, experiment
コード例 #21
0
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             area=False,
             invert_evap=False):
    """Read, merge, temporally aggregate and calculate zonal mean."""

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

        assert cube.units == 'kg m-2 s-1'
        cube.data = cube.data * 86400
        units = 'mm/day'

        if invert_evap and (var == 'water_evaporation_flux'):
            cube.data = cube.data * -1

        if area:
            cube = spatial_weights.multiply_by_area(cube)

        zonal_mean = cube.collapsed('longitude', iris.analysis.MEAN)
        zonal_mean.remove_coord('longitude')
    else:
        zonal_mean = None

    return zonal_mean, metadata_dict
コード例 #22
0
def read_data(infile_list, var, basin_cube, region):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the regioins by name.

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]
    model = atts['model_id']

    if var == 'northward_ocean_heat_transport':
        region_index = {}
        region_index['atlantic_arctic_ocean'] = 0
        region_index['indian_pacific_ocean'] = 1
        region_index['global_ocean'] = 2
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, region_index[region], :]
        else:
            cube = cube.extract(iris.Constraint(region=region))

    cube = timeseries.convert_to_annual(cube, full_months=True)
    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)
        if region != 'global_ocean':
            basin_numbers = {}
            basin_numbers['atlantic_arctic_ocean'] = [2, 4]
            basin_numbers['indian_pacific_ocean'] = [3, 5]
            cube = uconv.mask_unwanted_seas(cube, basin_cube,
                                            basin_numbers[region])

    return cube
コード例 #23
0
def main(inargs):
    """Run the program."""

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    for temperature_file in inargs.temperature_files:
        temperature_cube = iris.load_cube(
            temperature_file, inargs.temperature_var & level_subset)
        temperature_cube = gio.check_time_units(temperature_cube)
        metadata_dict = {
            temperature_file: temperature_cube.attributes['history']
        }
        temperature_atts = temperature_cube.attributes

        if inargs.annual:
            temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                            chunk=inargs.chunk)

        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        assert coord_names[0] == 'time'
        assert coord_names[1] == 'depth'

        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                inargs.area_file,
                                                temperature_cube,
                                                metadata_dict)

        ohc_cube = calc_ohc_vertical_integral(temperature_cube,
                                              volume_data,
                                              inargs.density,
                                              inargs.specific_heat,
                                              coord_names,
                                              chunk=inargs.chunk)
        ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                                metadata_dict, inargs)
        ohc_file = get_outfile_name(temperature_file, annual=inargs.annual)

        iris.save(ohc_cube, ohc_file)
        print(ohc_file)
コード例 #24
0
def load_data(filenames, standard_name, time_constraint, metadata_dict,
              input_timescale):
    """Basic data loading and temporal smoothing"""

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(filenames, gio.check_iris_var(standard_name))
        assert cube, "files do not exist: %s" % (filenames[0])

        metadata_dict[filenames[0]] = cube[0].attributes['history']
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)
        cube = iris.util.squeeze(cube)

    attributes = cube.attributes

    if not input_timescale == 'annual':
        cube = timeseries.convert_to_annual(cube, full_months=True)

    return cube, metadata_dict, attributes
コード例 #25
0
def get_cube_list(infiles, agg, time_bounds=None, quick=False):
    """Read and process data."""

    assert agg in ['clim', 'anom']

    ensemble_cube_list = iris.cube.CubeList([])
    for ensnum, ensemble_member in enumerate(infiles):
        print(ensemble_member)
        cube, history = gio.combine_files(
            ensemble_member,
            'precipitation_minus_evaporation_flux',
            new_calendar='365_day')
        cube = gio.check_time_units(cube)
        if time_bounds:
            time_constraint = gio.get_time_constraint(time_bounds)
            cube = cube.extract(time_constraint)
        elif quick:
            cube = cube[0:120, ::]
        if agg == 'clim':
            cube = timeseries.convert_to_annual(cube,
                                                aggregation='mean',
                                                days_in_month=True)
            cube = cube.collapsed('time', iris.analysis.MEAN)
        elif agg == 'anom':
            start_data = cube.data[0, ::]
            cube = cube[-1, ::]
            cube.data = cube.data - start_data
        cube.remove_coord('time')
        cube = regrid(cube)
        new_aux_coord = iris.coords.AuxCoord(ensnum,
                                             long_name='ensemble_member',
                                             units='no_unit')
        cube.add_aux_coord(new_aux_coord)
        cube.cell_methods = ()
        ensemble_cube_list.append(cube)
        print("Total number of models:", len(ensemble_cube_list))

    return ensemble_cube_list, history
def calc_mean(infiles, variable, lat_constraint, time_constraint, area_file):
    """Load the infiles and calculate the hemispheric mean values."""

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(infiles,
                         variable & lat_constraint & time_constraint,
                         callback=save_history)

        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)

        cube = timeseries.convert_to_annual(cube)

    orig_units = str(cube.units)
    orig_atts = cube.attributes

    area_weights = get_area_weights(cube, area_file, lat_constraint)
    mean = cube.collapsed(['longitude', 'latitude'],
                          iris.analysis.MEAN,
                          weights=area_weights)

    return mean, orig_units, orig_atts
コード例 #27
0
def main(inargs):
    """Run the program."""

    # Read data
    cube = iris.load(inargs.infiles,
                     'precipitation minus evaporation flux',
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Prepare data
    cube = timeseries.convert_to_annual(cube)
    zonal_mean_cube = cube.collapsed('longitude', iris.analysis.MEAN)

    # Calculate metrics
    xdata = cube.coord('latitude').points
    xnew = numpy.linspace(xdata[0], xdata[-1], num=1000, endpoint=True)

    metric_dict = {'nh': [], 'sh': [], 'globe': []}
    for ycube in zonal_mean_cube.slices(['latitude']):
        func = interp1d(xdata, ycube.data, kind='cubic')
        ynew = func(xnew)
        for hemisphere in ['nh', 'sh', 'globe']:
            amp = pe_amplitude(xnew, ynew, hemisphere)
            metric_dict[hemisphere].append(amp)

    # Write the output file
    atts = cube.attributes
    infile_history = {inargs.infiles[0]: history[0]}
    atts['history'] = gio.write_metadata(file_info=infile_history)
    cube_list = create_outcubes(metric_dict, cube.attributes, cube.units,
                                cube.coord('time'))

    iris.save(cube_list, inargs.outfile)
コード例 #28
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
コード例 #29
0
def main(inargs):
    """Run the program."""

    # Read drift coefficients
    coefficient_a_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient a')
    coefficient_b_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient b')
    coefficient_c_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient c')
    coefficient_d_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient d')
    if inargs.coefficient_check and (inargs.var in [
            'sea_water_potential_temperature', 'sea_water_salinity'
    ]):
        sanity_summary = coefficient_sanity_check(coefficient_a_cube,
                                                  coefficient_b_cube,
                                                  coefficient_c_cube,
                                                  coefficient_d_cube,
                                                  inargs.var)
    else:
        sanity_summary = None

    # Read first data cube to get some information
    first_data_cube = iris.load_cube(inargs.data_files[0],
                                     gio.check_iris_var(inargs.var))
    coord_names = [
        coord.name() for coord in first_data_cube.coords(dim_coords=True)
    ]
    assert coord_names[0] == 'time'

    if inargs.annual:
        assert inargs.timescale == 'annual'
        first_data_cube = timeseries.convert_to_annual(first_data_cube,
                                                       chunk=12)

    time_diff, branch_time, new_time_unit = time_adjustment(
        first_data_cube,
        coefficient_a_cube,
        inargs.timescale,
        branch_time=inargs.branch_time)

    data_history = first_data_cube.attributes['history']
    del first_data_cube

    new_cubelist = []
    for fnum, filename in enumerate(inargs.data_files):
        # Read data
        data_cube = iris.load_cube(filename, gio.check_iris_var(inargs.var))

        # Reinstate time dim_coord if necessary
        aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
        if 'time' in aux_coord_names:
            data_cube = iris.util.new_axis(data_cube, 'time')
        if inargs.annual:
            assert inargs.timescale == 'annual'
            data_cube = timeseries.convert_to_annual(data_cube, chunk=12)
        data_cube = check_data_units(data_cube, coefficient_a_cube)
        data_cube = gio.check_time_units(data_cube)
        data_cube.cell_methods = ()
        if not inargs.no_parent_check:
            check_attributes(data_cube.attributes,
                             coefficient_a_cube.attributes)

        # Sync the data time axis with the coefficient time axis
        time_coord = data_cube.coord('time')
        time_coord.convert_units(new_time_unit)

        time_values = time_coord.points.astype(numpy.float32) - time_diff
        if not inargs.no_time_check:
            check_time_adjustment(time_values, coefficient_a_cube, branch_time,
                                  fnum)

        # Remove the drift
        if fnum == 0:
            drift_signal, start_polynomial = apply_polynomial(
                time_values,
                coefficient_a_cube.data,
                coefficient_b_cube.data,
                coefficient_c_cube.data,
                coefficient_d_cube.data,
                poly_start=None,
                chunk=inargs.chunk)
        else:
            try:
                start = start_polynomial[0, ::]
            except IndexError:
                start = start_polynomial[0]
            drift_signal, scraps = apply_polynomial(time_values,
                                                    coefficient_a_cube.data,
                                                    coefficient_b_cube.data,
                                                    coefficient_c_cube.data,
                                                    coefficient_d_cube.data,
                                                    poly_start=start,
                                                    chunk=inargs.chunk)

        if not inargs.dummy:
            new_cube = data_cube - drift_signal
            #assert new_cube.data.mask.sum() == drift_signal.mask.sum()
            new_cube.data.mask = drift_signal.mask
            if not inargs.no_data_check:
                check_data(new_cube, data_cube, filename)
        else:
            print('fake run - drift signal not subtracted')
            new_cube = data_cube
        new_cube.metadata = data_cube.metadata
        if sanity_summary:
            new_cube.attributes['drift_removal'] = sanity_summary

        assert (inargs.outfile[-3:] == '.nc') or (inargs.outfile[-1] == '/')

        if inargs.outfile[-3:] == '.nc':
            new_cubelist.append(new_cube)
        elif inargs.outfile[-1] == '/':
            infile = filename.split('/')[-1]
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
            outfile = inargs.outfile + infile
            metadata_dict = {
                infile: data_cube.attributes['history'],
                inargs.coefficient_file:
                coefficient_a_cube.attributes['history']
            }
            new_cube.attributes['history'] = gio.write_metadata(
                file_info=metadata_dict)

            #assert new_cube.data.dtype == numpy.float32
            iris.save(new_cube, outfile, netcdf_format='NETCDF3_CLASSIC')
            print('output:', outfile)
            del new_cube
            del drift_signal

    if inargs.outfile[-3:] == '.nc':
        new_cubelist = iris.cube.CubeList(new_cubelist)
        equalise_attributes(new_cubelist)
        new_cubelist = new_cubelist.concatenate_cube()

        try:
            metadata_dict = {
                inargs.data_files[0]: data_history,
                inargs.coefficient_file:
                coefficient_a_cube.attributes['history']
            }
        except KeyError:
            metadata_dict = {inargs.data_files[0]: data_history}
        new_cubelist.attributes['history'] = gio.write_metadata(
            file_info=metadata_dict)

        #assert new_cubelist[0].data.dtype == numpy.float32
        iris.save(new_cubelist, inargs.outfile)
コード例 #30
0
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time_bounds)
    except AttributeError:
        time_constraint = iris.Constraint()

    depth_constraint = gio.iris_vertical_constraint(inargs.min_depth,
                                                    inargs.max_depth)

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles,
                         gio.check_iris_var(inargs.var) & depth_constraint)
        history = cube[0].attributes['history']
        atts = cube[0].attributes
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()

        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)

        cube = iris.util.squeeze(cube)

        if 'salinity' in inargs.var:
            cube = gio.salinity_unit_check(cube)

        infile_metadata = {inargs.infiles[0]: history}

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    if inargs.min_depth or inargs.max_depth:
        cube = vertical_mean(cube)

    agg_cube = get_agg_cube(cube, inargs.aggregation)

    if inargs.regrid:
        before_sum = agg_cube.data.sum()
        before_mean = agg_cube.data.mean()
        agg_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            agg_cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum))
            print('After sum:', '%.2E' % Decimal(agg_cube.data.sum()))
            print('Before mean:', '%.2E' % Decimal(before_mean))
            print('After mean:', '%.2E' % Decimal(agg_cube.data.mean()))

    if inargs.subtract_tropics:
        agg_cube = subtract_tropics(agg_cube)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
        agg_cube = uconv.apply_land_ocean_mask(agg_cube, sftlf_cube, 'ocean')

    atts['history'] = gio.write_metadata(file_info=infile_metadata)
    agg_cube.attributes = atts

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(agg_cube, inargs.outfile)