Пример #1
0
def main(inargs):
    """Run the program."""

    hist_time_constraint = gio.get_time_constraint(['1850-01-01', '2005-12-31'])
    outcubes = iris.cube.CubeList([])
    for var in inargs.variables:
        metadata_dict = {}
        hist_cube = iris.load_cube(inargs.hist_file, gio.check_iris_var(var) & hist_time_constraint)
        hist_cube = clean_attributes(hist_cube)
        branch_time = hist_cube.attributes['branch_time']
        history = hist_cube.attributes['history']
        
        rcp_cube = iris.load_cube(inargs.rcp_file, gio.check_iris_var(var))
        rcp_cube = clean_attributes(rcp_cube)
        rcp_experiment = rcp_cube.attributes['experiment_id']

        if inargs.cumsum:
            rcp_cube.data = rcp_cube.data + hist_cube.data[-1]

        cube_list = iris.cube.CubeList([hist_cube, rcp_cube])
        equalise_attributes(cube_list)
        iris.util.unify_time_units(cube_list)
        cube = cube_list.concatenate_cube()
        cube.attributes['branch_time'] = branch_time
        cube.attributes['experiment_id'] = 'historical-' + rcp_experiment

        outcubes.append(cube.copy())

    for cube in outcubes:
        cube.attributes['history'] = gio.write_metadata(file_info={inargs.hist_file: history})
    equalise_attributes(outcubes)

    iris.save(outcubes, inargs.outfile)
Пример #2
0
def read_data(inargs, infiles, time_bounds, ref_cube=None, anomaly=False, branch_index=None, branch_time=None):
    """Read data."""

    data_dict = {}
    file_count = 0
    for infile in infiles:
        try:
            cube = iris.load_cube(infile, gio.check_iris_var(inargs.var))
        except iris.exceptions.ConstraintMismatchError:
            print('using inferred value for', infile)
            cube = iris.load_cube(infile, gio.check_iris_var('Inferred_' + inargs.var))
            cube.long_name = inargs.var.replace('_', ' ')
            cube.var_name = cube.var_name.replace('-inferred', '')
        
        if ref_cube:
            cube = timeseries.adjust_control_time(cube, ref_cube, branch_index=branch_index, branch_time=branch_time)

        if not (ref_cube and inargs.full_control):
            time_constraint = gio.get_time_constraint(time_bounds)
            cube = cube.extract(time_constraint)

        if anomaly:
            cube.data = cube.data - cube.data[0:20].mean()     

        cube.data = cube.data.astype(numpy.float64)
        cube.cell_methods = ()
        for aux_coord in ['latitude', 'longitude']:
            try:
                cube.remove_coord(aux_coord)
            except iris.exceptions.CoordinateNotFoundError:
                pass

        new_aux_coord = iris.coords.AuxCoord(file_count, long_name='ensemble_member', units='no_unit')
        cube.add_aux_coord(new_aux_coord)
         
        model = cube.attributes['model_id']
        realization = 'r' + str(cube.attributes['realization'])
        physics = 'p' + str(cube.attributes['physics_version'])
        experiment = cube.attributes['experiment_id']

        key = (model, physics, realization)
        data_dict[key] = cube
        file_count = file_count + 1
    
    ylabel = get_ylabel(cube, inargs)
    experiment = 'historicalAA' if experiment == "historicalMisc" else experiment
    metadata_dict = {infile: cube.attributes['history']}
    
    return data_dict, experiment, ylabel, metadata_dict
Пример #3
0
def read_data(infile_list, var, model, basin_cube):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the "global_ocean" by name

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]

    if var == 'northward_ocean_heat_transport':
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, 2, :]
        else:
            cube = cube.extract(iris.Constraint(region='global_ocean'))

    cube = timeseries.convert_to_annual(cube, full_months=True)

    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)

    return cube
def main(inargs):
    """Run the program."""

    metadata_dict = {}

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    cube_list = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var))
        history = cube.attributes['history']
        #coord_names = [coord.name() for coord in cube.dim_coords]
        new_aux_coord = iris.coords.AuxCoord(fnum,
                                             long_name='ensemble_member',
                                             units='no_unit')
        cube.add_aux_coord(new_aux_coord)
        if ref_cube:
            cube = regrid_cube(cube, ref_cube)
        else:
            ref_cube = cube.copy()
        cube_list.append(cube)

    ensemble_agg = calc_ensemble(cube_list, inargs.aggregation)

    metadata_dict[filename] = history
    ensemble_agg.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(ensemble_agg, inargs.outfile)
Пример #5
0
def read_data(infiles, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    infile_history = {}
    infile_history[infiles[0]] = history[0]
    cube.attributes['history'] = gio.write_metadata(file_info=infile_history)

    return cube, coord_names, aux_coord_names, grid_type
Пример #6
0
def read_data(infiles, variable, time_constraint):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube = cube.extract(time_constraint)

    if not 'J' in str(cube.units):
        cube = convert_to_joules(cube)

    if variable == 'surface_downward_heat_flux_in_sea_water':
        agg_method = 'sum'
    elif variable == 'ocean_heat_content':
        agg_method = 'mean'
    cube = timeseries.convert_to_annual(cube, aggregation=agg_method)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
Пример #7
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var), callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)

    if inargs.area:
        cube = multiply_by_area(cube) 

    if inargs.sftlf_file and inargs.realm:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

    zonal_aggregate = cube.collapsed('longitude', aggregation_functions[inargs.aggregation])
    zonal_aggregate.remove_coord('longitude')

    zonal_aggregate.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(zonal_aggregate, inargs.outfile)
Пример #8
0
def get_data(filename, var, target_grid=None):
    """Read data.
    
    Positive is defined as down.
    
    """

    if filename:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(filename, gio.check_iris_var(var))
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            if target_grid:
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube, target_grid_cube=target_grid)

            coord_names = [coord.name() for coord in cube.dim_coords]
            if 'depth' in coord_names:
                depth_constraint = iris.Constraint(depth=0)
                cube = cube.extract(depth_constraint)

            if 'up' in cube.standard_name:
                cube.data = cube.data * -1
    else:
        cube = None

    return cube
Пример #9
0
def get_data(filename, var, target_grid=None):
    """Read data."""
    
    cube = iris.load_cube(filename, gio.check_iris_var(var))
    cube = gio.check_time_units(cube)
    cube = iris.util.squeeze(cube)

    return cube
Пример #10
0
def get_data(filename, var, time_constraint):
    """Read data."""

    cube = iris.load_cube(filename, gio.check_iris_var(var) & time_constraint)
    cube = gio.check_time_units(cube)
    cube = iris.util.squeeze(cube)

    return cube
Пример #11
0
def load_data(infile, var, time_constraint):
    """Load the data"""

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load_cube(infile,
                              gio.check_iris_var(var) & time_constraint)
        cube.data = cube.data * 100
    model, experiment, rip, physics = get_run_details(cube)
    trend = timeseries.calc_trend(cube, per_yr=True)

    return cube, trend, model, experiment, rip
Пример #12
0
def main(args):
    """Run the program."""

    metadata_dict = {}
    cubes = []
    for filename, var in args.infile:
        cube = iris.load_cube(filename, gio.check_iris_var(var))
        cubes.append(cube)
        metadata_dict[filename] = cube.attributes['history']

    if args.ref_file:
        ref_cube = iris.load_cube(args.ref_file[0], gio.check_iris_var(args.ref_file[1]))
    else:
        ref_cube = cubes[0]
   
    if args.operation == 'division':
        assert len(cubes) == 2
        outcube = cubes[0] / cubes[1]
        assert str(cubes[1].units) == 'm2'
        cube1_units = str(cubes[0].units)
        new_units = f'{cube1_units} m-2'
    elif args.operation == 'addition':
        outcube = cubes[0]
        for cube in cubes[1:]:
            outcube = outcube + cube
        new_units = None

    outcube.attributes = ref_cube.attributes
    if new_units:
        outcube.units = new_units
    outcube.var_name = ref_cube.var_name
    outcube.long_name = ref_cube.long_name
    if ref_cube.standard_name:
        outcube.standard_name = ref_cube.standard_name
    else:
        standard_name = ref_cube.long_name.replace(' ', '_')
        iris.std_names.STD_NAMES[standard_name] = {'canonical_units': outcube.units}
        outcube.standard_name = standard_name

    outcube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, args.outfile)
Пример #13
0
def get_title(infile, var, index_list):
    """Get the plot title."""

    cube = iris.load_cube(infile, gio.check_iris_var(var))
    title = ''
    coord_names = [coord.name() for coord in cube.dim_coords]
    for posnum, index in enumerate(index_list):
        point_name = coord_names[posnum + 1]
        point_value = cube.coord(point_name).points[index]
        title = f"{title} {point_name}: {point_value};"

    return title
Пример #14
0
def main(inargs):
    """Run the program."""

    # Read data
    level_constraint, lat_constraint = get_constraints(inargs.depth,
                                                       inargs.hemisphere)
    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var) & level_constraint,
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Get area file (if applicable)
    if inargs.hemisphere:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = cube.extract(lat_constraint)
        area_cube = None
    else:
        area_cube = read_optional(inargs.area_file)

    # Mask ocean or atmosphere (if applicable)
    if inargs.sftlf_file:
        sftlf_file, selected_region = inargs.sftlf_file
        sftlf_cube = read_optional(sftlf_file)
        mask = create_mask(sftlf_cube, selected_region)
        cube.data = numpy.ma.asarray(cube.data)
        cube.data.mask = mask
        if area_cube:
            areas_dict = area_info(area_cube.copy(), mask, selected_region)
    else:
        areas_dict = {}
        sftlf_cube = None

    # Outfile attributes
    atts = set_attributes(inargs, cube, area_cube, sftlf_cube, areas_dict)

    # Temporal smoothing
    if inargs.smoothing:
        cube = smooth_data(cube, inargs.smoothing)

    # Calculate metric
    area_weights = get_area_weights(cube, area_cube)
    if inargs.metric == 'bulk-deviation':
        metric = calc_bulk_deviation(cube, area_weights, atts)
    elif inargs.metric == 'mean':
        metric = calc_global_mean(cube, area_weights, atts)
    elif inargs.metric == 'grid-deviation':
        metric = calc_grid_deviation(cube, inargs.var, area_weights, atts)

    iris.save(metric, inargs.outfile)
Пример #15
0
def extract_data(infile_list, output_projection, scale_factors):
    """Extract data."""

    cube_dict = {}
    metadata_dict = {}
    plot_numbers = []
    max_layers = 0
    for infile, long_name, start_date, end_date, timestep, plot_type, plot_number, input_projection in infile_list:

        assert plot_type[:-1] in plot_types
        assert input_projection in list(input_projections.keys())

        # Define data constraints
        time_constraint = get_time_constraint(start_date, end_date)
        if output_projection[int(plot_number) - 1] == 'SouthPolarStereo':
            lat_constraint = iris.Constraint(latitude=lambda y: y <= 0.0)
        else:
            lat_constraint = iris.Constraint()

        # Read data
        with iris.FUTURE.context(cell_datetime_objects=True):
            new_cube = iris.load_cube(
                infile,
                gio.check_iris_var(long_name) & time_constraint
                & lat_constraint)
            new_cube = scale_data(new_cube, scale_factors, plot_type)
            new_cube = iris.util.squeeze(new_cube)

        coord_names = [coord.name() for coord in new_cube.coords()]
        if 'time' in coord_names:
            ntimes = len(new_cube.coords('time')[0].points)
            if ntimes == 1:
                pass
            elif ntimes > 1:
                try:
                    timestep = int(timestep)
                except ValueError:
                    timestep = None
                new_cube = collapse_time(new_cube, ntimes, timestep)

        new_cube.attributes['input_projection'] = input_projection

        # Define outputs
        cube_dict[(plot_type, int(plot_number))] = new_cube
        metadata_dict[infile] = new_cube.attributes['history']
        plot_numbers.append(int(plot_number))

        layer = int(plot_type[-1])
        if layer > max_layers:
            max_layers = layer

    return cube_dict, metadata_dict, set(plot_numbers), max_layers
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             sftlf_cube=None,
             realm=None):
    """Read, merge, temporally aggregate and calculate zonal sum.
    
    Positive is defined as down.
    
    """

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            depth_constraint = iris.Constraint(depth=0)
            cube = cube.extract(depth_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = multiply_by_area(cube)

        if 'up' in cube.standard_name:
            cube.data = cube.data * -1

        if sftlf_cube and realm in ['ocean', 'land']:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, realm)

        zonal_sum = cube.collapsed('longitude', iris.analysis.SUM)
        zonal_sum.remove_coord('longitude')

        grid_spacing = grids.get_grid_spacing(zonal_sum)
        zonal_sum.data = zonal_sum.data / grid_spacing

    else:
        zonal_sum = None

    return zonal_sum, metadata_dict
Пример #17
0
def main(inargs):
    """Run the program."""

    if inargs.sftlf_file:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    else:
        sftlf_cube = None

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var))
        history = cube[0].attributes['history']

        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = iris.util.squeeze(cube)
        cube.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: history})

        cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

    output = {}
    output['full'] = calc_fields(cube,
                                 sftlf_cube,
                                 inargs.aggregation,
                                 realm=None,
                                 area=inargs.area)
    if inargs.sftlf_file:
        for realm in ['ocean', 'land']:
            output[realm] = calc_fields(cube,
                                        sftlf_cube,
                                        inargs.aggregation,
                                        realm=realm,
                                        area=inargs.area)

    cube_list = iris.cube.CubeList()
    for realm, output_cubes in output.items():
        for cube in output_cubes:
            cube_list.append(cube)

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(cube_list, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
Пример #18
0
def read_data(infile, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load_cube(infile, gio.check_iris_var(variable))
    cube = gio.check_time_units(cube)

    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
Пример #19
0
def read_data(inargs, infiles, ref_cube=None):
    """Read data."""

    clim_dict = {}
    trend_dict = {}
    file_count = 0
    for infile in infiles:
        print(infile)
        cube = iris.load_cube(infile, gio.check_iris_var(inargs.var))
        if ref_cube:
            time_constraint = timeseries.get_control_time_constraint(
                cube, ref_cube, inargs.time, branch_time=inargs.branch_time)
            cube = cube.extract(time_constraint)
            iris.util.unify_time_units([ref_cube, cube])
            cube.replace_coord(ref_cube.coord('time'))
        else:
            time_constraint = gio.get_time_constraint(inargs.time)
            cube = cube.extract(time_constraint)

        if inargs.perlat:
            grid_spacing = grids.get_grid_spacing(cube)
            cube.data = cube.data / grid_spacing

        trend_cube = calc_trend_cube(cube.copy())

        clim_cube = cube.collapsed('time', iris.analysis.MEAN)
        clim_cube.remove_coord('time')

        model = cube.attributes['model_id']
        realization = 'r' + str(cube.attributes['realization'])
        physics = 'p' + str(cube.attributes['physics_version'])

        key = (model, physics, realization)
        trend_dict[key] = trend_cube
        clim_dict[key] = clim_cube
        file_count = file_count + 1

    experiment = cube.attributes['experiment_id']
    experiment = 'historicalAA' if experiment == "historicalMisc" else experiment
    trend_ylabel = get_ylabel(cube, 'trend', inargs)
    clim_ylabel = get_ylabel(cube, 'climatology', inargs)

    metadata_dict = {infile: cube.attributes['history']}

    return cube, trend_dict, clim_dict, experiment, trend_ylabel, clim_ylabel, metadata_dict
Пример #20
0
def load_data(infile, var_list, time_constraint):
    """Load the data"""

    trend_list = []
    for var in var_list:
        cube = iris.load_cube(infile,
                              gio.check_iris_var(var) & time_constraint)
        model, experiment, rip, physics = get_run_details(cube)
        trend = timeseries.calc_trend(cube, per_yr=True)
        trend = check_sign(trend, var)
        trend_list.append(trend)

    if len(trend_list) == 2:
        print('dividing y vars')
        out_trend = trend_list[0] / trend_list[1]
    else:
        out_trend = trend_list[0]

    return out_trend, cube, model, experiment, rip
Пример #21
0
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             area=False,
             invert_evap=False):
    """Read, merge, temporally aggregate and calculate zonal mean."""

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

        assert cube.units == 'kg m-2 s-1'
        cube.data = cube.data * 86400
        units = 'mm/day'

        if invert_evap and (var == 'water_evaporation_flux'):
            cube.data = cube.data * -1

        if area:
            cube = spatial_weights.multiply_by_area(cube)

        zonal_mean = cube.collapsed('longitude', iris.analysis.MEAN)
        zonal_mean.remove_coord('longitude')
    else:
        zonal_mean = None

    return zonal_mean, metadata_dict
Пример #22
0
def read_data(infile_list, var, basin_cube, region):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the regioins by name.

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]
    model = atts['model_id']

    if var == 'northward_ocean_heat_transport':
        region_index = {}
        region_index['atlantic_arctic_ocean'] = 0
        region_index['indian_pacific_ocean'] = 1
        region_index['global_ocean'] = 2
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, region_index[region], :]
        else:
            cube = cube.extract(iris.Constraint(region=region))

    cube = timeseries.convert_to_annual(cube, full_months=True)
    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)
        if region != 'global_ocean':
            basin_numbers = {}
            basin_numbers['atlantic_arctic_ocean'] = [2, 4]
            basin_numbers['indian_pacific_ocean'] = [3, 5]
            cube = uconv.mask_unwanted_seas(cube, basin_cube,
                                            basin_numbers[region])

    return cube
Пример #23
0
def load_data(filenames, standard_name, time_constraint, metadata_dict,
              input_timescale):
    """Basic data loading and temporal smoothing"""

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(filenames, gio.check_iris_var(standard_name))
        assert cube, "files do not exist: %s" % (filenames[0])

        metadata_dict[filenames[0]] = cube[0].attributes['history']
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)
        cube = iris.util.squeeze(cube)

    attributes = cube.attributes

    if not input_timescale == 'annual':
        cube = timeseries.convert_to_annual(cube, full_months=True)

    return cube, metadata_dict, attributes
Пример #24
0
def main(inargs):
    """Run the program."""

    if inargs.outfiles:
        assert len(inargs.infiles) == len(inargs.outfiles)

    for fnum, infile in enumerate(inargs.infiles):

        data_cube = read_data(infile, gio.check_iris_var(inargs.var))
        mask_cube = read_data(inargs.mask_file, inargs.mask_var)

        assert inargs.mask_method in ['copy', 'sftlf']
        if inargs.mask_method == 'copy':
            assert type(
                data_cube.data
            ) == numpy.ndarray, "It is assumed that the input data has no mask"
            mask = copy_mask(mask_cube, data_cube.shape)
        else:
            mask = create_mask(mask_cube, data_cube.shape)
        data_cube.data = numpy.ma.asarray(data_cube.data)
        data_cube.data.mask = mask

        outfile_metadata = {
            infile: data_cube.attributes['history'],
        }
        data_cube.attributes['history'] = gio.write_metadata(
            file_info=outfile_metadata)

        outfile = get_outfile(infile,
                              inargs.outfiles,
                              fnum,
                              fixed=inargs.fixed,
                              mask_label=inargs.mask_label)
        print('infile:', infile)
        print('outfile:', outfile)

        if not inargs.dry_run:
            iris.save(data_cube, outfile)
Пример #25
0
def main(inargs):
    """Run the program."""

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var) & time_constraint, callback=save_history)
    equalise_attributes(cube)
    cube = cube.concatenate_cube()

    annual_climatology = cube.collapsed('time', iris.analysis.MEAN)

    if inargs.regrid:
        annual_climatology, coord_names, regrid_status = grids.curvilinear_to_rectilinear(annual_climatology)

    if inargs.scale_factor:
        annual_climatology = scale_data(annual_climatology, inargs.scale_factor)

    annual_climatology.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(annual_climatology, inargs.outfile)
Пример #26
0
def main(inargs):
    """Run the program."""

    file_dict, tas_dict, area_dict, basin_dict = read_data(inargs)

    metadata_dict = {}
    climatology_dict = {}
    time_trend_dict = {}
    tas_scaled_trend_dict = {}
    branch_dict = {}
    for experiment in [
            'historical', 'historicalGHG', 'historicalAA', 'historicalnoAA',
            'piControl'
    ]:
        filenames = file_dict[experiment]
        if not filenames:
            climatology_dict[experiment] = None
            time_trend_dict[experiment] = None
            tas_scaled_trend_dict[experiment] = None
        else:
            print(experiment)
            try:
                time_constraint = gio.get_time_constraint(inargs.total_time)
            except (AttributeError, TypeError):
                time_constraint = iris.Constraint()

            with iris.FUTURE.context(cell_datetime_objects=True):
                cube = iris.load(filenames, gio.check_iris_var(inargs.var))

                # Merge cubes
                metadata_dict[filenames[0]] = cube[0].attributes['history']
                equalise_attributes(cube)
                iris.util.unify_time_units(cube)
                cube = cube.concatenate_cube()
                cube = gio.check_time_units(cube)

                # Time extraction and branch time info
                coord_names = [coord.name() for coord in cube.dim_coords]
                assert coord_names[0] == 'time'

                if 'historical' in experiment:
                    original_time_length = cube.shape[0]
                    cube = cube.extract(time_constraint)
                    new_time_length = cube.shape[0]
                    branch_time_index_offset = original_time_length - new_time_length

                    branch_time = cube.attributes['branch_time']
                    time_length = cube.shape[0]
                    branch_dict[experiment] = (branch_time, time_length,
                                               branch_time_index_offset)

                elif experiment == 'piControl':
                    branch_time, time_length, branch_time_index_offset = branch_dict[
                        'historical']
                    start_index, error = uconv.find_nearest(
                        cube.coord('time').points,
                        float(branch_time) + 15.5,
                        index=True)
                    if abs(error) > 15:
                        print(
                            "WARNING: Large error of %f in locating branch time"
                            % (error))
                        start_index = 0
                    start_index = start_index + branch_time_index_offset
                    cube = cube[start_index:start_index + time_length, ::]

                # Temporal smoothing
                cube = timeseries.convert_to_annual(cube, full_months=True)

                # Mask marginal seas
                if basin_dict[experiment]:
                    basin_cube = iris.load_cube(basin_dict[experiment])
                    cube = uconv.mask_marginal_seas(cube, basin_cube)

                # Regrid and select basin
                cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
                    cube)
                if not inargs.basin == 'globe':
                    if basin_dict[experiment] and not regrid_status:
                        ndim = cube.ndim
                        basin_array = uconv.broadcast_array(
                            basin_cube.data, [ndim - 2, ndim - 1], cube.shape)
                    else:
                        basin_array = uconv.create_basin_array(cube)
                    cube.data.mask = numpy.where(
                        (cube.data.mask == False) &
                        (basin_array == basins[inargs.basin]), False, True)

                # Scale
                cube, units = scale_data(cube,
                                         inargs.var,
                                         reverse_sign=inargs.reverse_sign)

                # Zonal statistic
                if inargs.area_adjust:
                    if regrid_status:
                        area_dict[experiment] = None
                    cube, units, metadata_dict = area_ajustment(
                        cube, area_dict[experiment], metadata_dict)
                    zonal_cube = cube.collapsed('longitude', iris.analysis.SUM)
                    aggregation = 'Zonally integrated'
                else:
                    zonal_cube = cube.collapsed('longitude',
                                                iris.analysis.MEAN)
                    aggregation = 'Zonal mean'
                zonal_cube.remove_coord('longitude')

                # Climatology and trends
                climatology_dict[experiment] = calculate_climatology(
                    zonal_cube, inargs.climatology_time, experiment)
                time_trend_dict[experiment] = get_trend_cube(zonal_cube)
                if tas_dict[experiment]:
                    tas_cube = iris.load_cube(
                        tas_dict[experiment],
                        'air_temperature' & time_constraint)
                    scale_factor = get_scale_factor(tas_cube)
                    print(experiment, 'warming:', scale_factor)
                    tas_scaled_trend_dict[experiment] = time_trend_dict[
                        experiment] * (1. / abs(scale_factor))
                    metadata_dict[tas_dict[experiment]
                                  [0]] = tas_cube.attributes['history']
                else:
                    tas_scaled_trend_dict[experiment] = None

    # Create the plots

    tas_scaled_trend_flag = tas_scaled_trend_dict[
        'historicalGHG'] and tas_scaled_trend_dict['historicalAA']

    fig = plt.figure(figsize=[15, 20])
    gs = set_plot_grid(tas_trend=tas_scaled_trend_flag)

    ax_main = plt.subplot(gs[0])
    plt.sca(ax_main)
    plot_climatology(climatology_dict, inargs.var, units, inargs.legloc,
                     aggregation)
    plt.title('%s (%s), %s' % (inargs.model, inargs.run, inargs.basin))

    ax_diff = plt.subplot(gs[1])
    plt.sca(ax_diff)
    plot_difference(climatology_dict)

    ax_time_trend = plt.subplot(gs[2])
    plt.sca(ax_time_trend)
    plot_trend(time_trend_dict, units)

    if tas_scaled_trend_flag:
        ax_tas_trend = plt.subplot(gs[3])
        plt.sca(ax_tas_trend)
        plot_trend(tas_scaled_trend_dict, units, scaled=True)

    plt.xlabel('latitude')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
Пример #27
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles,
                     gio.check_iris_var(inargs.var),
                     callback=save_history)

    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube.attributes = atts
    orig_long_name = cube.long_name
    if cube.standard_name == None:
        orig_standard_name = orig_long_name.replace(' ', '_')
    else:
        orig_standard_name = cube.standard_name
    orig_var_name = cube.var_name

    # Temporal smoothing
    cube = timeseries.convert_to_annual(cube, full_months=True)

    # Mask marginal seas
    if inargs.basin:
        if '.nc' in inargs.basin:
            basin_cube = iris.load_cube(inargs.basin_file)
            cube = uconv.mask_marginal_seas(cube, basin_cube)
        else:
            basin_cube = 'create'
    else:
        basin_cube = None

    # Regrid (if needed)
    if inargs.regrid:
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

    # Change units (remove m-2)
    if inargs.area:
        cube = multiply_by_area(cube, inargs.area)
        cube.attributes = atts
        cube.long_name = orig_long_name
        cube.standard_name = orig_standard_name
        cube.var_name = orig_var_name

    # History
    history_attribute = get_history_attribute(inargs.infiles[0], history[0])
    cube.attributes['history'] = gio.write_metadata(
        file_info=history_attribute)

    # Calculate output for each basin
    if type(basin_cube) == iris.cube.Cube:
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    elif type(basin_cube) == str:
        basin_array = uconv.create_basin_array(cube)
        basin_list = ['atlantic', 'pacific', 'indian', 'globe']
    else:
        basin_array = None
        basin_list = ['globe']

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert len(dim_coord_names) == 3
    assert dim_coord_names[0] == 'time'
    x_axis_name = dim_coord_names[2]

    for aux_coord in aux_coord_names:
        cube.remove_coord(aux_coord)

    out_cubes = []
    for basin_name in basin_list:
        data_cube = cube.copy()
        if not basin_name == 'globe':
            data_cube.data.mask = numpy.where(
                (data_cube.data.mask == False) &
                (basin_array == basins[basin_name]), False, True)

        # Zonal statistic
        zonal_cube = data_cube.collapsed(
            x_axis_name, aggregation_functions[inargs.zonal_stat])
        zonal_cube.remove_coord(x_axis_name)

        # Attributes
        standard_name = 'zonal_%s_%s_%s' % (inargs.zonal_stat,
                                            orig_standard_name, basin_name)
        var_name = '%s_%s_%s' % (orig_var_name,
                                 aggregation_abbreviations[inargs.zonal_stat],
                                 basin_name)
        iris.std_names.STD_NAMES[standard_name] = {
            'canonical_units': zonal_cube.units
        }

        zonal_cube.standard_name = standard_name
        zonal_cube.long_name = standard_name.replace('_', ' ')
        zonal_cube.var_name = var_name

        out_cubes.append(zonal_cube)

    out_cubes = iris.cube.CubeList(out_cubes)
    iris.save(out_cubes, inargs.outfile)
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time_bounds)
    except AttributeError:
        time_constraint = iris.Constraint()

    depth_constraint = gio.iris_vertical_constraint(inargs.min_depth,
                                                    inargs.max_depth)

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles,
                         gio.check_iris_var(inargs.var) & depth_constraint)
        history = cube[0].attributes['history']
        atts = cube[0].attributes
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()

        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)

        cube = iris.util.squeeze(cube)

        if 'salinity' in inargs.var:
            cube = gio.salinity_unit_check(cube)

        infile_metadata = {inargs.infiles[0]: history}

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    if inargs.min_depth or inargs.max_depth:
        cube = vertical_mean(cube)

    agg_cube = get_agg_cube(cube, inargs.aggregation)

    if inargs.regrid:
        before_sum = agg_cube.data.sum()
        before_mean = agg_cube.data.mean()
        agg_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            agg_cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum))
            print('After sum:', '%.2E' % Decimal(agg_cube.data.sum()))
            print('Before mean:', '%.2E' % Decimal(before_mean))
            print('After mean:', '%.2E' % Decimal(agg_cube.data.mean()))

    if inargs.subtract_tropics:
        agg_cube = subtract_tropics(agg_cube)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
        agg_cube = uconv.apply_land_ocean_mask(agg_cube, sftlf_cube, 'ocean')

    atts['history'] = gio.write_metadata(file_info=infile_metadata)
    agg_cube.attributes = atts

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(agg_cube, inargs.outfile)
Пример #29
0
def main(inargs):
    """Run the program."""

    metadata_dict = {}

    # Read data
    control_cube, control_history = read_data(inargs.control_files, inargs.variable,
                                              inargs.grid_point, convert_to_annual=inargs.annual)
    metadata_dict[inargs.control_files[0]] = control_history
    coord_names = [coord.name() for coord in control_cube.dim_coords]
    time_var = coord_names[0]
    assert time_var in ['time', 'year']

    experiment_cube, experiment_history = read_data(inargs.experiment_files, inargs.variable,
                                                    inargs.grid_point, convert_to_annual=inargs.annual)
    metadata_dict[inargs.experiment_files[0]] = experiment_history

    if inargs.dedrifted_files:
        dedrifted_cube, dedrifted_history = read_data(inargs.dedrifted_files, inargs.variable,
                                                      inargs.grid_point, convert_to_annual=inargs.annual)
        metadata_dict[inargs.dedrifted_files[0]] = dedrifted_history   

    if inargs.coefficient_file:
        cubic_data, a_cube = cubic_fit(inargs.coefficient_file, inargs.grid_point,
                                       control_cube.coord(time_var).points)
        #TODO: coeff metadata    

    # Time axis adjustment
    if time_var == 'time':
        first_data_cube = iris.load_cube(inargs.experiment_files[0], gio.check_iris_var(inargs.variable))
        if inargs.grid_point:
            first_data_cube = select_point(first_data_cube, inargs.grid_point, timeseries=True)
        if inargs.annual:
            first_data_cube = timeseries.convert_to_annual(first_data_cube)
        time_diff, branch_time, new_time_unit = remove_drift.time_adjustment(first_data_cube, control_cube, 'annual',
                                                                             branch_time=inargs.branch_time)
        print(f'branch time: {branch_time - 182.5}')
        time_coord = experiment_cube.coord('time')
        time_coord.convert_units(new_time_unit)
        experiment_time_values = time_coord.points.astype(numpy.float32) - time_diff
    elif time_var == 'year':
        if not inargs.branch_year == None:
            branch_year = inargs.branch_year
        else:
            if not inargs.control_time_units:
                control_time_units = gio.fix_time_descriptor(experiment_cube.attributes['parent_time_units'])
            else:
                control_time_units = inargs.control_time_units.replace("_", " ")
            branch_time = experiment_cube.attributes['branch_time_in_parent']
            branch_datetime = cf_units.num2date(branch_time, control_time_units, cf_units.CALENDAR_STANDARD)
            branch_year = branch_datetime.year
        print(f'branch year: {branch_year}')
        experiment_time_values = numpy.arange(branch_year, branch_year + experiment_cube.shape[0])

    # Plot
    fig = plt.figure(figsize=[14, 7])
    plt.plot(control_cube.coord(time_var).points, control_cube.data, label='control')
    plt.plot(experiment_time_values, experiment_cube.data, label='experiment')
    if inargs.dedrifted_files:
        plt.plot(experiment_time_values, dedrifted_cube.data, label='dedrifted')
    if inargs.coefficient_file:
        plt.plot(control_cube.coord(time_var).points, cubic_data, label='cubic fit')
    if inargs.outlier_threshold:
        data, outlier_idx = timeseries.outlier_removal(control_cube.data, inargs.outlier_threshold)
        plt.plot(control_cube.coord(time_var).points[outlier_idx], control_cube.data[outlier_idx],
                 marker='o', linestyle='none', color='r', alpha=0.3)
    if inargs.ylim:
        ymin, ymax = inargs.ylim
        plt.ylim(ymin, ymax)
    plt.ylabel(f"{gio.check_iris_var(inargs.variable)} ({control_cube.units})")
    if time_var == 'time':
        plt.xlabel(str(new_time_unit))
    else:
        plt.xlabel('control run year')
    plt.legend()
    if inargs.grid_point:
        title = get_title(inargs.control_files[0], inargs.variable, inargs.grid_point)
        plt.title(title)

    # Save output
    plt.savefig(inargs.outfile, bbox_inches='tight')
    
    log_text = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
Пример #30
0
def main(inargs):
    """Run the program."""

    # Read drift coefficients
    coefficient_a_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient a')
    coefficient_b_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient b')
    coefficient_c_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient c')
    coefficient_d_cube = iris.load_cube(inargs.coefficient_file,
                                        'coefficient d')
    if inargs.coefficient_check and (inargs.var in [
            'sea_water_potential_temperature', 'sea_water_salinity'
    ]):
        sanity_summary = coefficient_sanity_check(coefficient_a_cube,
                                                  coefficient_b_cube,
                                                  coefficient_c_cube,
                                                  coefficient_d_cube,
                                                  inargs.var)
    else:
        sanity_summary = None

    # Read first data cube to get some information
    first_data_cube = iris.load_cube(inargs.data_files[0],
                                     gio.check_iris_var(inargs.var))
    coord_names = [
        coord.name() for coord in first_data_cube.coords(dim_coords=True)
    ]
    assert coord_names[0] == 'time'

    if inargs.annual:
        assert inargs.timescale == 'annual'
        first_data_cube = timeseries.convert_to_annual(first_data_cube,
                                                       chunk=12)

    time_diff, branch_time, new_time_unit = time_adjustment(
        first_data_cube,
        coefficient_a_cube,
        inargs.timescale,
        branch_time=inargs.branch_time)

    data_history = first_data_cube.attributes['history']
    del first_data_cube

    new_cubelist = []
    for fnum, filename in enumerate(inargs.data_files):
        # Read data
        data_cube = iris.load_cube(filename, gio.check_iris_var(inargs.var))

        # Reinstate time dim_coord if necessary
        aux_coord_names = [coord.name() for coord in data_cube.aux_coords]
        if 'time' in aux_coord_names:
            data_cube = iris.util.new_axis(data_cube, 'time')
        if inargs.annual:
            assert inargs.timescale == 'annual'
            data_cube = timeseries.convert_to_annual(data_cube, chunk=12)
        data_cube = check_data_units(data_cube, coefficient_a_cube)
        data_cube = gio.check_time_units(data_cube)
        data_cube.cell_methods = ()
        if not inargs.no_parent_check:
            check_attributes(data_cube.attributes,
                             coefficient_a_cube.attributes)

        # Sync the data time axis with the coefficient time axis
        time_coord = data_cube.coord('time')
        time_coord.convert_units(new_time_unit)

        time_values = time_coord.points.astype(numpy.float32) - time_diff
        if not inargs.no_time_check:
            check_time_adjustment(time_values, coefficient_a_cube, branch_time,
                                  fnum)

        # Remove the drift
        if fnum == 0:
            drift_signal, start_polynomial = apply_polynomial(
                time_values,
                coefficient_a_cube.data,
                coefficient_b_cube.data,
                coefficient_c_cube.data,
                coefficient_d_cube.data,
                poly_start=None,
                chunk=inargs.chunk)
        else:
            try:
                start = start_polynomial[0, ::]
            except IndexError:
                start = start_polynomial[0]
            drift_signal, scraps = apply_polynomial(time_values,
                                                    coefficient_a_cube.data,
                                                    coefficient_b_cube.data,
                                                    coefficient_c_cube.data,
                                                    coefficient_d_cube.data,
                                                    poly_start=start,
                                                    chunk=inargs.chunk)

        if not inargs.dummy:
            new_cube = data_cube - drift_signal
            #assert new_cube.data.mask.sum() == drift_signal.mask.sum()
            new_cube.data.mask = drift_signal.mask
            if not inargs.no_data_check:
                check_data(new_cube, data_cube, filename)
        else:
            print('fake run - drift signal not subtracted')
            new_cube = data_cube
        new_cube.metadata = data_cube.metadata
        if sanity_summary:
            new_cube.attributes['drift_removal'] = sanity_summary

        assert (inargs.outfile[-3:] == '.nc') or (inargs.outfile[-1] == '/')

        if inargs.outfile[-3:] == '.nc':
            new_cubelist.append(new_cube)
        elif inargs.outfile[-1] == '/':
            infile = filename.split('/')[-1]
            if inargs.annual:
                infile = re.sub('Omon', 'Oyr', infile)
            outfile = inargs.outfile + infile
            metadata_dict = {
                infile: data_cube.attributes['history'],
                inargs.coefficient_file:
                coefficient_a_cube.attributes['history']
            }
            new_cube.attributes['history'] = gio.write_metadata(
                file_info=metadata_dict)

            #assert new_cube.data.dtype == numpy.float32
            iris.save(new_cube, outfile, netcdf_format='NETCDF3_CLASSIC')
            print('output:', outfile)
            del new_cube
            del drift_signal

    if inargs.outfile[-3:] == '.nc':
        new_cubelist = iris.cube.CubeList(new_cubelist)
        equalise_attributes(new_cubelist)
        new_cubelist = new_cubelist.concatenate_cube()

        try:
            metadata_dict = {
                inargs.data_files[0]: data_history,
                inargs.coefficient_file:
                coefficient_a_cube.attributes['history']
            }
        except KeyError:
            metadata_dict = {inargs.data_files[0]: data_history}
        new_cubelist.attributes['history'] = gio.write_metadata(
            file_info=metadata_dict)

        #assert new_cubelist[0].data.dtype == numpy.float32
        iris.save(new_cubelist, inargs.outfile)