def calc_coefficients(cube,
                      coord_names,
                      masked_array=True,
                      convert_annual=False,
                      chunk_annual=False,
                      outlier_threshold=None):
    """Calculate the polynomial coefficients.

    Can select to convert data to annual timescale first.

    Choices are made to avoid memory errors on large arrays.

    """

    time_var = coord_names[0]
    if 'depth' in coord_names:
        assert coord_names[
            1] == 'depth', 'coordinate order must be time, depth, ...'
        out_shape = list(cube.shape)
        out_shape[0] = 4
        coefficients = numpy.zeros(out_shape)  #, dtype=numpy.float32)
        for d, cube_slice in enumerate(cube.slices_over('depth')):
            print('Depth:', cube_slice.coord('depth').points[0])
            if convert_annual:
                cube_slice = timeseries.convert_to_annual(cube_slice,
                                                          chunk=chunk_annual)
            time_axis = cube_slice.coord(
                time_var).points  #.astype(numpy.float32)
            coefficients[:, d, ::] = numpy.ma.apply_along_axis(
                polyfit, 0, cube_slice.data, time_axis, masked_array,
                outlier_threshold)
        fill_value = cube_slice.data.fill_value
        coefficients = numpy.ma.masked_values(coefficients, fill_value)
    else:
        if convert_annual:
            cube = timeseries.convert_to_annual(cube)
        time_axis = cube.coord(time_var).points  # .astype(numpy.float32)
        if cube.ndim == 1:
            coefficients = polyfit(cube.data, time_axis, masked_array,
                                   outlier_threshold)
        else:
            coefficients = numpy.ma.apply_along_axis(polyfit, 0, cube.data,
                                                     time_axis, masked_array,
                                                     outlier_threshold)
            if masked_array:
                fill_value = cube.data.fill_value
                coefficients = numpy.ma.masked_values(coefficients, fill_value)

    time_start = time_axis[0]
    time_end = time_axis[-1]

    return coefficients, time_start, time_end
Esempio n. 2
0
def read_data(infiles, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    infile_history = {}
    infile_history[infiles[0]] = history[0]
    cube.attributes['history'] = gio.write_metadata(file_info=infile_history)

    return cube, coord_names, aux_coord_names, grid_type
Esempio n. 3
0
def process_cube(cube, inargs, sftlf_cube):
    """Process a data cube"""

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)
        
    if inargs.aggregation:
        cube = get_agg_cube(cube, inargs.aggregation, remove_outliers=inargs.remove_outliers)

    if 'salinity' in inargs.var:
        cube = gio.salinity_unit_check(cube)

    if inargs.regrid:
        before_sum = cube.data.sum()
        before_mean = cube.data.mean()
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)
        if regrid_status:
            print('Warning: Data has been regridded')
            print('Before sum:', '%.2E' % Decimal(before_sum) )
            print('After sum:', '%.2E' % Decimal(cube.data.sum()) )
            print('Before mean:', '%.2E' % Decimal(before_mean) )
            print('After mean:', '%.2E' % Decimal(cube.data.mean()) )
            
    if sftlf_cube:
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, 'ocean')

    return cube
Esempio n. 4
0
def read_data(infile_list, var, model, basin_cube):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the "global_ocean" by name

    """
    cube = iris.load(infile_list,
                     gio.check_iris_var(var),
                     callback=save_history)
    atts = cube[0].attributes
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)
    cube.attributes = atts
    cube.attributes['history'] = history[0]

    if var == 'northward_ocean_heat_transport':
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, 2, :]
        else:
            cube = cube.extract(iris.Constraint(region='global_ocean'))

    cube = timeseries.convert_to_annual(cube, full_months=True)

    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)

    return cube
Esempio n. 5
0
def read_data(infiles, variable, time_constraint):
    """Load the input data."""

    cube = iris.load(infiles,
                     gio.check_iris_var(variable),
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    cube = cube.extract(time_constraint)

    if not 'J' in str(cube.units):
        cube = convert_to_joules(cube)

    if variable == 'surface_downward_heat_flux_in_sea_water':
        agg_method = 'sum'
    elif variable == 'ocean_heat_content':
        agg_method = 'mean'
    cube = timeseries.convert_to_annual(cube, aggregation=agg_method)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    assert len(coord_names) == 3
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
Esempio n. 6
0
def get_htc_data(htc_file, metadata_dict, rolling_window=None):
    """Read ocean heat transport convergence data and calculate mean and trend.
    
    A hfbasin-convengence or hfy-convergence file is expected.
    
    Input: units = W, timescale = monhtly
    Output: units = W s-1, timescale = annual
    
    """

    if htc_file:
        if 'hfy' in htc_file:
            htc_cube = iris.load_cube(
                htc_file, 'zonal sum ocean heat y transport convergence globe')
        else:
            htc_cube = iris.load_cube(htc_file)
        metadata_dict[htc_file] = htc_cube.attributes['history']

        htc_cube = timeseries.convert_to_annual(htc_cube)
        if rolling_window:
            y_axis_name = get_y_axis_name(htc_cube)
            htc_cube = htc_cube.rolling_window(y_axis_name, iris.analysis.MEAN,
                                               rolling_window)

        htc_trend = calc_trend_cube(htc_cube)
        htc_mean = htc_cube.collapsed('time', iris.analysis.MEAN)

        htc_trend.attributes = htc_cube.attributes
        htc_mean.atributes = htc_cube.attributes
    else:
        htc_trend = None
        htc_mean = None

    return htc_trend, htc_mean, metadata_dict
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=False)  

    if inargs.aggregation == 'sum':
        cube = cube.collapsed('depth', iris.analysis.SUM)
    else:
        dim_coord_names = [coord.name() for coord in cube.dim_coords]
        depth_coord = cube.coord('depth')
        assert depth_coord.units in ['m', 'dbar'], "Unrecognised depth axis units"
        if depth_coord.units == 'm':
            vert_extents = spatial_weights.calc_vertical_weights_1D(depth_coord, dim_coord_names, cube.shape)
        elif depth_coord.units == 'dbar':
            vert_extents = spatial_weights.calc_vertical_weights_2D(depth_coord, cube.coord('latitude'), dim_coord_names, cube.shape)
        cube = cube.collapsed('depth', iris.analysis.MEAN, weights=vert_extents)
    cube.remove_coord('depth')
    
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(cube, inargs.outfile)  
Esempio n. 8
0
def load_data(infile, basin):
    """Load, temporally aggregate and spatially slice input data"""

    try:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(
                infile,
                'ocean_meridional_overturning_mass_streamfunction',
                callback=save_history)
            equalise_attributes(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)

            cube = cube[:, basin_index[basin], :, :]
            cube = timeseries.convert_to_annual(cube)

        experiment = cube.attributes['experiment_id']
        if experiment == 'historicalMisc':
            experiment = 'historicalAA'

        depth_constraint = iris.Constraint(depth=lambda cell: cell <= 250)
        sh_constraint = iris.Constraint(
            latitude=lambda cell: -30.0 <= cell < 0.0)
        nh_constraint = iris.Constraint(
            latitude=lambda cell: 0.0 < cell <= 30.0)

        sh_cube = cube.extract(depth_constraint & sh_constraint)
        nh_cube = cube.extract(depth_constraint & nh_constraint)
    except OSError:
        sh_cube = nh_cube = experiment = None

    return sh_cube, nh_cube, experiment
Esempio n. 9
0
def read_data(infile_list, var, basin_cube, region):
    """Read the data files.

    The CSIRO-Mk3-6-0 model seems to be formatted incorrectly
      and you can't select the regioins by name.

    """

    cube, history = gio.combine_files(infile_list, var)

    cube.attributes = atts
    cube.attributes['history'] = history[0]
    model = atts['model_id']

    if var == 'northward_ocean_heat_transport':
        region_index = {}
        region_index['atlantic_arctic_ocean'] = 0
        region_index['indian_pacific_ocean'] = 1
        region_index['global_ocean'] = 2
        if model == 'CSIRO-Mk3-6-0':
            cube = cube[:, region_index[region], :]
        else:
            cube = cube.extract(iris.Constraint(region=region))

    cube = timeseries.convert_to_annual(cube, full_months=True)
    if basin_cube:
        cube = uconv.mask_marginal_seas(cube, basin_cube)
        if region != 'global_ocean':
            basin_numbers = {}
            basin_numbers['atlantic_arctic_ocean'] = [2, 4]
            basin_numbers['indian_pacific_ocean'] = [3, 5]
            cube = uconv.mask_unwanted_seas(cube, basin_cube, basin_numbers[region])
            
    return cube
Esempio n. 10
0
def main(inargs):
    """Run the program."""

    cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var), callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, full_months=True)

    cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(cube)

    if inargs.area:
        cube = multiply_by_area(cube) 

    if inargs.sftlf_file and inargs.realm:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
        cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, inargs.realm)

    zonal_aggregate = cube.collapsed('longitude', aggregation_functions[inargs.aggregation])
    zonal_aggregate.remove_coord('longitude')

    zonal_aggregate.attributes['history'] = gio.write_metadata(file_info={inargs.infiles[0]: history[0]}) 
    iris.save(zonal_aggregate, inargs.outfile)
def read_data(infiles,
              var,
              area_cube,
              annual=False,
              multiply_by_area=False,
              chunk_annual=False):
    """Read the input data."""

    cube, history = gio.combine_files(infiles, var)
    if annual:
        cube = timeseries.convert_to_annual(cube,
                                            days_in_month=True,
                                            chunk=chunk_annual)

    cube = uconv.flux_to_magnitude(cube)
    if multiply_by_area:
        cube = spatial_weights.multiply_by_area(cube, area_cube=area_cube)

    coord_names = [coord.name() for coord in cube.coords(dim_coords=True)]
    assert cube.ndim == 3
    lats = cube.coord('latitude').points
    if lats.ndim == 1:
        lat_pos = coord_names.index('latitude')
        lats = uconv.broadcast_array(lats, lat_pos - 1, cube.shape[1:])
    else:
        assert lats.shape == cube.shape[1:]

    return cube, lats, history
Esempio n. 12
0
def read_data(file_list, var, grid_point, convert_to_annual=False):
    """Read input data."""

    cube, history = gio.combine_files(file_list, var)
    if grid_point:  
        cube = select_point(cube, grid_point, timeseries=True)
    if convert_to_annual:
        cube = timeseries.convert_to_annual(cube)
    
    return cube, history[0]
Esempio n. 13
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)
    if inargs.surface:
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            cube = cube.extract(iris.Constraint(depth=0))
        else:
            print('no depth axis for surface extraction')
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)
    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]},
                          git_repo=repo_dir)

    dim_vals = {}
    dim_vals['latitude'] = get_dim_vals(inargs.lats)
    dim_vals['longitude'] = get_dim_vals(inargs.lons)
    if inargs.levs:
        dim_vals['depth'] = get_dim_vals(inargs.levs)
    else:
        dim_vals['depth'] = get_dim_vals(inargs.depth_bnds, bounds=True)

    # Regrid from curvilinear to rectilinear if necessary
    regrid_status = False
    if inargs.lats:
        horizontal_grid = grids.make_grid(dim_vals['latitude'],
                                          dim_vals['longitude'])
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube, target_grid_cube=horizontal_grid)

    # Regrid to new grid
    if dim_vals['depth'] or not regrid_status:
        sample_points = get_sample_points(cube, dim_vals)
        cube = cube.interpolate(sample_points, iris.analysis.Linear())
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'latitude' in coord_names:
            cube.coord('latitude').guess_bounds()
        if 'longitude' in coord_names:
            cube.coord('longitude').guess_bounds()
        if inargs.levs:
            cube = spatial_weights.guess_depth_bounds(cube)
        else:
            cube.coord('depth').bounds = get_depth_bounds(inargs.depth_bnds)

    if numpy.isnan(numpy.min(cube.data)):
        cube = remove_nans(cube)

    # Reinstate time dim_coord if necessary
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    if 'time' in aux_coord_names:
        cube = iris.util.new_axis(cube, 'time')

    cube.attributes['history'] = log
    iris.save(cube, inargs.outfile, fill_value=1e20)
Esempio n. 14
0
def main(inargs):
    """Run the program."""

    # Read data
    cube = iris.load(inargs.infiles,
                     'surface_downward_eastward_stress',
                     callback=save_history)
    equalise_attributes(cube)
    iris.util.unify_time_units(cube)
    cube = cube.concatenate_cube()
    cube = gio.check_time_units(cube)

    # Prepare data
    cube = timeseries.convert_to_annual(cube)

    sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    mask = create_land_mask(sftlf_cube, cube.shape)
    cube.data = numpy.ma.asarray(cube.data)
    cube.data.mask = mask

    cube = cube.collapsed('longitude', iris.analysis.MEAN)

    # Calculate metrics
    xdata = cube.coord('latitude').points
    xnew = numpy.linspace(xdata[0], xdata[-1], num=1000, endpoint=True)

    hemispheres = ['sh', 'nh']
    directions = ['easterly', 'westerly']

    metric_dict = {}
    for hemisphere, direction in itertools.product(hemispheres, directions):
        metric_dict[(hemisphere, direction, 'location')] = []
        metric_dict[(hemisphere, direction, 'magnitude')] = []

    for ycube in cube.slices(['latitude']):
        func = interp1d(xdata, ycube.data, kind='cubic')
        ynew = func(xnew)
        for hemisphere, direction in itertools.product(hemispheres,
                                                       directions):
            loc, mag = wind_stress_metrics(xnew, ynew, hemisphere, direction)
            metric_dict[(hemisphere, direction, 'location')].append(loc)
            metric_dict[(hemisphere, direction, 'magnitude')].append(mag)

    # Write the output file
    atts = cube.attributes
    infile_history = {inargs.infiles[0]: history[0]}
    atts['history'] = gio.write_metadata(file_info=infile_history)
    units_dict = {
        'magnitude': cube.units,
        'location': cube.coord('latitude').units
    }
    cube_list = create_outcubes(metric_dict, cube.attributes, units_dict,
                                cube.coord('time'))

    iris.save(cube_list, inargs.outfile)
Esempio n. 15
0
def smooth_data(cube, smooth_type):
    """Apply temporal smoothing to a data cube."""

    assert smooth_type in ['annual', 'annual_running_mean']

    if smooth_type == 'annual_running_mean':
        cube = cube.rolling_window('time', iris.analysis.MEAN, 12)
    elif smooth_type == 'annual':
        cube = timeseries.convert_to_annual(cube)

    return cube
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             sftlf_cube=None,
             realm=None):
    """Read, merge, temporally aggregate and calculate zonal sum.
    
    Positive is defined as down.
    
    """

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            depth_constraint = iris.Constraint(depth=0)
            cube = cube.extract(depth_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)
        cube = multiply_by_area(cube)

        if 'up' in cube.standard_name:
            cube.data = cube.data * -1

        if sftlf_cube and realm in ['ocean', 'land']:
            cube = uconv.apply_land_ocean_mask(cube, sftlf_cube, realm)

        zonal_sum = cube.collapsed('longitude', iris.analysis.SUM)
        zonal_sum.remove_coord('longitude')

        grid_spacing = grids.get_grid_spacing(zonal_sum)
        zonal_sum.data = zonal_sum.data / grid_spacing

    else:
        zonal_sum = None

    return zonal_sum, metadata_dict
Esempio n. 17
0
def main(inargs):
    """Run the program."""

    tas_cube, history = gio.combine_files(inargs.tas_files, inargs.var)
    if inargs.annual:
        tas_cube = timeseries.convert_to_annual(tas_cube)
    area_data = spatial_weights.area_array(tas_cube)
    coord_names = [coord.name() for coord in tas_cube.dim_coords]
    tasga_cube = tas_cube.collapsed(coord_names[1:], iris.analysis.MEAN, weights=area_data)
    tasga_cube.remove_coord(coord_names[1])
    tasga_cube.remove_coord(coord_names[2])

    tasga_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(tasga_cube, inargs.outfile)
Esempio n. 18
0
def main(inargs):
    """Run the program."""

    if inargs.sftlf_file:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    else:
        sftlf_cube = None

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var))
        history = cube[0].attributes['history']

        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = iris.util.squeeze(cube)
        cube.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: history})

        cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

    output = {}
    output['full'] = calc_fields(cube,
                                 sftlf_cube,
                                 inargs.aggregation,
                                 realm=None,
                                 area=inargs.area)
    if inargs.sftlf_file:
        for realm in ['ocean', 'land']:
            output[realm] = calc_fields(cube,
                                        sftlf_cube,
                                        inargs.aggregation,
                                        realm=realm,
                                        area=inargs.area)

    cube_list = iris.cube.CubeList()
    for realm, output_cubes in output.items():
        for cube in output_cubes:
            cube_list.append(cube)

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(cube_list, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
Esempio n. 19
0
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile, inargs.var)
    cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)

    log = cmdprov.new_log(
        infile_history={inargs.infile: cube.attributes['history']},
        git_repo=repo_dir)
    cube.attributes['history'] = log

    #assert cube.data.dtype == numpy.float32
    #iris.save(cube, outfile, netcdf_format='NETCDF3_CLASSIC')
    iris.save(cube, inargs.outfile)
Esempio n. 20
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, aggregation='mean', days_in_month=True)
    if inargs.flux_to_mag:
        cube = uconv.flux_to_magnitude(cube)

    dim_coord_names = [coord.name() for coord in cube.dim_coords]
    assert dim_coord_names[0] in ['time', 'year']
    cube.data = numpy.cumsum(cube.data, axis=0)

    cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir, infile_history={inargs.infiles[0]: history[0]})
    iris.save(cube, inargs.outfile)
Esempio n. 21
0
def main(inargs):
    """Run the program."""

    region_constraint = iris.Constraint(
        region='atlantic_arctic_ocean'
    )  # "atlantic_arctic_ocean", "indian_pacific_ocean ", "global_ocean         "
    cube = iris.load(inargs.infiles,
                     'ocean_meridional_overturning_mass_streamfunction'
                     & region_constraint,
                     callback=save_history)
    if not cube:
        cube = iris.load(inargs.infiles,
                         'ocean_meridional_overturning_mass_streamfunction',
                         callback=save_history)
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube = cube[:, 0, :, :]  # index for Atlantic
    else:
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube.remove_coord('region')

    cube = gio.check_time_units(cube)

    cube = timeseries.convert_to_annual(cube)

    target_lat, error = uconv.find_nearest(cube.coord('latitude').points,
                                           30,
                                           index=False)
    cube = cube.extract(iris.Constraint(latitude=target_lat))
    cube.remove_coord('latitude')

    assert str(cube.units) == 'kg s-1'
    cube.data = (cube.data / 1023) / 1e+6
    cube.units = 'Sv'

    #dim_coord_names = [coord.name() for coord in cube.dim_coords]
    #vert_extents = spatial_weights.calc_vertical_weights_1D(cube.coord('depth'), dim_coord_names, cube.shape)

    metric = cube.collapsed('depth', iris.analysis.MAX)
    metric.remove_coord('depth')

    try:
        metric.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: cube.attributes['history']})
    except KeyError:
        pass
    iris.save(metric, inargs.outfile)
Esempio n. 22
0
def read_data(infile, variable, calc_annual=False, chunk=False):
    """Load the input data."""

    cube = iris.load_cube(infile, gio.check_iris_var(variable))
    cube = gio.check_time_units(cube)

    if calc_annual:
        cube = timeseries.convert_to_annual(cube, chunk=chunk)

    coord_names = [coord.name() for coord in cube.dim_coords]
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    assert 'time' in coord_names
    grid_type = 'curvilinear' if aux_coord_names == ['latitude', 'longitude'
                                                     ] else 'latlon'

    return cube, coord_names, aux_coord_names, grid_type
Esempio n. 23
0
def main(inargs):
    """Run the program."""

    temperature_cube, history = gio.combine_files(inargs.temperature_files,
                                                  inargs.var)

    temperature_atts = temperature_cube.attributes
    metadata_dict = {inargs.temperature_files[0]: history[0]}

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    temperature_cube = temperature_cube.extract(level_subset)

    if inargs.annual:
        temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                        chunk=inargs.chunk)

    if inargs.regrid:
        area_cube = read_area_file(inargs.regrid)
        temperature_cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            temperature_cube, weights=area_cube.data)
        volume_data = spatial_weights.volume_array(temperature_cube)
        grid = 'y72x144'
    else:
        assert inargs.volume_file, "Must provide volume file if not regridding data"
        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                temperature_cube, level_subset,
                                                metadata_dict)
        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        grid = None

    ohc_cube = ohc(temperature_cube,
                   volume_data,
                   inargs.density,
                   inargs.specific_heat,
                   coord_names,
                   vertical_integral=inargs.vertical_integral,
                   chunk=inargs.chunk)

    ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                            inargs)
    log = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    ohc_cube.attributes['history'] = log

    iris.save(ohc_cube, inargs.outfile)
Esempio n. 24
0
def read_global_variable(model, variable, ensemble, manual_files):
    """Read data for a global variable"""

    manual = file_match(manual_files, model, variable, ensemble)
    if manual or variable == 'massa':
        file_list = manual
    else:
        file_list = clef_search(model, variable, ensemble)

    if file_list:
        cube, history = gio.combine_files(file_list, names[variable])
        cube = timeseries.convert_to_annual(cube)
        cube = time_check(cube)
        extra_log.append(file_list)
    else:
        cube = None

    return cube
Esempio n. 25
0
def main(inargs):
    """Run the program."""

    for infile in inargs.infiles:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(infile, inargs.var)
            cube = timeseries.convert_to_annual(cube)

        metadata_dict = {infile: cube.attributes['history']}
        cube.attributes['history'] = gio.write_metadata(
            file_info=metadata_dict)

        outfile = infile.replace('mon', 'yr')
        outfile = outfile.replace('ua6', 'r87/dbi599')

        #assert cube.data.dtype == numpy.float32
        iris.save(cube, outfile, netcdf_format='NETCDF3_CLASSIC')

        print(outfile)
        del cube
Esempio n. 26
0
def load_data(infiles, basin):
    """Load, temporally aggregate and spatially slice input data"""
    
    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(infiles, 'ocean_meridional_overturning_mass_streamfunction', callback=save_history)
        equalise_attributes(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)

        cube = cube[:, basin_index[basin], : ,:]
        cube = timeseries.convert_to_annual(cube)

    experiment = cube.attributes['experiment_id']
    
    depth_constraint = iris.Constraint(depth=lambda cell: cell <= 250)
    lat_constraint = iris.Constraint(latitude=lambda cell: -30.0 <= cell < 30.0)

    cube = cube.extract(depth_constraint & lat_constraint)
    
    return cube, experiment
def load_data(infile, basin_cube, basin_name):
    """Load, temporally aggregate and spatially slice input data"""

    cube = iris.load_cube(infile, 'surface_downward_x_stress')
    cube = timeseries.convert_to_annual(cube)
    experiment = cube.attributes['experiment_id']
    if experiment == 'historicalMisc':
        experiment = 'historicalAA'

    if not basin_name == 'globe':
        if basin_cube:
            ndim = cube.ndim
            basin_array = uconv.broadcast_array(basin_cube.data,
                                                [ndim - 2, ndim - 1],
                                                cube.shape)
        else:
            basin_array = uconv.create_basin_array(cube)
        cube.data.mask = numpy.where(
            (cube.data.mask == False) & (basin_array == basins[basin_name]),
            False, True)

    sh_constraint = iris.Constraint(latitude=lambda cell: -30.0 <= cell < 0.0)
    nh_constraint = iris.Constraint(latitude=lambda cell: 0.0 < cell <= 30.0)
    scrit_constraint = iris.Constraint(
        latitude=lambda cell: -17.0 <= cell < -13.0)
    ncrit_constraint = iris.Constraint(
        latitude=lambda cell: 13.0 < cell <= 17.0)

    sh_cube = cube.extract(sh_constraint)
    nh_cube = cube.extract(nh_constraint)
    scrit_cube = cube.extract(scrit_constraint)
    ncrit_cube = cube.extract(ncrit_constraint)

    sh_mean = sh_cube.collapsed(['longitude', 'latitude'], iris.analysis.MEAN)
    nh_mean = nh_cube.collapsed(['longitude', 'latitude'], iris.analysis.MEAN)
    scrit_mean = scrit_cube.collapsed(['longitude', 'latitude'],
                                      iris.analysis.MEAN)
    ncrit_mean = ncrit_cube.collapsed(['longitude', 'latitude'],
                                      iris.analysis.MEAN)

    return sh_mean, nh_mean, scrit_mean, ncrit_mean, experiment
def get_data(filenames,
             var,
             metadata_dict,
             time_constraint,
             area=False,
             invert_evap=False):
    """Read, merge, temporally aggregate and calculate zonal mean."""

    if filenames:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load(filenames, gio.check_iris_var(var))

            metadata_dict[filenames[0]] = cube[0].attributes['history']
            equalise_attributes(cube)
            iris.util.unify_time_units(cube)
            cube = cube.concatenate_cube()
            cube = gio.check_time_units(cube)
            cube = iris.util.squeeze(cube)

            cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube)

        assert cube.units == 'kg m-2 s-1'
        cube.data = cube.data * 86400
        units = 'mm/day'

        if invert_evap and (var == 'water_evaporation_flux'):
            cube.data = cube.data * -1

        if area:
            cube = spatial_weights.multiply_by_area(cube)

        zonal_mean = cube.collapsed('longitude', iris.analysis.MEAN)
        zonal_mean.remove_coord('longitude')
    else:
        zonal_mean = None

    return zonal_mean, metadata_dict
Esempio n. 29
0
def main(inargs):
    """Run the program."""

    level_subset = gio.iris_vertical_constraint(inargs.min_depth,
                                                inargs.max_depth)
    for temperature_file in inargs.temperature_files:
        temperature_cube = iris.load_cube(
            temperature_file, inargs.temperature_var & level_subset)
        temperature_cube = gio.check_time_units(temperature_cube)
        metadata_dict = {
            temperature_file: temperature_cube.attributes['history']
        }
        temperature_atts = temperature_cube.attributes

        if inargs.annual:
            temperature_cube = timeseries.convert_to_annual(temperature_cube,
                                                            chunk=inargs.chunk)

        coord_names = [coord.name() for coord in temperature_cube.dim_coords]
        assert coord_names[0] == 'time'
        assert coord_names[1] == 'depth'

        volume_data, metadata_dict = get_volume(inargs.volume_file,
                                                inargs.area_file,
                                                temperature_cube,
                                                metadata_dict)

        ohc_cube = calc_ohc_vertical_integral(temperature_cube,
                                              volume_data,
                                              inargs.density,
                                              inargs.specific_heat,
                                              coord_names,
                                              chunk=inargs.chunk)
        ohc_cube = add_metadata(temperature_cube, temperature_atts, ohc_cube,
                                metadata_dict, inargs)
        ohc_file = get_outfile_name(temperature_file, annual=inargs.annual)

        iris.save(ohc_cube, ohc_file)
        print(ohc_file)
Esempio n. 30
0
def load_data(filenames, standard_name, time_constraint, metadata_dict,
              input_timescale):
    """Basic data loading and temporal smoothing"""

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(filenames, gio.check_iris_var(standard_name))
        assert cube, "files do not exist: %s" % (filenames[0])

        metadata_dict[filenames[0]] = cube[0].attributes['history']
        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = cube.extract(time_constraint)
        cube = iris.util.squeeze(cube)

    attributes = cube.attributes

    if not input_timescale == 'annual':
        cube = timeseries.convert_to_annual(cube, full_months=True)

    return cube, metadata_dict, attributes