Esempio n. 1
0
def get_log_text(extra_log):
    """Write the metadata to file."""

    flat_list = [item for sublist in extra_log for item in sublist]
    log_text = cmdprov.new_log(git_repo=repo_dir, extra_notes=flat_list)

    return log_text
Esempio n. 2
0
def main(inargs):
    """Run the program."""

    assert len(inargs.infiles) > 1
    if inargs.variables:
        variable_list = inargs.variables
    else:
        cube_list = iris.load(inargs.infiles[0])
        variable_list = [cube.long_name for cube in cube_list]

    cube_list = iris.cube.CubeList([])
    for var in variable_list:
        cube, history = gio.combine_files(inargs.infiles, var)
        cube_list.append(cube)

    metadata_dict = {inargs.infiles[-1]: history[-1]}
    log_entry = cmdprov.new_log(infile_history=metadata_dict,
                                git_repo=repo_dir)
    if len(cube_list) > 1:
        iris.util.equalise_attributes(cube_list)
        for cube in cube_list:
            cube.attributes['history'] = log_entry
    else:
        cube_list = cube_list[0]
        cube_list.attributes['history'] = log_entry

    iris.save(cube_list, inargs.outfile)
Esempio n. 3
0
def main(inargs):
    """Run the program."""

    df = pd.read_csv(inargs.infile)

    fig, axes =plt.subplots(1,2)
    sns.set(style="whitegrid")
    
    sns.boxplot(x="project", y="regression coefficient", hue="comparison",
                data=df[df['realm'] == 'energy'], ax=axes[0], palette='hot')
    axes[0].set_title('(a) energy budget')
    axes[0].set_ylim(-0.25, 1.75)
    axes[0].axhline(y=1.0, color='0.5', linewidth=0.2)
    axes[0].xaxis.label.set_visible(False)
    set_legend(axes[0])
    
    sns.boxplot(x="project", y="regression coefficient", hue="comparison",
                data=df[df['realm'] == 'mass'], ax=axes[1], palette='GnBu_r')
    axes[1].set_title('(b) mass budget')
    axes[1].set_ylim(-0.25, 1.75)
    axes[1].axhline(y=1.0, color='0.5', linewidth=0.2)
    axes[1].xaxis.label.set_visible(False)
    set_legend(axes[1])
    
    for ax in axes.flat:
        ax.label_outer()
    
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=400)
    log_file = re.sub('.png', '.met', inargs.outfile)
    log_text = cmdprov.new_log(git_repo=repo_dir)
    cmdprov.write_log(log_file, log_text)
Esempio n. 4
0
def main(inargs):
    """Run the program."""
    
    coefficient_a_cube = iris.load_cube(inargs.coefficient_file, 'coefficient a')
    coefficient_b_cube = iris.load_cube(inargs.coefficient_file, 'coefficient b')
    coefficient_c_cube = iris.load_cube(inargs.coefficient_file, 'coefficient c')
    coefficient_d_cube = iris.load_cube(inargs.coefficient_file, 'coefficient d')

    data_cube, data_history = gio.combine_files(inargs.data_files, inargs.var)
    coord_names = [coord.name() for coord in data_cube.coords(dim_coords=True)]
    assert coord_names[0] == 'year'
    if not inargs.branch_year == None:
        branch_year = inargs.branch_year
    else:
        branch_year = get_branch_year(data_cube, inargs.control_time_units)
    time_values = numpy.arange(branch_year, branch_year + data_cube.shape[0]) 
    drift_signal, start_polynomial = remove_drift.apply_polynomial(time_values, coefficient_a_cube.data,
                                                                   coefficient_b_cube.data, coefficient_c_cube.data,
                                                                   coefficient_d_cube.data, poly_start=None)

    new_cube = data_cube - drift_signal
    #remove_drift.check_data(new_cube, data_cube, inargs.data_file)
    new_cube.metadata = data_cube.metadata
            
    metadata_dict = {inargs.data_files[0]: data_history[0], 
                     inargs.coefficient_file: coefficient_a_cube.attributes['history']}
    new_cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(new_cube, inargs.outfile)
def main(inargs):
    """Run the program."""

    pe_cube, pe_history = gio.combine_files(
        inargs.pe_files, 'precipitation minus evaporation flux')
    lat_coord = pe_cube.coord('latitude').points

    region_data = np.apply_along_axis(get_regional_totals, 1, pe_cube.data,
                                      lat_coord)
    if inargs.cumsum:
        region_data = np.cumsum(region_data, axis=0)

    region_coord = create_region_coord()
    time_coord = pe_cube.coord('time')

    iris.std_names.STD_NAMES['precipitation_minus_evaporation_flux'] = {
        'canonical_units': pe_cube.units
    }
    dim_coords_list = [(time_coord, 0), (region_coord, 1)]
    out_cube = iris.cube.Cube(
        region_data,
        standard_name='precipitation_minus_evaporation_flux',
        long_name=pe_cube.long_name,
        var_name=pe_cube.var_name,
        units=pe_cube.units,
        attributes=pe_cube.attributes,
        dim_coords_and_dims=dim_coords_list)

    out_cube.attributes['history'] = cmdprov.new_log(
        infile_history={inargs.pe_files[0]: pe_history[0]}, git_repo=repo_dir)
    iris.save(out_cube, inargs.outfile)
def main(inargs):
    """Run the program."""

    metadata_dict = {}

    if inargs.ref_file:
        ref_cube = iris.load_cube(inargs.ref_file[0], inargs.ref_file[1])
    else:
        ref_cube = None

    cube_list = iris.cube.CubeList([])
    for fnum, filename in enumerate(inargs.infiles):
        cube = iris.load_cube(filename, gio.check_iris_var(inargs.var))
        history = cube.attributes['history']
        #coord_names = [coord.name() for coord in cube.dim_coords]
        new_aux_coord = iris.coords.AuxCoord(fnum,
                                             long_name='ensemble_member',
                                             units='no_unit')
        cube.add_aux_coord(new_aux_coord)
        if ref_cube:
            cube = regrid_cube(cube, ref_cube)
        else:
            ref_cube = cube.copy()
        cube_list.append(cube)

    ensemble_agg = calc_ensemble(cube_list, inargs.aggregation)

    metadata_dict[filename] = history
    ensemble_agg.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(ensemble_agg, inargs.outfile)
def main(inargs):
    """Run the program."""

    standard_names = {
        'thetao': 'sea_water_potential_temperature',
        'so': 'sea_water_salinity'
    }
    volume_cube = gio.get_ocean_weights(inargs.volfile)
    output_cubelist = iris.cube.CubeList([])

    cube, history = gio.combine_files(inargs.infiles,
                                      standard_names[inargs.invar],
                                      checks=True)
    ntsteps = cube.shape[0]
    for tstep, cube_slice in enumerate(cube.slices_over('time')):
        print(f'time step {tstep + 1} of {ntsteps}')
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        ga = cube_slice.collapsed(coord_names[1:],
                                  iris.analysis.MEAN,
                                  weights=volume_cube.data)
        for coord in coord_names[1:] + aux_coord_names:
            ga.remove_coord(coord)
            ga.var_name = inargs.invar + 'ga'
        output_cubelist.append(ga)
    outcube = output_cubelist.merge()[0]
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    metadata_dict[inargs.volfile] = volume_cube.attributes['history']
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
Esempio n. 8
0
def main(inargs):
    """Run the program."""

    ref_cube = iris.load_cube(inargs.ref_file, inargs.ref_var)
    coord_names = [coord.name() for coord in ref_cube.dim_coords]
    if ref_cube.ndim == 4:
        ref_cube = ref_cube[0, 0, ::]
        ref_cube.remove_coord(coord_names[0])
        ref_cube.remove_coord(coord_names[1])
    elif ref_cube.ndim == 3:
        ref_cube = ref_cube[0, ::]
        ref_cube.remove_coord(coord_names[0])
    else:
        assert ref_cube.ndim == 2

    if inargs.sftlf_file:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
        ref_cube = uconv.apply_land_ocean_mask(ref_cube,
                                               sftlf_cube,
                                               'ocean',
                                               threshold=inargs.land_threshold)

    ref_is_basin = ref_cube.var_name == 'basin'
    basin_array = create_basin_array(ref_cube, ref_is_basin)
    basin_cube = construct_basin_cube(basin_array, ref_cube.attributes,
                                      ref_cube.dim_coords)
    basin_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)

    iris.save(basin_cube, inargs.out_file)
Esempio n. 9
0
def main(inargs):
    """Run the program."""

    standard_names = {
        'thetao': 'sea_water_potential_temperature',
        'so': 'sea_water_salinity'
    }
    volume_cube = gio.get_ocean_weights(inargs.volfile)
    output_cubelist = iris.cube.CubeList([])
    for infile in inargs.infiles:
        cube = iris.load_cube(infile, standard_names[inargs.invar])
        weights = uconv.broadcast_array(volume_cube.data, [1, 3], cube.shape)
        coord_names = [coord.name() for coord in cube.dim_coords]
        aux_coord_names = [coord.name() for coord in cube.aux_coords]
        ga = cube.collapsed(coord_names[1:],
                            iris.analysis.MEAN,
                            weights=weights)
        for coord in coord_names[1:] + aux_coord_names:
            ga.remove_coord(coord)
        ga.var_name = inargs.invar + 'ga'
        output_cubelist.append(ga)
        print(infile)

    outcube = gio.combine_cubes(output_cubelist)
    metadata_dict = {}
    metadata_dict[infile] = cube.attributes['history']
    metadata_dict[inargs.volfile] = volume_cube.attributes['history']
    outcube.attributes['history'] = cmdprov.new_log(
        infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(outcube, inargs.outfile)
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)

    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=False)  

    if inargs.aggregation == 'sum':
        cube = cube.collapsed('depth', iris.analysis.SUM)
    else:
        dim_coord_names = [coord.name() for coord in cube.dim_coords]
        depth_coord = cube.coord('depth')
        assert depth_coord.units in ['m', 'dbar'], "Unrecognised depth axis units"
        if depth_coord.units == 'm':
            vert_extents = spatial_weights.calc_vertical_weights_1D(depth_coord, dim_coord_names, cube.shape)
        elif depth_coord.units == 'dbar':
            vert_extents = spatial_weights.calc_vertical_weights_2D(depth_coord, cube.coord('latitude'), dim_coord_names, cube.shape)
        cube = cube.collapsed('depth', iris.analysis.MEAN, weights=vert_extents)
    cube.remove_coord('depth')
    
    metadata_dict = {}
    metadata_dict[inargs.infiles[0]] = history
    cube.attributes['history'] = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    iris.save(cube, inargs.outfile)  
def main(inargs):
    """Run the program."""

    assert len(inargs.infiles) == len(inargs.invars)
    cube_list = iris.cube.CubeList([])
    bin_vars = ['tbin', 'sbin', 'tsbin']
    bin_names = ['temperature', 'salinity', 'temperature and salinity']
    for bin_var, bin_name in zip(bin_vars, bin_names):
        running_sum = 0
        metadata_dict = {}
        for file_name, base_var in zip(inargs.infiles, inargs.invars):
            cube = iris.load_cube(file_name,
                                  f'{base_var} binned by {bin_name}')
            metadata_dict[file_name] = cube.attributes['history']
            running_sum = running_sum + cube.data
        cube.data = running_sum
        if inargs.ref_file:
            ref_cube = iris.load_cube(inargs.ref_file, inargs.new_var)
            cube.attributes = ref_cube.attributes
            cube.var_name = ref_cube.var_name + '_' + bin_var
            cube.long_name = ref_cube.long_name + ' binned by ' + bin_name
        else:
            assert inargs.new_var in long_names.keys()
            cube.var_name = inargs.new_var + '_' + bin_var
            cube.long_name = long_names[
                inargs.new_var] + ' binned by ' + bin_name
            assert cube.units == 'W'
        cube_list.append(cube)

    log = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    for cube in cube_list:
        cube.attributes['history'] = log

    iris.save(cube_list, inargs.new_file)
Esempio n. 12
0
def get_log_and_key(pr_file, history_attr, plot_type):
    """Get key and command line log for image metadata.
   
    Different image formats allow different metadata keys.
   
    Args:
      pr_file (str): Input precipitation file
      history_attr (str): History attribute from pr_file
      plot_type (str): File format for output image
   
    """

    valid_keys = {
        'png': 'History',
        'pdf': 'Title',
        'eps': 'Creator',
        'ps': 'Creator'
    }

    assert plot_type in valid_keys.keys(
    ), f"Image format not one of: {*[*valid_keys],}"
    log_key = valid_keys[plot_type]
    new_log = cmdprov.new_log(infile_history={pr_file: history_attr})
    new_log = new_log.replace('\n', ' END ')

    return log_key, new_log
def main(inargs):
    """Run the program."""

    dset = xr.open_mfdataset(inargs.infiles)
    if inargs.time_bounds:
        start, end = inargs.time_bounds
        dset = dset.sel(time=slice(start, end))
    clim_dset = dset.groupby('time.month').mean('time', keep_attrs=True)
    
    if 'history' in dset.attrs:
        history = dset.attrs['history']
        log = cmdprov.new_log(infile_history={inargs.infiles[0]: history}, git_repo=repo_dir)
    else:
        log = cmdprov.new_log(git_repo=repo_dir)
    clim_dset.attrs['history'] = log

    clim_dset.to_netcdf(inargs.outfile)
Esempio n. 14
0
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile)
    for bogus_indexes, target_indexes in zip(inargs.bogus_indexes, inargs.target_indexes):
        exec('cube.data[%s] = cube.data[%s]' %(bogus_indexes, target_indexes))

    cube.attributes['history'] = cmdprov.new_log(infile_history={inargs.infile: cube.attributes['history']}, git_repo=repo_dir)
    iris.save(cube, inargs.outfile)
Esempio n. 15
0
def main(inargs):
    """Run the program."""

    inlogs = {}
    my_log = cmdprov.new_log()
    cube = read_data(inargs.infile, inargs.month)
    inlogs[inargs.infile] = cube.attributes['history']  #add data history
    cube = convert_pr_units(cube)
    if type(inargs.mask) is list:
        assert inargs.mask[1] == 'land' or inargs.mask[
            1] == 'ocean', 'mask should specify land or ocean'
        cube = mask_data(cube, inargs.mask[0], inargs.mask[1])
        inlogs[inargs.mask[0]] = cube.attributes['history']  #add mask history
    clim = cube.collapsed('time', iris.analysis.MEAN)
    plot_data(clim, inargs.month, inargs.gridlines, inargs.cbar_levels)
    plt.savefig(inargs.outfile + '.png')

    my_log = cmdprov.new_log(infile_history=inlogs, git_repo='.')
    cmdprov.write_log(inargs.outfile + '.log', my_log)
Esempio n. 16
0
def main(inargs):
    """Run the program."""

    cube, history = gio.combine_files(inargs.infiles, inargs.var, checks=True)
    if inargs.surface:
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'depth' in coord_names:
            cube = cube.extract(iris.Constraint(depth=0))
        else:
            print('no depth axis for surface extraction')
    if inargs.annual:
        cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)
    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]},
                          git_repo=repo_dir)

    dim_vals = {}
    dim_vals['latitude'] = get_dim_vals(inargs.lats)
    dim_vals['longitude'] = get_dim_vals(inargs.lons)
    if inargs.levs:
        dim_vals['depth'] = get_dim_vals(inargs.levs)
    else:
        dim_vals['depth'] = get_dim_vals(inargs.depth_bnds, bounds=True)

    # Regrid from curvilinear to rectilinear if necessary
    regrid_status = False
    if inargs.lats:
        horizontal_grid = grids.make_grid(dim_vals['latitude'],
                                          dim_vals['longitude'])
        cube, coord_names, regrid_status = grids.curvilinear_to_rectilinear(
            cube, target_grid_cube=horizontal_grid)

    # Regrid to new grid
    if dim_vals['depth'] or not regrid_status:
        sample_points = get_sample_points(cube, dim_vals)
        cube = cube.interpolate(sample_points, iris.analysis.Linear())
        coord_names = [coord.name() for coord in cube.dim_coords]
        if 'latitude' in coord_names:
            cube.coord('latitude').guess_bounds()
        if 'longitude' in coord_names:
            cube.coord('longitude').guess_bounds()
        if inargs.levs:
            cube = spatial_weights.guess_depth_bounds(cube)
        else:
            cube.coord('depth').bounds = get_depth_bounds(inargs.depth_bnds)

    if numpy.isnan(numpy.min(cube.data)):
        cube = remove_nans(cube)

    # Reinstate time dim_coord if necessary
    aux_coord_names = [coord.name() for coord in cube.aux_coords]
    if 'time' in aux_coord_names:
        cube = iris.util.new_axis(cube, 'time')

    cube.attributes['history'] = log
    iris.save(cube, inargs.outfile, fill_value=1e20)
def _main(args):
    """Run the command line program."""

    temperature_cube, temperature_history = gio.combine_files(args.temperature_file, args.temperature_var, checks=True)
    salinity_cube, salinity_history = gio.combine_files(args.salinity_file, 'sea_water_salinity', checks=True)
   
    assert 'c' in str(temperature_cube.units).lower(), "Input temperature units must be in celsius"
#    if not 'C' in str(bigthetao_cube.units):
#        bigthetao_cube.data = bigthetao_cube.data - 273.15
#        data_median = np.ma.median(bigthetao_cube.data)
#        assert data_median < 100
#        assert data_median > -10
#        bigthetao_cube.units = 'C'

    target_shape = temperature_cube.shape[1:]
    depth = temperature_cube.coord('depth').points * -1
    broadcast_depth = uconv.broadcast_array(depth, 0, target_shape)
    broadcast_longitude = uconv.broadcast_array(temperature_cube.coord('longitude').points, [1, 2], target_shape)
    broadcast_latitude = uconv.broadcast_array(temperature_cube.coord('latitude').points, [1, 2], target_shape)
    pressure = gsw.p_from_z(broadcast_depth, broadcast_latitude)

    absolute_salinity = gsw.SA_from_SP(salinity_cube.data, pressure, broadcast_longitude, broadcast_latitude)
    if args.temperature_var == 'sea_water_conservative_temperature':
        conservative_temperature = temperature_cube.data
    elif args.temperature_var == 'sea_water_potential_temperature':  
        conservative_temperature = gsw.CT_from_pt(absolute_salinity, temperature_cube.data)
    else:
        raise ValueError('Invalid temperature variable')

    if args.coefficient == 'alpha':
        coefficient_data = gsw.alpha(absolute_salinity, conservative_temperature, pressure)
        var_name = 'alpha'
        standard_name = 'thermal_expansion_coefficient'
        long_name = 'thermal expansion coefficient'
        units = '1/K'
    elif args.coefficient == 'beta':
        coefficient_data = gsw.beta(absolute_salinity, conservative_temperature, pressure)
        var_name = 'beta'
        standard_name = 'saline_contraction_coefficient'
        long_name = 'saline contraction coefficient'
        units = 'kg/g'
    else:
        raise ValueError('Coefficient must be alpha or beta')

    iris.std_names.STD_NAMES[standard_name] = {'canonical_units': units}
    coefficient_cube = temperature_cube.copy()
    coefficient_cube.data = coefficient_data
    coefficient_cube.standard_name = standard_name    
    coefficient_cube.long_name = long_name
    coefficient_cube.var_name = var_name
    coefficient_cube.units = units

    coefficient_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(coefficient_cube, args.outfile)
Esempio n. 18
0
def main(args):
    """Run the program."""

    dset = xr.open_dataset(args.pr_file)
    clim = dset['pr'].groupby('time.season').mean('time', keep_attrs=True)
    clim = convert_pr_units(clim)
    create_plot(clim, dset.attrs['source_id'], args.season)

    new_log = cmdprov.new_log(
        infile_logs={args.pr_file: dset.attrs['history']})
    plt.savefig(args.output_file, metadata={'History': new_log}, dpi=200)
def main(inargs):
    """Run the program."""

    metadata_dict = {}
    time_constraint = gio.get_time_constraint(
        [inargs.start_date, inargs.end_date])

    fig = plt.figure(figsize=[11, 10])

    if inargs.rndt_files:
        rndt_nh, rndt_sh = read_hemisphere_data(inargs.rndt_files, 'rndt',
                                                time_constraint, inargs.ensagg)
        iplt.plot(rndt_nh, label='netTOA, NH', color='red', linestyle='solid')
        iplt.plot(rndt_sh, label='netTOA, SH', color='red', linestyle='dashed')

    if inargs.hfds_files:
        hfds_nh, hfds_sh = read_hemisphere_data(inargs.hfds_files, 'hfds',
                                                time_constraint, inargs.ensagg)
        iplt.plot(hfds_nh, label='OHU, NH', color='orange', linestyle='solid')
        iplt.plot(hfds_sh, label='OHU, SH', color='orange', linestyle='dashed')

    if inargs.ohc_files:
        ohc_nh, ohc_sh = read_hemisphere_data(inargs.ohc_files, 'ohc',
                                              time_constraint, inargs.ensagg)
        iplt.plot(ohc_nh, label='OHC, NH', color='blue', linestyle='solid')
        iplt.plot(ohc_sh, label='OHC, SH', color='blue', linestyle='dashed')

    if inargs.ohc_guide_files:
        guide_nh, guide_sh = read_guide_data(inargs.ohc_guide_files, 'ohc',
                                             time_constraint, inargs.ensagg)
        iplt.plot(guide_nh,
                  label='OHC guide, NH',
                  color='0.5',
                  linestyle='solid')
        iplt.plot(guide_sh,
                  label='OHC guide, SH',
                  color='0.5',
                  linestyle='dashed')

    plt.legend()
    if inargs.ybounds:
        ymin, ymax = inargs.ybounds
        plt.ylim([ymin, ymax])

    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams'][
        'figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)

    log_text = cmdprov.new_log(
        git_repo=repo_dir)  # infile_history={nh_file: history}
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
Esempio n. 20
0
def main(inargs):
    """Run the program."""

    df = pandas.read_csv(inargs.infile)

    colors = {'historical': 'black', 'GHG-only': 'red', 'AA-only': 'blue'}

    fig, ax = plt.subplots(figsize=(16, 10))

    offset = lambda p: transforms.ScaledTranslation(p / 72., 0,
                                                    plt.gcf().dpi_scale_trans)
    trans = plt.gca().transData

    for experiment in ['historical', 'GHG-only', 'AA-only']:
        toa_vals = numpy.array(
            df.loc[(df['variable'] == 'netTOA')
                   & (df['experiment'] == experiment)]['accumulated_heat'])
        ohu_vals = numpy.array(
            df.loc[(df['variable'] == 'OHU')
                   & (df['experiment'] == experiment)]['accumulated_heat'])
        ohc_vals = numpy.array(
            df.loc[(df['variable'] == 'OHC')
                   & (df['experiment'] == experiment)]['accumulated_heat'])
        xvals = numpy.arange(toa_vals.shape[0])

        plt.scatter(xvals,
                    toa_vals,
                    c=colors[experiment],
                    marker='s',
                    transform=trans + offset(-5))
        plt.scatter(xvals, ohu_vals, c=colors[experiment])
        plt.scatter(xvals,
                    ohc_vals,
                    c=colors[experiment],
                    marker='D',
                    transform=trans + offset(5))

    plt.ticklabel_format(style='sci',
                         axis='y',
                         scilimits=(0, 0),
                         useMathText=True)
    ax.yaxis.major.formatter._useMathText = True

    plt.xticks(xvals, list(df.model.unique()), rotation=40, ha='right')
    ax.axhline(y=0, color='0.5', linestyle='--', linewidth=0.5)
    add_legends()
    plt.ylabel('J')

    plt.savefig(inargs.outfile, bbox_inches='tight')

    log_text = cmdprov.new_log(git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
def set_global_atts(inargs, cube, infile1, history):
    """Set global attributes."""

    atts = copy.copy(cube.attributes)
    atts['polynomial'] = 'a + bx + cx^2 + dx^3'
    try:
        atts['history'] = cmdprov.new_log(infile_history={infile1: history},
                                          git_repo=repo_dir)
    except IndexError:
        pass

    return atts
Esempio n. 22
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    basin_names = ['atlantic', 'indian', 'pacific', 'land']
    anomaly_data = {}
    start_data = {}
    data = []
    model_list = []
    for filenum, infile in enumerate(inargs.infiles):
        cube, anomaly_data, start = get_data(infile, inargs.var,
                                             time_constraint)
        units = cube.units
        cum_change = anomaly_data[-1, :]
        model = cube.attributes[
            'source_id'] if 'source_id' in cube.attributes else cube.attributes[
                'model_id']
        ntimes = anomaly_data.shape[0]
        pct_change = ((cum_change / ntimes) / np.absolute(start)) * 100
        total_cum_change = cum_change.sum()
        total_start = start.sum()
        total_pct_change = ((total_cum_change / ntimes) / total_start) * 100
        pct_change_anomaly = pct_change - total_pct_change

        model_list.append(model)
        for basin in range(4):
            data.append([
                model, basin_names[basin], start[basin], cum_change[basin],
                pct_change[basin], pct_change_anomaly[basin]
            ])

    df = pd.DataFrame(data,
                      columns=[
                          'model', 'basin', 'start', 'cumulative_change',
                          'percentage_change', 'percentage_change_anomaly'
                      ])

    model_list.sort()
    experiment = cube.attributes['experiment_id']
    plot_ensemble_lines(df, inargs.var, model_list, experiment, str(units),
                        inargs.ymax)

    plt.savefig(inargs.outfile, bbox_inches='tight')

    log_file = re.sub('.png', '.met', inargs.outfile)
    log_text = cmdprov.new_log(
        infile_history={inargs.infiles[-1]: cube.attributes['history']},
        git_repo=repo_dir)
    cmdprov.write_log(log_file, log_text)

    csv_file = re.sub('.png', '.csv', inargs.outfile)
    df.to_csv(csv_file)
Esempio n. 23
0
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile, inargs.var)

    cube.data = numpy.ma.masked_invalid(cube.data)
    if inargs.fill_value:
        cube.data = numpy.ma.masked_where(cube.data >= cube.data.fill_value, cube.data)
    if inargs.mask_value:
        cube.data = numpy.ma.masked_where(cube.data == inargs.mask_value, cube.data)

    cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir, infile_history={inargs.infile: cube.attributes['history']})
    iris.save(cube, inargs.outfile)
Esempio n. 24
0
def combine_infiles(inargs, time_constraint, depth_constraint):
    """Combine multiple input files into one cube"""

    cube, history = gio.combine_files(inargs.infiles, inargs.var)
    atts = cube[0].attributes

    cube = cube.extract(time_constraint & depth_constraint)
    cube = iris.util.squeeze(cube)

    log = cmdprov.new_log(infile_history={inargs.infiles[0]: history[0]}, git_repo=repo_dir)
    cube.attributes['history'] = log

    return cube
Esempio n. 25
0
def main(inargs):
    """Run the program."""

    tas_cube, history = gio.combine_files(inargs.tas_files, inargs.var)
    if inargs.annual:
        tas_cube = timeseries.convert_to_annual(tas_cube)
    area_data = spatial_weights.area_array(tas_cube)
    coord_names = [coord.name() for coord in tas_cube.dim_coords]
    tasga_cube = tas_cube.collapsed(coord_names[1:], iris.analysis.MEAN, weights=area_data)
    tasga_cube.remove_coord(coord_names[1])
    tasga_cube.remove_coord(coord_names[2])

    tasga_cube.attributes['history'] = cmdprov.new_log(git_repo=repo_dir)
    iris.save(tasga_cube, inargs.outfile)
Esempio n. 26
0
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile, inargs.var)
    cube = timeseries.convert_to_annual(cube, chunk=inargs.chunk)

    log = cmdprov.new_log(
        infile_history={inargs.infile: cube.attributes['history']},
        git_repo=repo_dir)
    cube.attributes['history'] = log

    #assert cube.data.dtype == numpy.float32
    #iris.save(cube, outfile, netcdf_format='NETCDF3_CLASSIC')
    iris.save(cube, inargs.outfile)
Esempio n. 27
0
def main(inargs):
    """Run the program."""

    metadata_dict = {}
    nplots = len(inargs.contourf_files)
    if inargs.contour_files:
        assert len(inargs.contour_files) == nplots

    fig, cbar_ax, gs = grid_config(nplots)

    for pnum in range(nplots):
        contourf_cube = iris.load_cube(inargs.contourf_files[pnum],
                                       inargs.variable)
        metadata_dict[
            inargs.contourf_files[pnum]] = contourf_cube.attributes['history']

        if inargs.rescale:
            depth_values = numpy.arange(0.5, 5500, 1)
            lat_values = numpy.arange(-89.5, 90.5, 1)
            ref_cube = make_grid(depth_values, lat_values)
            contourf_cube = regrid(contourf_cube, ref_cube)

        if inargs.contour_files:
            contour_cube = iris.load_cube(inargs.contour_files[pnum],
                                          inargs.variable)
            metadata_dict[inargs.contour_files[
                pnum]] = contour_cube.attributes['history']
        else:
            contour_cube = None

        title = inargs.titles[pnum] if inargs.titles else None
        create_plot(gs[pnum],
                    cbar_ax,
                    contourf_cube,
                    contour_cube,
                    inargs.scale_factor,
                    inargs.nyrs,
                    title,
                    ticks=inargs.ticks,
                    rescale=inargs.rescale)

    # Save output
    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams'][
        'figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)

    log_text = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
Esempio n. 28
0
def main(inargs):
    """Run the program."""

    df = pd.read_csv(inargs.infile)
    df.set_index(df['model'] + ' (' + df['run'] + ')', drop=True, inplace=True)

    if inargs.domain == 'energy':
        plot_energy_conservation(df, inargs.cmip_line)
    elif inargs.domain == 'mass':
        plot_mass_conservation(df, inargs.cmip_line)

    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=200)
    log_file = re.sub('.png', '.met', inargs.outfile)
    log_text = cmdprov.new_log(git_repo=repo_dir)
    cmdprov.write_log(log_file, log_text)
Esempio n. 29
0
def get_log_and_key(pr_file, history_attr, plot_type):
    valid_keys = {
        'png': 'History',
        'pdf': 'Title',
        'eps': 'Creator',
        'ps': 'Creator'
    }

    assert plot_type in valid_keys.keys(
    ), f"Image format not one of: {*[*valid_keys],}"
    log_key = valid_keys[plot_type]
    new_log = cmdprov.new_log(infile_history={pr_file: history_attr})
    new_log = new_log.replace('\n', ' END ')

    return log_key, new_log
def main(inargs):
    """Run the program."""

    cube = iris.load_cube(inargs.infile, inargs.variable)

    time_axis = cube.coord('time')
    new_units = cf_units.Unit(inargs.new_time_units, calendar=time_axis.units.calendar)  
    time_axis.convert_units(new_units)

    new_log = cmdprov.new_log(infile_history={inargs.infile: cube.attributes['history']})
    cube.attributes['history'] = new_log
    
    if inargs.infile == inargs.outfile:
        cube.data # to realise lazy data to allow file overwrite
    iris.save(cube, inargs.outfile)