コード例 #1
0
def main(inargs):
    """Run the program."""

    time_constraints = {}
    time_constraints['historical'] = gio.get_time_constraint(inargs.hist_time)
    time_constraints['rcp'] = gio.get_time_constraint(inargs.rcp_time)

    if inargs.basin_file:
        basin_cube = iris.load_cube(inargs.basin_file)
    else:
        basin_cube = None

    width = 20
    height = 21
    fig = plt.figure(figsize=(width, height))

    ax_dict = {}
    ax_dict[('tropics', 'atlantic')] = fig.add_subplot(3, 2, 1)
    ax_dict[('critical', 'atlantic')] = fig.add_subplot(3, 2, 2)
    ax_dict[('tropics', 'pacific')] = fig.add_subplot(3, 2, 3)
    ax_dict[('critical', 'pacific')] = fig.add_subplot(3, 2, 4)
    ax_dict[('tropics', 'globe')] = fig.add_subplot(3, 2, 5)
    ax_dict[('critical', 'globe')] = fig.add_subplot(3, 2, 6)

    previous_experiments = []
    for filenum, infile in enumerate(inargs.infiles):
        for basin in ['atlantic', 'pacific', 'globe']:
            sh_mean, nh_mean, scrit_mean, ncrit_mean, experiment = load_data(
                infile, basin_cube, basin)
            plot_data(ax_dict[('tropics', basin)],
                      sh_mean,
                      nh_mean,
                      experiment,
                      previous_experiments,
                      basin,
                      crit=False,
                      plot_type=inargs.plot_type)
            plot_data(ax_dict[('critical', basin)],
                      scrit_mean,
                      ncrit_mean,
                      experiment,
                      previous_experiments,
                      basin,
                      crit=True,
                      plot_type=inargs.plot_type)
        previous_experiments.append(experiment)

    title = 'Annual Mean Surface Downward X Stress, %s' % (
        sh_mean.attributes['model_id'])
    plt.suptitle(title, size='large')
    #    plt.subplots_adjust(top=0.90)

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(
        inargs.outfile,
        file_info={inargs.infiles[-1]: sh_mean.attributes['history']})
コード例 #2
0
def main(inargs):
    """Run the program."""

    hist_time_constraint = gio.get_time_constraint(['1850-01-01', '2005-12-31'])
    outcubes = iris.cube.CubeList([])
    for var in inargs.variables:
        metadata_dict = {}
        hist_cube = iris.load_cube(inargs.hist_file, gio.check_iris_var(var) & hist_time_constraint)
        hist_cube = clean_attributes(hist_cube)
        branch_time = hist_cube.attributes['branch_time']
        history = hist_cube.attributes['history']
        
        rcp_cube = iris.load_cube(inargs.rcp_file, gio.check_iris_var(var))
        rcp_cube = clean_attributes(rcp_cube)
        rcp_experiment = rcp_cube.attributes['experiment_id']

        if inargs.cumsum:
            rcp_cube.data = rcp_cube.data + hist_cube.data[-1]

        cube_list = iris.cube.CubeList([hist_cube, rcp_cube])
        equalise_attributes(cube_list)
        iris.util.unify_time_units(cube_list)
        cube = cube_list.concatenate_cube()
        cube.attributes['branch_time'] = branch_time
        cube.attributes['experiment_id'] = 'historical-' + rcp_experiment

        outcubes.append(cube.copy())

    for cube in outcubes:
        cube.attributes['history'] = gio.write_metadata(file_info={inargs.hist_file: history})
    equalise_attributes(outcubes)

    iris.save(outcubes, inargs.outfile)
コード例 #3
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time)
    nfiles = check_inputs(inargs)
    for fnum in range(nfiles):
        cube_dict = {}
        cube_dict['rsdt'] = get_data(inargs.rsdt_files[fnum],
                                     'toa_incoming_shortwave_flux',
                                     time_constraint)
        cube_dict['rsut'] = get_data(inargs.rsut_files[fnum],
                                     'toa_outgoing_shortwave_flux',
                                     time_constraint)
        cube_dict['rlut'] = get_data(inargs.rlut_files[fnum],
                                     'toa_outgoing_longwave_flux',
                                     time_constraint)

        cube_dict = equalise_time_axes(cube_dict)
        cube_dict = calc_rndt(cube_dict)
        add_metadata(cube_dict['rndt'], cube_dict['rsdt'].attributes)

        if inargs.outfile:
            rndt_file = inargs.outfile
        else:
            assert inargs.time == None
            rndt_file = get_outfile_name(inargs.rsdt_files[fnum])
        print(rndt_file)
        iris.save(cube_dict['rndt'], rndt_file)
コード例 #4
0
def main(inargs):
    """Run the program."""

    data_dict = {}
    for experiment in list(experiment_colors.keys()):
        data_dict[(experiment, 'x_data')] = numpy.ma.array([])
        data_dict[(experiment, 'y_data')] = numpy.ma.array([])

    metadata_dict = {}
    for data_file, basin_file in inargs.file_pair:
        try:
            time_constraint = gio.get_time_constraint(inargs.time)
        except AttributeError:
            time_constraint = iris.Constraint()

        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(data_file,
                                  'sea_surface_salinity' & time_constraint)

        basin_cube = read_basin(basin_file)
        ndim = cube.ndim
        basin_array = uconv.broadcast_array(basin_cube.data,
                                            [ndim - 2, ndim - 1], cube.shape)

        metadata_dict[data_file] = cube.attributes['history']
        metadata_dict[basin_file] = basin_cube.attributes['history']

        model, experiment = get_experiment_details(cube)

        for basin in list(basins.keys()):
            zonal_climatology, zonal_trends = calc_zonal_stats(
                cube.copy(), basin_array, basin)
            data_dict[(experiment, 'x_data')] = numpy.ma.append(
                data_dict[(experiment, 'x_data')], zonal_climatology)
            data_dict[(experiment, 'y_data')] = numpy.ma.append(
                data_dict[(experiment, 'y_data')], zonal_trends)

    fig = plt.figure(figsize=(12, 8))
    for experiment, color in experiment_colors.items():
        x_data = data_dict[(experiment, 'x_data')]
        y_data = data_dict[(experiment, 'y_data')]

        if numpy.any(x_data):
            plt.scatter(x_data[::inargs.thin],
                        y_data[::inargs.thin],
                        facecolors='none',
                        edgecolors=color,
                        label=experiment)
            if experiment in ['AA', 'noAA']:
                x_trend, y_trend = calc_trend(x_data, y_data, experiment)
                plt.plot(x_trend, y_trend, color=color)

    plt.legend(loc=4)
    plt.xlabel('Climatological mean salinity')
    plt.ylabel('Salinity trend (per 50 years)')
    plt.title(model)

    # Write output
    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time)

    nh_lower, nh_upper = inargs.nh_lat_bounds
    nh_constraint = iris.Constraint(
        latitude=lambda cell: nh_lower <= cell < nh_upper)

    sh_lower, sh_upper = inargs.sh_lat_bounds
    sh_constraint = iris.Constraint(
        latitude=lambda cell: sh_lower <= cell < sh_upper)

    data_dict = {}
    plot_details_list = []
    for infiles in inargs.experiment_files:
        cube, model, experiment, orig_units = get_data(
            infiles, inargs.variable, nh_constraint, sh_constraint,
            time_constraint, inargs.area_file)
        iplt.plot(cube, label=experiment, color=experiment_colors[experiment])

    plt.legend()
    plt.xlabel('year')
    plt.ylabel('NH / SH')

    title = '%s interhemispheric %s comparison' % (
        model, inargs.variable.replace('_', ' '))
    plt.title(title)
    #plt.subplots_adjust(top=0.90)

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile,
                       file_info={inargs.experiment_files[0][0]: history[0]})
コード例 #6
0
def main(inargs):
    """Run the program."""

    assert len(inargs.xfiles) == len(inargs.yfiles)

    time_constraint = gio.get_time_constraint(inargs.time)
    fig, ax = plt.subplots()
    plt.axhline(y=inargs.hline, color='0.5', linestyle='--')
    plt.axvline(x=0, color='0.5', linestyle='--')
    color_dict = get_colors(inargs.xfiles)

    legend_models = []
    xtrends = {'historicalGHG': [], 'historicalMisc': [], 'historical': []}
    ytrends = {'historicalGHG': [], 'historicalMisc': [], 'historical': []}
    for xfile, yfile in zip(inargs.xfiles, inargs.yfiles):
        with iris.FUTURE.context(cell_datetime_objects=True):
            ytrend, ycube, ymodel, yexperiment, yrip = load_data(
                yfile, inargs.yvar, time_constraint)
            xtrend, xcube, xmodel, xexperiment, xrip = load_data(
                xfile, [inargs.xvar], time_constraint)

        assert (xmodel, xexperiment, xrip) == (ymodel, yexperiment, yrip)

        if xmodel not in legend_models:
            label = xmodel
            legend_models.append(xmodel)
        else:
            label = None
        plt.plot(xtrend,
                 ytrend,
                 markers[xexperiment],
                 label=label,
                 color=color_dict[xmodel])
        xtrends[xexperiment].append(xtrend)
        ytrends[xexperiment].append(ytrend)

    if inargs.best_fit:
        for experiment in ['historicalGHG', 'historicalMisc', 'historical']:
            if xtrends[experiment]:
                plot_line_of_best_fit(xtrends[experiment], ytrends[experiment])

    title = 'linear trend, %s-%s' % (inargs.time[0][0:4], inargs.time[1][0:4])
    plt.title(title)
    xlabel, ylabel = set_axis_labels(inargs, xcube.units, ycube.units)
    plt.xlabel(xlabel)
    plt.ylabel(ylabel)

    handles, labels = ax.get_legend_handles_labels()
    labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: t[0]))
    box = ax.get_position()
    ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
    ax.legend(handles, labels, loc='center left', bbox_to_anchor=(1, 0.5))

    plt.savefig(inargs.outfile, bbox_inches='tight')

    metadata_dict = {
        xfile: xcube.attributes['history'],
        yfile: ycube.attributes['history']
    }
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
コード例 #7
0
def main(inargs):
    """Run the program."""

    if inargs.area_file:
        area_cube = iris.load_cube(inargs.area_file, 'cell_area')
    else:
        area_cube = None

    time_constraint = gio.get_time_constraint(inargs.time)

    hfds_cube_list = calc_regional_values(
        inargs.hfds_files, 'surface_downward_heat_flux_in_sea_water',
        time_constraint, area_cube)
    ohc_cube_list = calc_regional_values(inargs.ohc_files,
                                         'ocean_heat_content', time_constraint,
                                         area_cube)

    cube_list = ohc_cube_list + hfds_cube_list

    infile_history = {}
    infile_history[inargs.ohc_files[0]] = history[0]
    infile_history[inargs.hfds_files[-1]] = history[-1]
    cube_list = update_metadata(cube_list, infile_history)

    iris.save(cube_list, inargs.outfile)
コード例 #8
0
def main(inargs):
    """Run the program."""

    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    diff_trends = {}
    metadata_dict = {}
    for infile in inargs.infiles:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube_sthext = iris.load_cube(
                infile,
                'ocean heat content southern extratropics60' & time_constraint)
            cube_notsthext = iris.load_cube(
                infile,
                'ocean heat content northern extratropics60' & time_constraint)

        model, experiment, run = gio.get_cmip5_file_details(cube_sthext)
        run_ri = run[:-2]
        run_p = run[-2:]
        update_lists(model, experiment, run_ri, run_p)

        cube_sthext = cube_sthext.rolling_window('time', iris.analysis.MEAN,
                                                 12)
        cube_notsthext = cube_notsthext.rolling_window('time',
                                                       iris.analysis.MEAN, 12)

        diff_trends[(model, experiment, run_ri,
                     run_p)] = calc_diff_trends(cube_sthext, cube_notsthext)
        metadata_dict[infile] = cube_sthext.attributes['history']

    # Plot
    fig = plt.figure()  #figsize=[15, 7])

    tex_units, exponent = uconv.units_info(str(cube_sthext.units))
    for model in models:
        for experiment in experiments:
            for run_p in run_ps:
                data_compilation = numpy.array([])
                for run_ri in run_ris:
                    try:
                        data = diff_trends[(model, experiment, run_ri, run_p)]
                        data_compilation = numpy.concatenate(
                            (data_compilation, data))
                    except KeyError:
                        pass
                if data_compilation.any():
                    plot_trend_distribution(data_compilation, exponent, model,
                                            experiment, run_p)

    if inargs.reference_trend:
        plt.axvline(x=inargs.reference_trend, linestyle='--', color='0.5')

    plt.title('10-year trends in hemispheric OHC difference')
    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
コード例 #9
0
def main(inargs):
    """Run the program."""

    time_constraints = {'historical-rcp85': gio.get_time_constraint(inargs.rcp_time),
                        'historical': gio.get_time_constraint(inargs.historical_time),
                        'GHG-only': gio.get_time_constraint(inargs.historical_time),
                        'AA-only': gio.get_time_constraint(inargs.historical_time),
                        '1pctCO2': gio.get_time_constraint(inargs.pctCO2_time)}

    fig = plt.figure(figsize=[20, 7])
    ax1 = fig.add_subplot(1, 2, 1)
    ax2 = fig.add_subplot(1, 2, 2)

    for experiment_num, experiment in enumerate(inargs.experiment_list):
        time_constraint = time_constraints[experiment]
        ita_cube_list = iris.cube.CubeList([])
        eei_cube_list = iris.cube.CubeList([])
        for model_num in range(0, len(inargs.toa_files[experiment_num])):
            toa_file = inargs.toa_files[experiment_num][model_num]
            thetao_sh_file = inargs.thetao_sh_files[experiment_num][model_num]
            thetao_nh_file = inargs.thetao_nh_files[experiment_num][model_num]

            ita_cube, ita_history = calc_ita(thetao_sh_file, thetao_nh_file, time_constraint, model_num)
            ita_cube_list.append(ita_cube)

            eei_cube, eei_history = calc_eei(toa_file, time_constraint, model_num)
            eei_cube_list.append(eei_cube)

        ita_ensemble_agg = ensemble_aggregation(ita_cube_list, inargs.ensagg)
        ita_ensemble_spread = ensemble_aggregation(ita_cube_list, 'percentile')    
        eei_ensemble_agg = ensemble_aggregation(eei_cube_list, inargs.ensagg)
    
        temporal_plot(ax1, ita_ensemble_agg, ita_ensemble_spread, experiment)
        eei_plot(ax2, eei_ensemble_agg, ita_ensemble_agg, experiment)   

    ylim = temporal_plot_features(ax1)
    eei_plot_features(ax2, ylim)
    
    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams']['figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)

    infile_metadata = {thetao_nh_file: ita_history, toa_file: eei_history}
    log_text = cmdprov.new_log(infile_history=infile_metadata, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
コード例 #10
0
def main(inargs):
    """Run the program."""

    time_constraints = {}
    time_constraints['historical'] = gio.get_time_constraint(inargs.hist_time)
    time_constraints['rcp'] = gio.get_time_constraint(inargs.rcp_time)

    width = 10
    height = 20
    fig = plt.figure(figsize=(width, height))
    ax_dict = {}
    ax1 = fig.add_subplot(3, 1, 1)
    ax2 = fig.add_subplot(3, 1, 2)
    ax3 = fig.add_subplot(3, 1, 3)
    valid_files = []
    for infiles in inargs.experiment_files:
        spacific_cube, npacific_cube, experiment = load_data(
            infiles, 'pacific')
        satlantic_cube, natlantic_cube, experiment = load_data(
            infiles, 'atlantic')
        if experiment:
            spacific_metric, npacific_metric = calc_metrics(
                spacific_cube, npacific_cube)
            satlantic_metric, natlantic_metric = calc_metrics(
                satlantic_cube, natlantic_cube)
            plot_hemispheres(ax1, npacific_metric, spacific_metric, experiment,
                             'pacific')
            plot_comparison(ax2, npacific_metric, spacific_metric, experiment,
                            'pacific')
            plot_hemispheres(ax3, natlantic_metric, satlantic_metric,
                             experiment, 'atlantic')
            model = spacific_cube.attributes['model_id']
            valid_files.append(infiles)

    title = 'Annual Mean Meridional Overturning Mass Streamfunction, %s' % (
        model)
    plt.suptitle(title, size='large')
    #    plt.subplots_adjust(top=0.90)

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile,
                       file_info={valid_files[0][0]: history[0]})
コード例 #11
0
def main(inargs):
    """Run the program."""

    if inargs.time:
        try:
            time_constraint = gio.get_time_constraint(inargs.time)
        except AttributeError:
            time_constraint = iris.Constraint()
    else:
        time_constraint = iris.Constraint()

    fig, axes = setup_plot(inargs.nregions)
    bar_width = 0.7

    hfds_values = {}
    for region in region_names[inargs.nregions]:
        plot_atmos(axes,
                   inargs.infile,
                   region,
                   bar_width,
                   inargs.aggregation,
                   time_constraint,
                   branch=inargs.branch_time)
        hfds_values[region] = plot_surface(axes,
                                           inargs.infile,
                                           region,
                                           bar_width,
                                           inargs.aggregation,
                                           time_constraint,
                                           branch=inargs.branch_time)

    ohc_values, transport_values, ohc_inferred_values, transport_inferred_values = get_ocean_values(
        inargs.infile,
        inargs.aggregation,
        time_constraint,
        hfds_values,
        inargs.nregions,
        branch=inargs.branch_time,
        infer_ohc=inargs.infer_ohc,
        infer_hfbasin=inargs.infer_hfbasin)
    for region in region_names[inargs.nregions]:
        plot_ocean(axes, region, bar_width, inargs.aggregation, hfds_values,
                   ohc_values, transport_values, ohc_inferred_values,
                   transport_inferred_values)

    set_title(inargs.infile)
    fig.tight_layout(rect=[0, 0, 1, 0.93])  # (left, bottom, right, top)

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile,
                       file_info={
                           inargs.infile:
                           iris.load(inargs.infile)[0].attributes['history']
                       })
コード例 #12
0
def calculate_climatology(cube, time_bounds, experiment):
    """Calculate annual mean climatology"""

    if not experiment == 'piControl':
        time_constraint = gio.get_time_constraint(time_bounds)
        cube = cube.extract(time_constraint)

    cube = cube.collapsed('time', iris.analysis.MEAN)
    cube.remove_coord('time')

    return cube
コード例 #13
0
def get_constraints(inargs):
    """Get the time, depth and mask information"""

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    depth_constraint = gio.iris_vertical_constraint(0.0, inargs.max_depth)

    if inargs.land_mask:
        sftlf_cube = iris.load_cube(inargs.land_mask, 'land_area_fraction')
    else:
        sftlf_cube = None

    return time_constraint, depth_constraint, sftlf_cube
コード例 #14
0
def get_time_constraint(time_bounds):
    """Get the iris time constraint for given time bounds."""

    if time_bounds:
        try:
            time_constraint = gio.get_time_constraint(time_bounds)
        except AttributeError:
            time_constraint = iris.Constraint()
    else:
        time_constraint = iris.Constraint()

    return time_constraint
コード例 #15
0
def main(inargs):
    """Run the program."""

    metadata_dict = {}
    time_constraint = gio.get_time_constraint(
        [inargs.start_date, inargs.end_date])

    fig = plt.figure(figsize=[11, 10])

    if inargs.rndt_files:
        rndt_nh, rndt_sh = read_hemisphere_data(inargs.rndt_files, 'rndt',
                                                time_constraint, inargs.ensagg)
        iplt.plot(rndt_nh, label='netTOA, NH', color='red', linestyle='solid')
        iplt.plot(rndt_sh, label='netTOA, SH', color='red', linestyle='dashed')

    if inargs.hfds_files:
        hfds_nh, hfds_sh = read_hemisphere_data(inargs.hfds_files, 'hfds',
                                                time_constraint, inargs.ensagg)
        iplt.plot(hfds_nh, label='OHU, NH', color='orange', linestyle='solid')
        iplt.plot(hfds_sh, label='OHU, SH', color='orange', linestyle='dashed')

    if inargs.ohc_files:
        ohc_nh, ohc_sh = read_hemisphere_data(inargs.ohc_files, 'ohc',
                                              time_constraint, inargs.ensagg)
        iplt.plot(ohc_nh, label='OHC, NH', color='blue', linestyle='solid')
        iplt.plot(ohc_sh, label='OHC, SH', color='blue', linestyle='dashed')

    if inargs.ohc_guide_files:
        guide_nh, guide_sh = read_guide_data(inargs.ohc_guide_files, 'ohc',
                                             time_constraint, inargs.ensagg)
        iplt.plot(guide_nh,
                  label='OHC guide, NH',
                  color='0.5',
                  linestyle='solid')
        iplt.plot(guide_sh,
                  label='OHC guide, SH',
                  color='0.5',
                  linestyle='dashed')

    plt.legend()
    if inargs.ybounds:
        ymin, ymax = inargs.ybounds
        plt.ylim([ymin, ymax])

    dpi = inargs.dpi if inargs.dpi else plt.savefig.__globals__['rcParams'][
        'figure.dpi']
    print('dpi =', dpi)
    plt.savefig(inargs.outfile, bbox_inches='tight', dpi=dpi)

    log_text = cmdprov.new_log(
        git_repo=repo_dir)  # infile_history={nh_file: history}
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
コード例 #16
0
def period_mean(cube, time_period):
    """Calculate the mean for a particular time period."""

    time_constraint = gio.get_time_constraint(time_period)
    cube = cube.extract(time_constraint)

    coord_names = [coord.name() for coord in cube.dim_coords]
    if 'time' in coord_names:
        cube = cube.collapsed('time', iris.analysis.MEAN)
    cube.remove_coord('time')

    return cube
コード例 #17
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    basin_names = ['atlantic', 'indian', 'pacific', 'land']
    anomaly_data = {}
    start_data = {}
    data = []
    model_list = []
    for filenum, infile in enumerate(inargs.infiles):
        cube, anomaly_data, start = get_data(infile, inargs.var,
                                             time_constraint)
        units = cube.units
        cum_change = anomaly_data[-1, :]
        model = cube.attributes[
            'source_id'] if 'source_id' in cube.attributes else cube.attributes[
                'model_id']
        ntimes = anomaly_data.shape[0]
        pct_change = ((cum_change / ntimes) / np.absolute(start)) * 100
        total_cum_change = cum_change.sum()
        total_start = start.sum()
        total_pct_change = ((total_cum_change / ntimes) / total_start) * 100
        pct_change_anomaly = pct_change - total_pct_change

        model_list.append(model)
        for basin in range(4):
            data.append([
                model, basin_names[basin], start[basin], cum_change[basin],
                pct_change[basin], pct_change_anomaly[basin]
            ])

    df = pd.DataFrame(data,
                      columns=[
                          'model', 'basin', 'start', 'cumulative_change',
                          'percentage_change', 'percentage_change_anomaly'
                      ])

    model_list.sort()
    experiment = cube.attributes['experiment_id']
    plot_ensemble_lines(df, inargs.var, model_list, experiment, str(units),
                        inargs.ymax)

    plt.savefig(inargs.outfile, bbox_inches='tight')

    log_file = re.sub('.png', '.met', inargs.outfile)
    log_text = cmdprov.new_log(
        infile_history={inargs.infiles[-1]: cube.attributes['history']},
        git_repo=repo_dir)
    cmdprov.write_log(log_file, log_text)

    csv_file = re.sub('.png', '.csv', inargs.outfile)
    df.to_csv(csv_file)
コード例 #18
0
def read_supporting_inputs(vfile, bfile, inargs, ref=False):
    """Read the supporting volume, basin and time bounds information."""

    if ref:
        time_bounds = inargs.ref_time_bounds if inargs.ref_time_bounds else inargs.time_bounds
    else:
        time_bounds = inargs.time_bounds

    vcube = iris.load_cube(vfile)
    bcube = iris.load_cube(bfile)
    time_constraint = gio.get_time_constraint(time_bounds)

    return vcube, bcube, time_constraint
コード例 #19
0
def main(inargs):
    """Run the program."""

    hist_cube_list = iris.load(inargs.historical_file)
    control_cube_list = iris.load(inargs.control_file)

    total_time = (inargs.start_time[0], inargs.end_time[-1])

    hist_start_constraint = gio.get_time_constraint(inargs.start_time)
    hist_end_constraint = gio.get_time_constraint(inargs.end_time)
    hist_total_constraint = gio.get_time_constraint(total_time)

    control_start_constraint = timeseries.get_control_time_constraint(control_cube_list[0], hist_cube_list[0], inargs.start_time)
    control_end_constraint = timeseries.get_control_time_constraint(control_cube_list[0], hist_cube_list[0], inargs.end_time)
    control_total_constraint = timeseries.get_control_time_constraint(control_cube_list[0], hist_cube_list[0], total_time)
    
    column_headers = ['model', 'experiment', 'rip', 'period',
                      'hfds-globe-sum', 'hfds-nh-sum', 'hfds-sh-sum', 'hfds-nhext-sum', 'hfds-tropics-sum', 'hfds-shext-sum',
                      'ohc-globe-sum', 'ohc-nh-sum', 'ohc-sh-sum', 'ohc-nhext-sum', 'ohc-tropics-sum', 'ohc-shext-sum']

    data_dict = collections.OrderedDict()
    for column in column_headers:
        data_dict[column] = []

    data_dict = generate_results(data_dict, hist_cube_list, hist_start_constraint, inargs.start_time)
    data_dict = generate_results(data_dict, hist_cube_list, hist_end_constraint, inargs.end_time)
    data_dict = generate_results(data_dict, hist_cube_list, hist_total_constraint, total_time)

    data_dict = generate_results(data_dict, control_cube_list, control_start_constraint, inargs.start_time)
    data_dict = generate_results(data_dict, control_cube_list, control_end_constraint, inargs.end_time)
    data_dict = generate_results(data_dict, control_cube_list, control_total_constraint, total_time)

    data_df = pandas.DataFrame.from_dict(data_dict)
    data_df.to_csv(inargs.outfile)

    metadata_dict = {inargs.historical_file: hist_cube_list[0].attributes['history'],
                     inargs.control_file: control_cube_list[0].attributes['history']}
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
コード例 #20
0
def main(inargs):
    """Run the program."""

    if inargs.sftlf_file:
        sftlf_cube = iris.load_cube(inargs.sftlf_file, 'land_area_fraction')
    else:
        sftlf_cube = None

    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        cube = iris.load(inargs.infiles, gio.check_iris_var(inargs.var))
        history = cube[0].attributes['history']

        equalise_attributes(cube)
        iris.util.unify_time_units(cube)
        cube = cube.concatenate_cube()
        cube = gio.check_time_units(cube)
        cube = iris.util.squeeze(cube)
        cube.attributes['history'] = gio.write_metadata(
            file_info={inargs.infiles[0]: history})

        cube = cube.extract(time_constraint)

        cube = timeseries.convert_to_annual(cube, full_months=True)

    output = {}
    output['full'] = calc_fields(cube,
                                 sftlf_cube,
                                 inargs.aggregation,
                                 realm=None,
                                 area=inargs.area)
    if inargs.sftlf_file:
        for realm in ['ocean', 'land']:
            output[realm] = calc_fields(cube,
                                        sftlf_cube,
                                        inargs.aggregation,
                                        realm=realm,
                                        area=inargs.area)

    cube_list = iris.cube.CubeList()
    for realm, output_cubes in output.items():
        for cube in output_cubes:
            cube_list.append(cube)

    iris.FUTURE.netcdf_no_unlimited = True
    iris.save(cube_list, inargs.outfile, netcdf_format='NETCDF3_CLASSIC')
コード例 #21
0
def get_control_time_constraint(control_cube,
                                ref_cube,
                                time_bounds,
                                branch_time=None):
    """Define the time constraint for control data.

    Args:
      control_cube (iris.cube.Cube): cube for piControl experiment
      ref_cube (iris.cube.Cube): reference cube (e.g. from historical experiment)
      time_bounds (list): selected time periods from reference cube
        (e.g. ['1861-01-01', '2005-12-31'])
      branch_time (float): Override the branch time in the ref_cube attributes

    """

    _check_attributes(ref_cube.attributes, control_cube.attributes)

    iris.coord_categorisation.add_year(control_cube, 'time')
    iris.coord_categorisation.add_year(ref_cube, 'time')

    if not branch_time:
        branch_time = ref_cube.attributes['branch_time']

    index = 0
    for bounds in control_cube.coord('time').bounds:
        lower, upper = bounds
        if lower <= float(branch_time) < upper:
            break
        else:
            index = index + 1

    branch_year = control_cube.coord('year').points[index]
    ref_start_year = ref_cube.coord('year').points[0]
    start_gap = int(time_bounds[0].split('-')[0]) - ref_start_year
    end_gap = int(time_bounds[1].split('-')[0]) - ref_start_year

    control_start_year = branch_year + start_gap
    control_end_year = branch_year + end_gap

    control_start_date = str(control_start_year).zfill(4) + '-01-01'
    control_end_date = str(control_end_year).zfill(4) + '-12-31'

    time_constraint = gio.get_time_constraint(
        [control_start_date, control_end_date])

    control_cube.remove_coord('year')
    ref_cube.remove_coord('year')

    return time_constraint
コード例 #22
0
def read_data(inargs, infiles, time_bounds, ref_cube=None, anomaly=False, branch_index=None, branch_time=None):
    """Read data."""

    data_dict = {}
    file_count = 0
    for infile in infiles:
        try:
            cube = iris.load_cube(infile, gio.check_iris_var(inargs.var))
        except iris.exceptions.ConstraintMismatchError:
            print('using inferred value for', infile)
            cube = iris.load_cube(infile, gio.check_iris_var('Inferred_' + inargs.var))
            cube.long_name = inargs.var.replace('_', ' ')
            cube.var_name = cube.var_name.replace('-inferred', '')
        
        if ref_cube:
            cube = timeseries.adjust_control_time(cube, ref_cube, branch_index=branch_index, branch_time=branch_time)

        if not (ref_cube and inargs.full_control):
            time_constraint = gio.get_time_constraint(time_bounds)
            cube = cube.extract(time_constraint)

        if anomaly:
            cube.data = cube.data - cube.data[0:20].mean()     

        cube.data = cube.data.astype(numpy.float64)
        cube.cell_methods = ()
        for aux_coord in ['latitude', 'longitude']:
            try:
                cube.remove_coord(aux_coord)
            except iris.exceptions.CoordinateNotFoundError:
                pass

        new_aux_coord = iris.coords.AuxCoord(file_count, long_name='ensemble_member', units='no_unit')
        cube.add_aux_coord(new_aux_coord)
         
        model = cube.attributes['model_id']
        realization = 'r' + str(cube.attributes['realization'])
        physics = 'p' + str(cube.attributes['physics_version'])
        experiment = cube.attributes['experiment_id']

        key = (model, physics, realization)
        data_dict[key] = cube
        file_count = file_count + 1
    
    ylabel = get_ylabel(cube, inargs)
    experiment = 'historicalAA' if experiment == "historicalMisc" else experiment
    metadata_dict = {infile: cube.attributes['history']}
    
    return data_dict, experiment, ylabel, metadata_dict
コード例 #23
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    ensemble_cube_list = iris.cube.CubeList([])
    for ensnum, ensemble_member in enumerate(inargs.ensemble_member):
        cube, history = read_infiles(ensemble_member, inargs.var, time_constraint, ensnum)
        ensemble_cube_list.append(cube)
    
    ensagg = calc_ensagg(ensemble_cube_list)

    log = cmdprov.new_log(infile_history={ensemble_member[0]: history}, git_repo=repo_dir)
    ensagg.attributes['history'] = log

    iris.save(ensagg, inargs.outfile)
コード例 #24
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time)
    metadata_dict = {}
    results_dict = {}
    fig, ax = plt.subplots()

    metadata_dict, results_dict, ohc_cube = plot_files(inargs.ohc_file,
                                                       inargs.hfds_file,
                                                       inargs.rndt_file,
                                                       inargs.hemisphere,
                                                       metadata_dict,
                                                       results_dict,
                                                       time_constraint,
                                                       dedrifted=True)
    if inargs.orig_ohc_file and inargs.orig_hfds_file and inargs.orig_rndt_file:
        metadata_dict, results_dict, ohc_cube = plot_files(
            inargs.orig_ohc_file,
            inargs.orig_hfds_file,
            inargs.orig_rndt_file,
            inargs.hemisphere,
            metadata_dict,
            results_dict,
            time_constraint,
            dedrifted=False)

    plt.ylabel(ohc_cube.units)
    plt.ticklabel_format(style='sci',
                         axis='y',
                         scilimits=(0, 0),
                         useMathText=True,
                         useOffset=False)
    ax.yaxis.major.formatter._useMathText = True

    #plt.ylim(-5e+24, 9e+24)
    ymin, ymax = plt.ylim()
    print('ymin:', ymin)
    print('ymax:', ymax)

    title, legloc = get_title(ohc_cube, inargs.hemisphere)
    plt.title(title)
    plt.legend(loc=legloc)

    write_result(inargs.outfile, results_dict)
    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info=metadata_dict)
コード例 #25
0
def main(inargs):
    """Run the program."""

    assert inargs.var in [
        'precipitation_minus_evaporation_flux', 'water_flux_into_sea_water',
        'water_evapotranspiration_flux', 'precipitation_flux'
    ]
    cmap = 'BrBG'
    basins_to_plot = [
        'Atlantic', 'Indian', 'Pacific', 'Land', 'Ocean', 'Globe'
    ]
    if inargs.var == 'precipitation_minus_evaporation_flux':
        var_abbrev = 'P-E'
    elif inargs.var == 'water_evapotranspiration_flux':
        var_abbrev = 'evaporation'
        cmap = 'BrBG_r'
    elif inargs.var == 'precipitation_flux':
        var_abbrev = 'precipitation'
    elif inargs.var == 'water_flux_into_sea_water':
        var_abbrev = 'net mositure import/export (i.e. P-E+R)'
        basins_to_plot = ['Atlantic', 'Indian', 'Pacific', 'Arctic', 'Globe']

    time_constraint = gio.get_time_constraint(inargs.time_bounds)
    input_files = [
        inargs.control_files, inargs.ghg_files, inargs.aa_files,
        inargs.hist_files
    ]
    experiments = ['piControl', 'GHG-only', 'AA-only', 'historical']

    metadata_dict = {}
    fig, axes = plt.subplots(2, 2, figsize=(24, 12))
    axes = axes.flatten()
    for plotnum, exp_files in enumerate(input_files):
        if exp_files:
            experiment = experiments[plotnum]
            print(f"Number of {experiment} models = {len(exp_files)}")
            time_selector = None if experiment == 'piControl' else time_constraint
            file_history = plot_data(axes[plotnum], exp_files, inargs,
                                     experiment, var_abbrev, time_selector,
                                     inargs.scale_factor[plotnum],
                                     basins_to_plot, cmap)
            metadata_dict[exp_files[0]] = file_history[0]

    plt.savefig(inargs.outfile, bbox_inches='tight')
    log_text = cmdprov.new_log(infile_history=metadata_dict, git_repo=repo_dir)
    log_file = re.sub('.png', '.met', inargs.outfile)
    cmdprov.write_log(log_file, log_text)
コード例 #26
0
def read_data(inargs, infiles, ref_cube=None):
    """Read data."""

    clim_dict = {}
    trend_dict = {}
    file_count = 0
    for infile in infiles:
        print(infile)
        cube = iris.load_cube(infile, gio.check_iris_var(inargs.var))
        if ref_cube:
            time_constraint = timeseries.get_control_time_constraint(
                cube, ref_cube, inargs.time, branch_time=inargs.branch_time)
            cube = cube.extract(time_constraint)
            iris.util.unify_time_units([ref_cube, cube])
            cube.replace_coord(ref_cube.coord('time'))
        else:
            time_constraint = gio.get_time_constraint(inargs.time)
            cube = cube.extract(time_constraint)

        if inargs.perlat:
            grid_spacing = grids.get_grid_spacing(cube)
            cube.data = cube.data / grid_spacing

        trend_cube = calc_trend_cube(cube.copy())

        clim_cube = cube.collapsed('time', iris.analysis.MEAN)
        clim_cube.remove_coord('time')

        model = cube.attributes['model_id']
        realization = 'r' + str(cube.attributes['realization'])
        physics = 'p' + str(cube.attributes['physics_version'])

        key = (model, physics, realization)
        trend_dict[key] = trend_cube
        clim_dict[key] = clim_cube
        file_count = file_count + 1

    experiment = cube.attributes['experiment_id']
    experiment = 'historicalAA' if experiment == "historicalMisc" else experiment
    trend_ylabel = get_ylabel(cube, 'trend', inargs)
    clim_ylabel = get_ylabel(cube, 'climatology', inargs)

    metadata_dict = {infile: cube.attributes['history']}

    return cube, trend_dict, clim_dict, experiment, trend_ylabel, clim_ylabel, metadata_dict
コード例 #27
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time)
    #metadata_dict = {}
    fig, ax = plt.subplots()
    plt.axvline(x=0, color='0.5', linestyle='--')

    data_list = []
    for nh_file, sh_file in inargs.rndt_files:
        diff, model, experiment, mip = calc_interhemispheric_diff(
            nh_file, sh_file, 'netTOA', time_constraint)
        data_list.append(
            generate_data_dict(diff, model, experiment, mip, 'netTOA'))

    for nh_file, sh_file in inargs.hfds_files:
        diff, model, experiment, mip = calc_interhemispheric_diff(
            nh_file, sh_file, 'OHU', time_constraint)
        data_list.append(
            generate_data_dict(diff, model, experiment, mip, 'OHU'))

    for nh_file, sh_file in inargs.ohc_files:
        diff, model, experiment, mip = calc_interhemispheric_diff(
            nh_file, sh_file, 'OHC', time_constraint)
        data_list.append(
            generate_data_dict(diff, model, experiment, mip, 'OHC'))

    data_df = pandas.DataFrame(data_list)
    seaborn.boxplot(data=data_df[columns],
                    orient="h",
                    palette=[
                        'red', '#FFDDDD', '#FFDDDD', 'yellow', '#fdffdd',
                        '#fdffdd', 'blue', '#ddddff', '#ddddff'
                    ])

    plt.ticklabel_format(style='sci',
                         axis='x',
                         scilimits=(0, 0),
                         useMathText=True)
    ax.xaxis.major.formatter._useMathText = True
    ax.set_xlabel('Northern Hemisphere minus Southern Hemisphere (Joules)')

    plt.title('Interhemispheric difference in accumulated heat, 1861-2005')
    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile)
コード例 #28
0
def main(inargs):
    """Run the program."""

    data_files = {'historicalGHG': inargs.ghg_files,
                  'historicalAA': inargs.aa_files} 
    spatial_cubes = {'historicalGHG': read_spatial_file(inargs.ghg_spatial_file),
                     'historicalAA': read_spatial_file(inargs.aa_spatial_file)}
    time_periods = {'early': inargs.early_period,
                    'late': inargs.late_period}

    hist = {}
    for experiment in ['historicalGHG', 'historicalAA']:
        with iris.FUTURE.context(cell_datetime_objects=True):
            data_cubes = iris.load(data_files[experiment], inargs.var, callback=save_history)
            equalise_attributes(data_cubes)
            for period_name, period_dates in time_periods.iteritems():
                time_constraint = gio.get_time_constraint(period_dates)
                data_cube_list = data_cubes.extract(time_constraint)
                data_cube = concat_cubes(data_cube_list)
                if inargs.annual_smoothing:
                    data_cube = data_cube.rolling_window('time', iris.analysis.MEAN, 12)
                    #data_cube = timeseries.convert_to_annual(data_cube)
                hist[(experiment, period_name)], bin_edges = calc_histogram(data_cube, spatial_cubes[experiment])
        hist[(experiment, 'diff')] = hist[(experiment, 'late')] - hist[(experiment, 'early')]

    fig = plt.figure(figsize=[15, 10])
    gs = gridspec.GridSpec(2, 2, height_ratios=[4,1])

    ax0 = plt.subplot(gs[0])
    create_upper_plot(ax0, hist, bin_edges[0:-1], 'historicalAA')

    ax1 = plt.subplot(gs[1])
    create_upper_plot(ax1, hist, bin_edges[0:-1], 'historicalGHG')

    ax2 = plt.subplot(gs[2])
    create_lower_plot(ax2, hist, bin_edges[0:-1], 'historicalAA')

    ax3 = plt.subplot(gs[3])
    create_lower_plot(ax3, hist, bin_edges[0:-1], 'historicalGHG')

    fig.suptitle('Salinity distribution')

    plt.savefig(inargs.outfile, bbox_inches='tight')
    write_met_file(inargs, spatial_cubes, inargs.outfile)
コード例 #29
0
def main(inargs):
    """Run the program."""
    
    # Read data
    try:
        time_constraint = gio.get_time_constraint(inargs.time)
    except AttributeError:
        time_constraint = iris.Constraint()

    with iris.FUTURE.context(cell_datetime_objects=True):
        ohc_3D_cube = iris.load_cube(inargs.infile, 'ocean heat content 3D' & time_constraint)  
        ohc_2D_cube = iris.load_cube(inargs.infile, 'ocean heat content 2D' & time_constraint)

    lons = ohc_3D_cube.coord('longitude').points
    lats = ohc_3D_cube.coord('latitude').points
    infile_history = ohc_3D_cube.attributes['history']
    
    # Calculate seasonal cycle
    running_mean = True
    if inargs.seasonal_cycle:
        ohc_3D_cube = timeseries.calc_seasonal_cycle(ohc_3D_cube) 
        ohc_2D_cube = timeseries.calc_seasonal_cycle(ohc_2D_cube)
        running_mean = False

    # Calculate trend
    ohc_3D_trend = timeseries.calc_trend(ohc_3D_cube, running_mean=running_mean,
                                         per_yr=False, remove_scaling=True)
    ohc_2D_trend = timeseries.calc_trend(ohc_2D_cube, running_mean=running_mean,
                                         per_yr=False, remove_scaling=True)

    # Plot
    fig = plt.figure(figsize=[15, 3])
    gs = gridspec.GridSpec(1, 2, width_ratios=[4, 1]) 

    cbar_tick_max, cbar_tick_step = inargs.ticks
    yticks = set_yticks(inargs.max_lat)
    plot_3D_trend(ohc_3D_trend, lons, lats, gs,
                  cbar_tick_max, cbar_tick_step, yticks)
    plot_2D_trend(ohc_2D_trend, lats, gs,
                  yticks)

    # Write output
    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile, file_info={inargs.outfile:infile_history})
コード例 #30
0
def main(inargs):
    """Run the program."""

    time_constraint = gio.get_time_constraint(inargs.time)

    trend_dict = {}
    models = []
    for infile in inargs.infiles:
        with iris.FUTURE.context(cell_datetime_objects=True):
            cube = iris.load_cube(infile, 'air_temperature' & time_constraint)
            experiment, model, metric_name = get_file_info(infile)
            trend_dict[(model,
                        experiment)] = timeseries.calc_trend(cube, per_yr=True)
        models.append(model)

    models = sort_list(models)
    hist_data, ghg_data, aa_data = order_data(trend_dict, models)
    ant_data = numpy.array(ghg_data) + numpy.array(aa_data)

    ind = numpy.arange(len(hist_data))  # the x locations for the groups
    width = 0.2  # the width of the bars

    fig, ax = plt.subplots(figsize=(20, 8))
    rects1 = ax.bar(ind, ghg_data, width, color='red')
    rects2 = ax.bar(ind + width, aa_data, width, color='blue')
    rects3 = ax.bar(ind + 2 * width, ant_data, width, color='purple')
    rects4 = ax.bar(ind + 3 * width, hist_data, width, color='green')

    ax.set_ylabel('$K yr^{-1}$')

    start_year = inargs.time[0].split('-')[0]
    end_year = inargs.time[1].split('-')[0]
    ax.set_title('Trend in %s, %s-%s' % (metric_name, start_year, end_year))

    ax.set_xticks(ind + 1.5 * width)
    ax.set_xticklabels(models)
    ax.legend((rects1[0], rects2[0], rects3[0], rects4[0]),
              ('historicalGHG', 'historicalAA', 'GHG + AA', 'historical'),
              loc=1)

    plt.savefig(inargs.outfile, bbox_inches='tight')
    gio.write_metadata(inargs.outfile,
                       file_info={infile: cube.attributes['history']})