def main(cfg):
    """Run the diagnostic."""
    cfg = get_default_settings(cfg)
    diag = check_cfg(cfg)
    sns.set(**cfg.get('seaborn_settings', {}))

    # Get input data
    input_data = list(cfg['input_data'].values())
    input_data.extend(io.netcdf_to_metadata(cfg, pattern=cfg.get('pattern')))
    input_data = deepcopy(input_data)
    check_input_data(input_data)
    grouped_data = group_metadata(input_data, 'dataset')

    # Calculate X-axis of emergent constraint
    diag_func = globals()[diag]
    (diag_data, var_attrs, attrs) = diag_func(grouped_data, cfg)
    attrs.update(get_global_attributes(input_data, cfg))

    # Save data
    netcdf_path = get_diagnostic_filename(diag, cfg)
    io.save_scalar_data(diag_data, netcdf_path, var_attrs, attributes=attrs)
    logger.info("Found data:\n%s", pformat(diag_data))

    # Provenance
    provenance_record = ec.get_provenance_record(
        {diag: attrs}, [diag],
        caption=attrs['plot_xlabel'],
        ancestors=[d['filename'] for d in input_data])
    with ProvenanceLogger(cfg) as provenance_logger:
        provenance_logger.log(netcdf_path, provenance_record)
Ejemplo n.º 2
0
def test_save_scalar_data(mock_logger, mock_save, var_attrs, attrs, aux_coord):
    """Test saving of scalar data."""
    data = OrderedDict([
        ('model1', np.nan),
        ('model2', 1.0),
        ('model3', 3.14),
    ])
    dataset_dim = iris.coords.AuxCoord(list(data.keys()), long_name='dataset')
    output_data = np.ma.masked_invalid([np.nan, 1.0, 3.14])

    # Without data
    io.save_scalar_data({}, PATH, var_attrs)
    mock_logger.warning.assert_called()
    assert not mock_save.called
    mock_logger.reset_mock()
    mock_save.reset_mock()

    # With data
    io.save_scalar_data(data, PATH, var_attrs, aux_coord, attrs)
    iris_var_attrs = deepcopy(var_attrs)
    iris_var_attrs['var_name'] = iris_var_attrs.pop('short_name')
    new_cube = iris.cube.Cube(output_data,
                              aux_coords_and_dims=[(dataset_dim, 0)],
                              attributes=attrs,
                              **iris_var_attrs)
    if aux_coord is not None:
        new_cube.add_aux_coord(aux_coord, 0)
    if 'units' not in var_attrs:
        mock_logger.warning.assert_called()
        assert not mock_save.called
    else:
        mock_logger.warning.assert_not_called()
        assert mock_save.call_args_list == [mock.call(new_cube, PATH)]
Ejemplo n.º 3
0
def main(cfg):
    """Run the diagnostic."""
    input_data = (
        select_metadata(cfg['input_data'].values(), short_name='tas') +
        select_metadata(cfg['input_data'].values(), short_name='tasa'))
    if not input_data:
        raise ValueError("This diagnostics needs 'tas' or 'tasa' variable")

    # Calculate psi for every dataset
    psis = {}
    psi_attrs = {
        'short_name': 'psi',
        'long_name': 'Temperature variability metric',
        'units': 'K',
    }
    grouped_data = group_metadata(input_data, 'dataset')
    for (dataset, [data]) in grouped_data.items():
        logger.info("Processing %s", dataset)
        cube = iris.load_cube(data['filename'])
        iris.coord_categorisation.add_year(cube, 'time')
        cube = cube.aggregated_by('year', iris.analysis.MEAN)
        psi_cube = calculate_psi(cube, cfg)
        data.update(psi_attrs)
        data.pop('standard_name', '')

        # Provenance
        caption = ("Temporal evolution of temperature variability metric psi "
                   "between {start_year} and {end_year} for {dataset}.".format(
                       **data))
        provenance_record = get_provenance_record(caption, [data['filename']])
        out_path = get_diagnostic_filename('psi_' + dataset, cfg)
        with ProvenanceLogger(cfg) as provenance_logger:
            provenance_logger.log(out_path, provenance_record)

        # Save psi for every dataset
        data['filename'] = out_path
        io.metadata_to_netcdf(psi_cube, data)

        # Save averaged psi
        psis[dataset] = np.mean(psi_cube.data)

    # Save averaged psis for every dataset in one file
    out_path = get_diagnostic_filename('psi', cfg)
    io.save_scalar_data(psis,
                        out_path,
                        psi_attrs,
                        attributes=psi_cube.attributes)

    # Provenance
    caption = "{long_name} for mutliple climate models.".format(**psi_attrs)
    ancestor_files = [d['filename'] for d in input_data]
    provenance_record = get_provenance_record(caption, ancestor_files)
    with ProvenanceLogger(cfg) as provenance_logger:
        provenance_logger.log(out_path, provenance_record)
def write_data(cfg, all_data, metadata):
    """Write netcdf file."""
    new_data = {}
    for (label, xy_data) in all_data.items():
        for (idx, dataset_name) in enumerate(xy_data[0]):
            key = f'{label}-{dataset_name}'
            value = xy_data[1][idx]
            new_data[key] = value
    netcdf_path = get_diagnostic_filename(metadata['var_name'], cfg)
    var_attrs = metadata.copy()
    var_attrs['short_name'] = var_attrs.pop('var_name')
    io.save_scalar_data(new_data, netcdf_path, var_attrs)
    return netcdf_path
Ejemplo n.º 5
0
def write_data(ecs_data, feedback_parameter_data, ancestor_files, cfg):
    """Write netcdf files."""
    data = [ecs_data, feedback_parameter_data]
    var_attrs = [
        {
            'short_name': 'ecs',
            'long_name': 'Equilibrium Climate Sensitivity (Gregory method)',
            'units': cf_units.Unit('K'),
        },
        {
            'short_name': 'lambda',
            'long_name': 'Climate Feedback Parameter',
            'units': cf_units.Unit('W m-2 K-1'),
        },
    ]
    input_data = list(cfg['input_data'].values())
    if input_data:
        attrs = {
            'project': input_data[0]['project'],
        }
    else:
        attrs = {}
    if RTMT_DATASETS:
        attrs['net_toa_radiation'] = (
            f"For datasets {RTMT_DATASETS}, 'rtmt' (net top of model "
            f"radiation) instead of 'rtnt' (net top of atmosphere radiation) "
            f"is used due to lack of data. These two variables might differ.")
    attrs.update(cfg.get('output_attributes', {}))
    data_available = False
    for (idx, var_attr) in enumerate(var_attrs):
        if not data[idx]:
            logger.info(
                "Skipping writing of '%s' for all models, no data available",
                var_attr['short_name'])
            continue
        data_available = True
        path = get_diagnostic_filename(var_attr['short_name'], cfg)
        io.save_scalar_data(data[idx], path, var_attr, attributes=attrs)
        caption = "{long_name} for multiple climate models.".format(**var_attr)
        provenance_record = get_provenance_record(caption)
        provenance_record['ancestors'] = ancestor_files
        with ProvenanceLogger(cfg) as provenance_logger:
            provenance_logger.log(path, provenance_record)
    if not data_available:
        raise ValueError("No input data given")
Ejemplo n.º 6
0
def write_data(cfg, tcr, external_file=None):
    """Write netcdf files."""
    var_attr = {
        'short_name': 'tcr',
        'long_name': 'Transient Climate Response (TCR)',
        'units': cf_units.Unit('K'),
    }
    path = get_diagnostic_filename(var_attr['short_name'], cfg)
    project = list(cfg['input_data'].values())[0]['project']
    io.save_scalar_data(tcr, path, var_attr, attributes={'project': project})
    caption = "{long_name} for multiple climate models.".format(**var_attr)
    provenance_record = get_provenance_record(caption)
    ancestor_files = []
    for dataset_name in tcr.keys():
        datasets = select_metadata(cfg['input_data'].values(),
                                   dataset=dataset_name)
        ancestor_files.extend([d['filename'] for d in datasets])
    if external_file is not None:
        ancestor_files.append(external_file)
    provenance_record['ancestors'] = ancestor_files
    with ProvenanceLogger(cfg) as provenance_logger:
        provenance_logger.log(path, provenance_record)
Ejemplo n.º 7
0
def _write_scalar_data(data, ancestor_files, cfg, description=None):
    """Write scalar data for multiple datasets."""
    var_attrs = [
        {
            'short_name': 'ecs',
            'long_name': 'Equilibrium Climate Sensitivity (Gregory method)',
            'units': cf_units.Unit('K'),
        },
        {
            'short_name': 'lambda',
            'long_name': 'Climate Feedback Parameter',
            'units': cf_units.Unit('W m-2 K-1'),
        },
    ]
    global_attrs = {'project': list(cfg['input_data'].values())[0]['project']}
    if RTMT_DATASETS:
        global_attrs['net_toa_radiation'] = RTMT_TEXT.format(RTMT_DATASETS)
    for (idx, var_attr) in enumerate(var_attrs):
        caption = '{long_name} for multiple climate models'.format(**var_attr)
        if description is not None:
            filename = '{}_{}'.format(var_attr['short_name'],
                                      description.replace(' ', '_'))
            attributes = {'Description': description}
            caption += f' for {description}.'
        else:
            filename = var_attr['short_name']
            attributes = {}
            caption += '.'
        attributes.update(global_attrs)
        path = get_diagnostic_filename(filename, cfg)
        if not data[idx]:
            raise ValueError(f"Cannot write file {path}, no data for variable "
                             f"'{var_attr['short_name']}' given")

        # Scalar data
        if NDIMS['rad'] == 1:
            io.save_scalar_data({d: data[idx][d].data
                                 for d in data[idx]},
                                path,
                                var_attr,
                                attributes=attributes)

        # 1D data
        elif NDIMS['rad'] == 2:
            io.save_1d_data(data[idx],
                            path,
                            COORDS['rad'][0],
                            var_attr,
                            attributes=attributes)

        # Higher dimensions
        else:
            logger.info(
                "Writing netcdf summary file including ECS and feedback "
                "parameters for all datasets is not supported for %iD data "
                "yet", NDIMS['rad'])
            return

        # Provenance
        provenance_record = _get_provenance_record(caption)
        provenance_record['ancestors'] = ancestor_files
        with ProvenanceLogger(cfg) as provenance_logger:
            provenance_logger.log(path, provenance_record)