def test_subset_by_height(self):
        """Ensures correct output from subset_by_height."""

        this_example_dict = example_utils.subset_by_height(
            example_dict=copy.deepcopy(FIRST_EXAMPLE_DICT),
            heights_m_agl=HEIGHTS_TO_KEEP_M_AGL)

        self.assertTrue(
            _compare_example_dicts(this_example_dict,
                                   FIRST_EXAMPLE_DICT_SELECT_HEIGHTS))
    def test_subset_by_height_with_padding(self):
        """Ensures correct output from subset_by_height.

        In this case, some of the desired heights are not there yet (and should
        be added at the top of the profile).
        """

        this_example_dict = example_utils.subset_by_height(
            example_dict=copy.deepcopy(EXAMPLE_DICT_SANS_PADDING),
            heights_m_agl=PADDED_HEIGHTS_TO_KEEP_M_AGL)

        self.assertTrue(
            _compare_example_dicts(this_example_dict,
                                   EXAMPLE_DICT_PADDED_SELECT_HEIGHTS))
Beispiel #3
0
def _run(model_file_name, example_dir_name, first_time_string,
         last_time_string, exclude_summit_greenland, output_file_name):
    """Applies trained neural net in inference mode.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_dir_name: Same.
    :param first_time_string: Same.
    :param last_time_string: Same.
    :param exclude_summit_greenland: Same.
    :param output_file_name: Same.
    """

    first_time_unix_sec = time_conversion.string_to_unix_sec(
        first_time_string, TIME_FORMAT)
    last_time_unix_sec = time_conversion.string_to_unix_sec(
        last_time_string, TIME_FORMAT)

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True)

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    generator_option_dict = copy.deepcopy(
        metadata_dict[neural_net.TRAINING_OPTIONS_KEY])
    generator_option_dict[neural_net.EXAMPLE_DIRECTORY_KEY] = example_dir_name
    generator_option_dict[neural_net.FIRST_TIME_KEY] = first_time_unix_sec
    generator_option_dict[neural_net.LAST_TIME_KEY] = last_time_unix_sec

    vector_target_norm_type_string = copy.deepcopy(
        generator_option_dict[neural_net.VECTOR_TARGET_NORM_TYPE_KEY])
    scalar_target_norm_type_string = copy.deepcopy(
        generator_option_dict[neural_net.SCALAR_TARGET_NORM_TYPE_KEY])
    generator_option_dict[neural_net.VECTOR_TARGET_NORM_TYPE_KEY] = None
    generator_option_dict[neural_net.SCALAR_TARGET_NORM_TYPE_KEY] = None

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]
    predictor_matrix, target_array, example_id_strings = neural_net.create_data(
        option_dict=generator_option_dict,
        for_inference=True,
        net_type_string=net_type_string,
        exclude_summit_greenland=exclude_summit_greenland)
    print(SEPARATOR_STRING)

    exec_start_time_unix_sec = time.time()
    prediction_array = neural_net.apply_model(
        model_object=model_object,
        predictor_matrix=predictor_matrix,
        num_examples_per_batch=NUM_EXAMPLES_PER_BATCH,
        net_type_string=net_type_string,
        verbose=True)

    print(SEPARATOR_STRING)
    print('Time to apply neural net = {0:.4f} seconds'.format(
        time.time() - exec_start_time_unix_sec))

    vector_target_matrix = target_array[0]
    vector_prediction_matrix = prediction_array[0]

    if len(target_array) == 2:
        scalar_target_matrix = target_array[1]
        scalar_prediction_matrix = prediction_array[1]
    else:
        scalar_target_matrix = None
        scalar_prediction_matrix = None

    target_example_dict = _targets_numpy_to_dict(
        scalar_target_matrix=scalar_target_matrix,
        vector_target_matrix=vector_target_matrix,
        model_metadata_dict=metadata_dict)

    prediction_example_dict = _targets_numpy_to_dict(
        scalar_target_matrix=scalar_prediction_matrix,
        vector_target_matrix=vector_prediction_matrix,
        model_metadata_dict=metadata_dict)

    normalization_file_name = (
        generator_option_dict[neural_net.NORMALIZATION_FILE_KEY])
    print(('Reading training examples (for normalization) from: "{0:s}"...'
           ).format(normalization_file_name))
    training_example_dict = example_io.read_file(normalization_file_name)
    training_example_dict = example_utils.subset_by_height(
        example_dict=training_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY])

    num_examples = len(example_id_strings)
    num_heights = len(prediction_example_dict[example_utils.HEIGHTS_KEY])

    this_dict = {
        example_utils.VECTOR_PREDICTOR_NAMES_KEY: [],
        example_utils.VECTOR_PREDICTOR_VALS_KEY:
        numpy.full((num_examples, num_heights, 0), 0.),
        example_utils.SCALAR_PREDICTOR_NAMES_KEY: [],
        example_utils.SCALAR_PREDICTOR_VALS_KEY:
        numpy.full((num_examples, 0), 0.)
    }

    target_example_dict.update(this_dict)
    prediction_example_dict.update(this_dict)

    if vector_target_norm_type_string is not None:
        print('Denormalizing predicted vectors...')

        # down_flux_inc_matrix_w_m03 = example_utils.get_field_from_dict(
        #     example_dict=prediction_example_dict,
        #     field_name=example_utils.SHORTWAVE_DOWN_FLUX_INC_NAME
        # )
        # print(down_flux_inc_matrix_w_m03[0, ...])
        # print('\n')

        prediction_example_dict = normalization.denormalize_data(
            new_example_dict=prediction_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=vector_target_norm_type_string,
            min_normalized_value=generator_option_dict[
                neural_net.VECTOR_TARGET_MIN_VALUE_KEY],
            max_normalized_value=generator_option_dict[
                neural_net.VECTOR_TARGET_MAX_VALUE_KEY],
            separate_heights=True,
            apply_to_predictors=False,
            apply_to_vector_targets=True,
            apply_to_scalar_targets=False)

        # down_flux_inc_matrix_w_m03 = example_utils.get_field_from_dict(
        #     example_dict=prediction_example_dict,
        #     field_name=example_utils.SHORTWAVE_DOWN_FLUX_INC_NAME
        # )
        # print(down_flux_inc_matrix_w_m03[0, ...])
        # print('\n\n\n')

    if scalar_target_norm_type_string is not None:
        print('Denormalizing predicted scalars...')

        prediction_example_dict = normalization.denormalize_data(
            new_example_dict=prediction_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=scalar_target_norm_type_string,
            min_normalized_value=generator_option_dict[
                neural_net.SCALAR_TARGET_MIN_VALUE_KEY],
            max_normalized_value=generator_option_dict[
                neural_net.SCALAR_TARGET_MAX_VALUE_KEY],
            separate_heights=True,
            apply_to_predictors=False,
            apply_to_vector_targets=False,
            apply_to_scalar_targets=True)

    add_heating_rate = generator_option_dict[neural_net.OMIT_HEATING_RATE_KEY]

    if add_heating_rate:
        pressure_matrix_pascals = _get_unnormalized_pressure(
            model_metadata_dict=metadata_dict,
            example_id_strings=example_id_strings)

        prediction_example_dict = _get_predicted_heating_rates(
            prediction_example_dict=prediction_example_dict,
            pressure_matrix_pascals=pressure_matrix_pascals,
            model_metadata_dict=metadata_dict)

    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY])

    if example_utils.SHORTWAVE_HEATING_RATE_NAME in vector_target_names:
        heating_rate_index = vector_target_names.index(
            example_utils.SHORTWAVE_HEATING_RATE_NAME)

        heights_m_agl = generator_option_dict[neural_net.HEIGHTS_KEY]
        height_indices = numpy.where(
            heights_m_agl >= ZERO_HEATING_HEIGHT_M_AGL)[0]

        vector_target_matrix = (
            prediction_example_dict[example_utils.VECTOR_TARGET_VALS_KEY])
        vector_target_matrix[..., heating_rate_index][..., height_indices] = 0.
        prediction_example_dict[example_utils.VECTOR_TARGET_VALS_KEY] = (
            vector_target_matrix)

    all_heights_m_agl = generator_option_dict[neural_net.HEIGHTS_KEY]
    desired_heights_m_agl = (
        all_heights_m_agl[all_heights_m_agl < MAX_HEIGHT_M_AGL])

    target_example_dict = example_utils.subset_by_height(
        example_dict=target_example_dict, heights_m_agl=desired_heights_m_agl)
    prediction_example_dict = example_utils.subset_by_height(
        example_dict=prediction_example_dict,
        heights_m_agl=desired_heights_m_agl)

    print('Writing target (actual) and predicted values to: "{0:s}"...'.format(
        output_file_name))
    prediction_io.write_file(netcdf_file_name=output_file_name,
                             scalar_target_matrix=target_example_dict[
                                 example_utils.SCALAR_TARGET_VALS_KEY],
                             vector_target_matrix=target_example_dict[
                                 example_utils.VECTOR_TARGET_VALS_KEY],
                             scalar_prediction_matrix=prediction_example_dict[
                                 example_utils.SCALAR_TARGET_VALS_KEY],
                             vector_prediction_matrix=prediction_example_dict[
                                 example_utils.VECTOR_TARGET_VALS_KEY],
                             heights_m_agl=desired_heights_m_agl,
                             example_id_strings=example_id_strings,
                             model_file_name=model_file_name)
def _run(evaluation_file_names, line_styles, line_colour_strings,
         set_descriptions_verbose, confidence_level, use_log_scale,
         plot_by_height, output_dir_name):
    """Plots model evaluation.

    This is effectively the main method.

    :param evaluation_file_names: See documentation at top of file.
    :param line_styles: Same.
    :param line_colour_strings: Same.
    :param set_descriptions_verbose: Same.
    :param confidence_level: Same.
    :param use_log_scale: Same.
    :param plot_by_height: Same.
    :param output_dir_name: Same.
    """

    # Check input args.
    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    if confidence_level < 0:
        confidence_level = None

    if confidence_level is not None:
        error_checking.assert_is_geq(confidence_level, 0.9)
        error_checking.assert_is_less_than(confidence_level, 1.)

    num_evaluation_sets = len(evaluation_file_names)
    expected_dim = numpy.array([num_evaluation_sets], dtype=int)

    error_checking.assert_is_string_list(line_styles)
    error_checking.assert_is_numpy_array(numpy.array(line_styles),
                                         exact_dimensions=expected_dim)

    error_checking.assert_is_string_list(set_descriptions_verbose)
    error_checking.assert_is_numpy_array(numpy.array(set_descriptions_verbose),
                                         exact_dimensions=expected_dim)

    set_descriptions_verbose = [
        s.replace('_', ' ') for s in set_descriptions_verbose
    ]
    set_descriptions_abbrev = [
        s.lower().replace(' ', '-') for s in set_descriptions_verbose
    ]

    error_checking.assert_is_string_list(line_colour_strings)
    error_checking.assert_is_numpy_array(numpy.array(line_colour_strings),
                                         exact_dimensions=expected_dim)
    line_colours = [
        numpy.fromstring(s, dtype=float, sep='_') / 255
        for s in line_colour_strings
    ]

    for i in range(num_evaluation_sets):
        error_checking.assert_is_numpy_array(line_colours[i],
                                             exact_dimensions=numpy.array(
                                                 [3], dtype=int))
        error_checking.assert_is_geq_numpy_array(line_colours[i], 0.)
        error_checking.assert_is_leq_numpy_array(line_colours[i], 1.)

    # Read files.
    evaluation_tables_xarray = [xarray.Dataset()] * num_evaluation_sets
    prediction_dicts = [dict()] * num_evaluation_sets

    for i in range(num_evaluation_sets):
        print('Reading data from: "{0:s}"...'.format(evaluation_file_names[i]))
        evaluation_tables_xarray[i] = evaluation.read_file(
            evaluation_file_names[i])

        this_prediction_file_name = (
            evaluation_tables_xarray[i].attrs[evaluation.PREDICTION_FILE_KEY])

        print(
            'Reading data from: "{0:s}"...'.format(this_prediction_file_name))
        prediction_dicts[i] = prediction_io.read_file(
            this_prediction_file_name)

    model_file_name = (
        evaluation_tables_xarray[0].attrs[evaluation.MODEL_FILE_KEY])
    model_metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True)

    print('Reading metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = neural_net.read_metafile(model_metafile_name)
    generator_option_dict = model_metadata_dict[
        neural_net.TRAINING_OPTIONS_KEY]

    scalar_target_names = (
        generator_option_dict[neural_net.SCALAR_TARGET_NAMES_KEY])
    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY])
    heights_m_agl = generator_option_dict[neural_net.HEIGHTS_KEY]

    try:
        t = evaluation_tables_xarray[0]
        aux_target_names = t.coords[evaluation.AUX_TARGET_FIELD_DIM].values
    except:
        aux_target_names = []

    num_scalar_targets = len(scalar_target_names)
    num_vector_targets = len(vector_target_names)
    num_heights = len(heights_m_agl)
    num_aux_targets = len(aux_target_names)

    example_dict = {
        example_utils.SCALAR_TARGET_NAMES_KEY:
        scalar_target_names,
        example_utils.VECTOR_TARGET_NAMES_KEY:
        vector_target_names,
        example_utils.HEIGHTS_KEY:
        heights_m_agl,
        example_utils.SCALAR_PREDICTOR_NAMES_KEY:
        generator_option_dict[neural_net.SCALAR_PREDICTOR_NAMES_KEY],
        example_utils.VECTOR_PREDICTOR_NAMES_KEY:
        generator_option_dict[neural_net.VECTOR_PREDICTOR_NAMES_KEY]
    }

    normalization_file_name = (
        generator_option_dict[neural_net.NORMALIZATION_FILE_KEY])
    print(('Reading training examples (for climatology) from: "{0:s}"...'
           ).format(normalization_file_name))

    training_example_dict = example_io.read_file(normalization_file_name)
    training_example_dict = example_utils.subset_by_height(
        example_dict=training_example_dict, heights_m_agl=heights_m_agl)
    mean_training_example_dict = normalization.create_mean_example(
        new_example_dict=example_dict,
        training_example_dict=training_example_dict)

    print(SEPARATOR_STRING)

    # Do actual stuff.
    _plot_error_distributions(
        prediction_dicts=prediction_dicts,
        model_metadata_dict=model_metadata_dict,
        aux_target_names=aux_target_names,
        set_descriptions_abbrev=set_descriptions_abbrev,
        set_descriptions_verbose=set_descriptions_verbose,
        output_dir_name=output_dir_name)
    print(SEPARATOR_STRING)

    _plot_reliability_by_height(
        evaluation_tables_xarray=evaluation_tables_xarray,
        vector_target_names=vector_target_names,
        heights_m_agl=heights_m_agl,
        set_descriptions_abbrev=set_descriptions_abbrev,
        set_descriptions_verbose=set_descriptions_verbose,
        output_dir_name=output_dir_name)
    print(SEPARATOR_STRING)

    for k in range(num_vector_targets):
        for this_score_name in list(SCORE_NAME_TO_PROFILE_KEY.keys()):
            _plot_score_profile(
                evaluation_tables_xarray=evaluation_tables_xarray,
                line_styles=line_styles,
                line_colours=line_colours,
                set_descriptions_verbose=set_descriptions_verbose,
                confidence_level=confidence_level,
                target_name=vector_target_names[k],
                score_name=this_score_name,
                use_log_scale=use_log_scale,
                output_dir_name=output_dir_name)

    print(SEPARATOR_STRING)

    for k in range(num_scalar_targets):
        _plot_attributes_diagram(
            evaluation_tables_xarray=evaluation_tables_xarray,
            line_styles=line_styles,
            line_colours=line_colours,
            set_descriptions_abbrev=set_descriptions_abbrev,
            set_descriptions_verbose=set_descriptions_verbose,
            confidence_level=confidence_level,
            mean_training_example_dict=mean_training_example_dict,
            target_name=scalar_target_names[k],
            output_dir_name=output_dir_name)

    for k in range(num_aux_targets):
        _plot_attributes_diagram(
            evaluation_tables_xarray=evaluation_tables_xarray,
            line_styles=line_styles,
            line_colours=line_colours,
            set_descriptions_abbrev=set_descriptions_abbrev,
            set_descriptions_verbose=set_descriptions_verbose,
            confidence_level=confidence_level,
            mean_training_example_dict=mean_training_example_dict,
            target_name=aux_target_names[k],
            output_dir_name=output_dir_name)

    if not plot_by_height:
        return

    print(SEPARATOR_STRING)

    for k in range(num_vector_targets):
        for j in range(num_heights):
            _plot_attributes_diagram(
                evaluation_tables_xarray=evaluation_tables_xarray,
                line_styles=line_styles,
                line_colours=line_colours,
                set_descriptions_abbrev=set_descriptions_abbrev,
                set_descriptions_verbose=set_descriptions_verbose,
                confidence_level=confidence_level,
                mean_training_example_dict=mean_training_example_dict,
                height_m_agl=heights_m_agl[j],
                target_name=vector_target_names[k],
                output_dir_name=output_dir_name)

        if k != num_vector_targets - 1:
            print(SEPARATOR_STRING)
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, layer_name, neuron_indices, ideal_activation,
         num_iterations, learning_rate, l2_weight, output_file_name):
    """Runs backwards optimization.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param layer_name: Same.
    :param neuron_indices: Same.
    :param ideal_activation: Same.
    :param num_iterations: Same.
    :param learning_rate: Same.
    :param l2_weight: Same.
    :param output_file_name: Same.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    generator_option_dict = metadata_dict[neural_net.TRAINING_OPTIONS_KEY]
    normalization_file_name = (
        generator_option_dict[neural_net.NORMALIZATION_FILE_KEY]
    )

    print((
        'Reading training examples (for normalization) from: "{0:s}"...'
    ).format(
        normalization_file_name
    ))
    training_example_dict = example_io.read_file(normalization_file_name)
    training_example_dict = example_utils.subset_by_height(
        example_dict=training_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY]
    )

    num_examples = len(example_id_strings)
    bwo_dict = None

    for i in range(num_examples):
        this_bwo_dict = bwo.optimize_input_for_neuron(
            model_object=model_object,
            init_function_or_matrix=predictor_matrix[i, ...],
            layer_name=layer_name, neuron_indices=neuron_indices,
            ideal_activation=ideal_activation, num_iterations=num_iterations,
            learning_rate=learning_rate, l2_weight=l2_weight
        )

        if i == num_examples - 1:
            print(SEPARATOR_STRING)
        else:
            print(MINOR_SEPARATOR_STRING)

        if bwo_dict is None:
            these_dim = numpy.array(
                (num_examples,) +
                this_bwo_dict[bwo.INITIAL_PREDICTORS_KEY].shape[1:],
                dtype=int
            )

            bwo_dict = {
                bwo.INITIAL_PREDICTORS_KEY: numpy.full(these_dim, numpy.nan),
                bwo.FINAL_PREDICTORS_KEY: numpy.full(these_dim, numpy.nan),
                bwo.INITIAL_ACTIVATIONS_KEY:
                    numpy.full(num_examples, numpy.nan),
                bwo.FINAL_ACTIVATIONS_KEY: numpy.full(num_examples, numpy.nan)
            }

        bwo_dict[bwo.INITIAL_PREDICTORS_KEY][i, ...] = (
            this_bwo_dict[bwo.INITIAL_PREDICTORS_KEY][0, ...]
        )
        bwo_dict[bwo.FINAL_PREDICTORS_KEY][i, ...] = (
            this_bwo_dict[bwo.FINAL_PREDICTORS_KEY][0, ...]
        )
        bwo_dict[bwo.INITIAL_ACTIVATIONS_KEY][i] = (
            this_bwo_dict[bwo.INITIAL_ACTIVATION_KEY]
        )
        bwo_dict[bwo.FINAL_ACTIVATIONS_KEY][i] = (
            this_bwo_dict[bwo.FINAL_ACTIVATION_KEY]
        )

    if example_file_name == '':
        example_file_name = example_io.find_many_files(
            directory_name=example_dir_name,
            first_time_unix_sec=0, last_time_unix_sec=int(1e12),
            raise_error_if_any_missing=False, raise_error_if_all_missing=True
        )[0]

    first_example_dict = example_io.read_file(example_file_name)
    first_example_dict = example_utils.subset_by_height(
        example_dict=first_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY]
    )

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]

    init_example_dict = copy.deepcopy(first_example_dict)
    this_example_dict = neural_net.predictors_numpy_to_dict(
        predictor_matrix=bwo_dict[bwo.INITIAL_PREDICTORS_KEY],
        example_dict=init_example_dict, net_type_string=net_type_string
    )
    init_example_dict.update(this_example_dict)

    if generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY] is not None:
        init_example_dict = normalization.denormalize_data(
            new_example_dict=init_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=
            generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY],
            min_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MIN_NORM_VALUE_KEY],
            max_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MAX_NORM_VALUE_KEY],
            separate_heights=True, apply_to_predictors=True,
            apply_to_vector_targets=False, apply_to_scalar_targets=False
        )

    init_scalar_predictor_matrix = (
        init_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
    )
    init_vector_predictor_matrix = (
        init_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
    )

    final_example_dict = copy.deepcopy(first_example_dict)
    this_example_dict = neural_net.predictors_numpy_to_dict(
        predictor_matrix=bwo_dict[bwo.FINAL_PREDICTORS_KEY],
        example_dict=final_example_dict, net_type_string=net_type_string
    )
    final_example_dict.update(this_example_dict)

    if generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY] is not None:
        final_example_dict = normalization.denormalize_data(
            new_example_dict=final_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=
            generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY],
            min_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MIN_NORM_VALUE_KEY],
            max_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MAX_NORM_VALUE_KEY],
            separate_heights=True, apply_to_predictors=True,
            apply_to_vector_targets=False, apply_to_scalar_targets=False
        )

    final_scalar_predictor_matrix = (
        final_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
    )
    final_vector_predictor_matrix = (
        final_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
    )

    print('Writing results to file: "{0:s}"...'.format(output_file_name))
    bwo.write_file(
        netcdf_file_name=output_file_name,
        init_scalar_predictor_matrix=init_scalar_predictor_matrix,
        final_scalar_predictor_matrix=final_scalar_predictor_matrix,
        init_vector_predictor_matrix=init_vector_predictor_matrix,
        final_vector_predictor_matrix=final_vector_predictor_matrix,
        initial_activations=bwo_dict[bwo.INITIAL_ACTIVATIONS_KEY],
        final_activations=bwo_dict[bwo.FINAL_ACTIVATIONS_KEY],
        example_id_strings=example_id_strings, model_file_name=model_file_name,
        layer_name=layer_name, neuron_indices=neuron_indices,
        ideal_activation=ideal_activation, num_iterations=num_iterations,
        learning_rate=learning_rate, l2_weight=l2_weight
    )
Beispiel #6
0
def _fluxes_to_heating_rate(vector_target_matrix, vector_prediction_matrix,
                            model_metadata_dict, prediction_file_name,
                            example_dir_name):
    """If necessary, converts fluxes to heating rates.

    This method is a wrapper for `example_utils.fluxes_to_heating_rate`.

    :param vector_target_matrix: See doc for `_fluxes_increments_to_actual`.
    :param vector_prediction_matrix: Same.
    :param model_metadata_dict: Same.
    :param prediction_file_name: See documentation at top of file.
    :param example_dir_name: Same.
    :return: vector_target_matrix: See doc for `_fluxes_increments_to_actual`.
    :return: vector_prediction_matrix: Same.
    :return: model_metadata_dict: Same.
    """

    generator_option_dict = model_metadata_dict[
        neural_net.TRAINING_OPTIONS_KEY]
    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY])
    need_heating_rate = (example_utils.SHORTWAVE_HEATING_RATE_NAME
                         not in vector_target_names)
    have_fluxes = all([t in vector_target_names for t in FLUX_NAMES])

    if not (need_heating_rate and have_fluxes
            and example_dir_name is not None):
        return (vector_target_matrix, vector_prediction_matrix,
                model_metadata_dict)

    num_examples = vector_target_matrix.shape[0]

    this_example_dict = misc_utils.get_raw_examples(
        example_file_name='',
        num_examples=0,
        example_dir_name=example_dir_name,
        example_id_file_name=prediction_file_name)
    this_example_dict = example_utils.subset_by_height(
        example_dict=this_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY])
    pressure_matrix_pascals = example_utils.get_field_from_dict(
        example_dict=this_example_dict, field_name=example_utils.PRESSURE_NAME)
    pressure_matrix_pascals = pressure_matrix_pascals[:num_examples, ...]

    base_example_dict = {
        example_utils.SCALAR_PREDICTOR_NAMES_KEY: [],
        example_utils.SCALAR_PREDICTOR_VALS_KEY:
        numpy.full((num_examples, 0), 0.),
        example_utils.VECTOR_PREDICTOR_NAMES_KEY:
        [example_utils.PRESSURE_NAME],
        example_utils.VECTOR_PREDICTOR_VALS_KEY:
        numpy.expand_dims(pressure_matrix_pascals, axis=-1),
        example_utils.SCALAR_TARGET_NAMES_KEY: [],
        example_utils.SCALAR_TARGET_VALS_KEY:
        numpy.full((num_examples, 0), 0.),
        example_utils.VECTOR_TARGET_NAMES_KEY:
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY],
        example_utils.HEIGHTS_KEY:
        generator_option_dict[neural_net.HEIGHTS_KEY],
        example_utils.VALID_TIMES_KEY:
        numpy.full(num_examples, 0, dtype=int)
    }

    target_example_dict = copy.deepcopy(base_example_dict)
    target_example_dict[example_utils.VECTOR_TARGET_VALS_KEY] = (
        vector_target_matrix)
    target_example_dict = example_utils.fluxes_to_heating_rate(
        target_example_dict)
    vector_target_matrix = (
        target_example_dict[example_utils.VECTOR_TARGET_VALS_KEY])

    prediction_example_dict = copy.deepcopy(base_example_dict)
    prediction_example_dict[example_utils.VECTOR_TARGET_VALS_KEY] = (
        vector_prediction_matrix)
    prediction_example_dict = example_utils.fluxes_to_heating_rate(
        prediction_example_dict)
    vector_prediction_matrix = (
        prediction_example_dict[example_utils.VECTOR_TARGET_VALS_KEY])

    generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY] = (
        target_example_dict[example_utils.VECTOR_TARGET_NAMES_KEY])
    model_metadata_dict[neural_net.TRAINING_OPTIONS_KEY] = (
        generator_option_dict)

    return vector_target_matrix, vector_prediction_matrix, model_metadata_dict
def _run(input_prediction_file_name, average_over_height, scale_by_climo,
         num_examples_per_set, output_dir_name):
    """Finds best and worst heating-rate predictions.

    This is effectively the main method.

    :param input_prediction_file_name: See documentation at top of file.
    :param average_over_height: Same.
    :param scale_by_climo: Same.
    :param num_examples_per_set: Same.
    :param output_dir_name: Same.
    """

    # TODO(thunderhoser): Maybe allow specific height again (e.g., 15 km).

    error_checking.assert_is_greater(num_examples_per_set, 0)
    scale_by_climo = scale_by_climo and not average_over_height

    print('Reading data from: "{0:s}"...'.format(input_prediction_file_name))
    prediction_dict = prediction_io.read_file(input_prediction_file_name)

    model_file_name = prediction_dict[prediction_io.MODEL_FILE_KEY]
    model_metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0])

    print(
        'Reading model metadata from: "{0:s}"...'.format(model_metafile_name))
    model_metadata_dict = neural_net.read_metafile(model_metafile_name)
    generator_option_dict = model_metadata_dict[
        neural_net.TRAINING_OPTIONS_KEY]

    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY])
    hr_index = (vector_target_names.index(
        example_utils.SHORTWAVE_HEATING_RATE_NAME))

    target_matrix_k_day01 = (
        prediction_dict[prediction_io.VECTOR_TARGETS_KEY][..., hr_index])
    prediction_matrix_k_day01 = (
        prediction_dict[prediction_io.VECTOR_PREDICTIONS_KEY][..., hr_index])

    bias_matrix = prediction_matrix_k_day01 - target_matrix_k_day01
    absolute_error_matrix = numpy.absolute(bias_matrix)

    if average_over_height:
        bias_matrix = numpy.mean(bias_matrix, axis=1, keepdims=True)
        absolute_error_matrix = numpy.mean(absolute_error_matrix,
                                           axis=1,
                                           keepdims=True)

    if scale_by_climo:
        normalization_file_name = (
            generator_option_dict[neural_net.NORMALIZATION_FILE_KEY])

        print(('Reading training examples (for climatology) from: "{0:s}"...'
               ).format(normalization_file_name))

        training_example_dict = example_io.read_file(normalization_file_name)
        training_example_dict = example_utils.subset_by_field(
            example_dict=training_example_dict,
            field_names=[example_utils.SHORTWAVE_HEATING_RATE_NAME])
        training_example_dict = example_utils.subset_by_height(
            example_dict=training_example_dict,
            heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY])

        dummy_example_dict = {
            example_utils.SCALAR_PREDICTOR_NAMES_KEY: [],
            example_utils.VECTOR_PREDICTOR_NAMES_KEY: [],
            example_utils.SCALAR_TARGET_NAMES_KEY: [],
            example_utils.VECTOR_TARGET_NAMES_KEY:
            [example_utils.SHORTWAVE_HEATING_RATE_NAME],
            example_utils.HEIGHTS_KEY:
            generator_option_dict[neural_net.HEIGHTS_KEY]
        }

        mean_training_example_dict = normalization.create_mean_example(
            new_example_dict=dummy_example_dict,
            training_example_dict=training_example_dict)
        climo_matrix_k_day01 = mean_training_example_dict[
            example_utils.VECTOR_TARGET_VALS_KEY][..., 0]

        bias_matrix = bias_matrix / climo_matrix_k_day01
        absolute_error_matrix = absolute_error_matrix / climo_matrix_k_day01

    print(SEPARATOR_STRING)
    high_bias_indices, low_bias_indices, low_abs_error_indices = (
        misc_utils.find_best_and_worst_predictions(
            bias_matrix=bias_matrix,
            absolute_error_matrix=absolute_error_matrix,
            num_examples_per_set=num_examples_per_set))
    print(SEPARATOR_STRING)

    high_bias_prediction_dict = prediction_io.subset_by_index(
        prediction_dict=copy.deepcopy(prediction_dict),
        desired_indices=high_bias_indices)
    high_bias_file_name = (
        '{0:s}/predictions_high-bias.nc'.format(output_dir_name))

    print('Writing examples with greatest positive bias to: "{0:s}"...'.format(
        high_bias_file_name))
    prediction_io.write_file(
        netcdf_file_name=high_bias_file_name,
        scalar_target_matrix=high_bias_prediction_dict[
            prediction_io.SCALAR_TARGETS_KEY],
        vector_target_matrix=high_bias_prediction_dict[
            prediction_io.VECTOR_TARGETS_KEY],
        scalar_prediction_matrix=high_bias_prediction_dict[
            prediction_io.SCALAR_PREDICTIONS_KEY],
        vector_prediction_matrix=high_bias_prediction_dict[
            prediction_io.VECTOR_PREDICTIONS_KEY],
        heights_m_agl=high_bias_prediction_dict[prediction_io.HEIGHTS_KEY],
        example_id_strings=high_bias_prediction_dict[
            prediction_io.EXAMPLE_IDS_KEY],
        model_file_name=high_bias_prediction_dict[
            prediction_io.MODEL_FILE_KEY])

    low_bias_prediction_dict = prediction_io.subset_by_index(
        prediction_dict=copy.deepcopy(prediction_dict),
        desired_indices=low_bias_indices)
    low_bias_file_name = (
        '{0:s}/predictions_low-bias.nc'.format(output_dir_name))

    print('Writing examples with greatest negative bias to: "{0:s}"...'.format(
        low_bias_file_name))
    prediction_io.write_file(
        netcdf_file_name=low_bias_file_name,
        scalar_target_matrix=low_bias_prediction_dict[
            prediction_io.SCALAR_TARGETS_KEY],
        vector_target_matrix=low_bias_prediction_dict[
            prediction_io.VECTOR_TARGETS_KEY],
        scalar_prediction_matrix=low_bias_prediction_dict[
            prediction_io.SCALAR_PREDICTIONS_KEY],
        vector_prediction_matrix=low_bias_prediction_dict[
            prediction_io.VECTOR_PREDICTIONS_KEY],
        heights_m_agl=low_bias_prediction_dict[prediction_io.HEIGHTS_KEY],
        example_id_strings=low_bias_prediction_dict[
            prediction_io.EXAMPLE_IDS_KEY],
        model_file_name=low_bias_prediction_dict[prediction_io.MODEL_FILE_KEY])

    low_abs_error_prediction_dict = prediction_io.subset_by_index(
        prediction_dict=copy.deepcopy(prediction_dict),
        desired_indices=low_abs_error_indices)
    low_abs_error_file_name = (
        '{0:s}/predictions_low-absolute-error.nc'.format(output_dir_name))

    print(
        'Writing examples with smallest absolute error to: "{0:s}"...'.format(
            low_abs_error_file_name))
    prediction_io.write_file(
        netcdf_file_name=low_abs_error_file_name,
        scalar_target_matrix=low_abs_error_prediction_dict[
            prediction_io.SCALAR_TARGETS_KEY],
        vector_target_matrix=low_abs_error_prediction_dict[
            prediction_io.VECTOR_TARGETS_KEY],
        scalar_prediction_matrix=low_abs_error_prediction_dict[
            prediction_io.SCALAR_PREDICTIONS_KEY],
        vector_prediction_matrix=low_abs_error_prediction_dict[
            prediction_io.VECTOR_PREDICTIONS_KEY],
        heights_m_agl=low_abs_error_prediction_dict[prediction_io.HEIGHTS_KEY],
        example_id_strings=low_abs_error_prediction_dict[
            prediction_io.EXAMPLE_IDS_KEY],
        model_file_name=low_abs_error_prediction_dict[
            prediction_io.MODEL_FILE_KEY])

    if scale_by_climo:
        return

    if average_over_height:
        mean_targets_k_day01 = numpy.mean(target_matrix_k_day01, axis=1)
        sort_indices = numpy.argsort(-1 * mean_targets_k_day01)
    else:
        max_targets_k_day01 = numpy.max(target_matrix_k_day01, axis=1)
        sort_indices = numpy.argsort(-1 * max_targets_k_day01)

    large_hr_indices = sort_indices[:num_examples_per_set]
    large_hr_prediction_dict = prediction_io.subset_by_index(
        prediction_dict=copy.deepcopy(prediction_dict),
        desired_indices=large_hr_indices)
    large_hr_file_name = (
        '{0:s}/predictions_large-heating-rate.nc'.format(output_dir_name))

    print('Writing examples with greatest heating rate to: "{0:s}"...'.format(
        large_hr_file_name))
    prediction_io.write_file(
        netcdf_file_name=large_hr_file_name,
        scalar_target_matrix=large_hr_prediction_dict[
            prediction_io.SCALAR_TARGETS_KEY],
        vector_target_matrix=large_hr_prediction_dict[
            prediction_io.VECTOR_TARGETS_KEY],
        scalar_prediction_matrix=large_hr_prediction_dict[
            prediction_io.SCALAR_PREDICTIONS_KEY],
        vector_prediction_matrix=large_hr_prediction_dict[
            prediction_io.VECTOR_PREDICTIONS_KEY],
        heights_m_agl=large_hr_prediction_dict[prediction_io.HEIGHTS_KEY],
        example_id_strings=large_hr_prediction_dict[
            prediction_io.EXAMPLE_IDS_KEY],
        model_file_name=large_hr_prediction_dict[prediction_io.MODEL_FILE_KEY])

    if not average_over_height:
        return

    mean_targets_k_day01 = numpy.mean(target_matrix_k_day01, axis=1)
    sort_indices = numpy.argsort(mean_targets_k_day01)
    small_hr_indices = sort_indices[:num_examples_per_set]

    small_hr_prediction_dict = prediction_io.subset_by_index(
        prediction_dict=copy.deepcopy(prediction_dict),
        desired_indices=small_hr_indices)
    small_hr_file_name = (
        '{0:s}/predictions_small-heating-rate.nc'.format(output_dir_name))

    print('Writing examples with smallest heating rate to: "{0:s}"...'.format(
        small_hr_file_name))
    prediction_io.write_file(
        netcdf_file_name=small_hr_file_name,
        scalar_target_matrix=small_hr_prediction_dict[
            prediction_io.SCALAR_TARGETS_KEY],
        vector_target_matrix=small_hr_prediction_dict[
            prediction_io.VECTOR_TARGETS_KEY],
        scalar_prediction_matrix=small_hr_prediction_dict[
            prediction_io.SCALAR_PREDICTIONS_KEY],
        vector_prediction_matrix=small_hr_prediction_dict[
            prediction_io.VECTOR_PREDICTIONS_KEY],
        heights_m_agl=small_hr_prediction_dict[prediction_io.HEIGHTS_KEY],
        example_id_strings=small_hr_prediction_dict[
            prediction_io.EXAMPLE_IDS_KEY],
        model_file_name=small_hr_prediction_dict[prediction_io.MODEL_FILE_KEY])
Beispiel #8
0
def _run(example_file_name, num_examples, example_dir_name,
         example_id_file_name, use_log_scale, model_file_name,
         output_dir_name):
    """Plots profiles (vector predictor and target variables) for each example.

    This is effectively the main method.

    :param example_file_name: See documentation at top of file.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param use_log_scale: Same.
    :param model_file_name: Same.
    :param output_dir_name: Same.
    """

    file_system_utils.mkdir_recursive_if_necessary(
        directory_name=output_dir_name)

    example_dict = misc_utils.get_raw_examples(
        example_file_name=example_file_name,
        num_examples=num_examples,
        example_dir_name=example_dir_name,
        example_id_file_name=example_id_file_name)

    num_examples_total = len(example_dict[example_utils.VALID_TIMES_KEY])

    if 0 < num_examples < num_examples_total:
        desired_indices = numpy.linspace(0,
                                         num_examples - 1,
                                         num=num_examples,
                                         dtype=int)
        example_dict = example_utils.subset_by_index(
            example_dict=example_dict, desired_indices=desired_indices)

    if model_file_name != '':
        model_metafile_name = neural_net.find_metafile(
            os.path.split(model_file_name)[0])

        print('Reading model metadata from: "{0:s}"...\n'.format(
            model_metafile_name))
        model_metadata_dict = neural_net.read_metafile(model_metafile_name)

        generator_option_dict = (
            model_metadata_dict[neural_net.TRAINING_OPTIONS_KEY])
        vector_predictor_names = (
            generator_option_dict[neural_net.VECTOR_PREDICTOR_NAMES_KEY])
        all_field_names = vector_predictor_names + VECTOR_TARGET_NAMES

        example_dict = example_utils.subset_by_field(
            example_dict=example_dict, field_names=all_field_names)
        example_dict = example_utils.subset_by_height(
            example_dict=example_dict,
            heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY])

    num_examples = len(example_dict[example_utils.VALID_TIMES_KEY])

    for i in range(num_examples):
        _plot_one_example(example_dict=example_dict,
                          example_index=i,
                          use_log_scale=use_log_scale,
                          output_dir_name=output_dir_name)

        print(MINOR_SEPARATOR_STRING)