Exemplo n.º 1
0
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, heating_rate_weight, flux_weight,
         include_net_flux, do_backwards_test, shuffle_profiles_together,
         num_bootstrap_reps, output_file_name):
    """Runs permutation-based importance test.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param heating_rate_weight: Same.
    :param flux_weight: Same.
    :param include_net_flux: Same.
    :param do_backwards_test: Same.
    :param shuffle_profiles_together: Same.
    :param num_bootstrap_reps: Same.
    :param output_file_name: Same.
    """

    cost_function = permutation.make_cost_function(
        heating_rate_weight=heating_rate_weight, flux_weight=flux_weight,
        include_net_flux=include_net_flux
    )

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, target_matrices = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )[:2]
    )
    print(SEPARATOR_STRING)

    if not isinstance(target_matrices, list):
        target_matrices = [target_matrices]

    if do_backwards_test:
        result_dict = permutation.run_backwards_test(
            predictor_matrix=predictor_matrix, target_matrices=target_matrices,
            model_object=model_object, model_metadata_dict=metadata_dict,
            cost_function=cost_function,
            shuffle_profiles_together=shuffle_profiles_together,
            num_bootstrap_reps=num_bootstrap_reps
        )
    else:
        result_dict = permutation.run_forward_test(
            predictor_matrix=predictor_matrix, target_matrices=target_matrices,
            model_object=model_object, model_metadata_dict=metadata_dict,
            cost_function=cost_function,
            shuffle_profiles_together=shuffle_profiles_together,
            num_bootstrap_reps=num_bootstrap_reps
        )

    print(SEPARATOR_STRING)

    print('Writing results of permutation test to: "{0:s}"...'.format(
        output_file_name
    ))

    permutation.write_file(
        result_dict=result_dict, netcdf_file_name=output_file_name
    )
Exemplo n.º 2
0
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, layer_name, is_layer_output, neuron_indices,
         ideal_activation, output_file_name):
    """Makes saliency map for each example, according to one model.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param layer_name: Same.
    :param is_layer_output: Same.
    :param neuron_indices: Same.
    :param ideal_activation: Same.
    :param output_file_name: Same.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    generator_option_dict = metadata_dict[neural_net.TRAINING_OPTIONS_KEY]

    if is_layer_output:
        dummy_example_dict = {
            example_utils.SCALAR_TARGET_NAMES_KEY:
                generator_option_dict[neural_net.SCALAR_TARGET_NAMES_KEY],
            example_utils.VECTOR_TARGET_NAMES_KEY:
                generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY],
            example_utils.HEIGHTS_KEY:
                generator_option_dict[neural_net.HEIGHTS_KEY]
        }

        target_field_name, target_height_m_agl = (
            neural_net.neuron_indices_to_target_var(
                neuron_indices=neuron_indices,
                example_dict=copy.deepcopy(dummy_example_dict),
                net_type_string=metadata_dict[neural_net.NET_TYPE_KEY]
            )
        )
    else:
        target_field_name = None
        target_height_m_agl = None

    print('Target field and height = {0:s}, {1:s}'.format(
        str(target_field_name), str(target_height_m_agl)
    ))

    print('Computing saliency for neuron {0:s} in layer "{1:s}"...'.format(
        str(neuron_indices), layer_name
    ))
    saliency_matrix = saliency.get_saliency_one_neuron(
        model_object=model_object, predictor_matrix=predictor_matrix,
        layer_name=layer_name, neuron_indices=neuron_indices,
        ideal_activation=ideal_activation
    )

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]

    if net_type_string == neural_net.DENSE_NET_TYPE_STRING:
        dummy_example_dict = {
            example_utils.SCALAR_PREDICTOR_NAMES_KEY:
                generator_option_dict[neural_net.SCALAR_PREDICTOR_NAMES_KEY],
            example_utils.VECTOR_PREDICTOR_NAMES_KEY:
                generator_option_dict[neural_net.VECTOR_PREDICTOR_NAMES_KEY],
            example_utils.HEIGHTS_KEY:
                generator_option_dict[neural_net.HEIGHTS_KEY]
        }

        dummy_example_dict = neural_net.predictors_numpy_to_dict(
            predictor_matrix=saliency_matrix, example_dict=dummy_example_dict,
            net_type_string=metadata_dict[neural_net.NET_TYPE_KEY]
        )
        scalar_saliency_matrix = (
            dummy_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
        )
        vector_saliency_matrix = (
            dummy_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
        )
    else:
        num_scalar_predictors = len(
            generator_option_dict[neural_net.SCALAR_PREDICTOR_NAMES_KEY]
        )
        scalar_saliency_matrix = saliency_matrix[..., -num_scalar_predictors:]
        vector_saliency_matrix = saliency_matrix[..., :-num_scalar_predictors]

    print('Writing saliency maps to: "{0:s}"...'.format(output_file_name))
    saliency.write_file(
        netcdf_file_name=output_file_name,
        scalar_saliency_matrix=scalar_saliency_matrix,
        vector_saliency_matrix=vector_saliency_matrix,
        example_id_strings=example_id_strings, model_file_name=model_file_name,
        layer_name=layer_name, neuron_indices=neuron_indices,
        ideal_activation=ideal_activation, target_field_name=target_field_name,
        target_height_m_agl=target_height_m_agl
    )
Exemplo n.º 3
0
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, ideal_activation, scalar_output_layer_name,
         vector_output_layer_name, output_file_name):
    """Makes saliency map for each example and target, according to one model.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param ideal_activation: Same.
    :param scalar_output_layer_name: Same.
    :param vector_output_layer_name: Same.
    :param output_file_name: Same.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]
    generator_option_dict = metadata_dict[neural_net.TRAINING_OPTIONS_KEY]

    scalar_predictor_names = (
        generator_option_dict[neural_net.SCALAR_PREDICTOR_NAMES_KEY]
    )
    vector_predictor_names = (
        generator_option_dict[neural_net.VECTOR_PREDICTOR_NAMES_KEY]
    )
    scalar_target_names = (
        generator_option_dict[neural_net.SCALAR_TARGET_NAMES_KEY]
    )
    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY]
    )
    heights_m_agl = generator_option_dict[neural_net.HEIGHTS_KEY]

    num_scalar_predictors = len(scalar_predictor_names)
    num_vector_predictors = len(vector_predictor_names)
    num_scalar_targets = len(scalar_target_names)
    num_vector_targets = len(vector_target_names)
    num_heights = len(heights_m_agl)
    num_examples = len(example_id_strings)

    if net_type_string == neural_net.DENSE_NET_TYPE_STRING:
        saliency_matrix_scalar_p_scalar_t = numpy.full(
            (num_examples, num_scalar_predictors, num_scalar_targets), numpy.nan
        )
    else:
        saliency_matrix_scalar_p_scalar_t = numpy.full(
            (num_examples, num_heights, num_scalar_predictors,
             num_scalar_targets),
            numpy.nan
        )

    saliency_matrix_vector_p_scalar_t = numpy.full(
        (num_examples, num_heights, num_vector_predictors, num_scalar_targets),
        numpy.nan
    )

    if net_type_string == neural_net.DENSE_NET_TYPE_STRING:
        saliency_matrix_scalar_p_vector_t = numpy.full(
            (num_examples, num_scalar_predictors, num_heights,
             num_vector_targets),
            numpy.nan
        )
    else:
        saliency_matrix_scalar_p_vector_t = numpy.full(
            (num_examples, num_heights, num_scalar_predictors, num_heights,
             num_vector_targets),
            numpy.nan
        )

    saliency_matrix_vector_p_vector_t = numpy.full(
        (num_examples, num_heights, num_vector_predictors, num_heights,
         num_vector_targets),
        numpy.nan
    )

    dummy_example_dict = {
        example_utils.SCALAR_PREDICTOR_NAMES_KEY: scalar_predictor_names,
        example_utils.VECTOR_PREDICTOR_NAMES_KEY: vector_predictor_names,
        example_utils.SCALAR_TARGET_NAMES_KEY: scalar_target_names,
        example_utils.VECTOR_TARGET_NAMES_KEY: vector_target_names,
        example_utils.HEIGHTS_KEY: heights_m_agl
    }

    for k in range(num_scalar_targets):
        these_neuron_indices = neural_net.target_var_to_neuron_indices(
            example_dict=copy.deepcopy(dummy_example_dict),
            net_type_string=net_type_string, target_name=scalar_target_names[k]
        )

        print('Computing saliency for "{0:s}"...'.format(
            scalar_target_names[k]
        ))

        this_saliency_matrix = saliency.get_saliency_one_neuron(
            model_object=model_object, predictor_matrix=predictor_matrix,
            layer_name=scalar_output_layer_name,
            neuron_indices=these_neuron_indices,
            ideal_activation=ideal_activation
        )

        if net_type_string == neural_net.DENSE_NET_TYPE_STRING:
            new_example_dict = neural_net.predictors_numpy_to_dict(
                predictor_matrix=this_saliency_matrix,
                example_dict=copy.deepcopy(dummy_example_dict),
                net_type_string=net_type_string
            )
            saliency_matrix_scalar_p_scalar_t[..., k] = (
                new_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
            )
            saliency_matrix_vector_p_scalar_t[..., k] = (
                new_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
            )
        else:
            saliency_matrix_scalar_p_scalar_t[..., k] = (
                this_saliency_matrix[..., -num_scalar_predictors:]
            )
            saliency_matrix_vector_p_scalar_t[..., k] = (
                this_saliency_matrix[..., :-num_scalar_predictors]
            )

    print(SEPARATOR_STRING)

    for k in range(num_vector_targets):
        for j in range(num_heights):
            these_neuron_indices = neural_net.target_var_to_neuron_indices(
                example_dict=copy.deepcopy(dummy_example_dict),
                net_type_string=net_type_string,
                target_name=vector_target_names[k],
                height_m_agl=heights_m_agl[j]
            )

            print('Computing saliency for "{0:s}" at {1:d} m AGL...'.format(
                vector_target_names[k], int(numpy.round(heights_m_agl[j]))
            ))

            this_saliency_matrix = saliency.get_saliency_one_neuron(
                model_object=model_object, predictor_matrix=predictor_matrix,
                layer_name=vector_output_layer_name,
                neuron_indices=these_neuron_indices,
                ideal_activation=ideal_activation
            )

            if net_type_string == neural_net.DENSE_NET_TYPE_STRING:
                new_example_dict = neural_net.predictors_numpy_to_dict(
                    predictor_matrix=this_saliency_matrix,
                    example_dict=copy.deepcopy(dummy_example_dict),
                    net_type_string=net_type_string
                )
                saliency_matrix_scalar_p_vector_t[..., j, k] = (
                    new_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
                )
                saliency_matrix_vector_p_vector_t[..., j, k] = (
                    new_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
                )
            else:
                saliency_matrix_scalar_p_vector_t[..., j, k] = (
                    this_saliency_matrix[..., -num_scalar_predictors:]
                )
                saliency_matrix_vector_p_vector_t[..., j, k] = (
                    this_saliency_matrix[..., :-num_scalar_predictors]
                )

        print(SEPARATOR_STRING)

    print('Writing saliency maps to: "{0:s}"...'.format(output_file_name))
    saliency.write_all_targets_file(
        netcdf_file_name=output_file_name,
        saliency_matrix_scalar_p_scalar_t=saliency_matrix_scalar_p_scalar_t,
        saliency_matrix_vector_p_scalar_t=saliency_matrix_vector_p_scalar_t,
        saliency_matrix_scalar_p_vector_t=saliency_matrix_scalar_p_vector_t,
        saliency_matrix_vector_p_vector_t=saliency_matrix_vector_p_vector_t,
        example_id_strings=example_id_strings, model_file_name=model_file_name,
        ideal_activation=ideal_activation
    )
Exemplo n.º 4
0
def _run(model_file_name, layer_names, example_file_name, num_examples,
         example_dir_name, example_id_file_name, output_dir_name):
    """Plots feature maps for each layer/example pair, for a single neural net.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param layer_names: Same.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param output_dir_name: Same.
    :raises: ValueError: if neural-net type is not CNN or U-net.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]
    valid_net_type_strings = [
        neural_net.CNN_TYPE_STRING, neural_net.U_NET_TYPE_STRING
    ]

    if net_type_string not in valid_net_type_strings:
        error_string = (
            '\nThis script does not work for net type "{0:s}".  Works only for '
            'those listed below:\n{1:s}'
        ).format(net_type_string, str(valid_net_type_strings))

        raise ValueError(error_string)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    num_layers = len(layer_names)
    feature_matrix_by_layer = [numpy.array([])] * num_layers

    for k in range(num_layers):
        print('Creating feature maps for layer "{0:s}"...'.format(
            layer_names[k]
        ))

        feature_matrix_by_layer[k] = neural_net.get_feature_maps(
            model_object=model_object, predictor_matrix=predictor_matrix,
            num_examples_per_batch=predictor_matrix.shape[0],
            feature_layer_name=layer_names[k], verbose=False
        )

    print('\n')

    for k in range(num_layers):
        this_output_dir_name = '{0:s}/{1:s}'.format(
            output_dir_name, layer_names[k]
        )

        file_system_utils.mkdir_recursive_if_necessary(
            directory_name=this_output_dir_name
        )

        _plot_feature_maps_one_layer(
            feature_matrix=feature_matrix_by_layer[k],
            example_id_strings=example_id_strings,
            layer_name=layer_names[k],
            output_dir_name=this_output_dir_name
        )

        print(SEPARATOR_STRING)
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, layer_name, neuron_indices, ideal_activation,
         num_iterations, learning_rate, l2_weight, output_file_name):
    """Runs backwards optimization.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param layer_name: Same.
    :param neuron_indices: Same.
    :param ideal_activation: Same.
    :param num_iterations: Same.
    :param learning_rate: Same.
    :param l2_weight: Same.
    :param output_file_name: Same.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    generator_option_dict = metadata_dict[neural_net.TRAINING_OPTIONS_KEY]
    normalization_file_name = (
        generator_option_dict[neural_net.NORMALIZATION_FILE_KEY]
    )

    print((
        'Reading training examples (for normalization) from: "{0:s}"...'
    ).format(
        normalization_file_name
    ))
    training_example_dict = example_io.read_file(normalization_file_name)
    training_example_dict = example_utils.subset_by_height(
        example_dict=training_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY]
    )

    num_examples = len(example_id_strings)
    bwo_dict = None

    for i in range(num_examples):
        this_bwo_dict = bwo.optimize_input_for_neuron(
            model_object=model_object,
            init_function_or_matrix=predictor_matrix[i, ...],
            layer_name=layer_name, neuron_indices=neuron_indices,
            ideal_activation=ideal_activation, num_iterations=num_iterations,
            learning_rate=learning_rate, l2_weight=l2_weight
        )

        if i == num_examples - 1:
            print(SEPARATOR_STRING)
        else:
            print(MINOR_SEPARATOR_STRING)

        if bwo_dict is None:
            these_dim = numpy.array(
                (num_examples,) +
                this_bwo_dict[bwo.INITIAL_PREDICTORS_KEY].shape[1:],
                dtype=int
            )

            bwo_dict = {
                bwo.INITIAL_PREDICTORS_KEY: numpy.full(these_dim, numpy.nan),
                bwo.FINAL_PREDICTORS_KEY: numpy.full(these_dim, numpy.nan),
                bwo.INITIAL_ACTIVATIONS_KEY:
                    numpy.full(num_examples, numpy.nan),
                bwo.FINAL_ACTIVATIONS_KEY: numpy.full(num_examples, numpy.nan)
            }

        bwo_dict[bwo.INITIAL_PREDICTORS_KEY][i, ...] = (
            this_bwo_dict[bwo.INITIAL_PREDICTORS_KEY][0, ...]
        )
        bwo_dict[bwo.FINAL_PREDICTORS_KEY][i, ...] = (
            this_bwo_dict[bwo.FINAL_PREDICTORS_KEY][0, ...]
        )
        bwo_dict[bwo.INITIAL_ACTIVATIONS_KEY][i] = (
            this_bwo_dict[bwo.INITIAL_ACTIVATION_KEY]
        )
        bwo_dict[bwo.FINAL_ACTIVATIONS_KEY][i] = (
            this_bwo_dict[bwo.FINAL_ACTIVATION_KEY]
        )

    if example_file_name == '':
        example_file_name = example_io.find_many_files(
            directory_name=example_dir_name,
            first_time_unix_sec=0, last_time_unix_sec=int(1e12),
            raise_error_if_any_missing=False, raise_error_if_all_missing=True
        )[0]

    first_example_dict = example_io.read_file(example_file_name)
    first_example_dict = example_utils.subset_by_height(
        example_dict=first_example_dict,
        heights_m_agl=generator_option_dict[neural_net.HEIGHTS_KEY]
    )

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]

    init_example_dict = copy.deepcopy(first_example_dict)
    this_example_dict = neural_net.predictors_numpy_to_dict(
        predictor_matrix=bwo_dict[bwo.INITIAL_PREDICTORS_KEY],
        example_dict=init_example_dict, net_type_string=net_type_string
    )
    init_example_dict.update(this_example_dict)

    if generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY] is not None:
        init_example_dict = normalization.denormalize_data(
            new_example_dict=init_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=
            generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY],
            min_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MIN_NORM_VALUE_KEY],
            max_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MAX_NORM_VALUE_KEY],
            separate_heights=True, apply_to_predictors=True,
            apply_to_vector_targets=False, apply_to_scalar_targets=False
        )

    init_scalar_predictor_matrix = (
        init_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
    )
    init_vector_predictor_matrix = (
        init_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
    )

    final_example_dict = copy.deepcopy(first_example_dict)
    this_example_dict = neural_net.predictors_numpy_to_dict(
        predictor_matrix=bwo_dict[bwo.FINAL_PREDICTORS_KEY],
        example_dict=final_example_dict, net_type_string=net_type_string
    )
    final_example_dict.update(this_example_dict)

    if generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY] is not None:
        final_example_dict = normalization.denormalize_data(
            new_example_dict=final_example_dict,
            training_example_dict=training_example_dict,
            normalization_type_string=
            generator_option_dict[neural_net.PREDICTOR_NORM_TYPE_KEY],
            min_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MIN_NORM_VALUE_KEY],
            max_normalized_value=
            generator_option_dict[neural_net.PREDICTOR_MAX_NORM_VALUE_KEY],
            separate_heights=True, apply_to_predictors=True,
            apply_to_vector_targets=False, apply_to_scalar_targets=False
        )

    final_scalar_predictor_matrix = (
        final_example_dict[example_utils.SCALAR_PREDICTOR_VALS_KEY]
    )
    final_vector_predictor_matrix = (
        final_example_dict[example_utils.VECTOR_PREDICTOR_VALS_KEY]
    )

    print('Writing results to file: "{0:s}"...'.format(output_file_name))
    bwo.write_file(
        netcdf_file_name=output_file_name,
        init_scalar_predictor_matrix=init_scalar_predictor_matrix,
        final_scalar_predictor_matrix=final_scalar_predictor_matrix,
        init_vector_predictor_matrix=init_vector_predictor_matrix,
        final_vector_predictor_matrix=final_vector_predictor_matrix,
        initial_activations=bwo_dict[bwo.INITIAL_ACTIVATIONS_KEY],
        final_activations=bwo_dict[bwo.FINAL_ACTIVATIONS_KEY],
        example_id_strings=example_id_strings, model_file_name=model_file_name,
        layer_name=layer_name, neuron_indices=neuron_indices,
        ideal_activation=ideal_activation, num_iterations=num_iterations,
        learning_rate=learning_rate, l2_weight=l2_weight
    )
Exemplo n.º 6
0
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, activation_layer_name, vector_output_layer_name,
         output_neuron_indices, ideal_activation, output_file_name):
    """Runs the Grad-CAM (gradient-weighted class-activation maps) algorithm.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param activation_layer_name: Same.
    :param vector_output_layer_name: Same.
    :param output_neuron_indices: Same.
    :param ideal_activation: Same.
    :param output_file_name: Same.
    :raises: ValueError: if neural-net type is not CNN or U-net.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True)

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples,
            example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name))
    print(SEPARATOR_STRING)

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]
    valid_net_type_strings = [
        neural_net.CNN_TYPE_STRING, neural_net.U_NET_TYPE_STRING
    ]

    if net_type_string not in valid_net_type_strings:
        error_string = (
            '\nThis script does not work for net type "{0:s}".  Works only for '
            'those listed below:\n{1:s}').format(net_type_string,
                                                 str(valid_net_type_strings))

        raise ValueError(error_string)

    num_examples = predictor_matrix.shape[0]
    num_heights = predictor_matrix.shape[1]
    class_activation_matrix = numpy.full((num_examples, num_heights),
                                         numpy.nan)

    for i in range(num_examples):
        if numpy.mod(i, 10) == 0:
            print('Have run Grad-CAM for {0:d} of {1:d} examples...'.format(
                i, num_examples))

        class_activation_matrix[i, :] = gradcam.run_gradcam(
            model_object=model_object,
            predictor_matrix=predictor_matrix[i, ...],
            activation_layer_name=activation_layer_name,
            vector_output_layer_name=vector_output_layer_name,
            output_neuron_indices=output_neuron_indices,
            ideal_activation=ideal_activation)

    print('Have run Grad-CAM for all {0:d} examples!\n'.format(num_examples))

    print('Writing class-activation maps to: "{0:s}"...'.format(
        output_file_name))
    gradcam.write_file(netcdf_file_name=output_file_name,
                       class_activation_matrix=class_activation_matrix,
                       example_id_strings=example_id_strings,
                       model_file_name=model_file_name,
                       activation_layer_name=activation_layer_name,
                       vector_output_layer_name=vector_output_layer_name,
                       output_neuron_indices=output_neuron_indices,
                       ideal_activation=ideal_activation)
Exemplo n.º 7
0
def _run(model_file_name, example_file_name, num_examples, example_dir_name,
         example_id_file_name, activation_layer_name, vector_output_layer_name,
         ideal_activation, output_file_name):
    """Runs Grad-CAM for each example and target variable.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param example_file_name: Same.
    :param num_examples: Same.
    :param example_dir_name: Same.
    :param example_id_file_name: Same.
    :param activation_layer_name: Same.
    :param vector_output_layer_name: Same.
    :param ideal_activation: Same.
    :param output_file_name: Same.
    :raises: ValueError: if neural-net type is not CNN or U-net.
    """

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = neural_net.read_model(model_file_name)

    metafile_name = neural_net.find_metafile(
        model_dir_name=os.path.split(model_file_name)[0],
        raise_error_if_missing=True
    )

    print('Reading metadata from: "{0:s}"...'.format(metafile_name))
    metadata_dict = neural_net.read_metafile(metafile_name)
    generator_option_dict = metadata_dict[neural_net.TRAINING_OPTIONS_KEY]

    net_type_string = metadata_dict[neural_net.NET_TYPE_KEY]
    valid_net_type_strings = [
        neural_net.CNN_TYPE_STRING, neural_net.U_NET_TYPE_STRING
    ]

    if net_type_string not in valid_net_type_strings:
        error_string = (
            '\nThis script does not work for net type "{0:s}".  Works only for '
            'those listed below:\n{1:s}'
        ).format(net_type_string, str(valid_net_type_strings))

        raise ValueError(error_string)

    predictor_matrix, _, example_id_strings = (
        misc_utils.get_examples_for_inference(
            model_metadata_dict=metadata_dict,
            example_file_name=example_file_name,
            num_examples=num_examples, example_dir_name=example_dir_name,
            example_id_file_name=example_id_file_name
        )
    )
    print(SEPARATOR_STRING)

    vector_target_names = (
        generator_option_dict[neural_net.VECTOR_TARGET_NAMES_KEY]
    )
    heights_m_agl = generator_option_dict[neural_net.HEIGHTS_KEY]

    dummy_example_dict = {
        example_utils.SCALAR_TARGET_NAMES_KEY: [],
        example_utils.VECTOR_TARGET_NAMES_KEY: vector_target_names,
        example_utils.HEIGHTS_KEY: heights_m_agl
    }

    num_examples = len(example_id_strings)
    num_vector_targets = len(vector_target_names)
    num_heights = len(heights_m_agl)

    class_activation_matrix = numpy.full(
        (num_examples, num_heights, num_heights, num_vector_targets), numpy.nan
    )

    for i in range(num_examples):
        print((
            'Have run Grad-CAM (all target variables) for {0:d} of {1:d} '
            'examples...'
        ).format(
            i, num_examples
        ))

        for k in range(num_vector_targets):
            for j in range(num_heights):
                these_neuron_indices = neural_net.target_var_to_neuron_indices(
                    example_dict=dummy_example_dict,
                    net_type_string=net_type_string,
                    target_name=vector_target_names[k],
                    height_m_agl=heights_m_agl[j]
                )

                class_activation_matrix[i, :, j, k] = gradcam.run_gradcam(
                    model_object=model_object,
                    predictor_matrix=predictor_matrix[i, ...],
                    activation_layer_name=activation_layer_name,
                    vector_output_layer_name=vector_output_layer_name,
                    output_neuron_indices=these_neuron_indices,
                    ideal_activation=ideal_activation
                )

    print((
        'Have run Grad-CAM (all target variables) for all {0:d} examples!\n'
    ).format(
        num_examples
    ))

    print('Writing class-activation maps to: "{0:s}"...'.format(
        output_file_name
    ))
    gradcam.write_all_targets_file(
        netcdf_file_name=output_file_name,
        class_activation_matrix=class_activation_matrix,
        example_id_strings=example_id_strings,
        model_file_name=model_file_name,
        activation_layer_name=activation_layer_name,
        vector_output_layer_name=vector_output_layer_name,
        ideal_activation=ideal_activation
    )