Exemplo n.º 1
0
def viz_outs_grads(model, layer_idx=1):
    x, y = make_data(K.int_shape(model.input), model.layers[2].units)
    grads = get_layer_gradients(model, x, y, layer_idx=layer_idx)
    kws = dict(n_rows=8, title_mode='grads')

    show_features_1D(grads[0], show_borders=False, **kws)
    show_features_2D(grads, norm=(-1e-4, 1e-4), **kws)
Exemplo n.º 2
0
def _test_outputs_gradients(model):
    x, y = make_data(K.int_shape(model.input), model.layers[2].units)
    name = model.layers[1].name
    grads_all  = get_layer_gradients(model, x, y, layer_name=name, mode='outputs')
    grads_last = get_layer_gradients(model, x, y, layer_idx=2,     mode='outputs')

    kwargs1 = dict(n_rows=None, show_xy_ticks=[0, 0], show_borders=True,
                   max_timesteps=50, title_mode='grads')
    kwargs2 = dict(n_rows=2,    show_xy_ticks=[1, 1], show_borders=False,
                   max_timesteps=None)

    show_features_1D(grads_all[0],  **kwargs1)
    show_features_1D(grads_all[:1], **kwargs1)
    show_features_1D(grads_all,     **kwargs2)
    show_features_2D(grads_all[0], norm=(-.01, .01), show_colorbar=True, **kwargs1)
    show_features_2D(grads_all,    norm=None,        reflect_half=True,  **kwargs2)
    show_features_0D(grads_last,   marker='o', color=None, title_mode='grads')
    show_features_0D(grads_last,   marker='x', color='blue', ylims=(-.1, .1))
    print('\n')  # improve separation
Exemplo n.º 3
0
def test_misc():  # test miscellaneous functionalities
    units = 6
    batch_shape = (8, 100, 2*units)

    reset_seeds(reset_graph_with_backend=K)
    model = make_model(GRU, batch_shape, activation='relu', recurrent_dropout=0.3)
    x, y = make_data(batch_shape, units)
    model.train_on_batch(x, y)

    grads = get_layer_gradients(model, x, y, layer_idx=1)

    show_features_1D(grads,    subplot_samples=True, tight=True, borderwidth=2)
    show_features_1D(grads[0], subplot_samples=True)
    show_features_2D(grads.T, n_rows=1.5, tight=True, borderwidth=2)
    show_features_2D(grads.T[:, :, 0])
    rnn_histogram(model, layer_idx=1, show_xy_ticks=[0, 0], equate_axes=2)
    rnn_heatmap(model, layer_idx=1, cmap=None, normalize=True, show_borders=False)
    rnn_heatmap(model, layer_idx=1, cmap=None, absolute_value=True)
    rnn_heatmap(model, layer_idx=1, norm=None)
    rnn_heatmap(model, layer_idx=1, norm=(-.004, .004))

    from see_rnn.inspect_gen import get_layer, _detect_nans

    get_layer(model, layer_name='gru')
    get_rnn_weights(model, layer_idx=1, concat_gates=False, as_tensors=True)
    rnn_heatmap(model, layer_idx=1, input_data=x, labels=y, mode='weights')
    _test_prefetched_data(model)

    # test NaN detection
    nan_txt = _detect_nans(np.array([1]*9999 + [np.nan])).replace('\n', ' ')
    print(nan_txt)  # case: print as quantity

    K.set_value(model.optimizer.lr, 1e12)
    train_model(model, iterations=10)
    rnn_histogram(model, layer_idx=1)
    rnn_heatmap(model, layer_idx=1)

    del model
    reset_seeds(reset_graph_with_backend=K)

    # test SimpleRNN & other
    _model = make_model(SimpleRNN, batch_shape, units=128, use_bias=False)
    train_model(_model, iterations=1)  # TF2-Keras-Graph bug workaround
    rnn_histogram(_model, layer_idx=1)  # test _pretty_hist
    K.set_value(_model.optimizer.lr, 1e50)  # SimpleRNNs seem ridiculously robust
    train_model(_model, iterations=20)
    rnn_heatmap(_model, layer_idx=1)
    data = get_rnn_weights(_model, layer_idx=1)
    rnn_heatmap(_model, layer_idx=1, input_data=x, labels=y, data=data)
    os.environ["TF_KERAS"] = '0'
    get_rnn_weights(_model, layer_idx=1, concat_gates=False)
    del _model

    assert True
    cprint("\n<< MISC TESTS PASSED >>\n", 'green')
Exemplo n.º 4
0
def test_errors():  # test Exception cases
    units = 6
    batch_shape = (8, 100, 2*units)

    reset_seeds(reset_graph_with_backend=K)
    model = make_model(GRU, batch_shape, activation='relu', recurrent_dropout=0.3)
    x, y = make_data(batch_shape, units)
    model.train_on_batch(x, y)

    grads = get_layer_gradients(model, x, y, layer_idx=1)
    grads_4D = np.expand_dims(grads, -1)

    from see_rnn.inspect_gen import get_layer, _make_grads_fn

    _pass_on_error(show_features_0D, grads)
    _pass_on_error(show_features_0D, grads_4D)
    _pass_on_error(show_features_1D, grads_4D)
    _pass_on_error(show_features_2D, grads_4D)
    _pass_on_error(show_features_2D, grads)
    _pass_on_error(get_layer_gradients, model, x, y, layer_idx=1, mode='cactus')
    _pass_on_error(get_layer_gradients, model, x, y, layer_idx=1,
                   layer_name='gru', layer=model.layers[1])
    _pass_on_error(_make_grads_fn, model, model.layers[1], mode='banana')
    _pass_on_error(get_layer, model)
    _pass_on_error(get_layer, model, layer_name='capsule')
    _pass_on_error(rnn_heatmap, model, layer_idx=1, input_data=x, labels=y,
                   mode='coffee')
    _pass_on_error(rnn_heatmap, model, layer_idx=1, norm=(0, 1, 2))
    _pass_on_error(rnn_heatmap, model, layer_idx=1, mode='grads')
    _pass_on_error(rnn_histogram, model, layer_idx=1, norm=None)
    _pass_on_error(rnn_heatmap, model, layer_index=9001)
    _pass_on_error(show_features_0D, grads, cake='lie')
    _pass_on_error(show_features_1D, grads, pup='not just any')
    _pass_on_error(show_features_2D, grads, true=False)
    outs = get_layer_outputs(model, x, layer_idx=1)
    _pass_on_error(rnn_histogram, model, layer_idx=1, data=outs)
    _pass_on_error(rnn_histogram, model, layer_idx=1, data=[1])
    _pass_on_error(rnn_histogram, model, layer_idx=1, data=[[1]])

    cprint("\n<< EXCEPTION TESTS PASSED >>\n", 'green')
    assert True
Exemplo n.º 5
0
def viz_outs_grads_last(model, layer_idx=2):  # return_sequences=False layer
    x, y = make_data(K.int_shape(model.input), model.layers[2].units)
    grads = get_layer_gradients(model, x, y, layer_idx=layer_idx)
    show_features_0D(grads)