def test_misc(): # test miscellaneous functionalities units = 6 batch_shape = (8, 100, 2 * units) reset_seeds(reset_graph_with_backend=K) model = make_model(GRU, batch_shape, activation='relu', recurrent_dropout=0.3, IMPORTS=IMPORTS) x, y, sw = make_data(batch_shape, units) model.train_on_batch(x, y, sw) weights_norm(model, 'gru', omit_names='bias', verbose=1) weights_norm(model, ['gru', 1, (1, 1)], norm_fn=np.abs) stats = weights_norm(model, 'gru') weights_norm(model, 'gru', _dict=stats) grads = get_gradients(model, 1, x, y) get_gradients(model, 1, x, y, as_dict=True) get_gradients(model, ['gru', 1], x, y) get_outputs(model, ['gru', 1], x) features_1D(grads, subplot_samples=True, tight=True, borderwidth=2, share_xy=False) with tempdir() as dirpath: features_0D(grads[0], savepath=os.path.join(dirpath, 'img.png')) with tempdir() as dirpath: features_1D(grads[0], subplot_samples=True, annotations=[1, 'pi'], savepath=os.path.join(dirpath, 'img.png')) features_2D(grads.T, n_rows=1.5, tight=True, borderwidth=2) with tempdir() as dirpath: features_2D(grads.T[:, :, 0], norm='auto', savepath=os.path.join(dirpath, 'img.png')) with tempdir() as dirpath: features_hist(grads, show_borders=False, borderwidth=1, annotations=[0], show_xy_ticks=[0, 0], share_xy=(1, 1), title="grads", savepath=os.path.join(dirpath, 'img.png')) with tempdir() as dirpath: features_hist_v2(list(grads[:, :4, :3]), colnames=list('abcd'), show_borders=False, xlims=(-.01, .01), ylim=100, borderwidth=1, show_xy_ticks=[0, 0], side_annot='row', share_xy=True, title="Grads", savepath=os.path.join(dirpath, 'img.png')) features_hist(grads, center_zero=True, xlims=(-1, 1), share_xy=0) features_hist_v2(list(grads[:, :4, :3]), center_zero=True, xlims=(-1, 1), share_xy=(False, False)) with tempdir() as dirpath: rnn_histogram(model, 1, show_xy_ticks=[0, 0], equate_axes=2, savepath=os.path.join(dirpath, 'img.png')) rnn_histogram(model, 1, equate_axes=False, configs={ 'tight': dict(left=0, right=1), 'plot': dict(color='red'), 'title': dict(fontsize=14), }) rnn_heatmap(model, 1, cmap=None, normalize=True, show_borders=False) rnn_heatmap(model, 1, cmap=None, norm='auto', absolute_value=True) rnn_heatmap(model, 1, norm=None) with tempdir() as dirpath: rnn_heatmap(model, 1, norm=(-.004, .004), savepath=os.path.join(dirpath, 'img.png')) hist_clipped(grads, peaks_to_clip=2) _, ax = plt.subplots(1, 1) hist_clipped(grads, peaks_to_clip=2, ax=ax, annot_kw=dict(fontsize=15)) get_full_name(model, 'gru') get_full_name(model, 1) pass_on_error(get_full_name, model, 'croc') get_weights(model, 'gru', as_dict=False) get_weights(model, 'gru', as_dict=True) get_weights(model, 'gru/bias') get_weights(model, ['gru', 1, (1, 1)]) pass_on_error(get_weights, model, 'gru/goo') get_weights(model, '*') get_gradients(model, '*', x, y) get_outputs(model, '*', x) from see_rnn.utils import _filter_duplicates_by_keys keys, data = _filter_duplicates_by_keys(list('abbc'), [1, 2, 3, 4]) assert keys == ['a', 'b', 'c'] assert data == [1, 2, 4] keys, data = _filter_duplicates_by_keys(list('abbc'), [1, 2, 3, 4], [5, 6, 7, 8]) assert keys == ['a', 'b', 'c'] assert data[0] == [1, 2, 4] and data[1] == [5, 6, 8] from see_rnn.inspect_gen import get_layer, detect_nans get_layer(model, 'gru') get_rnn_weights(model, 1, concat_gates=False, as_tensors=True) rnn_heatmap(model, 1, input_data=x, labels=y, mode='weights') _test_prefetched_data(model) # test NaN/Inf detection nan_txt = detect_nans(np.array([1] * 9999 + [np.nan])).replace('\n', ' ') print(nan_txt) # case: print as quantity data = np.array([np.nan, np.inf, -np.inf, 0]) print(detect_nans(data, include_inf=True)) print(detect_nans(data, include_inf=False)) data = np.array([np.inf, 0]) print(detect_nans(data, include_inf=True)) detect_nans(np.array([0])) K.set_value(model.optimizer.lr, 1e12) train_model(model, iterations=10) rnn_histogram(model, 1) rnn_heatmap(model, 1) del model reset_seeds(reset_graph_with_backend=K) # test SimpleRNN & other _model = make_model(SimpleRNN, batch_shape, units=128, use_bias=False, IMPORTS=IMPORTS) train_model(_model, iterations=1) # TF2-Keras-Graph bug workaround rnn_histogram(_model, 1) # test _pretty_hist K.set_value(_model.optimizer.lr, 1e50) # force NaNs train_model(_model, iterations=20) rnn_heatmap(_model, 1) data = get_rnn_weights(_model, 1) rnn_heatmap(_model, 1, input_data=x, labels=y, data=data) os.environ["TF_KERAS"] = '0' get_rnn_weights(_model, 1, concat_gates=False) del _model assert True cprint("\n<< MISC TESTS PASSED >>\n", 'green')
def test_track_weight_decays(): """This example should be able to run without error""" def make_model(batch_shape, layer_kw={}): """Conv1D autoencoder""" dim = batch_shape[-1] bdim = dim // 2 ipt = Input(batch_shape=batch_shape) x = Conv1D(dim, 8, activation='relu', **layer_kw)(ipt) x = Conv1D(bdim, 1, activation='relu', **layer_kw)(x) # bottleneck out = Conv1D(dim, 8, activation='linear', **layer_kw)(x) model = Model(ipt, out) model.compile('adam', 'mse') return model def make_data(batch_shape, n_batches): X = Y = np.random.randn(n_batches, *batch_shape) return X, Y ########### Train setup ################################################### batch_shape = (32, 15, 12) n_epochs = 4 n_batches = 10 wd = 2e-3 layer_kw = dict(padding='same', kernel_regularizer=l2(wd)) model = make_model(batch_shape, layer_kw) X, Y = make_data(batch_shape, n_batches) ## Train #################### l2_stats = {} for epoch in range(n_epochs): l2_stats[epoch] = {} for i, (x, y) in enumerate(zip(X, Y)): model.train_on_batch(x, y) l2_stats[epoch] = weights_norm(model, [1, 3], l2_stats[epoch], omit_names='bias', verbose=1) print("Epoch", epoch + 1, "finished") print() ########### Preprocess funcs ################################################## def _get_weight_names(model, layer_names, omit_names): weight_names = [] for name in layer_names: layer = model.get_layer(name=name) for w in layer.weights: if not any(to_omit in w.name for to_omit in omit_names): weight_names.append(w.name) return weight_names def _merge_layers_and_weights(l2_stats): stats_merged = [] for stats in l2_stats.values(): x = np.array(list( stats.values())) # (layers, weights, stats, batches) x = x.reshape(-1, *x.shape[2:]) # (layers-weights, stats, batches) stats_merged.append(x) return stats_merged # (epochs, layer-weights, stats, batches) ########### Plot setup ######################################################## ylim = 5 xlims = (.4, 1.2) omit_names = 'bias' suptitle = "wd={:.0e}".format(wd).replace('0', '') side_annot = "EP" configs = {'side_annot': dict(xy=(.9, .9))} layer_names = list(l2_stats[0].keys()) weight_names = _get_weight_names(model, layer_names, omit_names) stats_merged = _merge_layers_and_weights(l2_stats) ## Plot ######## features_hist_v2(stats_merged, colnames=weight_names, title=suptitle, xlims=xlims, ylim=ylim, side_annot=side_annot, pad_xticks=True, configs=configs)