def do_train(): dataset = np.ones(input_shape) # data to be fed as training model = Sequential() model.add( Conv2D( n_out, (5, 5), activation="relu", input_shape=input_shape_conv, use_bias=True, weights=weights_bias, name="conv", )) model.add(Flatten()) # to handle Theano's categorical crossentropy model.compile(optimizer="sgd", loss="categorical_crossentropy") model.fit( dataset, np.ones(shape_out), batch_size=1, epochs=1, callbacks=[ callbacks.DeadReluDetector(dataset, verbose=verbose) ], verbose=False, )
def test_DeadDeadReluDetector_activation(): """ Tests that using "Activation" layer does not throw error """ input_data = Input(shape=(1, )) output_data = Activation('relu')(input_data) model = Model(input_data, output_data) model.compile(optimizer='adadelta', loss='binary_crossentropy') model.fit(np.array([[1]]), np.array([[1]]), epochs=1, validation_data=(np.array([[1]]), np.array([[1]])), callbacks=[callbacks.DeadReluDetector(np.array([[1]]))])
def do_train(): dataset = np.ones(input_shape) # data to be fed as training model = Sequential() model.add( Dense(n_out, activation='relu', input_shape=input_shape_dense, use_bias=False, weights=[weights], name='dense')) model.compile(optimizer='sgd', loss='categorical_crossentropy') model.fit(dataset, np.ones(shape_out), batch_size=1, epochs=1, callbacks=[ callbacks.DeadReluDetector(dataset, verbose=verbose) ], verbose=False)
def do_test(weights, expected_warnings, verbose): with warnings.catch_warnings(record=True) as w: dataset = np.ones((1, 1, 1)) # data to be fed as training model = Sequential() model.add( Dense(10, activation='relu', input_shape=(1, 1), use_bias=False, weights=[weights])) model.compile(optimizer='sgd', loss='categorical_crossentropy') model.fit(dataset, np.ones((1, 1, 10)), epochs=1, callbacks=[ callbacks.DeadReluDetector(dataset, verbose=verbose) ], verbose=False) assert len(w) == expected_warnings for warn_item in w: assert issubclass(warn_item.category, RuntimeWarning) assert "dead neurons" in str(warn_item.message)