def test_040_objective_instance_properties(): """ Objective: Verify the layer class validates the parameters have been initialized before accessed. Expected: Initialization detects the access to the non-initialized parameters and fails. """ msg = "Accessing uninitialized property of the layer must fail." name = random_string(np.random.randint(1, 10)) for _ in range(NUM_MAX_TEST_TIMES): M: int = 1 layer = CrossEntropyLogLoss( name=name, num_nodes=1, log_loss_function=sigmoid_cross_entropy_log_loss, log_level=logging.DEBUG) # -------------------------------------------------------------------------------- # To pass # -------------------------------------------------------------------------------- try: if not layer.name == name: raise RuntimeError("layer.name == name should be true") except AssertionError: raise RuntimeError( "Access to name should be allowed as already initialized.") try: if not layer.M == M: raise RuntimeError("layer.M == M should be true") except AssertionError: raise RuntimeError( "Access to M should be allowed as already initialized.") try: if not isinstance(layer.logger, logging.Logger): raise RuntimeError( "isinstance(layer.logger, logging.Logger) should be true") except AssertionError: raise RuntimeError( "Access to logger should be allowed as already initialized.") # -------------------------------------------------------------------------------- # To fail # -------------------------------------------------------------------------------- try: print(layer.X) raise RuntimeError(msg) except AssertionError: pass try: layer.X = int(1) raise RuntimeError(msg) except AssertionError: pass try: print(layer.N) raise RuntimeError(msg) except AssertionError: pass try: print(layer.dX) raise RuntimeError(msg) except AssertionError: pass try: print(layer.Y) raise RuntimeError(msg) except AssertionError: pass try: print(layer.P) raise RuntimeError(msg) except AssertionError: pass try: layer._Y = int(1) print(layer.Y) raise RuntimeError(msg) except AssertionError: pass try: print(layer.dY) raise RuntimeError(msg) except AssertionError: pass try: layer._dY = int(1) print(layer.dY) raise RuntimeError(msg) except AssertionError: pass try: print(layer.T) raise RuntimeError(msg) except AssertionError: pass try: print(layer.L) raise RuntimeError(msg) except AssertionError: pass try: print(layer.J) raise RuntimeError(msg) except AssertionError: pass try: layer.T = float(1) raise RuntimeError(msg) except AssertionError: pass try: layer.function(int(1)) raise RuntimeError("Invoke layer.function(int(1)) must fail.") except AssertionError: pass try: layer.function(TYPE_FLOAT(1.0)) layer.gradient(int(1)) raise RuntimeError("Invoke layer.gradient(int(1)) must fail.") except AssertionError: pass
def test_040_objective_instantiation(): """ Objective: Verify the initialized layer instance provides its properties. Expected: * name, num_nodes, M, log_level are the same as initialized. * X, T, dY, objective returns what is set. * N, M property are provided after X is set. * Y, P, L properties are provided after function(X). * gradient(dL/dY) repeats dL/dY, * gradient_numerical() returns 1 """ name = "test_040_objective_instantiation" for _ in range(NUM_MAX_TEST_TIMES): N: int = np.random.randint(1, NUM_MAX_BATCH_SIZE) M: int = 1 # For sigmoid log loss layer, the number of features N in X is the same with node number. D: int = M layer = CrossEntropyLogLoss( name=name, num_nodes=M, log_loss_function=sigmoid_cross_entropy_log_loss, log_level=logging.DEBUG) # -------------------------------------------------------------------------------- # Properties # -------------------------------------------------------------------------------- assert layer.name == name assert layer.num_nodes == layer.M == M layer._D = D assert layer.D == D X = np.random.randn(N, D).astype(TYPE_FLOAT) layer.X = X assert np.array_equal(layer.X, X) assert layer.N == N == X.shape[0] # For sigmoid log loss layer, the number of features N in X is the same with node number. assert layer.M == X.shape[1] layer._dX = X assert np.array_equal(layer.dX, X) T = np.random.randint(0, M, N).astype(TYPE_LABEL) layer.T = T assert np.array_equal(layer.T, T) # layer.function() gives the total loss L in shape (). # log_loss function require (X, T) in X(N, M), and T(N, M) in OHE label format. X, T = transform_X_T(X, T) L = layer.function(X) J, P = sigmoid_cross_entropy_log_loss(X, T) assert \ L.shape == () and np.allclose(L, (np.sum(J) / N).astype(TYPE_FLOAT)) and L == layer.Y, \ "After setting T, layer.function(X) generates the total loss L but %s" % L # layer.function(X) sets layer.P to sigmoid_cross_entropy_log_loss(X, T) # P is nearly equal with sigmoid(X) assert \ np.array_equal(layer.P, P) and \ np.all(np.abs(layer.P - sigmoid(X)) < LOSS_DIFF_ACCEPTANCE_VALUE), \ "layer.function(X) needs to set P as sigmoid_cross_entropy_log_loss(X, T) " \ "which is close to sigmoid(X) but layer.P=\n%s\nP=\n%s\nsigmoid(X)=%s" \ % (layer.P, P, sigmoid(X)) # gradient of sigmoid cross entropy log loss layer is (P-T)/N G = layer.gradient() assert \ np.all(np.abs(G - ((P-T)/N)) < GRADIENT_DIFF_ACCEPTANCE_VALUE), \ "Gradient G needs (P-T)/N but G=\n%s\n(P-T)/N=\n%s\n" % (G, (P-T)/N) layer.logger.debug("This is a pytest") # pylint: disable=not-callable assert \ layer.objective(np.array(1.0, dtype=TYPE_FLOAT)) \ == np.array(1.0, dtype=TYPE_FLOAT), \ "Objective function of the output/last layer is an identity function."