def __init__(self, name, n_inputs, n_outputs, activation=None, use_bias=True, weights=None, biases=None): super().__init__(name) self.n_inputs = n_inputs self.n_outputs = n_outputs self.use_bias = use_bias if activation is None: activation = Activation.getInitialized("tanh") else: if not Activation.isObjectRegistered(activation): if isinstance(activation, dict): activation = Activation(**activation) elif isinstance(activation, str): activation = Activation(class_name=activation) else: raise Exception("{} is not a "\ "registered activation. Use {}".format(activation, Activation.registeredClasses())) self.activation = activation if weights is None: # Between -1 and 1 self.weights = (np.random.random((n_outputs, n_inputs)) * 2 - 1) else: assert isinstance(weights, np.ndarray) assert weights.shape == (n_outputs, n_inputs) self.weights = weights if biases is None: # Between -1 and 1 self.biases = (np.random.random((n_outputs, 1)) * 2 - 1) * 0.001 else: assert isinstance(biases, np.ndarray) assert biases.shape == (n_outputs, 1) self.biases = biases # Mutation mask ... create only once. self.mutation_mask = np.zeros_like(self.weights)
self.setBiases(arr) else: raise Exception() if __name__ == "__main__": layer = Layer("dense", "input_lay2er", 14, 16, use_bias=False, activation=Activation.getInitialized("relu")) layer2 = Layer("dense", "input_lay22er", 16, 16, activation=Activation.getInitialized("relu")) layer3 = Layer("dense", "input_lay222er", 16, 16, activation=Activation.getInitialized("relu")) layer4 = Layer("dense", "output_layer", 16, 4, activation=Activation.getInitialized("relu")) model = SequentialModel([layer, layer2, layer3, layer4]) inputs = model.generateRandomInputs(-1, 1) np.savez_compressed(r"C:\tmp\sequence_test.npz", state=model.__getstate__(), **model.getArrs()) #model2 = model.duplicate(True) #model3 = model.duplicate(True) #print (model.compute(inputs), model2.compute(inputs), model3.compute(inputs))