def __init__(self): """ hidden_layer_num: 隐藏层数目 hidden_size: 隐藏层大小 classes_num: 分类数目 """ super(MLP, self).__init__() self.LossFunc = FocalLoss() self.layers = [ Linear(256, 256), Batchnorm(256), ReLU(), Dropout(0.5), Linear(256, 128), Batchnorm(128), ReLU(), Dropout(0.5), Linear(128, 128), Batchnorm(128), ReLU(), Dropout(0.5), Linear(128, 4), Sigmoid(), self.LossFunc ] self.parameters = {} for l in self.layers: if hasattr(l, 'parameters'): self.parameters[id(l)] = l.parameters
class SigmoidEx(Net): def __init__(self): super().__init__(MSE, learning_rate=0.02) self.a1 = Sigmoid(self) self.l1 = Linear(self, 1, 5) self.l2 = Linear(self, 5, 1) def forward_pass(self, input): x = self.a1.call(self.l1.forward(input)) x = self.a1.call(self.l2.forward(x)) return x
def __init__(self): super().__init__(CrossEntropy, learning_rate=0.05) self.a1 = ReLu(self) self.a2 = SoftMax(self) self.l1 = Linear(self, 784, 256) self.l2 = Linear(self, 256, 120) self.l3 = Linear(self, 120, 64) self.l4 = Linear(self, 64, 10)
class Ex(Net): def __init__(self): super().__init__(CrossEntropy, learning_rate=0.05) self.a1 = ReLu(self) self.a2 = SoftMax(self) self.l1 = Linear(self, 784, 256) self.l2 = Linear(self, 256, 120) self.l3 = Linear(self, 120, 64) self.l4 = Linear(self, 64, 10) def forward_pass(self, input): x = input.reshape(input.shape[0], -1) x = self.a1.call(self.l1.forward(x)) x = self.a1.call(self.l2.forward(x)) x = self.a1.call(self.l3.forward(x)) x = self.a2.call(self.l4.forward(x)) return x
def layer(name, dim1, dim2, ind): if "input" == name: assert False, "input layer must be add-hot pluged to neural net at forward pass per new input!" if "lin" == name: return Linear(dim1, dim2, ind) if "sig" == name: return Sigmoid(dim1, dim2, ind) if "relu" == name: return ReLU(dim1, dim2, ind) if "ass" == name: return AntiSaturationSig(dim1, ind) assert False, "space-layer : <{}> not implemented!".format(name) return None
def __init__(self): super().__init__(MSE, learning_rate=0.02) self.a1 = Passive(self) self.l1 = Linear(self, 1, 5) self.l2 = Linear(self, 5, 1)