def forward_propagate(self,X): W = self.get_param(0,"param","W") b = self.get_param(0,"param","b") # linear part Z = np.dot(W,X) + b # activation self.info[0]["A"] = functions_activation.activation(self.info[0]["activation"],Z)
def forward_propagate(self,X): for layer in range(self.nlayer): # linear part if layer == 0: Ain = X else: Ain = self.get_A(layer-1) W = self.get_param(layer,"param","W") b = self.get_param(layer,"param","b") Z = np.dot(W,Ain)+b # activation self.info[layer]["A"] = functions_activation.activation(self.info[layer]["activation"],Z)