def backwardada(self,D,lr,mu,nest,l1,l2): derivterm=D*self.Act_derivative(self.Z) Dout=np.empty((D.shape[0],self.A.shape[1])) padrows=int(np.ceil(((self.weights.shape[0]-1)/2))) padder=np.vstack((np.zeros((padrows,derivterm.shape[1])),derivterm,np.zeros((padrows,derivterm.shape[1])))) for k in range(self.weights[0,:,0].shape[0]): Dout[:,k]=SC(padder,self.weights[::-1,k,:],mode='valid').reshape(-1) for l in range(self.weights[0,0,:].shape[0]): self.weights[:,k,l]-=lr*SC(self.A[:,k],derivterm[:,l], mode="valid") self.biases-=lr*(np.sum(derivterm, axis=0)) return Dout
def forwardnest(self,A,mu,p=1): self.A=A self.weights=np.random.randn(self.filtersize,self.inputs,self.filters)*np.sqrt(2/(self.inputs+self.filters)) self.biases=np.random.randn(1,self.filters)*np.sqrt(2/(self.inputs+self.filters)) padrows=int(np.ceil(((self.weights.shape[0]-1)/2))) padA=np.vstack((np.zeros((padrows,self.A.shape[1])),self.A,np.zeros((padrows,self.A.shape[1])))) self.H=np.empty((self.A.shape[0],self.filters)) for k in range(self.filters): self.H[:,k]=SC(padA,self.weights[:,:,k], mode="valid").reshape(-1) self.H+=self.biases self.Z=ReLU(self.H) return self.Z
def forward(self,A,p=1): self.A=A if not self.weights.size: self.weightinit() padrows=int((self.weights.shape[0]-1)/2) padA=np.vstack((np.zeros((padrows,self.A.shape[1])),self.A,np.zeros((padrows,self.A.shape[1])))) self.H=np.empty((self.A.shape[0],self.filters)) counter=0 for k in range(self.filters): # print(self.weights[:,:,k].shape) self.H[:,k]=SC(padA,self.weights[:,:,k], mode="valid").reshape(-1) self.H+=self.biases self.Z=ReLU(self.H) return self.Z