def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] # Currrently, A is the batch samples assert self.A.data.shape[1:] == self.B.data.shape C = Tensor(self.A.data * self.B.data) # In numpy, * means element-wise multiply C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] # May not have the same shape, use broadcast instead. # assert self.A.data.shape == self.B.data.shape if not isinstance(self.A.data, np.ndarray): C = Tensor(self.B.data) elif not isinstance(self.B.data, np.ndarray): C = Tensor(self.A.data) else: C = Tensor(self.A.data + self.B.data) C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B self.output_shape = C.data.shape return C
def forward(self, *args): assert len(args) == 1 assert isinstance(args[0], Tensor) self.A = args[0] C_data = np.sum(self.A.data, axis=self.axis) if isinstance(self.target_shape, tuple): C_data = C_data.reshape(self.target_shape) self.output_shape = C_data.shape C = Tensor(C_data) C.name = self.name C.grad_fn = self if self.A.requires_grad: C.requires_grad = True self.A.parent = C # self.B.parent = C C.left_child = self.A #C.right_child = self.B return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] # Y self.B = args[1] # Y_pred assert self.A.data.shape == self.B.data.shape # loss = .5 * ((Y_pred - Y) ** 2) / n_samples n_samples = self.A.data.shape[0] loss_value = 0.5 * (np.sum((self.B.data - self.A.data) ** 2))\ / n_samples C = Tensor(loss_value) C.name = self.name C.grad_fn = self # A = Y is the label, which is constant. self.A.requires_grad = False # B = Y_pred if self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C
def forward(self, *args): assert len(args) == 1 assert isinstance(args[0], Tensor) self.A = args[0] # Sigmoid: f(x) = sigmoid(x) C = Tensor(sigmoid(self.A.data)) C.name = self.name C.grad_fn = self if self.A.requires_grad: C.requires_grad = True self.A.parent = C C.left_child = self.A self.C = C return C
def forward(self, *args): assert len(args) == 1 assert isinstance(args[0], Tensor) self.A = args[0] # ReLU: f(x) = max(0, x) # For numpy, relu(x) = x * (x > 0), relu_grad(x) = 1 * (x > 0) #C = Tensor(np.clip(self.A.data, a_min=0, a_max=np.Infinity)) C = Tensor(self.A.data * (self.A.data > 0)) C.name = self.name C.grad_fn = self if self.A.requires_grad: C.requires_grad = True self.A.parent = C C.left_child = self.A return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] assert self.A.data.shape == self.B.data.shape C = Tensor(self.A.data - self.B.data) C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C