def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] assert self.shape == self.B.data.shape C_data = self.A.data set_sub_ndarray(C_data, self.B.data, self.coordinate_tuple) assert C_data.shape == self.A.data.shape C = Tensor(C_data) C.left_child = self.A C.right_child = self.B self.output_shape = C_data.shape C.grad_fn = self self.A.parent = C self.B.parent = C if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] # May not have the same shape, use broadcast instead. # assert self.A.data.shape == self.B.data.shape if not isinstance(self.A.data, np.ndarray): C = Tensor(self.B.data) elif not isinstance(self.B.data, np.ndarray): C = Tensor(self.A.data) else: C = Tensor(self.A.data + self.B.data) C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B self.output_shape = C.data.shape return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] # Currrently, A is the batch samples assert self.A.data.shape[1:] == self.B.data.shape C = Tensor(self.A.data * self.B.data) # In numpy, * means element-wise multiply C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] # Y self.B = args[1] # Y_pred assert self.A.data.shape == self.B.data.shape # loss = .5 * ((Y_pred - Y) ** 2) / n_samples n_samples = self.A.data.shape[0] loss_value = 0.5 * (np.sum((self.B.data - self.A.data) ** 2))\ / n_samples C = Tensor(loss_value) C.name = self.name C.grad_fn = self # A = Y is the label, which is constant. self.A.requires_grad = False # B = Y_pred if self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C
def forward(self, *args): assert len(args) == 2 assert isinstance(args[0], Tensor) assert isinstance(args[1], Tensor) self.A = args[0] self.B = args[1] assert self.A.data.shape == self.B.data.shape C = Tensor(self.A.data - self.B.data) C.name = self.name C.grad_fn = self if self.A.requires_grad or self.B.requires_grad: C.requires_grad = True self.A.parent = C self.B.parent = C C.left_child = self.A C.right_child = self.B return C