def __init__(self, in_size, out_size): super().__init__() self.weights = minitorch.Parameter( 2 * (minitorch.rand((in_size, out_size)) - 0.5) ) self.bias = minitorch.Parameter(2 * (minitorch.rand((out_size,)) - 0.5)) self.out_size = out_size
def __init__(self, extra=0): super().__init__() self.parameter_a = minitorch.Parameter(VAL_A) self.parameter_b = minitorch.Parameter(VAL_B) self.non_parameter = 10 for i in range(extra): self.add_parameter(f"extra_parameter_{i}", None)
def __init__(self, in_size, out_size, backend): super().__init__() self.weights = RParam(in_size, out_size, backend=backend) s = minitorch.zeros((out_size, ), backend=backend) s = s + 0.1 self.bias = minitorch.Parameter(s) self.out_size = out_size
def test_parameter(): t = MockParam() q = minitorch.Parameter(t) print(q) assert t.x t2 = MockParam() q.update(t2) assert t2.x
def RParam(*shape): r = 2 * (minitorch.rand(shape) - 0.5) return minitorch.Parameter(r)
def RParam(*shape): r = 0.1 * (minitorch.rand(shape, backend=BACKEND) - 0.5) return minitorch.Parameter(r)
def RParam(*shape): p = 1.0 for s in shape: p += s r = 2 * (minitorch.rand(shape, backend=BACKEND) - 0.5) return minitorch.Parameter(r)
def __init__(self): super().__init__() self.parameter_a = minitorch.Parameter(VAL_A)
def __init__(self, size_a, size_b, val): super().__init__() self.module_a = Module2(size_a) self.module_b = Module2(size_b) self.parameter_a = minitorch.Parameter(val)
def __init__(self): super().__init__() self.p3 = minitorch.Parameter(15)
def __init__(self): super().__init__() self.p1 = minitorch.Parameter(5) self.non_param = 10 self.a = ModuleA2() self.b = ModuleA3()
def __init__(self): super().__init__() self.module_a = Module2(5) self.module_b = Module2(10) self.parameter_a = minitorch.Parameter(VAL)
def __init__(self, extra=0): super().__init__() self.parameter_a = minitorch.Parameter(VAL_A + 100)
def RParam(*shape, backend): r = minitorch.rand(shape, backend=backend) - 0.5 return minitorch.Parameter(r)