def __init__( self, layer_weight_dims, layer_output_dims, filter_size=7, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0, layer_config=[1, 0, 0, 1, 1], ): """ Layer Id 0 - Conv_Rect_Pool 1 - Conv_Rect 2 - Conv 3 - Linear_Rect 4 - Linear Input list = [2,0,0,1,1] - 2 Conv_Rect_Pool layers, 1 Linear_Rect and 1 Linear """ self.layer_objs = [] self.layer_output_dims = layer_output_dims self.layer_weight_dims = layer_weight_dims assert len(layer_output_dims) == len(layer_weight_dims) self.num_conv_layers = len(layer_output_dims) # Construct the Conv layers for layer_weight_dim in self.layer_weight_dims: w, b = ConvLayer.generateWeightsAndBias(layer_weight_dim, weight_scale) conv_obj = Conv_Rect_Pool(w, b, pad=(filter_size - 1) // 2) self.layer_objs.append(conv_obj) # Construct the FC layer w2, b2 = LinearNet.generateWeightsAndBias(np.prod(self.layer_output_dims[-1]), hidden_dim, weight_scale) lin_obj = Linear_Rect(w2, b2) # Construct the output layer w3, b3 = LinearNet.generateWeightsAndBias(hidden_dim, num_classes, weight_scale) lin_obj_2 = LinearNet(w3, b3) self.layer_objs.append(lin_obj) self.layer_objs.append(lin_obj_2) self.reg = reg
def __init__(self, w, b): self._fwdcache = None self.lin_obj = LinearNet(w, b)
class Linear_Rect(BaseLayer, BaseNeuron): def __init__(self, w, b): self._fwdcache = None self.lin_obj = LinearNet(w, b) @property def cache(self): return self._fwdcache @cache.setter def cache(self, newvalue): self._fwdcache = newvalue @property def W(self): return self.lin_obj.W @W.setter def W(self, newValue): self.lin_obj.W = newValue @property def b(self): return self.lin_obj.b @b.setter def b(self, newValue): self.lin_obj.b = newValue @property def dw(self): return self.lin_obj.dw @dw.setter def dw(self, newValue): self.lin_obj.dw = newValue @property def db(self): return self.lin_obj.db @db.setter def db(self, newValue): self.lin_obj.db = newValue def forward(self, x): out = self.lin_obj.forward(x) # x is cached inside this object. w,b - Refer to w and b. self.rect_obj = RectifierLinearUnit() # out is cached inside this object return self.rect_obj.forward(out) def backward(self, dout): dx = self.rect_obj.backward(dout) out = self.lin_obj.backward(dx) return out def generateWeightsAndBias(*args, **kwargs): return LinearNet.generateWeightsAndBias(*args, **kwargs) def printW(self): print(self.W) def printDimensions(self): print("W:{}\tb:{}".format(self.lin_obj.W.shape, self.lin_obj.b.shape))
def generateWeightsAndBias(*args, **kwargs): return LinearNet.generateWeightsAndBias(*args, **kwargs)
class Linear_Rect(BaseLayer, BaseNeuron): def __init__(self, w, b): self._fwdcache = None self.lin_obj = LinearNet(w, b) @property def cache(self): return self._fwdcache @cache.setter def cache(self, newvalue): self._fwdcache = newvalue @property def W(self): return self.lin_obj.W @W.setter def W(self, newValue): self.lin_obj.W = newValue @property def b(self): return self.lin_obj.b @b.setter def b(self, newValue): self.lin_obj.b = newValue @property def dw(self): return self.lin_obj.dw @dw.setter def dw(self, newValue): self.lin_obj.dw = newValue @property def db(self): return self.lin_obj.db @db.setter def db(self, newValue): self.lin_obj.db = newValue def forward(self, x): out = self.lin_obj.forward( x) # x is cached inside this object. w,b - Refer to w and b. self.rect_obj = RectifierLinearUnit( ) # out is cached inside this object return self.rect_obj.forward(out) def backward(self, dout): dx = self.rect_obj.backward(dout) out = self.lin_obj.backward(dx) return out def generateWeightsAndBias(*args, **kwargs): return LinearNet.generateWeightsAndBias(*args, **kwargs) def printW(self): print(self.W) def printDimensions(self): print("W:{}\tb:{}".format(self.lin_obj.W.shape, self.lin_obj.b.shape))