def lazy_init(self): if self.lazy: (prev_features,n_features,filter_heigth, filter_width,border_mode,reg,init)=self.lazy self.weight = SharedVar(self,'weight',init((n_features,prev_features, filter_heigth,filter_width))) self.bias = SharedVar(self,'bias',init((n_features,))) self.param = (self.weight,self.bias) self.add_reg(self.weight,reg) self.mode = border_mode self.lazy = None
class Linear(Module): def __init__(self,input_size,output_size,reg=None,init=Init.Uniform): super(Linear,self).__init__() if isinstance(init,(dict)): self.bias = dict['bias'] self.weight=dict['weight'] self.lazy = None else: self.lazy = (input_size,output_size,reg,init) self.input = T.matrix(dtype=dtype) def params(self): self.lazy_init() return self.param def lazy_init(self): if self.lazy: lazy = self.lazy self.weight = SharedVar(self, 'weigth',lazy[3]((lazy[0],lazy[1]))) self.bias = SharedVar(self,'bias',lazy[3](lazy[1])) self.param = (self.weight,self.bias) self.add_reg(self.weight,lazy[2]) self.lazy=None def get_output(self,input): self.lazy_init() return T.dot(input,self.weight)+self.bias def __str__(self): return "Linear({}->{})".format(*(self.weight.get_value(borrow=True).shape))
def __init__(self,prev_features,n_features,filter_heigth, filter_width,border_mode=Valid,reg=None,init=Init.Uniform): super(Conv2D, self).__init__() if isinstance(init,(dict)): self.bias = dict['bias'] self.weight =dict['weigth'] else: self.lazy = (prev_features,n_features,filter_heigth, filter_width,border_mode,reg,init) self.weight = SharedVar(self,'weight',init((n_features,prev_features, filter_heigth,filter_width))) self.bias = SharedVar(self,'bias',init((n_features,))) self.input = T.tensor4() self.param = (self.weight,self.bias) self.add_reg(self.weight,reg) self.mode = border_mode
def lazy_init(self): if self.lazy: lazy = self.lazy self.weight = SharedVar(self, 'weigth',lazy[3]((lazy[0],lazy[1]))) self.bias = SharedVar(self,'bias',lazy[3](lazy[1])) self.param = (self.weight,self.bias) self.add_reg(self.weight,lazy[2]) self.lazy=None
class Conv2D(Module): Full = "full" Valid= 'valid' def __init__(self,prev_features,n_features,filter_heigth, filter_width,border_mode=Valid,reg=None,init=Init.Uniform): super(Conv2D, self).__init__() if isinstance(init,(dict)): self.bias = dict['bias'] self.weight =dict['weigth'] else: self.lazy = (prev_features,n_features,filter_heigth, filter_width,border_mode,reg,init) self.weight = SharedVar(self,'weight',init((n_features,prev_features, filter_heigth,filter_width))) self.bias = SharedVar(self,'bias',init((n_features,))) self.input = T.tensor4() self.param = (self.weight,self.bias) self.add_reg(self.weight,reg) self.mode = border_mode def lazy_init(self): if self.lazy: (prev_features,n_features,filter_heigth, filter_width,border_mode,reg,init)=self.lazy self.weight = SharedVar(self,'weight',init((n_features,prev_features, filter_heigth,filter_width))) self.bias = SharedVar(self,'bias',init((n_features,))) self.param = (self.weight,self.bias) self.add_reg(self.weight,reg) self.mode = border_mode self.lazy = None def params(self): self.lazy_init() return self.param def get_output(self,input): self.lazy_init() return conv2d(input,self.weight,border_mode=self.mode) + \ self.bias.dimshuffle('x',0,'x','x') def __str__(self): shp = self.weight.get_value(borrow=True).shape return "Conv2D(inMap:{},outMap:{},filter:({},{}))".format(shp[1],shp[0],shp[2],shp[3])