コード例 #1
0
ファイル: conv_layers.py プロジェクト: jelennal/t1t2
   def __init__(self, rng, rstream, index, x, 
                params, globalParams, useRglrz, bnPhase,
                filterShape, inFilters, outFilters, stride, 
                W=None, b=None, a=None, rglrzParam=None, normParam=None, normWindow=None):
                    
        ''' Class defining a convolutional layer. (bc01)
            # imageShape  ::  (0 batch size, 1 # in feature maps,         2 in image height, 3 in image width)  
            # filterShape ::  (0 # out feature maps, 1 # in feature maps, 2 filter height, 3 filter width)
        
        # Arguments:
        
        '''                                        
        # defining filter dimensions           
        filterDim = (filterShape[0], filterShape[1])     
        filterShape = (outFilters, inFilters, filterShape[0], filterShape[1]) 
        nIn = inFilters*filterDim[0]*filterDim[1] 
        nOut = outFilters*filterDim[0]*filterDim[1]
        updateBN = []
        convNonLin = params.cnnNonlin
        
        ''' 
            Defining shared variables: T1, T2, BN
        '''
                
                
        # defining shared T1 params
        if W is None:
            W, b, a = t1_shared(params=params, rng=rng, index=index, nIn=nIn, nOut=nOut, 
                                outFilters=outFilters, filterShape=filterShape) 

        self.W = W; self.b = b; self.a = a    
        if params.batchNorm and not params.aFix:
            self.paramsT1 = [W, b, a]
        else:    
            self.paramsT1 = [W, b]
            
        # defining shared T2 params      
        self.paramsT2 = []
        if rglrzParam is None:
            rglrzParam = t2_shared(params, globalParams, index, inFilters, outFilters, filterShape) 
                                                                                      
        self.rglrzParam = rglrzParam               
        if params.useT2:
            for rglrz in params.rglrzTrain:
                if (rglrz not in params.rglrzPerNetwork) and (rglrz not in params.rglrzPerNetwork1):
                        if (rglrz not in ['inputNoise', 'addNoise']) or params.convLayers[index].noise:
                            self.paramsT2 += [rglrzParam[rglrz]] # if trained, put param here

        #  defining shared BN params
        if params.batchNorm and params.convLayers[index].bn:          
            if normParam is None: 
                normParam, paramsBN = bn_shared(params, outFilters, index)                       
            self.normParam = normParam         
            self.paramsBN = paramsBN

        # noise      
        if (index == 0 and 'inputNoise' in rglrzParam.keys()):
            noiz = self.rglrzParam['inputNoise']
        elif 'addNoise' in rglrzParam.keys():
            noiz = self.rglrzParam['addNoise']
        if ('dropOut' in rglrzParam.keys()):
            drop = self.rglrzParam['dropOut']
        elif 'dropOutB' in rglrzParam.keys():
            drop = self.rglrzParam['dropOutB']

        ''' 
            x transformations: convolution, BN, noise, nonlinearity  
        '''

        # add gauss noise before affine transform                 
        if noise_conditions(params, index, 'type0'):
            x = noiseup(x, useRglrz, noiz, params.noiseT1, params, index, rstream)
        if dropout_conditions(params, index, 'type0'):
            x = dropout(x, useRglrz, drop, params, inFilters, rstream)

        # convolution
        if cudasConv:
            convOut = cudnn.dnn_conv(x, self.W, border_mode = params.convLayers[index].border, subsample = stride)
        else: 
            convOut = nnconv.conv2d(x, self.W, subsample = stride, border_mode = params.convLayers[index].border)


        # batch normalization & scale+shift   
        if params.batchNorm and params.convLayers[index].bn:
            convOut, updateBN = bn_layer(convOut, self.a, self.b, self.normParam, params, bnPhase)
            self.updateBN = updateBN 
        else:
            convOut += self.b.dimshuffle('x', 0, 'x', 'x') 

        # add gauss noise before nonlinearity         
        if noise_conditions(params, index, 'type1'): 
            convOut = noiseup(convOut, useRglrz, noiz, params.noiseT1, params, index, rstream)        
        # nonlinearity
        self.output = activation(convOut, convNonLin)
コード例 #2
0
    def __init__(self,
                 rng,
                 rstream,
                 index,
                 x,
                 params,
                 globalParams,
                 useRglrz,
                 bnPhase,
                 W=None,
                 b=None,
                 a=None,
                 rglrzParam=None,
                 normParam=None,
                 normWindow=None):
        '''
            Class defining a fully connected layer.
        '''
        if params.model == 'convnet':
            nonLin = 'softmax'
            nIn = 10
            nOut = 10
        else:
            nonLin = params.activation[index]
            nIn = params.nHidden[index]
            nOut = params.nHidden[index + 1]
        '''
            Initializing shared variables.
        '''

        # defining shared T1 params
        if W is None:
            W, b, a = t1_shared(params, rng, index, nIn, nOut, nOut)
        self.W = W
        self.b = b
        self.a = a
        if params.batchNorm and (not params.aFix) and (nonLin != 'softmax'):
            self.paramsT1 = [W, b, a]
        else:
            self.paramsT1 = [W, b]

        # defining shared T2 params
        self.paramsT2 = []
        if rglrzParam is None:
            rglrzParam = t2_shared(params, globalParams, index, nIn, nOut)

        self.rglrzParam = rglrzParam
        self.paramsT2 = []
        if params.useT2:
            for rglrz in params.rglrzTrain:
                if (rglrz not in params.rglrzPerNetwork) and (
                        rglrz not in params.rglrzPerNetwork1):
                    if rglrz != 'addNoise' or nonLin != 'softmax':
                        self.paramsT2 += [rglrzParam[rglrz]
                                          ]  # if trained, put param here

        # defining shared BN params
        if normParam is None and params.batchNorm and nonLin != 'softmax':
            if normParam is None:
                normParam, paramsBN = bn_shared(params, nOut, index)
            self.normParam = normParam
            self.paramsBN = paramsBN

        # noise
        if (index == 0 and 'inputNoise' in rglrzParam.keys()):
            noiz = self.rglrzParam['inputNoise']
        elif 'addNoise' in rglrzParam.keys():
            noiz = self.rglrzParam['addNoise']
        if ('dropOut' in rglrzParam.keys()):
            drop = self.rglrzParam['dropOut']
        elif 'dropOutB' in rglrzParam.keys():
            drop = self.rglrzParam['dropOutB']
        '''
            Input transformations: convolution, BN, noise, nonlinearity
        '''

        # add normal noise to x
        self.x = x
        if noise_conditions(params, index, 'type0'):
            x = noiseup(x, useRglrz, noiz, params.noiseT1, params, index,
                        rstream)
        if dropout_conditions(params, index, 'type0'):
            x = dropout(x, useRglrz, drop, params, nIn, rstream)

        # affine transform
        xLin = T.dot(x, self.W)

        # batchnorm transform
        if params.batchNorm and nonLin != 'softmax':
            xLin, updateBN = bn_layer(xLin, self.a, self.b, self.normParam,
                                      params, bnPhase)
            self.updateBN = updateBN
        else:
            xLin += self.b

        # noise before nonlinearity
        if noise_conditions(params, index, 'type1'):
            xLin = noiseup(xLin, useRglrz, noiz, params.noiseT1, params, index,
                           rstream)
            # nonlinearity
        self.output = activation(xLin, nonLin)
コード例 #3
0
ファイル: mlp_layer.py プロジェクト: jelennal/t1t2
    def __init__(
        self,
        rng,
        rstream,
        index,
        x,
        params,
        globalParams,
        useRglrz,
        bnPhase,
        W=None,
        b=None,
        a=None,
        rglrzParam=None,
        normParam=None,
        normWindow=None,
    ):

        """ 
            Class defining a fully connected layer.                                        
        """
        if params.model == "convnet":
            nonLin = "softmax"
            nIn = 10
            nOut = 10
        else:
            nonLin = params.activation[index]
            nIn = params.nHidden[index]
            nOut = params.nHidden[index + 1]

        """
            Initializing shared variables.
        """

        # defining shared T1 params
        if W is None:
            W, b, a = t1_shared(params, rng, index, nIn, nOut, nOut)
        self.W = W
        self.b = b
        self.a = a
        if params.batchNorm and (not params.aFix) and (nonLin != "softmax"):
            self.paramsT1 = [W, b, a]
        else:
            self.paramsT1 = [W, b]

        # defining shared T2 params
        self.paramsT2 = []
        if rglrzParam is None:
            rglrzParam = t2_shared(params, globalParams, index, nIn, nOut)

        self.rglrzParam = rglrzParam
        self.paramsT2 = []
        if params.useT2:
            for rglrz in params.rglrzTrain:
                if (rglrz not in params.rglrzPerNetwork) and (rglrz not in params.rglrzPerNetwork1):
                    if rglrz != "addNoise" or nonLin != "softmax":
                        self.paramsT2 += [rglrzParam[rglrz]]  # if trained, put param here

        # defining shared BN params
        if normParam is None and params.batchNorm and nonLin != "softmax":
            if normParam is None:
                normParam, paramsBN = bn_shared(params, nOut, index)
            self.normParam = normParam
            self.paramsBN = paramsBN

        # noise
        if index == 0 and "inputNoise" in rglrzParam.keys():
            noiz = self.rglrzParam["inputNoise"]
        elif "addNoise" in rglrzParam.keys():
            noiz = self.rglrzParam["addNoise"]
        if "dropOut" in rglrzParam.keys():
            drop = self.rglrzParam["dropOut"]
        elif "dropOutB" in rglrzParam.keys():
            drop = self.rglrzParam["dropOutB"]

        """ 
            Input transformations: convolution, BN, noise, nonlinearity 
        """

        # add normal noise to x
        self.x = x
        if noise_conditions(params, index, "type0"):
            x = noiseup(x, useRglrz, noiz, params.noiseT1, params, index, rstream)
        if dropout_conditions(params, index, "type0"):
            x = dropout(x, useRglrz, drop, params, nIn, rstream)

        # affine transform
        xLin = T.dot(x, self.W)

        # batchnorm transform
        if params.batchNorm and nonLin != "softmax":
            xLin, updateBN = bn_layer(xLin, self.a, self.b, self.normParam, params, bnPhase)
            self.updateBN = updateBN
        else:
            xLin += self.b

        # noise before nonlinearity
        if noise_conditions(params, index, "type1"):
            xLin = noiseup(xLin, useRglrz, noiz, params.noiseT1, params, index, rstream)
        # nonlinearity
        self.output = activation(xLin, nonLin)