Ejemplo n.º 1
0
    def _processInputWithBnNonLinearityDropoutPooling(
            self,
            rng,
            inputToLayerTrain,
            inputToLayerVal,
            inputToLayerTest,
            inputToLayerShapeTrain,
            inputToLayerShapeVal,
            inputToLayerShapeTest,
            useBnFlag,  # Must be true to do BN. Used to not allow doing BN on first layers straight on image, even if rollingAvForBnOverThayManyBatches > 0.
            rollingAverageForBatchNormalizationOverThatManyBatches,  #If this is <= 0, we are not using BatchNormalization, even if above is True.
            activationFunc,
            dropoutRate):
        # ---------------- Order of what is applied -----------------
        #  Input -> [ BatchNorm OR biases applied] -> NonLinearity -> DropOut -> Pooling --> Conv ] # ala He et al "Identity Mappings in Deep Residual Networks" 2016
        # -----------------------------------------------------------

        #---------------------------------------------------------
        #------------------ Batch Normalization ------------------
        #---------------------------------------------------------
        if useBnFlag and rollingAverageForBatchNormalizationOverThatManyBatches > 0:
            self._appliedBnInLayer = True
            self._rollingAverageForBatchNormalizationOverThatManyBatches = rollingAverageForBatchNormalizationOverThatManyBatches
            (
                inputToNonLinearityTrain,
                inputToNonLinearityVal,
                inputToNonLinearityTest,
                self._gBn,
                self._b,
                # For rolling average :
                self._muBnsArrayForRollingAverage,
                self._varBnsArrayForRollingAverage,
                self._sharedNewMu_B,
                self._sharedNewVar_B,
                self._newMu_B,
                self._newVar_B) = applyBn(
                    rollingAverageForBatchNormalizationOverThatManyBatches,
                    inputToLayerTrain, inputToLayerVal, inputToLayerTest,
                    inputToLayerShapeTrain)
            self.params = self.params + [self._gBn, self._b]
        else:  #Not using batch normalization
            self._appliedBnInLayer = False
            #make the bias terms and apply them. Like the old days before BN's own learnt bias terms.
            numberOfInputChannels = inputToLayerShapeTrain[1]

            (self._b, inputToNonLinearityTrain, inputToNonLinearityVal,
             inputToNonLinearityTest) = makeBiasParamsAndApplyToFms(
                 inputToLayerTrain, inputToLayerVal, inputToLayerTest,
                 numberOfInputChannels)
            self.params = self.params + [self._b]

        #--------------------------------------------------------
        #------------ Apply Activation/ non-linearity -----------
        #--------------------------------------------------------
        self._activationFunctionType = activationFunc
        if self._activationFunctionType == "linear":  # -1 stands for "no nonlinearity". Used for input layers of the pathway.
            (inputToDropoutTrain, inputToDropoutVal,
             inputToDropoutTest) = (inputToNonLinearityTrain,
                                    inputToNonLinearityVal,
                                    inputToNonLinearityTest)
        elif self._activationFunctionType == "relu":
            (inputToDropoutTrain, inputToDropoutVal,
             inputToDropoutTest) = applyRelu(inputToNonLinearityTrain,
                                             inputToNonLinearityVal,
                                             inputToNonLinearityTest)
        elif self._activationFunctionType == "prelu":
            numberOfInputChannels = inputToLayerShapeTrain[1]
            (self._aPrelu, inputToDropoutTrain, inputToDropoutVal,
             inputToDropoutTest) = applyPrelu(inputToNonLinearityTrain,
                                              inputToNonLinearityVal,
                                              inputToNonLinearityTest,
                                              numberOfInputChannels)
            self.params = self.params + [self._aPrelu]
        elif self._activationFunctionType == "elu":
            (inputToDropoutTrain, inputToDropoutVal,
             inputToDropoutTest) = applyElu(inputToNonLinearityTrain,
                                            inputToNonLinearityVal,
                                            inputToNonLinearityTest)
        elif self._activationFunctionType == "selu":
            (inputToDropoutTrain, inputToDropoutVal,
             inputToDropoutTest) = applySelu(inputToNonLinearityTrain,
                                             inputToNonLinearityVal,
                                             inputToNonLinearityTest)

        #------------------------------------
        #------------- Dropout --------------
        #------------------------------------
        (inputToPoolTrain, inputToPoolVal, inputToPoolTest) = applyDropout(
            rng, dropoutRate, inputToLayerShapeTrain, inputToDropoutTrain,
            inputToDropoutVal, inputToDropoutTest)

        #-------------------------------------------------------
        #-----------  Pooling ----------------------------------
        #-------------------------------------------------------
        if self._poolingParameters == []:  #no max pooling before this conv
            inputToConvTrain = inputToPoolTrain
            inputToConvVal = inputToPoolVal
            inputToConvTest = inputToPoolTest

            inputToConvShapeTrain = inputToLayerShapeTrain
            inputToConvShapeVal = inputToLayerShapeVal
            inputToConvShapeTest = inputToLayerShapeTest
        else:  #Max pooling is actually happening here...
            (inputToConvTrain,
             inputToConvShapeTrain) = pool3dMirrorPad(inputToPoolTrain,
                                                      inputToLayerShapeTrain,
                                                      self._poolingParameters)
            (inputToConvVal,
             inputToConvShapeVal) = pool3dMirrorPad(inputToPoolVal,
                                                    inputToLayerShapeVal,
                                                    self._poolingParameters)
            (inputToConvTest,
             inputToConvShapeTest) = pool3dMirrorPad(inputToPoolTest,
                                                     inputToLayerShapeTest,
                                                     self._poolingParameters)

        return (inputToConvTrain, inputToConvVal, inputToConvTest,
                inputToConvShapeTrain, inputToConvShapeVal,
                inputToConvShapeTest)
Ejemplo n.º 2
0
 def _processInputWithBnNonLinearityDropoutPooling(self,
             rng,
             inputToLayerTrain,
             inputToLayerVal,
             inputToLayerTest,
             inputToLayerShapeTrain,
             inputToLayerShapeVal,
             inputToLayerShapeTest,
             useBnFlag, # Must be true to do BN. Used to not allow doing BN on first layers straight on image, even if rollingAvForBnOverThayManyBatches > 0.
             movingAvForBnOverXBatches, #If this is <= 0, we are not using BatchNormalization, even if above is True.
             activationFunc,
             dropoutRate) :
     # ---------------- Order of what is applied -----------------
     #  Input -> [ BatchNorm OR biases applied] -> NonLinearity -> DropOut -> Pooling --> Conv ] # ala He et al "Identity Mappings in Deep Residual Networks" 2016
     # -----------------------------------------------------------
     
     #---------------------------------------------------------
     #------------------ Batch Normalization ------------------
     #---------------------------------------------------------
     if useBnFlag and movingAvForBnOverXBatches > 0 :
         self._appliedBnInLayer = True
         self._movingAvForBnOverXBatches = movingAvForBnOverXBatches
         (inputToNonLinearityTrain,
         inputToNonLinearityVal,
         inputToNonLinearityTest,
         self._gBn,
         self._b,
         # For rolling average :
         self._muBnsArrayForRollingAverage,
         self._varBnsArrayForRollingAverage,
         self._sharedNewMu_B,
         self._sharedNewVar_B,
         self._newMu_B,
         self._newVar_B
         ) = applyBn( movingAvForBnOverXBatches, inputToLayerTrain, inputToLayerVal, inputToLayerTest, inputToLayerShapeTrain)
         self.params = self.params + [self._gBn, self._b]
         # Create ops for updating the matrices with the bn inference stats.
         self._op_update_mtrx_bn_inf_mu = tf.assign( self._muBnsArrayForRollingAverage[self._tf_plchld_int32], self._sharedNewMu_B )
         self._op_update_mtrx_bn_inf_var = tf.assign( self._varBnsArrayForRollingAverage[self._tf_plchld_int32], self._sharedNewVar_B )
 
     else : #Not using batch normalization
         self._appliedBnInLayer = False
         #make the bias terms and apply them. Like the old days before BN's own learnt bias terms.
         numberOfInputChannels = inputToLayerShapeTrain[1]
         
         (self._b,
         inputToNonLinearityTrain,
         inputToNonLinearityVal,
         inputToNonLinearityTest) = makeBiasParamsAndApplyToFms( inputToLayerTrain, inputToLayerVal, inputToLayerTest, numberOfInputChannels )
         self.params = self.params + [self._b]
         
     #--------------------------------------------------------
     #------------ Apply Activation/ non-linearity -----------
     #--------------------------------------------------------
     self._activationFunctionType = activationFunc
     if self._activationFunctionType == "linear" : # -1 stands for "no nonlinearity". Used for input layers of the pathway.
         ( inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest ) = (inputToNonLinearityTrain, inputToNonLinearityVal, inputToNonLinearityTest)
     elif self._activationFunctionType == "relu" :
         ( inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest ) = applyRelu(inputToNonLinearityTrain, inputToNonLinearityVal, inputToNonLinearityTest)
     elif self._activationFunctionType == "prelu" :
         numberOfInputChannels = inputToLayerShapeTrain[1]
         ( self._aPrelu, inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest ) = applyPrelu(inputToNonLinearityTrain, inputToNonLinearityVal, inputToNonLinearityTest, numberOfInputChannels)
         self.params = self.params + [self._aPrelu]
     elif self._activationFunctionType == "elu" :
         ( inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest ) = applyElu(inputToNonLinearityTrain, inputToNonLinearityVal, inputToNonLinearityTest)
     elif self._activationFunctionType == "selu" :
         ( inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest ) = applySelu(inputToNonLinearityTrain, inputToNonLinearityVal, inputToNonLinearityTest)
         
     #------------------------------------
     #------------- Dropout --------------
     #------------------------------------
     (inputToPoolTrain, inputToPoolVal, inputToPoolTest) = applyDropout(rng, dropoutRate, inputToLayerShapeTrain, inputToDropoutTrain, inputToDropoutVal, inputToDropoutTest)
     
     #-------------------------------------------------------
     #-----------  Pooling ----------------------------------
     #-------------------------------------------------------
     if self._poolingParameters == [] : #no max pooling before this conv
         inputToConvTrain = inputToPoolTrain
         inputToConvVal = inputToPoolVal
         inputToConvTest = inputToPoolTest
         
         inputToConvShapeTrain = inputToLayerShapeTrain
         inputToConvShapeVal = inputToLayerShapeVal
         inputToConvShapeTest = inputToLayerShapeTest
     else : #Max pooling is actually happening here...
         (inputToConvTrain, inputToConvShapeTrain) = pool3dMirrorPad(inputToPoolTrain, inputToLayerShapeTrain, self._poolingParameters)
         (inputToConvVal, inputToConvShapeVal) = pool3dMirrorPad(inputToPoolVal, inputToLayerShapeVal, self._poolingParameters)
         (inputToConvTest, inputToConvShapeTest) = pool3dMirrorPad(inputToPoolTest, inputToLayerShapeTest, self._poolingParameters)
         
     return (inputToConvTrain, inputToConvVal, inputToConvTest,
             inputToConvShapeTrain, inputToConvShapeVal, inputToConvShapeTest )