Beispiel #1
0
    def __init__(self, rs):
        regression.__init__(self,rs)
        
        self.activation = []
        self.W=[]
        self.b=[]
        self.size=0
        
        


        
        #no hidden Layer
        if(len(rs.hiddenLayers)==0):
            #connection between input and output Layer
            self.W.append(np.empty((rs.inputDim,rs.outputDim)))
            self.b.append(np.empty(rs.outputDim))
            self.activation.append(layersDict[rs.outputLayer])
            self.size=rs.inputDim*rs.outputDim+rs.outputDim
        else :
            #hidden Layers
            precDim=rs.inputDim
            for layer in rs.hiddenLayers:
                self.W.append(np.empty((precDim,layer[1])))
                self.b.append(np.empty(layer[1]))
                self.activation.append(layersDict[layer[0]])
                precDim=layer[1]
                self.size+=precDim*layer[1]+layer[1]
             
            #outputLayer
            self.W.append(np.empty((precDim,rs.outputDim)))
            self.b.append(np.empty(rs.outputDim))
            self.activation.append(layersDict[rs.outputLayer])
            self.size+=precDim*rs.outputDim+rs.outputDim
            self.theta=np.empty(self.size)
Beispiel #2
0
    def __init__(self, rs):
        '''
	Initializes class parameters
	
	Input:     -nbFeature: int, number of feature in order to perform the regression

        '''
        regression.__init__(self, rs)
        self.nbFeat = rs.numfeats
        self.theta = np.zeros((self.nbFeat, self.outputDimension))
Beispiel #3
0
    def __init__(self, rs):
        regression.__init__(self,rs)
        self.learningRate=rs.learningRate
        self.momentum=rs.momentum
        self.nbW=0
        self.nbB=0
        
        self.x = tf.placeholder(tf.float32, shape=[None, rs.inputDim])
        self.y_ = tf.placeholder(tf.float32, shape=[None, rs.outputDim])

        #no hidden Layer
        if(len(rs.hiddenLayers)==0):
            #connection between input and output Layer
            in_to_out = [self.weight_variable([rs.inputDim,rs.outputDim]),self.bias_variable([rs.outputDim])]
            self.y = layersDict[rs.outputLayer](tf.matmul(self.x,in_to_out[0]) + in_to_out[1])
            self.listTheta=[in_to_out]
            self.listBias=[in_to_out[1]]
            self.listWeight=[in_to_out[0]]
            self.theta = np.empty(rs.inputDim*rs.outputDim+rs.outputDim)
        else :
            #hidden Layers
            self.listTheta=[]
            self.listBias=[]
            self.listWeight=[]
            precDim=rs.inputDim
            y=self.x
            size=0
            for layer in rs.hiddenLayers:
                tmp=[self.weight_variable([precDim,layer[1]]),self.bias_variable([layer[1]])]
                self.listTheta.append(tmp)
                self.listBias.append(tmp[1])
                self.listWeight.append(tmp[0])
                y=layersDict[layer[0]](tf.matmul(y,tmp[0]) + tmp[1])
                size+=precDim*layer[1]+layer[1]
                precDim=layer[1]
             
            tmp=[self.weight_variable([precDim,rs.outputDim]),self.bias_variable([rs.outputDim])]
            self.listTheta.append(tmp)
            self.listBias.append(tmp[1])
            self.listWeight.append(tmp[0])
            self.y=layersDict[rs.outputLayer](tf.matmul(y,tmp[0]) + tmp[1])
            size+=precDim*rs.outputDim+rs.outputDim
            self.theta=np.empty(size)
            
            
        self.meanSquareError = tf.reduce_mean(tf.square(tf.sub(self.y_,self.y))) 
        self.train_step = tf.train.AdamOptimizer(rs.learningRate).minimize(self.meanSquareError)
        
        self.init_op = tf.initialize_all_variables()
        self.saver = tf.train.Saver(self.listBias+self.listWeight)
        self.sess = tf.Session()
        self.sess.run(self.init_op)
        self.sess.as_default()
Beispiel #4
0
    def __init__(self, rs):
        regression.__init__(self,rs)
        self.learningRate=rs.learningRate
        self.momentum=rs.momentum
        
        self.net = FeedForwardNetwork()
        
        #input Layer
        inLayer = layersDict[rs.inputLayer](rs.inputDim)
        self.net.addInputModule(inLayer)
        
        #outputLayer
        outLayer = layersDict[rs.outputLayer](rs.outputDim)
        self.net.addOutputModule(outLayer)
        
        #no hidden Layer
        if(len(rs.hiddenLayers)==0):
            #connection between input and output Layer
            in_to_out = FullConnection(inLayer, outLayer)
            self.net.addConnection(in_to_out)
            if(rs.bias==True):
                bias= BiasUnit('bias')
                self.net.addModule(bias)
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
        else :
            #hidden Layers
            hiddenLayers=[]
            for layer in rs.hiddenLayers:
                tmp=layersDict[layer[0]](layer[1])
                self.net.addModule(tmp)
                hiddenLayers.append(tmp)
             
            #connection between input and first hidden Layer  
            in_to_hidden=FullConnection(inLayer,hiddenLayers[0])
            self.net.addConnection(in_to_hidden)
            
            #connection between hidden Layers
            i=0
            for i in range(1,len(hiddenLayers)):
                hidden_to_hidden=FullConnection(hiddenLayers[i-1],hiddenLayers[i])
                self.net.addConnection(hidden_to_hidden)
            
            #connection between last hidden Layer and output Layer   
            hidden_to_out= FullConnection(hiddenLayers[i],outLayer)
            self.net.addConnection(hidden_to_out)     
            
            if(rs.bias==True):
                bias=BiasUnit('bias')
                self.net.addModule(bias)
                for layer in hiddenLayers :
                    bias_to_hidden = FullConnection(bias, layer)
                    self.net.addConnection(bias_to_hidden)
                
                bias_to_out = FullConnection(bias, outLayer)
                self.net.addConnection(bias_to_out)
                

        
        #initilisation of weight
        self.net.sortModules()
        self.shape=self.net.params.shape
        self.net._setParameters(np.random.normal(0.0,0.1,self.shape))
            
        
        self.ds = SupervisedDataSet(self.inputDimension, self.outputDimension)