コード例 #1
0
    def __init__(self,
                 input,
                 W,
                 b,
                 filter_shape,
                 poolsize=(2, 2, 2),
                 activation=relu):

        self.input = input
        # self.W = theano.shared(value = W, borrow = True)
        # self.b = theano.shared(value = b, borrow = True)

        self.W = sharedata(W)
        self.b = sharedata(b)

        # self.W = theano.shared(value = numpy.asarray(W), borrow = True)
        # self.b = theano.shared(value = numpy.asarray(b), borrow = True)

        conv = T.nnet.conv3d2d.conv3d(signals=self.input,
                                      filters=self.W,
                                      signals_shape=None,
                                      filters_shape=filter_shape,
                                      border_mode='valid')
        pooled_out = max_pool_3d(input=conv, ds=poolsize, ignore_border=True)

        # non-linear function
        self.output = activation(pooled_out + self.b.dimshuffle(
            'x', 'x', 0, 'x', 'x'))  # arg activitaion function
        # store parameters of this layer
        self.params = [self.W, self.b]
コード例 #2
0
 def __init__(self, rng, input, input_shape, filter, filter_shape, base, activation, poolsize, dtype = theano.config.floatX):
             
     self.input = input   
     self.W = filter    
     self.b = base
     # do the convolution with flip
     conv_out = theano.tensor.nnet.conv3d2d.conv3d(
         signals = self.input, 
         filters = self.W, #
         signals_shape = input_shape,
         filters_shape = filter_shape,
         border_mode = 'valid') # the conv stride is 1
     
     conv = conv_out + self.b.dimshuffle('x','x',0,'x','x')
     
     if poolsize is None:
         pooled_out = conv
     else:
         pooled_out = max_pool_3d( input = conv, ds = poolsize, ignore_border = True)
     
     # non-linear 
     self.output = ( 
         pooled_out if activation is None 
         else activation(pooled_out)
     )
     
    # store parameters of this layer
     self.params = [self.W, self.b]
コード例 #3
0
ファイル: 3d_cnn.py プロジェクト: xavipor/ProjectLaptop
    def __init__(self,
                 input,
                 W,
                 b,
                 filter_shape,
                 poolsize=(2, 2, 2),
                 activation=relu):

        self.input = input  #Lo mismo el shape.eval() aqui tampoco funciona y es del tipo theano.tensor.var.TensorVariable
        #hacen esto para convertir lo que llega en W que es un CudaArray no una shared variable en
        #una shared variable como era params_L0[0] Osea hacen una cosa para desacerla no muy sense...
        self.W = theano.shared(value=W, borrow=True)
        self.b = theano.shared(value=b, borrow=True)

        # conv tampoco funciona el shape y es...<class 'theano.tensor.var.TensorVariable'>
        conv = T.nnet.conv3d2d.conv3d(
            signals=self.
            input,  #( batch_size, time, in_channels, height, width )
            filters=self.
            W,  #( num_of_filters, flt_time, in_channels, flt_height, flt_width)
            signals_shape=None,
            filters_shape=filter_shape,
            border_mode='valid')
        pooled_out = max_pool_3d(input=conv, ds=poolsize, ignore_border=True)

        # non-linear function
        #Aqui esto está bien lo que no se es si esta bien hacer lo de sumar la b antes del pooling
        # por supuerto en output tampoco va shape.eval() <class 'theano.tensor.var.TensorVariable'>
        self.output = activation(pooled_out + self.b.dimshuffle(
            'x', 'x', 0, 'x', 'x'))  # arg activitaion function
        # store parameters of this layer
        self.params = [self.W, self.b]
コード例 #4
0
    def __init__(self,
                 input,
                 filter,
                 base,
                 activation,
                 poolsize,
                 dtype=theano.config.floatX):
        """
        Allocate a Conv3dLayer with shared variable internal parameters.
      
        :type input: theano.tensor
        :param input: 5D matrix -- (batch_size, time, in_channels, height, width)
        
        :type filter: 
        :param filter: 5D matrix -- (num_of_filters, flt_time, in_channels, flt_height, flt_width)
        
        :type filters_shape: tuple or list of length 5
        :param filter_shape:(number_of_filters, flt_time,in_channels,flt_height,flt_width)
        
        :type base: tuple or list of length number_of_filters
        :param base:(number_of_filters)
        
        :param activation: non-linear activation function, typically relu or tanh 
        
        :poolsize: tuple or list of length 3
        :param poolsize: the pooling stride, typically (2,2,2)              
        """

        self.input = input
        self.W = filter
        self.b = base

        # do the 3d convolution --- have flip
        conv_out = theano.tensor.nnet.conv3d2d.conv3d(
            signals=self.input,
            filters=self.W,
            signals_shape=None,
            filters_shape=None,
            border_mode='valid')  # the convolution stride is 1

        conv = conv_out + self.b.dimshuffle('x', 'x', 0, 'x', 'x')

        if poolsize is None:
            pooled_out = conv
        else:
            pooled_out = max_pool_3d(input=conv,
                                     ds=poolsize,
                                     ignore_border=True)

        # non-linear function
        self.output = (pooled_out
                       if activation is None else activation(pooled_out))

        # store parameters of this layer
        self.params = [self.W, self.b]
コード例 #5
0
class myConvPool3dLayer(object):
	def __init(self,input,W,b,filter_shape,poolsize=(1,1,1),activation=relu):

		self.input = input
		self.W = theano.shared(numpy.asarray(W,dtype = theano.config.floatX),borrow=True)
		self.b = theano.shared(numpy.asarray(W,dtype = theano.config.floatX),borrow=True)
		conv = T.nnet.conv3d2d.conv3d(
            signals = self.input,
            filters = self.W,
            signals_shape = None,
            filters_shape = filter_shape,
            border_mode = 'valid')

        pooled_out = max_pool_3d(
            input = conv,
            ds = poolsize,
            ignore_border = True)

        #After convolution and pooling if needed (if pooling = (1,1,1)) is like doing nothing

		self.output = activation( pooled_out + self.b.dimshuffle('x','x',0,'x','x'))

		self.params = [self.W, self.b]
コード例 #6
0
ファイル: wrap_test.py プロジェクト: xavipor/ProjectLaptop
    def __init__(self,
                 input,
                 filter,
                 base,
                 activation,
                 poolsize,
                 dtype=theano.config.floatX):
        """
        Allocate a Conv3dLayer with shared variable internal parameters.
      
        :type input: theano.tensor
        :param input: 5D matrix -- (batch_size, time, in_channels, height, width)
        
        :type filter: 
        :param filter: 5D matrix -- (num_of_filters, flt_time, in_channels, flt_height, flt_width)
        
        :type filters_shape: tuple or list of length 5
        :param filter_shape:(number_of_filters, flt_time,in_channels,flt_height,flt_width)
        
        :type base: tuple or list of length number_of_filters
        :param base:(number_of_filters)
        
        :param activation: non-linear activation function, typically relu or tanh 
        
        :poolsize: tuple or list of length 3
        :param poolsize: the pooling stride, typically (2,2,2)              
        """

        self.input = input  #shared variable el self.input #solo la primera vez.
        self.W = filter  #Theano <class 'theano.tensor.var.TensorVariable'>
        self.b = base  #Shared

        # do the 3d convolution --- have flip
        conv_out = theano.tensor.nnet.conv3d2d.conv3d(
            signals=self.input,
            filters=self.
            W,  #Tensor Variable ya no es shared anymore       DISTINTO QE EN CNN3D
            signals_shape=None,
            filters_shape=None,
            border_mode='valid')  # the convolution stride is 1
        #conv_out = array([  1,  76,  64, 176, 176]) <class 'theano.tensor.var.TensorVariable'>
        #Aqui convout se puede evaluar lul!
        #2 array([ 1, 36, 64, 86, 86])
        #3 array([ 1, 36, 64, 84, 84])
        #4 array([ 1, 35, 150,83, 83])
        #5 array([ 1, 35,  2, 83, 83])

        conv = conv_out + self.b.dimshuffle('x', 'x', 0, 'x', 'x')

        if poolsize is None:
            pooled_out = conv
        else:
            pooled_out = max_pool_3d(input=conv,
                                     ds=poolsize,
                                     ignore_border=True)
            #array([ 1, 38, 64, 88, 88])
            #<class 'theano.tensor.var.TensorVariable'>

            #2array([ 1, 36, 64, 86, 86])

        # non-linear function
        self.output = (pooled_out
                       if activation is None else activation(pooled_out))

        # store parameters of this layer
        self.params = [self.W, self.b]