Ejemplo n.º 1
0
def lenet(trained=True,
          weights_filename=LENET_FILENAME,
          weights_url=LENET_URL):
    """Create and return instance of LeNet network.

    :param trained: If True, trained weights will be loaded from file.
    :param weights_filename: Name of a file with LeNet weights. Will be used if
                             ``trained`` argument is set to True.
    :param weights_url: Url from which to download file with weights.
    :return: LeNet network.
    """
    if trained:
        weights = load_data(get_bin_path(weights_filename), weights_url)
        if weights is None:
            raise Exception("cannot load LeNet weights")

    lenet = Network([
        ConvolutionalLayer(image_shape=(28, 28, 1), filter_shape=(5, 5, 20)),
        ReLU(),
        MaxPool(poolsize=(2, 2)),
        ConvolutionalLayer(filter_shape=(5, 5, 50)),
        ReLU(),
        MaxPool(poolsize=(2, 2)),
        FullyConnectedLayer(n_out=500),
        ReLU(),
        FullyConnectedLayer(n_out=10),
        Softmax(),
    ])
    if trained:
        lenet.set_params(weights)
    return lenet
Ejemplo n.º 2
0
 def _get_img(self, filename, reverse):
     img = misc.imread(get_bin_path(filename))
     img = np.rollaxis(img, 2)
     img = img.reshape((1, 3, self._height, self._width))
     result = np.asarray(img, dtype=theano.config.floatX)
     if reverse:
         return result[..., ::-1]
     return result
Ejemplo n.º 3
0
def alexnet(trained=True, weights_filename=ALEXNET_FILENAME, weights_url=None):
    if trained:
        weights = load_data(get_bin_path(weights_filename), weights_url)
        if weights is None:
            raise Exception("cannot load AlexNet weights")

    # Normalization parameters
    local_range = 5
    alpha = 0.0001
    beta = 0.75
    k = 1

    net = Network([
        ConvolutionalLayer(image_shape=(227, 227, 3),
                           filter_shape=(11, 11, 96),
                           stride=(4, 4)),
        ReLU(),
        LRN(local_range=local_range, alpha=alpha, beta=beta, k=k),
        MaxPool(poolsize=(3, 3), stride=(2, 2)),
        ConvolutionalLayer(filter_shape=(5, 5, 256),
                           padding=(2, 2),
                           n_groups=2),
        ReLU(),
        LRN(local_range=local_range, alpha=alpha, beta=beta, k=k),
        MaxPool(poolsize=(3, 3), stride=(2, 2)),
        ConvolutionalLayer(filter_shape=(3, 3, 384), padding=(1, 1)),
        ReLU(),
        ConvolutionalLayer(filter_shape=(3, 3, 384),
                           padding=(1, 1),
                           n_groups=2),
        ReLU(),
        ConvolutionalLayer(filter_shape=(3, 3, 256),
                           padding=(1, 1),
                           n_groups=2),
        ReLU(),
        MaxPool(poolsize=(3, 3), stride=(2, 2)),
        FullyConnectedLayer(4096),
        ReLU(),
        Dropout(),
        FullyConnectedLayer(4096),
        ReLU(),
        Dropout(),
        FullyConnectedLayer(1000),
        Softmax()
    ])
    if trained:
        net.set_params(weights)
    return net
Ejemplo n.º 4
0
    def __init__(self, filename=MNIST_filename, url=MNIST_origin):
        """Create MNIST data loader.

        :param filename: Name of a file with MNIST data.
        :param url: Url for downloading MNIST data.
        """
        super(MNISTDataLoader, self).__init__()

        train_set, val_set, test_set = load_data(get_bin_path(filename), url)

        self.train_in, self.train_out, self.train_set_size =\
            self._mnist_shared_dataset(train_set)
        self.val_in, self.val_out, self.val_set_size =\
            self._mnist_shared_dataset(val_set)
        self.test_in, self.test_out, self.test_set_size =\
            self._mnist_shared_dataset(test_set)

        self.batch_size = 1
        self.train_data_available = True
        self.val_data_available = True
        self.test_data_available = True
Ejemplo n.º 5
0
def googlenet(trained=True, weights_filename=GOOGLENET_FILENAME,
              weights_url=None):
    if trained:
        weights = load_data(get_bin_path(weights_filename), weights_url)
        if weights is None:
            raise Exception("cannot load GoogLeNet weights")

    # Normalization parameters
    local_range = 5
    alpha = 0.0001
    beta = 0.75
    k = 1

    net = Network([
        ConvolutionalLayer(image_shape=(224, 224, 3),
                           filter_shape=(7, 7, 64),
                           stride=(2, 2),
                           padding=(3, 3)),
        ReLU(),
        MaxPool(poolsize=(3, 3),
                stride=(2, 2),
                padding=(1, 1)),
        LRN(local_range=local_range,
            alpha=alpha,
            beta=beta,
            k=k),
        ConvolutionalLayer(filter_shape=(1, 1, 64)),
        ReLU(),
        ConvolutionalLayer(filter_shape=(3, 3, 192),
                           padding=(1, 1)),
        ReLU(),
        LRN(local_range=local_range,
            alpha=alpha,
            beta=beta,
            k=k),
        MaxPool(poolsize=(3, 3),
                stride=(2, 2),
                padding=(1, 1)),
        InceptionLayer([64, 96, 128, 16, 32, 32], name='inception 3a'),
        InceptionLayer([128, 128, 192, 32, 96, 64], name='inception 3b'),
        MaxPool(poolsize=(3, 3),
                stride=(2, 2),
                padding=(1, 1)),
        InceptionLayer([192, 96, 208, 16, 48, 64], name='inception 4a'),
        InceptionLayer([160, 112, 224, 24, 64, 64], name='inception 4b'),
        InceptionLayer([128, 128, 256, 24, 64, 64], name='inception 4c'),
        InceptionLayer([112, 144, 288, 32, 64, 64], name='inception 4d'),
        InceptionLayer([256, 160, 320, 32, 128, 128], name='inception 4e'),
        MaxPool(poolsize=(3, 3),
                stride=(2, 2),
                padding=(1, 1)),
        InceptionLayer([256, 160, 320, 32, 128, 128], name='inception 5a'),
        InceptionLayer([384, 192, 384, 48, 128, 128], name='inception 5b'),
        AvgPool(poolsize=(7, 7),
                stride=(1, 1)),
        Dropout(0.4),
        FullyConnectedLayer(1000),
        Softmax(),
    ])
    if trained:
        net.set_params(weights)
    return net
Ejemplo n.º 6
0
    def __init__(self, year, image_shape, buffer_size=1, train_data=True,
                 val_data=True, val_size=None, reverse_training=True,
                 reverse_validation=True):
        """Create ImageNet data loader.

        :param year: Specifies which year's data should be loaded.
        :param image_shape: Image shape in format (height, width).
        :param buffer_size: Number of batches to be stored in memory.
        :param train_data: Specifies whether to load training data.
        :param val_data: Specifies whether to load validation data.
        :param val_size: Maximal size of validation data. If None, then all
                         validation data will be used. Otherwise, val_size
                         images will be chosen randomly from the whole set.
        :param reverse: When set on True, reversed copies of images will be
                        attached to train and validaton data
        """
        super(ImageNetDataLoader, self).__init__()
        self.buffer_size = buffer_size
        self.shuffle_train_data = True
        self._height, self._width = image_shape

        base_name = self.name_prefix + str(year)
        self.train_name = base_name + self.train_suffix
        self.val_name = base_name + self.val_suffix

        if train_data:
            index = 0
            answers = []
            train_files = []
            train_dirs = os.listdir(get_bin_path(self.train_name))
            for d in train_dirs:
                path = os.path.join(self.train_name, d)
                files = os.listdir(get_bin_path(path))
                train_files += [(os.path.join(d, f), False) for f in files]
                answers += [index for i in range(len(files))]
                if reverse_training:
                    train_files += [(os.path.join(d, f), True) for f in files]
                    answers += [index for i in range(len(files))]
                index += 1
            self.train_files = np.asarray(train_files)
            self.train_answers = np.asarray(answers)

            self._train_in = Buffer(self)
            self._train_out = theano.shared(self.train_answers, borrow=True)
            self.train_data_available = True
            self.train_set_size = len(answers)

        if val_data:
            answers = OrderedDict()
            with open(get_data_path(self.val_name + '.txt'), 'rb') as f:
                while True:
                    line = f.readline()
                    if not line:
                        break
                    filename, answer = line.rsplit(' ', 1)
                    answers[filename] = np.array(int(answer), dtype="int32")
            val_files = [(filename, False) for filename in answers.keys()]
            val_answers = answers.values()
            if reverse_validation:
                val_files = [(filename, True) for filename in answers.keys()]
                val_answers *= 2
            val_answers = np.asarray(val_answers)
            self.val_files = np.asarray(val_files)
            self.val_set_size = len(self.val_files)

            # Reduce amount of validation data, if necessary
            if val_size and val_size < self.val_set_size:
                ind = np.random.permutation(self.val_set_size)[:val_size]
                self.val_files = self.val_files[ind]
                val_answers = val_answers[ind]
                self.val_set_size = val_size

            self._val_in = Buffer(self)
            self._val_out = theano.shared(val_answers, borrow=True)
            self.val_data_available = True

        self.batch_size = 1