Exemplo n.º 1
0
    def __init__(self, k, nvis, convergence_th=1e-6, max_iter=None,
                 verbose=False):
        """
        Parameters
        ----------
        k : int
            Number of clusters
        nvis : int
            Dimension of input
        convergence_th : float
            Threshold of distance to clusters under which k-means stops \
            iterating.
        max_iter : int
            Maximum number of iterations. Defaults to infinity.
        """

        Block.__init__(self)
        Model.__init__(self)

        self.input_space = VectorSpace(nvis)

        self.k = k
        self.convergence_th = convergence_th
        if max_iter:
            if max_iter < 0:
                raise Exception('KMeans init: max_iter should be positive.')
            self.max_iter = max_iter
        else:
            self.max_iter = float('inf')

        self.verbose = verbose
Exemplo n.º 2
0
def test_stackedblocks_without_params():
    """
    Test StackedBlocks when not all layers have trainable params
    """

    sb = StackedBlocks([Block(), Block()])

    assert sb._params is None
Exemplo n.º 3
0
 def __init__(self, C, kernel='rbf', gamma=1.0, coef0=1.0, degree=3):
     estimator = SVC(C=C,
                     kernel=kernel,
                     gamma=gamma,
                     coef0=coef0,
                     degree=degree)
     Block.__init__(self)
     Model.__init__(self)
     super(DenseMulticlassSVM, self).__init__(estimator)
Exemplo n.º 4
0
    def __init__(self, k, nvis, convergence_th=1e-6, max_iter=None, verbose=False):
        Block.__init__(self)
        Model.__init__(self)

        self.input_space = VectorSpace(nvis)

        self.k = k
        self.convergence_th = convergence_th
        if max_iter:
            if max_iter < 0:
                raise Exception("KMeans init: max_iter should be positive.")
            self.max_iter = max_iter
        else:
            self.max_iter = float("inf")

        self.verbose = verbose
Exemplo n.º 5
0
    def __init__(self, k, nvis, convergence_th=1e-6, max_iter=None,
                 verbose=False):
        Block.__init__(self)
        Model.__init__(self)

        self.input_space = VectorSpace(nvis)

        self.k = k
        self.convergence_th = convergence_th
        if max_iter:
            if max_iter < 0:
                raise Exception('KMeans init: max_iter should be positive.')
            self.max_iter = max_iter
        else:
            self.max_iter = float('inf')

        self.verbose = verbose
Exemplo n.º 6
0
def test_transformer_iterator():
    """
    Tests whether TransformerIterator is iterable
    """

    test_path = os.path.join(pylearn2.__path__[0], 'datasets', 'tests',
                             'test.csv')
    raw = CSVDataset(path=test_path, expect_headers=False)
    block = Block()
    dataset = TransformerDataset(raw, block)
    iterator = dataset.iterator('shuffled_sequential', 3)
    try:
        iter(iterator)
    except TypeError:
        assert False, "TransformerIterator isn't iterable"
Exemplo n.º 7
0
    # Set PCA subclass from argument.
    if args.algorithm == 'cov_eig':
        PCAImpl = CovEigPCA
    elif args.algorithm == 'svd':
        PCAImpl = SVDPCA
    elif args.algorithm == 'online':
        PCAImpl = OnlinePCA
        conf['minibatch_size'] = args.minibatch_size
    else:
        # This should never happen.
        raise NotImplementedError(args.algorithm)

    # Load precomputed PCA transformation if requested; otherwise compute it.
    if args.load_file:
        pca = Block.load(args.load_file)
    else:
        logger.info("... computing PCA")
        pca = PCAImpl(**conf)
        pca.train(train_data)
        # Save the computed transformation.
        pca.save(args.save_file)

    # Apply the transformation to test and valid subsets.
    inputs = tensor.matrix()
    pca_transform = theano.function([inputs], pca(inputs))
    valid_pca = pca_transform(valid_data)
    test_pca = pca_transform(test_data)
    logger.info("New shapes: {0}".format(map(numpy.shape,
                                         [valid_pca, test_pca])))
Exemplo n.º 8
0
    # Set PCA subclass from argument.
    if args.algorithm == 'cov_eig':
        PCAImpl = CovEigPCA
    elif args.algorithm == 'svd':
        PCAImpl = SVDPCA
    elif args.algorithm == 'online':
        PCAImpl = OnlinePCA
        conf['minibatch_size'] = args.minibatch_size
    else:
        # This should never happen.
        raise NotImplementedError(args.algorithm)

    # Load precomputed PCA transformation if requested; otherwise compute it.
    if args.load_file:
        pca = Block.load(args.load_file)
    else:
        logger.info("... computing PCA")
        pca = PCAImpl(**conf)
        pca.train(train_data)
        # Save the computed transformation.
        pca.save(args.save_file)

    # Apply the transformation to test and valid subsets.
    inputs = tensor.matrix()
    pca_transform = theano.function([inputs], pca(inputs))
    valid_pca = pca_transform(valid_data)
    test_pca = pca_transform(test_data)
    logger.info("New shapes: {0}".format(map(numpy.shape,
                                         [valid_pca, test_pca])))
Exemplo n.º 9
0
 def __init__(self, C, kernel='rbf', gamma = 1.0, coef0 = 1.0, degree = 3):
     estimator = SVC(C=C, kernel=kernel, gamma = gamma, coef0 = coef0,
             degree = degree)
     Block.__init__(self)
     Model.__init__(self)
     super(DenseMulticlassSVM,self).__init__(estimator)
Exemplo n.º 10
0
    def __init__(self,
                 nmap,
                 input_space=None,
                 nvisx=None,
                 nvisy=None,
                 input_source=("featuresX", "featuresY"),
                 act_enc=None,
                 act_dec=None,
                 irange=1e-3,
                 rng=9001):
        Block.__init__(self)
        Model.__init__(self)
        assert nmap > 0, "Number of mapping units must be positive"

        if nvisx is not None and nvisy is not None or input_space is not None:
            if nvisx is not None and nvisy is not None:
                assert nvisx > 0, "Number of visx units must be non-negative"
                assert nvisy > 0, "Number of visy units must be non-negative"
                input_space = CompositeSpace(
                    [VectorSpace(nvisx),
                     VectorSpace(nvisy)])
                self.nvisx = nvisx
                self.nvisy = nvisy
            elif isinstance(input_space.components[0], Conv2DSpace):
                rx, cx = input_space.components[0].shape
                chx = input_space.components[0].num_channels
                ry, cy = input_space.components[1].shape
                chy = input_space.components[1].num_channels
                self.nvisx = rx * cx * chx
                self.nvisy = ry * cy * chy
            else:
                raise NotImplementedError(
                    str(type(self)) + " does not support that input_space.")
        # Check whether the input_space and input_source structures match
        try:
            DataSpecsMapping((input_space, input_source))
        except ValueError:
            raise ValueError("The structures of `input_space`, %s, and "
                             "`input_source`, %s do not match. If you "
                             "specified a CompositeSpace as an input, "
                             "be sure to specify the data sources as well." %
                             (input_space, input_source))

        self.input_space = input_space
        self.input_source = input_source
        self.nmap = nmap
        self.output_space = VectorSpace(self.nmap)
        self._initialize_visbiasX(self.nvisx)  # self.visbiasX
        self._initialize_visbiasY(self.nvisy)  # self.visbiasY
        self._initialize_mapbias()  # self.mapbias
        self.irange = irange
        self.rng = make_np_rng(rng, which_method="randn")
        seed = int(self.rng.randint(2**30))
        self.s_rng = make_theano_rng(seed, which_method="uniform")

        def _resolve_callable(conf, conf_attr):
            if conf[conf_attr] is None or conf[conf_attr] == "linear":
                return None
            # If it's a callable, use it directly.
            if hasattr(conf[conf_attr], '__call__'):
                return conf[conf_attr]
            elif (conf[conf_attr] in globals()
                  and hasattr(globals()[conf[conf_attr]], '__call__')):
                return globals()[conf[conf_attr]]
            elif hasattr(tensor.nnet, conf[conf_attr]):
                return getattr(tensor.nnet, conf[conf_attr])
            elif hasattr(tensor, conf[conf_attr]):
                return getattr(tensor, conf[conf_attr])
            else:
                raise ValueError("Couldn't interpret %s value: '%s'" %
                                 (conf_attr, conf[conf_attr]))

        self.act_enc = _resolve_callable(locals(), 'act_enc')
        self.act_dec = _resolve_callable(locals(), 'act_dec')
Exemplo n.º 11
0
    def __init__(
        self,
        nmap,
        input_space=None,
        nvisx=None,
        nvisy=None,
        input_source=("featuresX", "featuresY"),
        act_enc=None,
        act_dec=None,
        irange=1e-3,
        rng=9001,
    ):
        Block.__init__(self)
        Model.__init__(self)
        assert nmap > 0, "Number of mapping units must be positive"

        if nvisx is not None and nvisy is not None or input_space is not None:
            if nvisx is not None and nvisy is not None:
                assert nvisx > 0, "Number of visx units must be non-negative"
                assert nvisy > 0, "Number of visy units must be non-negative"
                input_space = CompositeSpace([VectorSpace(nvisx), VectorSpace(nvisy)])
                self.nvisx = nvisx
                self.nvisy = nvisy
            elif isinstance(input_space.components[0], Conv2DSpace):
                rx, cx = input_space.components[0].shape
                chx = input_space.components[0].num_channels
                ry, cy = input_space.components[1].shape
                chy = input_space.components[1].num_channels
                self.nvisx = rx * cx * chx
                self.nvisy = ry * cy * chy
            else:
                raise NotImplementedError(str(type(self)) + " does not support that input_space.")
        # Check whether the input_space and input_source structures match
        try:
            DataSpecsMapping((input_space, input_source))
        except ValueError:
            raise ValueError(
                "The structures of `input_space`, %s, and "
                "`input_source`, %s do not match. If you "
                "specified a CompositeSpace as an input, "
                "be sure to specify the data sources as well." % (input_space, input_source)
            )

        self.input_space = input_space
        self.input_source = input_source
        self.nmap = nmap
        self.output_space = VectorSpace(self.nmap)
        self._initialize_visbiasX(self.nvisx)  # self.visbiasX
        self._initialize_visbiasY(self.nvisy)  # self.visbiasY
        self._initialize_mapbias()  # self.mapbias
        self.irange = irange
        self.rng = make_np_rng(rng, which_method="randn")
        seed = int(self.rng.randint(2 ** 30))
        self.s_rng = make_theano_rng(seed, which_method="uniform")

        def _resolve_callable(conf, conf_attr):
            if conf[conf_attr] is None or conf[conf_attr] == "linear":
                return None
            # If it's a callable, use it directly.
            if hasattr(conf[conf_attr], "__call__"):
                return conf[conf_attr]
            elif conf[conf_attr] in globals() and hasattr(globals()[conf[conf_attr]], "__call__"):
                return globals()[conf[conf_attr]]
            elif hasattr(tensor.nnet, conf[conf_attr]):
                return getattr(tensor.nnet, conf[conf_attr])
            elif hasattr(tensor, conf[conf_attr]):
                return getattr(tensor, conf[conf_attr])
            else:
                raise ValueError("Couldn't interpret %s value: '%s'" % (conf_attr, conf[conf_attr]))

        self.act_enc = _resolve_callable(locals(), "act_enc")
        self.act_dec = _resolve_callable(locals(), "act_dec")