Ejemplo n.º 1
0
def setup_model(p):
    ladder = LadderAE(p)
    # Setup inputs
    input_type = TensorType('float32',
                            [False] * (len(p.encoder_layers[0]) + 1))
    x_only = input_type('features_unlabeled')
    if debug:
        x_only.tag.test_value = numpy.random.normal(
            size=(p.batch_size, ) + p.encoder_layers[0]).astype(floatX)
    x = input_type('features_labeled')
    if debug:
        x.tag.test_value = numpy.random.normal(
            size=(p.batch_size, ) + p.encoder_layers[0]).astype(floatX)
    y = theano.tensor.lvector('targets_labeled')
    if debug:
        y.tag.test_value = numpy.random.randint(1,
                                                int(p.encoder_layers[-1]) + 1,
                                                (p.batch_size))
    ladder.apply(x, y, x_only)

    # Load parameters if requested
    if p.get('load_from'):
        with open(p.load_from + '/trained_params.npz') as f:
            loaded = numpy.load(f)
            cg = ComputationGraph([ladder.costs.total])
            current_params = VariableFilter(roles=[PARAMETER])(cg.variables)
            logger.info('Loading parameters: %s' % ', '.join(loaded.keys()))
            for param in current_params:
                assert param.get_value().shape == loaded[param.name].shape
                param.set_value(loaded[param.name])

    return ladder
Ejemplo n.º 2
0
def setup_model(p):
    ladder = LadderAE(p)
    # Setup inputs
    input_type = TensorType('float32', [False] * (len(p.encoder_layers[0]) + 1))
    x_only = input_type('features_unlabeled')
    if debug:
        x_only.tag.test_value =  numpy.random.normal(size=(p.batch_size,)+p.encoder_layers[0]).astype(floatX)
    x = input_type('features_labeled')
    if debug:
        x.tag.test_value =  numpy.random.normal(size=(p.batch_size,)+p.encoder_layers[0]).astype(floatX)
    y = theano.tensor.lvector('targets_labeled')
    if debug:
        y.tag.test_value = numpy.random.randint(1,int(p.encoder_layers[-1])+1,(p.batch_size))
    ladder.apply(x, y, x_only)

    # Load parameters if requested
    if p.get('load_from'):
        with open(p.load_from + '/trained_params.npz') as f:
            loaded = numpy.load(f)
            cg = ComputationGraph([ladder.costs.total])
            current_params = VariableFilter(roles=[PARAMETER])(cg.variables)
            logger.info('Loading parameters: %s' % ', '.join(loaded.keys()))
            for param in current_params:
                assert param.get_value().shape == loaded[param.name].shape
                param.set_value(loaded[param.name])

    return ladder
Ejemplo n.º 3
0
def setup_model(p):
    ladder = LadderAE(p)
    # Setup inputs
    input_type = TensorType("float32", [False] * (len(p.encoder_layers[0]) + 1))
    x_only = input_type("features_unlabeled")
    x = input_type("features_labeled")
    y = theano.tensor.lvector("targets_labeled")
    ladder.apply(x, y, x_only)

    # Load parameters if requested
    if p.get("load_from"):
        trained_params = ojoin(p.load_from, "trained_params_best.npz")
        #         trained_params = ojoin(p.load_from, 'trained_params_best.npz')
        #         if not file_exists(trained_params):
        #             trained_params = ojoin(p.load_from, 'trained_params.npz')

        with open(trained_params) as f:
            loaded = numpy.load(f)
            cg = ComputationGraph([ladder.costs.total])
            current_params = VariableFilter(roles=[PARAMETER])(cg.variables)
            logger.info("Loading parameters: %s" % ", ".join(loaded.keys()))
            for param in current_params:
                assert param.get_value().shape == loaded[param.name].shape
                param.set_value(loaded[param.name])

    return ladder
Ejemplo n.º 4
0
def setup_model(p):
    ladder = LadderAE(p)
    # Setup inputs
    input_type = TensorType('float32',
                            [False] * (len(p.encoder_layers[0]) + 1))
    x_only = input_type('features_unlabeled')
    x = input_type('features_labeled')
    y = theano.tensor.lvector('targets_labeled')
    ladder.apply(x, y, x_only)

    # Load parameters if requested
    if p.get('load_from'):
        trained_params = ojoin(p.load_from, 'trained_params_best.npz')
        #         trained_params = ojoin(p.load_from, 'trained_params_best.npz')
        #         if not file_exists(trained_params):
        #             trained_params = ojoin(p.load_from, 'trained_params.npz')

        with open(trained_params) as f:
            loaded = numpy.load(f)
            cg = ComputationGraph([ladder.costs.total])
            current_params = VariableFilter(roles=[PARAMETER])(cg.variables)
            logger.info('Loading parameters: %s' % ', '.join(loaded.keys()))
            for param in current_params:
                assert param.get_value().shape == loaded[param.name].shape
                param.set_value(loaded[param.name])

    return ladder
Ejemplo n.º 5
0
def setup_model():
    ladder = LadderAE()
    input_type = TensorType('float32', [False, False])
    x_lb = input_type('features_labeled')
    x_un = input_type('features_unlabeled')
    y = theano.tensor.lvector('targets_labeled')
    ladder.apply(x_lb, x_un, y)

    return ladder
Ejemplo n.º 6
0
def setup_model():
    ladder = LadderAE()
    input_type = TensorType('float32', [False, False])
    x_lb = input_type('features_labeled')
    x_un = input_type('features_unlabeled')
    y = theano.tensor.lvector('targets_labeled')
    ladder.apply(x_lb, x_un, y)

    return ladder
Ejemplo n.º 7
0
def setup_model(p):
    ladder = LadderAE(p)
    # Setup inputs
    input_type = TensorType('float32', [False] * (len(p.encoder_layers[0]) + 1))
    x_only = input_type('features_unlabeled')
    x = input_type('features_labeled')
    y = theano.tensor.lvector('targets_labeled')
    ladder.apply(x, y, x_only)

    # Load parameters if requested
    if p.get('load_from'):
        with numpy.load(p.load_from + '/trained_params.npz') as loaded:
            cg = ComputationGraph([ladder.costs.total])
            current_params = VariableFilter(roles=[PARAMETER])(cg.variables)
            logger.info('Loading parameters: %s' % ', '.join(loaded.keys()))
            for param in current_params:
                assert param.get_value().shape == loaded[param.name].shape
                param.set_value(loaded[param.name])

    return ladder