Exemplo n.º 1
0
    def __init__(self,
                 dim_in,
                 dim_h,
                 dim_out,
                 n_layers,
                 h_act='T.nnet.sigmoid',
                 out_act='T.nnet.sigmoid',
                 name='DARN',
                 **kwargs):

        self.dim_in = dim_in
        self.dim_h = dim_h
        self.dim_out = dim_out
        self.n_layers = n_layers
        assert n_layers > 0

        self.h_act = h_act
        self.out_act = out_act

        if out_act is None:
            out_act = 'T.nnet.sigmoid'

        if out_act == 'T.nnet.sigmoid':
            self.f_sample = _binomial
            self.f_neg_log_prob = _cross_entropy
            self.f_entropy = _binary_entropy
        else:
            raise ValueError()

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        super(DARN, self).__init__(name=name)
Exemplo n.º 2
0
    def __init__(self,
                 dim_in,
                 graph,
                 log_prob_scale=dict(),
                 name='MLP',
                 **kwargs):
        graph = copy.deepcopy(graph)

        self.layers = OrderedDict()
        self.layers.update(**graph['layers'])
        self.edges = graph['edges']
        outs = graph['outs'].keys()
        for k in outs:
            assert not k in self.layers.keys()
        self.layers.update(**graph['outs'])

        for l in self.layers.keys():
            if self.layers[l]['act'] == 'lambda x: x':
                self.layers[l]['dim'] *= 2

        self.outs = OrderedDict()
        for i, o in self.edges:
            if o in outs:
                assert not o in self.outs.keys()
                o_dict = OrderedDict()
                act = self.layers[o]['act']
                if act == 'T.nnet.sigmoid':
                    o_dict['f_sample'] = _binomial
                    o_dict['f_neg_log_prob'] = _cross_entropy
                    o_dict['f_entropy'] = _binary_entropy
                    o_dict['f_prob'] = lambda x: x
                elif act == 'T.nnet.softmax':
                    o_dict['f_sample'] = _sample_softmax
                    o_dict['f_neg_log_prob'] = _categorical_cross_entropy
                    o_dict['f_entropy'] = _categorical_entropy
                    o_dict['f_prob'] = lambda x: x
                    self.layers[o]['act'] = '_softmax'
                elif act == 'T.tanh':
                    o_dict['f_sample'] = _centered_binomial
                elif act == 'lambda x: x':
                    o_dict['f_sample'] = _normal
                    o_dict['f_neg_log_prob'] = _neg_normal_log_prob
                    o_dict['f_entropy'] = _normal_entropy
                    o_dict['f_prob'] = _normal_prob
                else:
                    raise ValueError(act)

                if log_prob_scale.get(o, None) is not None:
                    o_dict['log_prob_scale'] = log_prob_scale[o]

                self.outs[o] = o_dict

        assert not 'i' in self.layers.keys()
        self.layers['i'] = dict(dim=dim_in)

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        #assert len(kwargs) == 0, 'Got extra args: %r' % kwargs.keys()
        super(MultiModalMLP, self).__init__(name=name)
Exemplo n.º 3
0
    def __init__(self,
                 dim_in,
                 dim_h,
                 dim_out,
                 n_layers=2,
                 posteriors=None,
                 conditionals=None,
                 z_init=None,
                 name='gbn',
                 **kwargs):

        self.dim_in = dim_in
        self.dim_out = dim_out
        self.dim_h = dim_h

        self.n_layers = n_layers

        self.posteriors = posteriors
        self.conditionals = conditionals

        self.z_init = z_init

        kwargs = init_inference_args(self, **kwargs)
        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        super(DeepGBN, self).__init__(name=name)
Exemplo n.º 4
0
    def __init__(self,
                 dim,
                 name='distribution',
                 must_sample=False,
                 scale=1,
                 **kwargs):
        self.dim = dim
        self.must_sample = must_sample
        self.scale = scale

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        super(Distribution, self).__init__(name=name)
Exemplo n.º 5
0
    def __init__(self,
                 dim_in,
                 dim_hs,
                 posteriors=None,
                 conditionals=None,
                 prior=None,
                 name='sbn',
                 **kwargs):

        self.dim_in = dim_in
        self.dim_hs = dim_hs
        self.n_layers = len(self.dim_hs)
        self.posteriors = posteriors
        self.conditionals = conditionals
        self.prior = prior

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        super(DeepSBN, self).__init__(name=name)
Exemplo n.º 6
0
    def __init__(self,
                 dim_in,
                 dim_h,
                 posterior=None,
                 conditional=None,
                 prior=None,
                 name='sbn',
                 **kwargs):

        self.dim_in = dim_in
        self.dim_h = dim_h

        self.posterior = posterior
        self.conditional = conditional
        self.prior = prior

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)

        super(SBN, self).__init__(name=name)
Exemplo n.º 7
0
    def __init__(self,
                 dim_in,
                 dim_out,
                 dim_h=None,
                 n_layers=None,
                 dim_hs=None,
                 f_sample=None,
                 f_neg_log_prob=None,
                 f_entropy=None,
                 h_act='T.nnet.sigmoid',
                 distribution='binomial',
                 out_act=None,
                 distribution_args=dict(),
                 name='MLP',
                 **kwargs):

        self.dim_in = dim_in

        if out_act is not None:
            warnings.warn('out_act option going away. Use `distribution`.',
                          FutureWarning)
            if out_act == 'T.nnet.sigmoid':
                distribution = 'binomial'
            elif out_act == 'T.tanh':
                distribution = 'centered_binomial'
            elif out_act == 'T.nnet.softmax':
                distribution = 'multimnomial'
            elif out_act == 'lambda x: x':
                distribution = 'gaussian'
            elif out_act == 'T.tanh':
                distribution = 'centered binomial'
            else:
                raise ValueError(out_act)

        if isinstance(distribution, Distribution):
            self.distribution = distribution
        elif distribution is not None:
            self.distribution = resolve_distribution(
                distribution, conditional=True)(dim_out, **distribution_args)
        else:
            self.distribution = None

        if self.distribution is not None:
            self.dim_out = dim_out * self.distribution.scale

        if dim_h is None:
            if dim_hs is None:
                dim_hs = []
            else:
                dim_hs = [dim_h for dim_h in dim_hs]
            assert n_layers is None
        else:
            assert dim_hs is None
            dim_hs = []
            for l in xrange(n_layers - 1):
                dim_hs.append(dim_h)
        dim_hs.append(self.dim_out)
        self.dim_hs = dim_hs
        self.n_layers = len(dim_hs)
        assert self.n_layers > 0

        self.h_act = h_act

        kwargs = init_weights(self, **kwargs)
        kwargs = init_rngs(self, **kwargs)
        super(MLP, self).__init__(name=name, **kwargs)