Exemple #1
0
 def _init_params(self):
     """
     Initialize parameters for the layers in this generator module.
     """
     if self.use_rand:
         # random values will be stacked on exogenous input
         self.w1 = self.init_func(
             (self.out_chans, (self.in_chans + self.rand_chans),
              self.filt_dim, self.filt_dim), "{}_w1".format(self.mod_name))
     else:
         # random values won't be stacked on exogenous input
         self.w1 = self.init_func(
             (self.out_chans, self.in_chans, self.filt_dim, self.filt_dim),
             "{}_w1".format(self.mod_name))
     self.w2 = self.init_func(
         (self.out_chans, self.out_chans, self.filt_dim, self.filt_dim),
         "{}_w2".format(self.mod_name))
     self.params = [self.w1, self.w2]
     # make gains and biases for transforms that will get batch normed
     if self.apply_bn_1:
         gain_ifn = inits.Normal(loc=1., scale=0.02)
         bias_ifn = inits.Constant(c=0.)
         self.g1 = gain_ifn((self.out_chans), "{}_g1".format(self.mod_name))
         self.b1 = bias_ifn((self.out_chans), "{}_b1".format(self.mod_name))
         self.params.extend([self.g1, self.b1])
     if self.apply_bn_2:
         gain_ifn = inits.Normal(loc=1., scale=0.02)
         bias_ifn = inits.Constant(c=0.)
         self.g2 = gain_ifn((self.out_chans), "{}_g2".format(self.mod_name))
         self.b2 = bias_ifn((self.out_chans), "{}_b2".format(self.mod_name))
         self.params.extend([self.g2, self.b2])
     return
Exemple #2
0
 def _init_params(self):
     """
     Initialize parameters for the layers in this discriminator module.
     """
     self.w1 = self.init_func(
         (self.out_chans, self.in_chans, self.filt_dim, self.filt_dim),
         "{}_w1".format(self.mod_name))
     self.w2 = self.init_func(
         (self.out_chans, self.out_chans, self.filt_dim, self.filt_dim),
         "{}_w2".format(self.mod_name))
     self.wd = self.init_func(
         (1, self.out_chans, self.filt_dim, self.filt_dim),
         "{}_wd".format(self.mod_name))
     self.params = [self.w1, self.w2, self.wd]
     # make gains and biases for transforms that will get batch normed
     if self.apply_bn_1:
         gain_ifn = inits.Normal(loc=1., scale=0.02)
         bias_ifn = inits.Constant(c=0.)
         self.g1 = gain_ifn((self.out_chans), "{}_g1".format(self.mod_name))
         self.b1 = bias_ifn((self.out_chans), "{}_b1".format(self.mod_name))
         self.params.extend([self.g1, self.b1])
     if self.apply_bn_2:
         gain_ifn = inits.Normal(loc=1., scale=0.02)
         bias_ifn = inits.Constant(c=0.)
         self.g2 = gain_ifn((self.out_chans), "{}_g2".format(self.mod_name))
         self.b2 = bias_ifn((self.out_chans), "{}_b2".format(self.mod_name))
         self.params.extend([self.g2, self.b2])
     return
Exemple #3
0
 def __init__(self,
              rand_dim,
              out_dim,
              fc_dim,
              apply_bn_1=True,
              apply_bn_2=True,
              init_func=None,
              rand_type='normal',
              final_relu=True,
              mod_name='dm_fc'):
     self.rand_dim = rand_dim
     self.out_dim = out_dim
     self.fc_dim = fc_dim
     self.apply_bn_1 = apply_bn_1
     self.apply_bn_2 = apply_bn_2
     self.mod_name = mod_name
     self.rand_type = rand_type
     self.final_relu = final_relu
     self.rng = RandStream(123)
     if init_func is None:
         self.init_func = inits.Normal(scale=0.02)
     else:
         self.init_func = init_func
     self._init_params()  # initialize parameters
     return
Exemple #4
0
 def __init__(self,
              filt_shape,
              in_chans,
              out_chans,
              rand_chans,
              use_rand=True,
              apply_bn_1=True,
              apply_bn_2=True,
              us_stride=2,
              use_pooling=True,
              init_func=None,
              mod_name='gm_conv',
              rand_type='normal'):
     assert ((filt_shape[0] % 2) > 0), "filter dim should be odd (not even)"
     self.filt_dim = filt_shape[0]
     self.in_chans = in_chans
     self.out_chans = out_chans
     self.rand_chans = rand_chans
     self.use_rand = use_rand
     self.apply_bn_1 = apply_bn_1
     self.apply_bn_2 = apply_bn_2
     self.us_stride = us_stride
     self.use_pooling = use_pooling
     self.mod_name = mod_name
     self.rand_type = rand_type
     self.rng = RandStream(123)
     if init_func is None:
         self.init_func = inits.Normal(scale=0.02)
     else:
         self.init_func = init_func
     self._init_params()  # initialize parameters
     return
Exemple #5
0
 def __init__(self,
              filt_shape,
              in_chans,
              out_chans,
              apply_bn_1=True,
              apply_bn_2=True,
              ds_stride=2,
              use_pooling=True,
              init_func=None,
              mod_name='dm_conv'):
     assert ((filt_shape[0] % 2) > 0), "filter dim should be odd (not even)"
     self.filt_dim = filt_shape[0]
     self.in_chans = in_chans
     self.out_chans = out_chans
     self.apply_bn_1 = apply_bn_1
     self.apply_bn_2 = apply_bn_2
     self.ds_stride = ds_stride
     self.use_pooling = use_pooling
     self.mod_name = mod_name
     if init_func is None:
         self.init_func = inits.Normal(scale=0.02)
     else:
         self.init_func = init_func
     self._init_params()  # initialize parameters
     return
Exemple #6
0
 def _init_params(self):
     """
     Initialize parameters for the layers in this generator module.
     """
     self.w1 = self.init_func((self.rand_dim, self.out_dim),
                              "{}_w1".format(self.mod_name))
     self.params = [self.w1]
     # make gains and biases for transforms that will get batch normed
     if self.apply_bn:
         gain_ifn = inits.Normal(loc=1., scale=0.02)
         bias_ifn = inits.Constant(c=0.)
         self.g1 = gain_ifn((self.out_dim), "{}_g1".format(self.mod_name))
         self.b1 = bias_ifn((self.out_dim), "{}_b1".format(self.mod_name))
         self.params.extend([self.g1, self.b1])
     return
Exemple #7
0
 def __init__(self,
              fc_dim,
              in_dim,
              apply_bn=True,
              init_func=None,
              mod_name='dm_fc'):
     self.fc_dim = fc_dim
     self.in_dim = in_dim
     self.apply_bn = apply_bn
     self.mod_name = mod_name
     if init_func is None:
         self.init_func = inits.Normal(scale=0.02)
     else:
         self.init_func = init_func
     self._init_params()  # initialize parameters
     return
Exemple #8
0
 def __init__(self,
              filt_shape,
              in_chans,
              out_chans,
              apply_bn=True,
              act_func='lrelu',
              init_func=None,
              mod_name='dm_conv'):
     assert ((filt_shape[0] % 2) > 0), "filter dim should be odd (not even)"
     self.filt_dim = filt_shape[0]
     self.in_chans = in_chans
     self.out_chans = out_chans
     self.apply_bn = apply_bn
     self.act_func = act_func
     self.mod_name = mod_name
     if init_func is None:
         self.init_func = inits.Normal(scale=0.02)
     else:
         self.init_func = init_func
     self._init_params()  # initialize parameters
     return
desc = 'matronet_1'
model_dir = "{}/models/{}".format(EXP_DIR, desc)
sample_dir = "{}/samples/{}".format(EXP_DIR, desc)
log_dir = "{}/logs".format(EXP_DIR)
if not os.path.exists(log_dir):
    os.makedirs(log_dir)
if not os.path.exists(model_dir):
    os.makedirs(model_dir)
if not os.path.exists(sample_dir):
    os.makedirs(sample_dir)

sigmoid = activations.Sigmoid()
bce = T.nnet.binary_crossentropy

gifn = inits.Normal(scale=0.02)
difn = inits.Normal(scale=0.02)

#
# Define some modules to use in the generator
#
gen_module_1 = \
GenFCModule(
    rand_dim=nz0,
    out_dim=(ngf*2*7*7),
    fc_dim=ngfc,
    apply_bn_1=True,
    apply_bn_2=True,
    init_func=gifn,
    rand_type='normal',
    mod_name='gen_mod_1'
    means = labels.mean(axis=0)
    print('labels ', labels.shape, means, means[0] / means[1])

    vaY, labels = tr_data.get_data(tr_handle, slice(10000, min(ntrain, 20000)))
    vaY = transform(vaY)

    va_nnd_1k = nnd_score(vaY.reshape((len(vaY), -1)),
                          vaX.reshape((len(vaX), -1)),
                          metric='euclidean')
    print 'va_nnd_1k = %.2f' % (va_nnd_1k)
    means = labels.mean(axis=0)
    print('labels ', labels.shape, means, means[0] / means[1])

#####################################
# shared variables
gifn = inits.Normal(scale=0.02)
difn = inits.Normal(scale=0.02)
gain_ifn = inits.Normal(loc=1., scale=0.02)
bias_ifn = inits.Constant(c=0.)

gw = gifn((nz, ngf * 8 * 4 * 4), 'gw')
gg = gain_ifn((ngf * 8 * 4 * 4), 'gg')
gb = bias_ifn((ngf * 8 * 4 * 4), 'gb')
gw2 = gifn((ngf * 8, ngf * 4, 5, 5), 'gw2')
gg2 = gain_ifn((ngf * 4), 'gg2')
gb2 = bias_ifn((ngf * 4), 'gb2')
gw3 = gifn((ngf * 4, ngf * 2, 5, 5), 'gw3')
gg3 = gain_ifn((ngf * 2), 'gg3')
gb3 = bias_ifn((ngf * 2), 'gb3')
gw4 = gifn((ngf * 2, ngf, 5, 5), 'gw4')
gg4 = gain_ifn((ngf), 'gg4')
Exemple #11
0
niter_decay = 30  # # of iter to linearly decay learning rate to zero
lr = 0.0002  # initial learning rate for adam
ntrain = 25000  # # of examples to train on

relu = activations.Rectify()
sigmoid = activations.Sigmoid()
lrelu = activations.LeakyRectify()
tanh = activations.Tanh()
bce = T.nnet.binary_crossentropy


def mse(x, y):
    return T.sum(T.pow(x - y, 2), axis=1)


gifn = inits.Normal(scale=0.02)
difn = inits.Normal(scale=0.02)
sigma_ifn = inits.Normal(loc=-100., scale=0.02)
gain_ifn = inits.Normal(loc=1., scale=0.02)
bias_ifn = inits.Constant(c=0.)


def target_transform(X):
    return floatX(X).transpose(0, 3, 1, 2) / 127.5 - 1.


def input_transform(X):
    return target_transform(X)


def make_conv_layer(X,