def __init__(self, name, configuration, graph=None): c = configuration self.configuration = c AutoEncoder.__init__(self, name, graph, configuration) with tf.variable_scope(name): self.z = c.encoder(self.x, **c.encoder_args) self.vz = c.embedder(self.vx, **c.embedder_args) self.bottleneck_size = int(self.z.get_shape()[1]) layer = c.decoder(self.z, **c.decoder_args) c.decoder_args['reuse'] = True vlayer = c.decoder(self.vz, **c.decoder_args) c.decoder_args['reuse'] = False if c.exists_and_is_not_none('close_with_tanh'): layer = tf.nn.tanh(layer) vlayer = tf.nn.tanh(vlayer) self.x_reconstr = tf.reshape( layer, [-1, self.n_output[0], self.n_output[1]]) self.vx_reconstr = tf.reshape( vlayer, [-1, self.n_output[0], self.n_output[1]]) self.saver = tf.train.Saver(tf.global_variables(), max_to_keep=c.saver_max_to_keep) self._create_loss() self._setup_optimizer() # GPU configuration if hasattr(c, 'allow_gpu_growth'): growth = c.allow_gpu_growth else: growth = True config = tf.ConfigProto() config.gpu_options.allow_growth = growth # Summaries self.merged_summaries = tf.summary.merge_all() self.train_writer = tf.summary.FileWriter( osp.join(configuration.train_dir, 'summaries'), self.graph) # Initializing the tensor flow variables self.init = tf.global_variables_initializer() # Launch the session self.sess = tf.Session(config=config) self.sess.run(self.init)
def __init__(self, numpy_rng=None, input=None, n_visible=8, n_hidden=4, corrupt_level=0.0, W=None, bhid=None, bvis=None, theano_rng=None, sparsity=0.05, beta=0.001): AutoEncoder.__init__(self, numpy_rng=numpy_rng, input = input, n_visible = n_visible, n_hidden = n_hidden, sparsity = sparsity, beta = beta, W = W, bhid = bhid, bvis = bvis ) if not theano_rng: theano_rng = RandomStreams(self.numpy_rng.randint(2 ** 3)) self.theano_rng = theano_rng self.corrupt_level = corrupt_level