Пример #1
0
 def update(self, ytrain):
   """
   TODO: construct the feed_dict
   """
   sess = get_session()
   _, loss = sess.run([self.train_op, self.loss], feed_dict={'Y:0' : ytrain})
   return loss
Пример #2
0
  def sample(self, input_data, node, islot=0):
    """
    Sample from the model graph. For user provided features generates a
    response.
    """
    addcolon0 = lambda s : self.main_scope +  '/' + s + ':0'
    node = self.nodes[node]
    sess = get_session()
    input_data = {addcolon0(key) : value for key, value in input_data.items()}
    if self.batch_size is None:
      return sess.run(node._islot_to_itensor[islot], feed_dict=input_data)
    else:
      num_samples =len(list(input_data.values())[0]) 
      if num_samples % self.batch_size:
        raise ValueError("The number of samples ({})is not divisible by "
                         "self.batch_size({})".format(num_samples,
                                                      self.batch_size))
      res = np.zeros([num_samples] + node._islot_to_shape[islot][1:])
#       print("res.shape", res.shape)
      i = 0
      for batch_data in self.batch_iterator_from_dataset(input_data,
                                                         shuffle=False):
        r = sess.run(node._islot_to_itensor[islot],
                     feed_dict=batch_data)
#         print("r.shape", r.shape)
        res[i:i+self.batch_size] = r
        i += 1
      return res
Пример #3
0
 def generate(self, t_init_gen=None, specs=None, nsamps=1000):
   """
   """
   Yfake = self._build_fake(t_init_gen, specs, nsamps=nsamps)
   sess = get_session()
   sess.run(tf.global_variables_initializer())
   return sess.run(Yfake)
Пример #4
0
  def train(self, dataset, num_epochs=100):
    """
    Trains the model. 
    
    The dataset provided by the client should have keys
    
    train_features, train_response
    valid_features, valid_response
    test_features, test_response
    
    where # is the number of the corresponding Input node, see model graph.
    """
    self._check_dataset_correctness(dataset)
    train_dataset, _, _ = self.make_datasets(dataset)
    batch_size = self.builder.batch_size

    sess = get_session()
    sess.run(tf.global_variables_initializer())
    for _ in range(num_epochs):
      self.bender.update(sess,
                         tuple(zip(*train_dataset.items())),
                         batch_size=batch_size)
#       cost = np.mean(sess.run([self.cost], feed_dict=train_dataset))
      cost = self.reduce_op_from_batches(sess, [self.cost], train_dataset)
      print(cost)
      
    sess.close()
    def train(self, dataset, num_epochs=100):
        """
    Train the RNNClassifier model. 
    
    The dataset, provided by the client, should have keys
    
    train_features, train_labels
    valid_features, valid_response
    test_features, test_response
    """
        self._check_dataset_correctness(dataset)
        train_dataset, _, _ = self.make_datasets(dataset)
        batch_size = self.batch_size

        sess = get_session()
        sess.run(tf.global_variables_initializer())
        for _ in range(num_epochs):
            self.trainer.update(sess,
                                tuple(zip(*train_dataset.items())),
                                batch_size=batch_size)
            cost = self.reduce_op_from_batches(sess, [self.cost],
                                               train_dataset)
            print(cost)
Пример #6
0
 def generate(self):
   """
   """
   sess = get_session()
   sess.run(tf.global_variables_initializer())
   return sess.run(self.Yfake)
Пример #7
0
 def train(self, ytrain, num_epochs=5):
   sess = get_session()
   sess.run(tf.global_variables_initializer())
   for _ in range(num_epochs):
     loss = self.update(ytrain)
     print(loss)