Пример #1
0
	def cut_batch(self, step, is_train = True, sampling = 1):
		n = step % self.train_batch
		begin = self.batch_size * n
		end = self.batch_size * (n + 1)
		
		if is_train == True:
			self.x_batch = self.x[begin:end]
			self.y_batch = self.y[begin:end]
		else:
			self.x_batch = self.x_valid[begin:end]
			self.y_batch = self.y_valid[begin:end]

		count = 0.0 # number of total words need to be computed loss in one batch, 
		self.mask = self.mask_zero[:]
		for i in range(self.batch_size):
			j = np.where(self.y_batch[i] == self.EOS)[0][0]		
			count += j+1
			self.mask[i][:j+1] = 1	
		self.count_words = count
		
		self.x_batch = seq2seq_lib.one_hot(\
			self.x_batch, self.batch_size, self.num_steps, self.vocab_size)
		self.y_batch = seq2seq_lib.one_hot(\
			self.y_batch, self.batch_size, self.num_steps, self.vocab_size)
		
		return
Пример #2
0
    def load_test(self):
        self.x_test = seq2seq_lib.preprocess_testing_data(self.num_steps)
        print self.x_test
        if self.x_test.shape[0] <= 64:

            self.x_test_vocab = seq2seq_lib.one_hot(\
             self.x_test, self.x_test.shape[0], self.num_steps, self.vocab_size)
        else:
            self.x_test_vocab = seq2seq_lib.one_hot(\
             self.x_test[:64], 64, self.num_steps, self.vocab_size)

            self.x_test_vocab_1 = seq2seq_lib.one_hot(\
             self.x_test[64:], self.x_test.shape[0]-64, self.num_steps, self.vocab_size)
Пример #3
0
	def load_test(self):
		self.x_test = seq2seq_lib.preprocess_testing_data(self.num_steps)
		self.x_test_vocab = seq2seq_lib.one_hot(\
			self.x_test, self.x_test.shape[0], self.num_steps, self.vocab_size)