示例#1
0
 def init_data_reading(self, train_data_spec, valid_data_spec):
     train_dataset, train_dataset_args = read_data_args(train_data_spec)
     valid_dataset, valid_dataset_args = read_data_args(valid_data_spec)
     self.train_sets, self.train_xy, self.train_x, self.train_y = read_dataset(
         train_dataset, train_dataset_args)
     self.valid_sets, self.valid_xy, self.valid_x, self.valid_y = read_dataset(
         valid_dataset, valid_dataset_args)
示例#2
0
 def init_data_reading(self, train_data_spec):
     train_dataset, train_dataset_args = read_data_args(train_data_spec)
     self.train_sets, self.train_xy, self.train_x, self.train_y = read_dataset(train_dataset, train_dataset_args)
示例#3
0
 def init_data_reading(self, train_data_spec):
     train_dataset, train_dataset_args = read_data_args(train_data_spec)
     self.train_sets, self.train_xy, self.train_x, self.train_y = read_dataset(train_dataset, train_dataset_args, self.multi_label)
示例#4
0
 def init_data_reading_test(self, data_spec):
     dataset, dataset_args = read_data_args(data_spec)
     self.test_sets, self.test_xy, self.test_x, self.test_y = read_dataset(dataset, dataset_args)
示例#5
0
 def init_data_reading_test(self, data_spec):
     dataset, dataset_args = read_data_args(data_spec)
     self.test_sets, self.test_xy, self.test_x, self.test_y = read_dataset(dataset, dataset_args)
示例#6
0
import numpy
import theano
from theano import tensor
import cPickle
from io_func.data_io import read_dataset, read_data_args

dev_data_file = 'dev.pfile.gz,partition=1000m,random=true,stream=false'

# Reading dev dataset

dev_dataset, dev_dataset_args = read_data_args(dev_data_file)
dev, dev_xy, dev_x, dev_y, dev_set_x , dev_set_y = read_dataset(dev_dataset, dev_dataset_args)
dev_set_y = dev_set_y.astype(numpy.int64)
print (numpy.shape(dev_set_x))

# print a

# Preparing neural network model
n_input = 440
n_output = 1940
n_hidden = 2000

x = tensor.dmatrix('x')
# Loading trained neural network model weights

a = cPickle.load(open('trained_model_weights.pkl', 'rb'))
W_h1_value = numpy.asarray(a['W_h1'])
b_h1_value = numpy.asarray(a['b_h1'])
W_h2_value = numpy.asarray(a['W_h2'])
b_h2_value = numpy.asarray(a['b_h2'])
W_h3_value = numpy.asarray(a['W_h3'])
示例#7
0
import numpy
import theano
from theano import tensor
import cPickle
from io_func.data_io import read_dataset, read_data_args
# data_file = 'cv05.pfile.gz,partition=1000m,random=true,stream=false'
# train_dataset, train_dataset_args = read_data_args(data_file)
# train_set, train_xy, train_x, train_y, x , y = read_dataset(train_dataset, train_dataset_args)
# Address of datasets
train_data_file = 'cv05.pfile.gz,partition=1000m,random=true,stream=false'
valid_data_file = 'valid.pfile.gz,partition=1000m,random=true,stream=false'
test_data_file = 'test.pfile.gz,partition=1000m,random=true,stream=false'

# Reading training dataset
train_dataset, train_dataset_args = read_data_args(train_data_file)
train, train_xy, train_x, train_y, train_set_x, train_set_y = read_dataset(
    train_dataset, train_dataset_args)

# Reading validation dataset
valid_dataset, valid_dataset_args = read_data_args(valid_data_file)
valid, valid_xy, valid_x, valid_y, valid_set_x, valid_set_y = read_dataset(
    valid_dataset, valid_dataset_args)

# Reading test dataset
test_dataset, test_data_args = read_data_args(test_data_file)
test, test_xy, test_x, test_y, test_set_x, test_set_y = read_dataset(
    test_dataset, test_data_args)

train_set_y = train_set_y.astype(numpy.int64)
valid_set_y = valid_set_y.astype(numpy.int64)
test_set_y = test_set_y.astype(numpy.int64)
示例#8
0
 def init_data_reading(self, train_data_spec):
     train_dataset, train_dataset_args = read_data_args(train_data_spec)
     self.train_sets, self.train_xy, self.train_x, self.train_y, self.extra_train_x = read_dataset(train_dataset, train_dataset_args)
import numpy
import theano
from theano import tensor
import cPickle
from io_func.data_io import read_dataset, read_data_args
# data_file = 'cv05.pfile.gz,partition=1000m,random=true,stream=false'
# train_dataset, train_dataset_args = read_data_args(data_file)
# train_set, train_xy, train_x, train_y, x , y = read_dataset(train_dataset, train_dataset_args)
# Address of datasets
train_data_file = 'cv05.pfile.gz,partition=1000m,random=true,stream=false'
valid_data_file = 'valid.pfile.gz,partition=1000m,random=true,stream=false'
test_data_file = 'test.pfile.gz,partition=1000m,random=true,stream=false'

# Reading training dataset
train_dataset, train_dataset_args = read_data_args(train_data_file)
train, train_xy, train_x, train_y, train_set_x , train_set_y = read_dataset(train_dataset, train_dataset_args)

# Reading validation dataset
valid_dataset, valid_dataset_args = read_data_args(valid_data_file)
valid, valid_xy, valid_x, valid_y, valid_set_x, valid_set_y = read_dataset(valid_dataset, valid_dataset_args)

# Reading test dataset
test_dataset, test_data_args = read_data_args(test_data_file)
test, test_xy, test_x, test_y, test_set_x, test_set_y = read_dataset(test_dataset, test_data_args)

train_set_y = train_set_y.astype(numpy.int64)
valid_set_y = valid_set_y.astype(numpy.int64)
test_set_y = test_set_y.astype(numpy.int64)

n_input = 440
n_output = 1940
示例#10
0
 def init_data_reading(self, train_data_spec, valid_data_spec):
     train_dataset, train_dataset_args = read_data_args(train_data_spec)
     valid_dataset, valid_dataset_args = read_data_args(valid_data_spec)
     #train_dataset.sort()
     #valid_dataset.sort()
     self.extra_dim = int(train_dataset_args['extra_dim'])
     print 'init_data_reading: '+str(train_dataset_args)
     print 'init_data_reading: '+str(train_dataset)
     self.train_sets, self.train_xye, self.train_x, self.train_y, self.extra_train_x = read_dataset(train_dataset, train_dataset_args)
     self.valid_sets, self.valid_xye, self.valid_x, self.valid_y, self.extra_valid_x = read_dataset(valid_dataset, valid_dataset_args)
示例#11
0
import numpy
import theano
from theano import tensor
import cPickle
from io_func.data_io import read_dataset, read_data_args

dev_data_file = 'dev.pfile.gz,partition=1000m,random=true,stream=false'

# Reading dev dataset

dev_dataset, dev_dataset_args = read_data_args(dev_data_file)
dev, dev_xy, dev_x, dev_y, dev_set_x, dev_set_y = read_dataset(
    dev_dataset, dev_dataset_args)
dev_set_y = dev_set_y.astype(numpy.int64)
print(numpy.shape(dev_set_x))

# print a

# Preparing neural network model
n_input = 440
n_output = 1940
n_hidden = 2000

x = tensor.dmatrix('x')
# Loading trained neural network model weights

a = cPickle.load(open('trained_model_weights.pkl', 'rb'))
W_h1_value = numpy.asarray(a['W_h1'])
b_h1_value = numpy.asarray(a['b_h1'])
W_h2_value = numpy.asarray(a['W_h2'])
b_h2_value = numpy.asarray(a['b_h2'])