Beispiel #1
0
for l in f:
    mapping = l.strip(' \n').split('\t')  # mapping = 0 ~ 1942, aa ~ z, aa ~ z
    phone_map_1943i_48i[i] = phone_map_48s_48i[mapping[1]]
    phone_map_1943i_48s[i] = mapping[1]
    i += 1
f.close()

###################
#   Build Model   #
###################

x = T.matrix(dtype=theano.config.floatX)

classifier = MLP(input=x,
                 n_in=INPUT_DIM,
                 n_hidden=NEURONS_PER_LAYER,
                 n_out=OUTPUT_DIM,
                 n_layers=HIDDEN_LAYERS)
classifier.load_model(args.model_in)

test_model = theano.function(inputs=[x], outputs=(classifier.output))
'''
#####################
#   Probing Train   #
#####################

f_y = args.train_in + ".unshuffled.y"
with open(f_y, "rb") as f:
    y_out = cPickle.load(f)
f_idx = args.train_in + ".idx"
with open(f_idx, "rb") as f:
print >> sys.stderr, "After loading: %f" % (time.time() - start_time)

###############
# Build Model #
###############

# symbolic variables
index = T.lscalar()
x = T.matrix(dtype=theano.config.floatX)
y = T.ivector()

# construct MLP class
classifier = MLP(input=x,
                 n_in=INPUT_DIM,
                 n_hidden=NEURONS_PER_LAYER,
                 n_out=OUTPUT_DIM,
                 n_layers=HIDDEN_LAYERS)

# cost + regularization terms; cost is symbolic
cost = (classifier.negative_log_likelihood(y) + L1_REG * classifier.L1 +
        L2_REG * classifier.L2_sqr)

# compile "dev model" function
dev_model = theano.function(
    inputs=[index],
    outputs=classifier.errors(y),
    givens={
        x: val_x[index * BATCH_SIZE:(index + 1) * BATCH_SIZE].T,
        y: val_y[index * BATCH_SIZE:(index + 1) * BATCH_SIZE].T,
    })
    mapping = l.strip(' \n').split('\t')        # mapping = 0 ~ 1942, aa ~ z, aa ~ z
    phone_map_1943i_48i[i] = phone_map_48s_48i[mapping[1]]
    phone_map_1943i_48s[i] = mapping[1]
    i += 1
f.close()

###################
#   Build Model   #
###################

x = T.matrix(dtype=theano.config.floatX)

classifier = MLP(
        input=x,
        n_in=INPUT_DIM,
        n_hidden=NEURONS_PER_LAYER,
        n_out=OUTPUT_DIM,
        n_layers=HIDDEN_LAYERS
)
classifier.load_model(args.model_in)

test_model = theano.function(
        inputs=[x],
        outputs=(classifier.output)
)

'''
#####################
#   Probing Train   #
#####################
print >> sys.stderr, "After loading: %f" % (time.time()-start_time)

###############
# Build Model #
###############

# symbolic variables
index = T.lscalar()
x = T.matrix(dtype=theano.config.floatX)
y = T.ivector()

# construct MLP class
classifier = MLP(
        input=x,
        n_in=INPUT_DIM,
        n_hidden=NEURONS_PER_LAYER,
        n_out=OUTPUT_DIM,
        n_layers=HIDDEN_LAYERS
)

# cost + regularization terms; cost is symbolic
cost = (
        classifier.negative_log_likelihood(y) +
        L1_REG * classifier.L1 +
        L2_REG * classifier.L2_sqr
)

# compile "dev model" function
dev_model = theano.function(
        inputs=[index],
        outputs=classifier.errors(y),