Ejemplo n.º 1
0
params.initiallization=0.02
params.epochs = 30



(words, We) = getWordmap('../commonsendata/embeddings/tuples/embeddings.skip.newtask.en.d'+str(sys.argv[1])+'.m1.w5.s0.it20.txt')
rel = getRelation('../commonsendata/Training/rel.txt')
params.outfile = "../models/"+params.outfile+".Epoch"+str(params.epochs)+".Frac"+str(params.frac)+".Act"+str(params.activation)+".Batch"+str(params.batchsize)+".LC"+str(params.LC)+".eta"+str(params.eta)+"relSize"+str(params.relsize)+".txt"

examples = getData(params.dataf)

params.data = examples[0:int(params.frac*len(examples))]

print "Using Training Data"+params.dataf
print "Using Word Embeddings with Dimension "+str(sys.argv[1])

print "Training on "+str(len(params.data))+" examples using lambda="+str(params.lam)
print "Saving models to: "+params.outfile


Rel_init = np.zeros((35,params.relsize))
for m in range(35):
	for n in range(params.relsize):
		Rel_init[m][n] = random.uniform(-(params.initiallization),params.initiallization)


tm = theano_word_model(We, words, params.hiddensize, params.embedsize, rel, params.batchsize, Rel_init, params.LC,params.LW, params.eta, params.margin, params.initiallization,params.relsize,params.activation)
tm.train( params.data, params, We)


Ejemplo n.º 2
0
        params.batchsize
    ) + "." + params.type + "." + params.activation + "." + str(
        params.frac) + ".txt"

#Examples are shuffled data
examples = getData(params.dataf)

params.data = examples[0:int(params.frac * len(examples))]

print "Using Training Data" + params.dataf
print "Using Word Embeddings with Dimension " + str(sys.argv[1])

print "Training on " + str(len(params.data)) + " examples using lambda=" + str(
    params.lam)
print "Saving models to: " + params.outfile

#Initialize relation matrix
Rel_init = np.zeros((35, params.relsize, params.relsize))
for k in range(35):
    for i in range(params.relsize):
        for j in range(params.relsize):
            if (i == j):
                Rel_init[k][i][j] = 1 + random.uniform(-0.2, 0.2)
            else:
                Rel_init[k][i][j] = random.uniform(-0.2, 0.2)

tm = theano_word_model(We, words, params.embedsize, rel, params.relsize,
                       Rel_init, params.LC, params.Lw, params.eta,
                       params.margin, params.usepeep, params.activation)
tm.train(params.data, params, We)
Ejemplo n.º 3
0
examples = getData(params.dataf)

params.data = examples[0:int(params.frac * len(examples))]

print "Using Training Data" + params.dataf
fin.write("Using Training Data" + params.dataf + "\n")
print "Using Word Embeddings with Dimension " + str(sys.argv[1])
fin.write("Using Word Embeddings with Dimension " + str(sys.argv[1]) + "\n")

print "Training on " + str(len(params.data)) + " examples using lambda=" + str(
    params.lam)
fin.write("Training on " + str(len(params.data)) + " examples using lambda=" +
          str(params.lam) + "\n")
print "Saving models to: " + params.outfile
fin.write("Saving models to: " + params.outfile + "\n")

Rel_init = np.zeros((35, params.relsize))
for m in range(35):
    for n in range(params.relsize):
        Rel_init[m][n] = random.uniform(-(params.initiallization),
                                        params.initiallization)

tm = theano_word_model(We, words, layersize, params.embedsize, rel,
                       params.batchsize, Rel_init, params.LC, params.eta,
                       params.margin, params.usepeep, fin,
                       params.initiallization, params.relsize,
                       params.activation, params.activation2)
tm.train(params.data, params, We, fin)
fin.close()