Esempio n. 1
0
from keras.optimizers import *
from keras.callbacks import *
from additional_metrics import *

import load_data
from create_model import *

# -data: dataset
# -p: p
# -log: log & model saving file
# -dim: dimension of highway layers
# -shared: 1 if shared, 0 otherwise, 2 if both

########################## LOAD DATA ###############################################
print 'Loading data...'
arg = load_data.arg_passing(sys.argv)
dataset, nlayers, inpShape, saving, dim, shared = arg['-data'], arg['-nlayers'], arg['-inpShape'], arg['-saving'], arg['-dim'], arg['-shared']

train, valid, test = load_data.load(dataset)
log = 'log/' + saving + '.txt'

train_x, train_y = train[0], train[1]
valid_x, valid_y = load_data.shuffle(valid[0], valid[1])
test_x, test_y = load_data.shuffle(test[0], test[1])

n_classes = max(train_y)
if n_classes > 1: n_classes += 1

if n_classes == 1:
    loss = 'binary_crossentropy'
    metric = f1
Esempio n. 2
0
from keras.layers import *
from keras.models import Model
from keras.constraints import *
from keras.regularizers import *
import gzip
import numpy
import cPickle

import load_data
import noise_dist
from NCE import *
from pprint import pprint

arg = load_data.arg_passing(sys.argv)
# -data = apache
dataset = '../data/' + arg['-data'] + '_pretrain.pkl.gz'
# -saving lstm2v_apache_dim10
saving = arg['-saving']
# dim = 10
emb_dim = arg['-dim']
# len = null
max_len = arg['-len']
log = 'log/' + saving + '.txt'

n_noise = 100
print 'Loading data...'

# datos del repo apache
train, valid, test = load_data.load(dataset)

valid = valid[-5000:]