else: w2v_size = 200 hidden_layer_dims = [int(h) for h in sys.argv[8].split(" ")] hidden_layer_activations = sys.argv[9].split(" ") hidden_layer_dropouts = [float(d) for d in sys.argv[10].split(" ")] window_size = int(sys.argv[11]) num_epochs = int(sys.argv[12]) loss_function = sys.argv[13] optimizer = sys.argv[14] if len(sys.argv) == 16: weights_location = sys.argv[15] else: weights_location = None #build model model = m.FF_keras(hidden_layer_dims=hidden_layer_dims, activations=hidden_layer_activations, embeddingClass=None, w2vDimension=w2v_size, window_size=window_size, hidden_dropouts=hidden_layer_dropouts, loss_function=loss_function, optimizer=optimizer, num_epochs=num_epochs) model.buildModel() print("loading data") model.loadData(training_vectors, training_labels, testing_vectors, testing_labels, number_training_points) print("training") model.train(None, 0, neg_sample, save_data=True, f_vec="training_instances/ff-Giga/training_X", f_lab="training_instances/ff-Giga/training_y") print("testing") model.test(None, 0, save_data=True, f_vec="training_instances/ff-Giga/testing_X", f_lab="training_instances/ff-Giga/testing_y") print("size of testing data", model.testing_X.shape) #save weights?