Esempio n. 1
0
import tensorflow as tf
import os
import preprocess
from time import gmtime, strftime
from sys import argv

datetime = strftime("%Y-%m-%d %H:%M:%S", gmtime())

curr_dir = os.getcwd()
seqdir = curr_dir + "/seqs/"
seqfiles = os.listdir(seqdir)
props_file = "aa_propierties.csv"
add_props = True
seq_len = int(argv[1])

dataset = preprocess.DataSet(seqdir, props_file, add_props, seq_len)
test_dict = dataset.test_dict
input_tensor = dataset.train_tensor  # Import train set
test_set = dataset.test_tensor
labels = dataset.labels

trainset_size = len(input_tensor)
n_labels = len(labels)
aa_vec_len = len(dataset.aa_dict.values()[0])
n_epochs = 400
minibatch_size = 500
learn_step = 0.2
iters_x_epoch = int(round(trainset_size / minibatch_size, 0))
drop_prob = 1
print_progress = True
Esempio n. 2
0
import preprocess
from neural_networks import *
from time import gmtime, strftime
from sys import argv
import pandas as pd

datetime = strftime("%Y-%m-%d %H:%M:%S", gmtime())
curr_dir = os.getcwd()

save_model = True
seqdir = curr_dir + "/seqs/"
seqfiles = os.listdir(seqdir)
props_file = "aa_propierties.csv"
add_props = True
seq_len = int(argv[3])
dataset = preprocess.DataSet(seqdir, props_file, add_props, seq_len,
                             flatten=True)

test_dict = dataset.test_dict
input_tensor = dataset.train_tensor  # Import train set
test_set = dataset.test_tensor
labels = dataset.labels
add_conv = True if len(argv) == 5 else False
n_labels = len(labels)
aa_vec_len = len(dataset.aa_dict.values()[0])
n_epochs = 1000
minibatch_size = 500
learn_step = 0.02
iters_x_epoch = int(round(len(input_tensor)/minibatch_size, 0))
drop_prob = float(argv[1])
n_units_fc = int(argv[2])
n_units_lstm = 60