Example #1
0
import utils
import tensorflow as tf
from tensorflow.contrib import rnn
import pickle
import numpy as np
import sys
import random
import os

tf.logging.set_verbosity(tf.logging.INFO)
tf.logging.info("*** Loaded Data ***")

conf = utils.get_config()
args = utils.get_args()
log = utils.get_logger(args['log_file'])
vectors, word2index_lookup = utils.get_embedding_dict(conf)
lookup = utils.lookup
# let's set pad token to zero padding instead of random padding.
# might be better for attention as it will give minimum value.
if conf.padding_type == 'Zero':
    tf.logging.info("Zero Padding..")
    vectors[lookup(word2index_lookup, '<pad>')] = 0

tf.logging.info(str(vars(conf)))
tf.logging.info(str(args))

number_epoch = int(args['number_epoch'])
batch_size = int(args['batch_size'])

X = tf.placeholder(shape=(None, 48, 76), dtype=tf.float32, name='X')  # B*48*76
y = tf.placeholder(shape=(None), dtype=tf.float32, name='y')
Example #2
0
     TabularEmbedding, MultiModalMultiTaskWrapper, FCTaskComponent, \
     MultiModalEncoder
from models.loss import masked_weighted_cross_entropy_loss, masked_mse_loss
from dataloaders import MultiModal_Dataset, custom_collate_fn
import functools
import json
from tqdm import tqdm
from sklearn import metrics
from utils import BootStrap, BootStrapDecomp, BootStrapLos, BootStrapIhm, BootStrapPheno, BootStrapLtm
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D      

#-----------------------Data locations ---------------------------------------------$
conf = utils.get_config()
args = utils.get_args()
vectors, w2i_lookup = utils.get_embedding_dict(conf)
#Note that some more paths are in conf
if conf.padding_type == 'Zero':
    vectors[utils.lookup(w2i_lookup, '<pad>')] = 0
train_val_ts_root_dir = '/home/luca/mutiltasking-for-mimic3/data/expanded_multitask/train'
test_ts_root_dir = '/home/luca/mutiltasking-for-mimic3/data/expanded_multitask/test'
train_val_text_root_dir = '/home/luca/mutiltasking-for-mimic3/data/root/train_text_ds/'
test_text_root_dir = '/home/luca/mutiltasking-for-mimic3/data/root/test_text_ds/'
train_val_tab_root_dir = '/home/luca/MultiModal-EHR/data/root/train/'
test_tab_root_dir = '/home/luca/MultiModal-EHR/data/root/test/'
train_listfile = '4k_train_listfile.csv'
val_listfile = '4k_val_listfile.csv'
test_listfile ='test_listfile.csv'
train_val_starttime_path = conf.starttime_path_train_val
test_starttime_path = conf.starttime_path_test
Example #3
0
conf = utils.get_config()
args = utils.get_args()

time_string = datetime.now().strftime(
    '%Y.%m.%d_%H-%M-%S') + "_" + args['problem_type']
log_folder = os.path.join(conf.log_folder, time_string)
os.makedirs(log_folder, exist_ok=True)

log = utils.get_logger(log_folder, args['log_file'])
results_csv_path = os.path.join(log_folder, 'results.csv')
header = "epoch;val_loss;val_AUCPR;val_AUCROC;val_Kappa\n"
with open(results_csv_path, 'w') as handle:
    handle.write(header)

vectors, word2index_lookup = utils.get_embedding_dict(conf, args['TEST'])

lookup = utils.lookup

model_name = args['model_name']
assert model_name in ['baseline', 'text_cnn', 'text_only']

problem_type = args['problem_type']
assert problem_type in ['los', 'decom']

# let's set pad token to zero padding instead of random padding.
# might be better for attention as it will give minimum value.
if conf.padding_type == 'Zero':
    tf.logging.info("Zero Padding..")
    vectors[lookup(word2index_lookup, '<pad>')] = 0