def __init__(self): self.glove_path = os.path.join( config.data_dir, "glove.6B.{}d.txt".format(config.embedding_size)) self.emb_matrix, self.word2id, self.id2word = get_glove( self.glove_path, config.embedding_size) self.train_context_path = os.path.join(config.data_dir, "train.context") self.train_qn_path = os.path.join(config.data_dir, "train.question") self.train_ans_path = os.path.join(config.data_dir, "train.span") self.dev_context_path = os.path.join(config.data_dir, "dev.context") self.dev_qn_path = os.path.join(config.data_dir, "dev.question") self.dev_ans_path = os.path.join(config.data_dir, "dev.span")
from config import Config from data_utils import get_data config = Config() # Embeddings and word2id and id2word glove_path = os.path.join(config.vectors_cache, "glove.6B.{}d.txt".format(config.embedding_dim)) if not os.path.exists(glove_path): print("\nDownloading wordvecs to {}".format(config.vectors_cache)) if not os.path.exists(config.vectors_cache): os.makedirs(config.vectors_cache) maybe_download(config.glove_base_url, config.glove_filename, config.vectors_cache, 862182613) emb_matrix, word2index, index2word = get_glove(glove_path, config.embedding_dim) train_context_path = os.path.join(config.data_dir, "train.context") train_qn_path = os.path.join(config.data_dir, "train.question") train_ans_path = os.path.join(config.data_dir, "train.span") dev_context_path = os.path.join(config.data_dir, "dev.context") dev_qn_path = os.path.join(config.data_dir, "dev.question") dev_ans_path = os.path.join(config.data_dir, "dev.span") def step(model, optimizer, batch): """ One batch of training :return: loss """ # Here goes one batch of training