Esempio n. 1
0
import argparse, math, time, warnings, copy, numpy as np, os.path as path
import utils.evals as evals
import utils.utils as utils
from utils.data_loader import process_data
import torch, torch.nn as nn, torch.nn.functional as F
import lamp.Constants as Constants
from lamp.Models import LAMP
from lamp.Translator import translate
from config_args import config_args, get_args
from pdb import set_trace as stop
from tqdm import tqdm
from runner import run_model
warnings.filterwarnings("ignore")

parser = argparse.ArgumentParser()
args = get_args(parser)
opt = config_args(args)


def main(opt):
    #========= Loading Dataset =========#
    data = torch.load(opt.data)
    vocab_size = len(data['dict']['tgt'])

    global_labels = None
    for i in range(len(data['train']['src'])):
        labels = torch.tensor(data['train']['tgt'][i]).unsqueeze(0)
        labels = utils.get_gold_binary_full(labels, vocab_size)
        if global_labels is None:
            global_labels = labels
        else:
Esempio n. 2
0
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

from tqdm import tqdm

# pylint: disable=import-outside-toplevel
import config_args
from mrpc_dataset import MRPCDataset

import megengine as mge
import megengine.functional as F
import megengine.optimizer as optim
from megengine.autodiff import GradManager

from official.nlp.bert.model import BertForSequenceClassification, create_hub_bert

args = config_args.get_args()
logger = mge.get_logger(__name__)


def net_eval(input_ids, segment_ids, input_mask, label_ids, net=None):
    net.eval()
    results = net(input_ids, segment_ids, input_mask, label_ids)
    logits, loss = results
    return loss, logits


def net_train(input_ids,
              segment_ids,
              input_mask,
              label_ids,
              gm=None,
Esempio n. 3
0
import torch
import torch.nn as nn
import argparse, math, numpy as np
from load_data import get_data
from models import CTranModel
from models import CTranModelCub
from config_args import get_args
import utils.evaluate as evaluate
import utils.logger as logger
from pdb import set_trace as stop
from optim_schedule import WarmupLinearSchedule
from run_epoch import run_epoch

args = get_args(argparse.ArgumentParser())

print('Labels: {}'.format(args.num_labels))
print('Train Known: {}'.format(args.train_known_labels))
print('Test Known:  {}'.format(args.test_known_labels))

train_loader, valid_loader, test_loader = get_data(args)

if args.dataset == 'cub':
    model = CTranModelCub(args.num_labels, args.use_lmt, args.pos_emb,
                          args.layers, args.heads, args.dropout,
                          args.no_x_features)
    print(model.self_attn_layers)
else:
    model = CTranModel(args.num_labels, args.use_lmt, args.pos_emb,
                       args.layers, args.heads, args.dropout,
                       args.no_x_features)
    print(model.self_attn_layers)