예제 #1
0
import pytorch_lightning as pl
import torch
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from pytorch_lightning.loggers import TensorBoardLogger
from torch.nn import CrossEntropyLoss
from torch.utils.data import DataLoader
from transformers import AdamW, RobertaTokenizer, RobertaConfig
from transformers.modeling_roberta import RobertaForMaskedLM

from datasets.collate_functions import collate_to_max_length
from datasets.roberta_dataset import RobertaMaskedLMDataset
from metrics.classification import MaskedAccuracy
from utils.random_seed import set_random_seed

set_random_seed(0)


class SemiRoberta(pl.LightningModule):
    """"""
    def __init__(self, args: argparse.Namespace):
        """Initialize a model, tokenizer and config."""
        super().__init__()
        self.args = args
        if isinstance(args, argparse.Namespace):
            self.save_hyperparameters(args)
        self.tokenizer = RobertaTokenizer.from_pretrained(
            self.args.roberta_path)
        self.model = RobertaForMaskedLM.from_pretrained(self.args.roberta_path)

        self.robert_config = RobertaConfig.from_pretrained(
예제 #2
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

# file: tasks/tnews/train.py

import os
import re
import logging
import argparse
import warnings
from collections import namedtuple
from utils.random_seed import set_random_seed
set_random_seed(2333)
# add these two lines, because pytorch-lightning may throw useless userwarning when training multi-class task.
# https://github.com/PyTorchLightning/pytorch-lightning/issues/2757
warnings.filterwarnings('ignore')

import pytorch_lightning as pl
import torch
from tokenizers import BertWordPieceTokenizer
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks.model_checkpoint import ModelCheckpoint
from torch.nn import functional as F
from torch.nn.modules import CrossEntropyLoss
from torch.utils.data.dataloader import DataLoader, RandomSampler, SequentialSampler
from transformers import AdamW, BertConfig, get_linear_schedule_with_warmup, get_polynomial_decay_schedule_with_warmup

from loss.dice_loss import DiceLoss
from loss.focal_loss import FocalLoss
from datasets.tnews_dataset import TNewsDataset
from utils.get_parser import get_parser