Exemplo n.º 1
0
def hyperargs():  # type: () -> {}
    """
  Builds different sets of arguments for the classifier.  Must be the same for
  training and predicting.

  :return: the labeled arguments
  :rtype: {}
  """

    retdict = {}

    for curwindow in [128, 64, 32, 256]:
        for curstride in [0.7, 0.8, 0.9]:
            accargs = ClassificationArgs()
            accargs.num_train_epochs = 5
            accargs.fp16 = False
            accargs.overwrite_output_dir = True
            accargs.evaluate_during_training = False
            accargs.sliding_window = True
            accargs.max_seq_length = curwindow
            accargs.stride = curstride
            accargs.labels_list = [1, 0]
            accargs.save_eval_checkpoints = False
            accargs.save_model_every_epoch = False
            accargs.silent = True
            accargs.manual_seed = 18
            retdict['basic5epochs' + str(curwindow) + 'win' +
                    str(int(curstride * 10.0)) + 'stride'] = accargs

    return retdict
Exemplo n.º 2
0
def buildbertargs():  # type: () -> ClassificationArgs
    """
  Builds arguments for the classifier.  Must be the same for
  training and predicting.

  :return: the arguments
  :rtype: ClassificationArgs
  """

    accargs = ClassificationArgs()
    accargs.num_train_epochs = 5
    accargs.fp16 = False
    accargs.overwrite_output_dir = True
    accargs.evaluate_during_training = False
    accargs.sliding_window = True
    accargs.max_seq_length = 256
    accargs.stride = 0.9
    accargs.labels_list = [1, 0]
    accargs.save_model_every_epoch = False
    accargs.silent = True
    accargs.manual_seed = 18

    return accargs
Exemplo n.º 3
0
transformers_logger = logging.getLogger("transformers")
transformers_logger.setLevel(logging.WARNING)

# Preparing train data
train_df = load_rte_data_file("data/train.jsonl")
eval_df = pd.read_json("data/eval_df", lines=True, orient="records")
test_df = pd.read_json("data/test_df", lines=True, orient="records")

model_args = ClassificationArgs()
model_args.eval_batch_size = 8
model_args.evaluate_during_training = True
model_args.evaluate_during_training_silent = False
model_args.evaluate_during_training_steps = 1000
model_args.learning_rate = 4e-4
model_args.manual_seed = 4
model_args.max_seq_length = 256
model_args.multiprocessing_chunksize = 5000
model_args.no_cache = True
model_args.no_save = True
model_args.num_train_epochs = 10
model_args.overwrite_output_dir = True
model_args.reprocess_input_data = True
model_args.train_batch_size = 16
model_args.gradient_accumulation_steps = 2
model_args.train_custom_parameters_only = False
model_args.labels_list = ["not_entailment", "entailment"]
model_args.wandb_project = "RTE - Hyperparameter Optimization"


def train():
    # Initialize a new wandb run