def test_custom_separator(nested_dict_config): use_custom_separator("-") config = Config.from_dict(nested_dict_config) config.d = Config.from_dict({"d": False}) assert config.identifier == "10-10-1x2x3-a-no_d" use_default_separator() config = Config.from_dict(nested_dict_config) config.d = Config.from_dict({"d": False}) assert config.identifier == "10|10|1x2x3|a|no_d"
def test_custom_separator(nested_dict_config): use_custom_separator("-") config = Config.from_dict(nested_dict_config) config.d = Config.from_dict({"d": False}) assert config.identifier == "10-10-1x2x3-a-no_d" use_default_separator() config = Config.from_dict(nested_dict_config) config.d = Config.from_dict({"d": False}) assert config.identifier == DEFAULT_SEPARATOR.join( "10 10 1x2x3 a no_d".split())
import argparse import logging import os import mag import numpy as np import pandas as pd import torch from dataset import get_test_set from loops import infer from mag.experiment import Experiment from misc import target_columns, input_columns from model import get_model_optimizer from torch.utils.data import DataLoader from transformers import BertTokenizer, RobertaTokenizer mag.use_custom_separator("-") parser = argparse.ArgumentParser() parser.add_argument("--experiment", type=str, required=True) parser.add_argument("--checkpoint", type=str, required=True) parser.add_argument("--bert_model", type=str, required=True) parser.add_argument("--dataframe", type=str, required=True) parser.add_argument("--output_dir", type=str, required=True) args = parser.parse_args() experiment = Experiment(resume_from=args.experiment) config = experiment.config logging.getLogger("transformers").setLevel(logging.ERROR)