def test_upgrade_to_latest(self):
     for p in glob.iglob(
             os.path.join(os.path.dirname(__file__), "json_config/*.json")):
         print("Trying to upgrade file:" + p)
         with PathManager.open(p) as f:
             test_data = json.load(f)
             for test_case in test_data:
                 # make sure the config can be instantiated, don't need return value
                 pytext_config_from_json(test_case["original"])
Beispiel #2
0
def load_v1(state):
    config = pytext_config_from_json(state[CONFIG_JSON])

    task = create_task(config.task,
                       metadata=state[DATA_STATE],
                       model_state=state[MODEL_STATE])
    return task, config
def load_v1(state):
    config = pytext_config_from_json(state[CONFIG_JSON])
    # importing in file level generates circular import/dependency failures,
    # that need refator later to fix
    from .task import create_task

    task = create_task(config.task,
                       metadata=state[DATA_STATE],
                       model_state=state[MODEL_STATE])
    return task, config
Beispiel #4
0
def load_v3(state, overwrite_config=None, rank=0, world_size=1):
    saved_config = pytext_config_from_json(state[CONFIG_JSON])
    if overwrite_config:
        config = overwrite_config
        print(f"Use config from current task")
    else:
        config = saved_config
        print(f"Use config saved in snapshot")

    model_state = state[MODEL_STATE]
    training_state = state[TRAINING_STATE]
    if training_state and training_state.tensorizers:
        tensorizers = training_state.tensorizers
    else:
        tensorizers = state[TENSORIZERS]
    # importing in file level generates circular import/dependency failures,
    # that need refator later to fix
    from .task import create_task

    task = create_task(
        config.task,
        metadata=state[DATA_STATE],
        model_state=model_state,
        tensorizers=tensorizers,
        rank=rank,
        world_size=world_size,
    )

    # TODO: T53664090 @stevenliu save & load state_dict() of optimizer and scheduler
    if training_state:
        if training_state.model is None and task.model:
            training_state.model = task.model
        if training_state.optimizer and task.trainer.optimizer:
            """
            https://pytorch.org/tutorials/beginner/saving_loading_models.html
            Unpickling optimizer object from checkpoint could result in a
            different parameter copy from model parameters. Especially in
            mixied precision training, which optimizer param_groups maintains
            master weights copy instead of the model parameters.

            The suggested loading mechanism is

            model = TheModelClass(*args, **kwargs)
            optimizer = TheOptimizerClass(model.parameters(), *args, **kwargs)

            checkpoint = torch.load(PATH)
            model.load_state_dict(checkpoint['model_state_dict'])
            optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
            """

            optimizer = task.trainer.optimizer
            optimizer.load_state_dict(training_state.optimizer.state_dict())
            training_state.optimizer = optimizer

    return task, config, training_state
Beispiel #5
0
def load_v2(state):
    config = pytext_config_from_json(state[CONFIG_JSON])
    model_state = state[MODEL_STATE]
    tensorizers = state[TENSORIZERS]
    task = create_task(
        config.task,
        metadata=state[DATA_STATE],
        model_state=model_state,
        tensorizers=tensorizers,
    )
    return task, config
Beispiel #6
0
def load(load_path: str):
    """
    Load task, will construct the task using the saved config then load metadata
    and model state.
    """
    if not (load_path and os.path.isfile(load_path)):
        raise ValueError(f"Invalid snapshot path{load_path}")
    print(f"Loading model from {load_path}...")
    state = torch.load(load_path, map_location=lambda storage, loc: storage)
    config = pytext_config_from_json(state[CONFIG_JSON])
    task = create_task(config.task,
                       metadata=state[DATA_STATE],
                       model_state=state[MODEL_STATE])
    return task, config
Beispiel #7
0
def load_v2(state):
    config = pytext_config_from_json(state[CONFIG_JSON])
    model_state = state[MODEL_STATE]
    tensorizers = state[TENSORIZERS]
    # importing in file level generates circular import/dependency failures,
    # that need refator later to fix
    from .task import create_task

    task = create_task(
        config.task,
        metadata=state[DATA_STATE],
        model_state=model_state,
        tensorizers=tensorizers,
    )
    return task, config, None