Exemple #1
0
def test_safe_set(config):
    config = copy.deepcopy(config)
    assert safitty.get(config, "numbers", transform=len) == 5
    safitty.set(config, "numbers", 8, value=42)
    assert safitty.get(config, "numbers", transform=len) == 9
    assert safitty.get(config, "numbers", 8) == 42

    assert safitty.set(config, "numbers2", "inner", value=[])
    assert safitty.get(config, "numbers2", "inner", transform=len) == 0
    assert safitty.set(config, "numbers", value=[])
    assert safitty.get(config, "numbers", transform=len) == 0
Exemple #2
0
def get_optimizer_momentum(optimizer: Optimizer) -> float:
    """
    Get momentum of current optimizer.
    Args:
        optimizer: PyTorch optimizer
    Returns:
        float: momentum at first param group
    """
    beta = safitty.get(optimizer.param_groups, 0, "betas", 0)
    momentum = safitty.get(optimizer.param_groups, 0, "momentum")
    return beta if beta is not None else momentum
Exemple #3
0
def parse_config_args(*, config, args, unknown_args):
    for arg in unknown_args:
        arg_name, value = arg.split("=")
        arg_name = arg_name.lstrip("-").strip("/")

        value_content, value_type = value.rsplit(":", 1)

        if "/" in arg_name:
            arg_names = arg_name.split("/")
            if value_type == "str":
                arg_value = value_content

                if arg_value.lower() == "none":
                    arg_value = None
            else:
                arg_value = eval("%s(%s)" % (value_type, value_content))

            config_ = config
            for arg_name in arg_names[:-1]:
                if arg_name not in config_:
                    config_[arg_name] = {}

                config_ = config_[arg_name]

            config_[arg_names[-1]] = arg_value
        else:
            if value_type == "str":
                arg_value = value_content
            else:
                arg_value = eval("%s(%s)" % (value_type, value_content))
            args.__setattr__(arg_name, arg_value)

    args_exists_ = config.get("args")
    if args_exists_ is None:
        config["args"] = dict()

    for key, value in args._get_kwargs():
        if value is not None:
            if key in ["logdir", "baselogdir"] and value == "":
                continue
            config["args"][key] = value

    autoresume = safitty.get(config, "args", "autoresume")
    if autoresume is not None and \
            safitty.get(config, "args", "logdir") is not None and \
            safitty.get(config, "args", "resume") is None:
        logdir = Path(safitty.get(config, "args", "logdir"))
        checkpoint_filename = logdir / "checkpoints" / f"{autoresume}_full.pth"
        if checkpoint_filename.is_file():
            config["args"]["resume"] = str(checkpoint_filename)
    return config, args
Exemple #4
0
def test_safe_set_2(transforms):
    transforms = copy.deepcopy(transforms)
    safitty.set(transforms, 2, "name", value="BatchNorm2d")
    assert safitty.get(transforms, 2, "name") == "BatchNorm2d"

    safitty.set(transforms, 1, "params", value="add", strategy="on_none")
    params1 = safitty.get(transforms, 1, "params")
    assert params1 is not None
    assert params1 != "add"

    safitty.set(transforms, 0, "params", value="subtract", strategy="on_none")
    params0 = safitty.get(transforms, 0, "params")
    assert params0 is not None
    assert params0 == "subtract"
Exemple #5
0
 def _get_flag(self, key, default=None):
     try:
         flag_obj = self._message_collection.find_one({"key": {"$eq": key}})
     except pymongo.errors.AutoReconnect:
         time.sleep(self._reconnect_timeout)
         return self._get_flag(key, default)
     flag = safitty.get(flag_obj, "value", default=default)
     return flag
Exemple #6
0
def test_safe_get_strategies(config):
    assert safitty.get(config,
                       "key",
                       "value",
                       "elem1",
                       strategy="last_container") == ["elem1", "elem2"]
    assert safitty.get(config, "key", "value", 1, "bad",
                       strategy="last_value") == "elem2"
    assert safitty.get(config,
                       "key",
                       "value",
                       1,
                       "bad",
                       strategy="last_value",
                       default="not_elem2") == "elem2"
    assert safitty.get(config,
                       "key",
                       "value",
                       3,
                       "bad",
                       strategy="last_value",
                       default="not_elem2") == ["elem1", "elem2"]

    assert safitty.get(config, "words", "none") is None
    assert safitty.get(
        config, "words", "none", default=42, strategy="missing_key") != 42
    assert safitty.get(
        config, "words", "none", default=42, strategy="missing_key") is None
Exemple #7
0
def test_safe_set_strategies(config):
    config = copy.deepcopy(config)
    safitty.set(config,
                "words",
                "quadre",
                value="four",
                strategy="existing_key")
    assert safitty.get(config, "words", "quadre") is None

    safitty.set(config, "words", "one", value="four", strategy="existing_key")
    assert safitty.get(config, "words", "one") is not None
    assert safitty.get(config, "words", "one") == "four"

    safitty.set(config, "words", "one", value="five", strategy="missing_key")
    assert safitty.get(config, "words", "one") != "five"

    safitty.set(config, "words", "five", value="five", strategy="missing_key")
    assert safitty.get(config, "words", "five") == "five"

    # cannot reset a reference
    assert safitty.set(config, value="hi") == "hi"
    assert config != "hi"

    safitty.set(config, "numbers", 40, "hi", "", value="привет")
    assert safitty.get(config, "numbers", 40, "hi", "") is not None
    assert safitty.get(config, "numbers", 40, "hi", "") == "привет"
Exemple #8
0
def main(args, unknown_args):
    """Run the ``catalyst-dl run`` script"""
    args, config = utils.parse_args_uargs(args, unknown_args)
    utils.set_global_seed(args.seed)
    utils.prepare_cudnn(args.deterministic, args.benchmark)

    Experiment, Runner = utils.import_experiment_and_runner(Path(args.expdir))

    runner_params = safitty.get(config, "runner_params", default={})
    experiment = Experiment(config)
    runner = Runner(**runner_params)

    if experiment.logdir is not None:
        utils.dump_environment(config, experiment.logdir, args.configs)
        utils.dump_code(args.expdir, experiment.logdir)

    check_run = safitty.get(config, "args", "check", default=False)
    runner.run_experiment(experiment, check=check_run)
Exemple #9
0
def main_worker(args, unknown_args):
    args, config = utils.parse_args_uargs(args, unknown_args)
    utils.set_global_seed(args.seed)
    utils.prepare_cudnn(args.deterministic, args.benchmark)

    config.setdefault("distributed_params", {})["apex"] = args.apex

    Experiment, Runner = utils.import_experiment_and_runner(Path(args.expdir))

    runner_params = safitty.get(config, "runner_params", default={})
    experiment = Experiment(config)
    runner = Runner(**runner_params)

    if experiment.logdir is not None and get_rank() <= 0:
        utils.dump_environment(config, experiment.logdir, args.configs)
        utils.dump_code(args.expdir, experiment.logdir)

    check_run = safitty.get(config, "args", "check", default=False)
    runner.run_experiment(experiment, check=check_run)
Exemple #10
0
def set_optimizer_momentum(optimizer: Optimizer, value: float, index: int = 0):
    """
    Set momentum of ``index``'th param group of optimizer to ``value``
    Args:
        optimizer: PyTorch optimizer
        value (float): new value of momentum
        index (int, optional): integer index of optimizer's param groups,
            default is 0
    """
    betas = safitty.get(optimizer.param_groups, index, "betas")
    momentum = safitty.get(optimizer.param_groups, index, "momentum")
    if betas is not None:
        _, beta = betas
        safitty.set(optimizer.param_groups,
                    index,
                    "betas",
                    value=(value, beta))
    elif momentum is not None:
        safitty.set(optimizer.param_groups, index, "momentum", value=value)
Exemple #11
0
    def step(self, state: RunnerState):
        scheduler = state.get_key(key="scheduler",
                                  inner_key=self.scheduler_key)

        valid_metric = \
            safitty.get(state.metrics.valid_values, self.reduce_metric)
        lr, momentum = self._scheduler_step(scheduler=scheduler,
                                            valid_metric=valid_metric)

        state.set_key(lr, key="lr", inner_key=self.scheduler_key)
        state.set_key(momentum, key="momentum", inner_key=self.scheduler_key)
Exemple #12
0
    def __init__(self, config: Dict):
        self._config = deepcopy(config)
        self._initial_seed = self._config.get("args", {}).get("seed", 42)
        self._verbose = safitty.get(self._config,
                                    "args",
                                    "verbose",
                                    default=False)
        self.__prepare_logdir()

        self._config["stages"]["state_params"] = utils.merge_dicts(
            deepcopy(self._config["stages"].get("state_params", {})),
            deepcopy(self._config.get("args", {})), {"logdir": self._logdir})
        self.stages_config = self._get_stages_config(self._config["stages"])
Exemple #13
0
    def _scheduler_step(
        scheduler,
        valid_metric=None,
    ):
        if isinstance(scheduler, torch.optim.lr_scheduler.ReduceLROnPlateau):
            scheduler.step(valid_metric)
            lr = safitty.get(scheduler.optimizer.param_groups, 0, "lr")
        else:
            scheduler.step()
            lr = scheduler.get_lr()[0]

        momentum = get_optimizer_momentum(scheduler.optimizer)

        return lr, momentum
Exemple #14
0
def trace_model_from_checkpoint(
    logdir: Path,
    method_name: str,
    checkpoint_name: str,
    mode: str = "eval",
    requires_grad: bool = False,
):
    config_path = logdir / "configs" / "_config.json"
    checkpoint_path = logdir / "checkpoints" / f"{checkpoint_name}.pth"
    print("Load config")
    config: Dict[str, dict] = safitty.load(config_path)

    # Get expdir name
    config_expdir = safitty.get(config, "args", "expdir", apply=Path)
    # We will use copy of expdir from logs for reproducibility
    expdir = Path(logdir) / "code" / config_expdir.name

    print("Import experiment and runner from logdir")
    ExperimentType, RunnerType = import_experiment_and_runner(expdir)
    experiment: Experiment = ExperimentType(config)

    print(f"Load model state from checkpoints/{checkpoint_name}.pth")
    model = experiment.get_model(next(iter(experiment.stages)))
    checkpoint = utils.load_checkpoint(checkpoint_path)
    utils.unpack_checkpoint(checkpoint, model=model)

    print("Tracing")
    traced = trace_model(
        model,
        experiment,
        RunnerType,
        method_name=method_name,
        mode=mode,
        requires_grad=requires_grad,
    )

    print("Done")
    return traced
Exemple #15
0
def test_safe_get_transformations(config):
    main_client: Client = safitty.get(config,
                                      "servers",
                                      "main-server",
                                      apply=Client)
    assert type(main_client) == Client
    assert main_client.address == "localhost:8888"
    assert main_client.password == "qwerty"
    assert safitty.get(config,
                       "servers",
                       "main-server",
                       transform=lambda x: Client(**x)) == main_client

    assert safitty.get(config, "servers", "broken-server",
                       apply=Client) is None

    with pytest.raises(TypeError):
        safitty.get(config,
                    "servers",
                    "broken-server",
                    apply=Client,
                    raise_on_transforms=True)

    with_default: Client = safitty.get(config,
                                       "servers",
                                       "with-default-pass",
                                       apply=Client)
    assert with_default is not None
    assert with_default.address == "https://github.com/TezRomacH/safitty"
    assert with_default.address == safitty.get(config, "servers",
                                               "with-default-pass", "address")
    assert with_default.password == "12345"
    assert safitty.get(config, "servers", "with-default-pass",
                       "password") is None

    assert safitty.get(config, "numbers", apply=sum_args) == 12
    with pytest.raises(TypeError):
        safitty.get(config,
                    "numbers",
                    transform=sum_args,
                    raise_on_transforms=True)

    assert safitty.get(config, "numbers",
                       transform=lambda x: sum_args(*x)) == 12

    status_bad_request = safitty.get(config,
                                     "status",
                                     transform=lambda x: x == 400)
    assert type(status_bad_request) == bool
    assert status_bad_request
Exemple #16
0
from src.models.cico.generic import GenericModel

logs = "/home/smirnvla/PycharmProjects/catalyst-classification/logs/cico15_256_arcface_64_5e-1_radam_plateu_1e-0_l2neck_stratified/"

yml_path = logs + "configs/cico.yml"
pth_path = logs + "checkpoints/best.pth"
pb_path = logs + "model.pb"
jit_path = logs + "traced.jit"
img_path = "/workspace/Datasets/CICO1.5/benchmarking_plan/v2/test/Pizza__/1_3_11_Camera_7_121.jpg"

device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

config = load_config(yml_path)

model = GenericModel.get_from_params(
    backbone_params=safitty.get(config, 'model_params', 'backbone_params'),
    neck_params=safitty.get(config, 'model_params', 'neck_params'),
    heads_params=safitty.get(config, 'model_params', 'heads_params'))

checkpoint = torch.load(pth_path, map_location=device)

model.load_state_dict(checkpoint['model_state_dict'])
model.to(device)
model.eval()

# torch.save(model, pb_path)
torch.jit.save(torch.jit.trace(model, torch.rand(1, 3, 224, 224)), jit_path)
model = torch.jit.load(jit_path)
# model = torch.load(pb_path)
# model.to(device)
Exemple #17
0
def trace_model_from_checkpoint(
    logdir: Path,
    method_name: str,
    checkpoint_name: str,
    stage: str = None,
    loader: Union[str, int] = None,
    mode: str = "eval",
    requires_grad: bool = False,
    opt_level: str = None,
    device: Device = "cpu",
):
    """
    Traces model using created experiment and runner.

    Args:
        logdir (Union[str, Path]): Path to Catalyst logdir with model
        checkpoint_name (str): Name of model checkpoint to use
        stage (str): experiment's stage name
        loader (Union[str, int]): experiment's loader name or its index
        method_name (str): Model's method name that will be
            used as entrypoint during tracing
        mode (str): Mode for model to trace (``train`` or ``eval``)
        requires_grad (bool): Flag to use grads
        opt_level (str): AMP FP16 init level
        device (str): Torch device

    Returns:
        the traced model
    """
    config_path = logdir / "configs" / "_config.json"
    checkpoint_path = logdir / "checkpoints" / f"{checkpoint_name}.pth"
    print("Load config")
    config: Dict[str, dict] = safitty.load(config_path)
    runner_params = config.pop("runner_params", {}) or {}

    # Get expdir name
    config_expdir = safitty.get(config, "args", "expdir", apply=Path)
    # We will use copy of expdir from logs for reproducibility
    expdir = Path(logdir) / "code" / config_expdir.name

    print("Import experiment and runner from logdir")
    ExperimentType, RunnerType = utils.import_experiment_and_runner(expdir)
    experiment: Experiment = ExperimentType(config)

    print(f"Load model state from checkpoints/{checkpoint_name}.pth")
    if stage is None:
        stage = list(experiment.stages)[0]

    model = experiment.get_model(stage)
    checkpoint = utils.load_checkpoint(checkpoint_path)
    utils.unpack_checkpoint(checkpoint, model=model)

    runner: RunnerType = RunnerType(**runner_params)
    runner.model, runner.device = model, device

    if loader is None:
        loader = 0
    batch = experiment.get_native_batch(stage, loader)

    print("Tracing")
    traced = trace.trace_model(
        model=model,
        runner=runner,
        batch=batch,
        method_name=method_name,
        mode=mode,
        requires_grad=requires_grad,
        opt_level=opt_level,
        device=device,
    )

    print("Done")
    return traced
Exemple #18
0
# @TODO: add metrics support 
# (catalyst expects logits, rather than sigmoid outputs)
# metrics = [
#     smp.utils.metrics.IoUMetric(eps=1.),
#     smp.utils.metrics.FscoreMetric(eps=1.),
# ]

if __name__ == '__main__':
    args = parse_args()
    config = safitty.load(args.config_path)

    runner = SupervisedWandbRunner()

    model = get_model(
        model_name=safitty.get(config, 'model', 'name', default='unet'),
        model_params=safitty.get(config, 'model', 'params', default={}))

    criterion = get_criterion(
        criterion_name=safitty.get(config, 'criterion', 'name', default='bce_dice'),
        criterion_params=safitty.get(config, 'criterion', 'params', default={}))

    optimizer = get_optimizer(
        optimizer_name=safitty.get(config, 'optimizer', 'name', default='adam'),
        optimizer_params=safitty.get(config, 'optimizer', 'params', default={}),
        model_=model)

    scheduler = get_scheduler(
        scheduler_name=safitty.get(config, 'scheduler', 'name', default='reduce_on_plateau'),
        scheduler_params=safitty.get(config, 'scheduler', 'params', default={}),
        optimizer_=optimizer)
Exemple #19
0
def test_safe_get(config):
    assert safitty.get(config) == config
    assert isinstance(safitty.get(config, "words"), dict)
    assert safitty.get(config, "words", "one") == "uno"
    assert safitty.get(config, "words", "one", "two") is None
    assert safitty.get(config, "words", "one", "two", default="hi") == "hi"
    assert safitty.get(config, "words", "none") is None
    assert safitty.get(config, "words", "none", default=42) == 42

    assert safitty.get(config, "key") is not None
    assert safitty.get(config, "key", 0) is None
    assert safitty.get(config) is not None
    assert safitty.get(config, "keyu", default="value") == "value"
    assert safitty.get(config, "key", "value", 0) is not None
    assert safitty.get(config, "key", "value", 0) == "elem1"
    assert safitty.get(config, "key", "value", 1) == "elem2"
    assert safitty.get(config, "key", "value", 2) is None
    assert safitty.get(config, "key", "value", 2, default="elem2") == "elem2"
    assert safitty.get(config,
                       "key",
                       "value",
                       2,
                       "deep",
                       1,
                       1,
                       "other",
                       default="elem2") == "elem2"
    assert safitty.get(config, None) is None
    assert safitty.get(config, None, default=True)
Exemple #20
0
 def _get_flag(self, key, default=None):
     flag_obj = self._messages_collection.find_one({"key": {"$eq": key}})
     flag = safitty.get(flag_obj, "value", default=default)
     return flag