Esempio n. 1
0
import logging
import time
import weakref
from typing import Dict

import numpy as np

import torch

from cvpods.utils import comm
from cvpods.utils.dump.events import EventStorage, get_event_storage
from cvpods.utils.registry import Registry

from .hooks import HookBase

RUNNERS = Registry("runners")
logger = logging.getLogger(__name__)


@RUNNERS.register()
class RunnerBase:
    """
    Base class for iterative runner with hooks.

    The only assumption we made here is: the training runs in a loop.
    A subclass can implement what the loop is.
    We made no assumptions about the existence of dataloader, optimizer, model, etc.

    Attributes:
        iter(int): the current iteration.
Esempio n. 2
0
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (C) 2019-2021 Megvii Inc. All rights reserved.
from torch.optim import lr_scheduler

from cvpods.utils.registry import Registry

from .lr_scheduler import PolyLR, WarmupCosineLR, WarmupMultiStepLR

SCHEDULER_BUILDER = Registry("LRScheduler builder")


@SCHEDULER_BUILDER.register()
class BaseSchedulerBuilder:

    @staticmethod
    def build(optimizer, cfg, **kwargs):
        raise NotImplementedError


@SCHEDULER_BUILDER.register()
class WarmupMultiStepLRBuilder(BaseSchedulerBuilder):

    @staticmethod
    def build(optimizer, cfg, **kwargs):
        scheduler = WarmupMultiStepLR(
            optimizer,
            cfg.SOLVER.LR_SCHEDULER.STEPS,
            cfg.SOLVER.LR_SCHEDULER.GAMMA,
            warmup_factor=cfg.SOLVER.LR_SCHEDULER.WARMUP_FACTOR,
            warmup_iters=cfg.SOLVER.LR_SCHEDULER.WARMUP_ITERS,
Esempio n. 3
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) BaseDetection, Inc. and its affiliates. All Rights Reserved

from typing import Any, Dict, List, Set

import torch
from torch import optim

from cvpods.utils.registry import Registry

OPTIMIZER_BUILDER = Registry("Optimizer builder")

NORM_MODULE_TYPES = (
    torch.nn.BatchNorm1d,
    torch.nn.BatchNorm2d,
    torch.nn.BatchNorm3d,
    torch.nn.SyncBatchNorm,
    # NaiveSyncBatchNorm inherits from BatchNorm2d
    torch.nn.GroupNorm,
    torch.nn.InstanceNorm1d,
    torch.nn.InstanceNorm2d,
    torch.nn.InstanceNorm3d,
    torch.nn.LayerNorm,
    torch.nn.LocalResponseNorm,
)


@OPTIMIZER_BUILDER.register()
class OptimizerBuilder:
    @staticmethod
Esempio n. 4
0
import torch
from torch.optim import Optimizer
from torch.optim.lr_scheduler import _LRScheduler

from cvpods.utils.registry import Registry

OPTIMIZERS = Registry("optimizers")
LR_SCHEDULERS = Registry("lr_schedulers")

for attr in dir(torch.optim):
    optim = getattr(torch.optim, attr)
    try:
        if issubclass(optim, Optimizer):
            OPTIMIZERS.register(optim)
    except:
        continue

for attr in dir(torch.optim.lr_scheduler):
    lrs = getattr(torch.optim.lr_scheduler, attr)
    try:
        if issubclass(lrs, _LRScheduler):
            LR_SCHEDULERS.register(lrs)
    except:
        continue