def register(): registry.Model(Pretrained) registry.Criterion(RingLoss) registry.Callback(InferBestCallback) registry.Scheduler(OneCycleCosineAnnealLR) # segmentation registry.Model(Unet) registry.Model(ResnetLinknet) registry.Model(MobileUnet) registry.Model(ResnetUnet) registry.Model(ResnetFPNUnet) registry.Model(ResnetPSPnet) registry.Model(FPNUnet) registry.Model(Linknet) registry.Model(PSPnet)
def register(): registry.Criterion(RingLoss) registry.Callback(InferBestCallback) registry.Scheduler(OneCycleCosineAnnealLR) # classification try: from mlcomp.contrib.model import Pretrained registry.Model(Pretrained) except Exception: pass # segmentation registry.Model(Unet) registry.Model(ResnetLinknet) registry.Model(MobileUnet) registry.Model(ResnetUnet) registry.Model(ResnetFPNUnet) registry.Model(ResnetPSPnet) registry.Model(FPNUnet) registry.Model(Linknet) registry.Model(PSPnet) registry.Model(ResNetLinknet) try: from mlcomp.contrib.model.segmentation_model_pytorch import \ SegmentationModelPytorch registry.Model(SegmentationModelPytorch) except Exception: pass
DiscriminatorLoss, DiscriminatorOptimizerCallback, GANLoss, GeneratorOptimizerCallback, IdenticalGANLoss, PrepareDiscriminatorPhase, PrepareGeneratorPhase, ) from src.callbacks.visualization import LogImageCallback from src.experiments.train.train_experiment import Experiment from src.modules.discriminator import NLayerDiscriminator, PixelDiscriminator from src.modules.generator import Generator from src.modules.loss import LSGanLoss from src.runner import CycleGANRunner as Runner registry.Model(Generator) registry.Model(PixelDiscriminator) registry.Model(NLayerDiscriminator) registry.Criterion(LSGanLoss) registry.Callback(CycleGANLoss) registry.Callback(GANLoss) registry.Callback(IdenticalGANLoss) registry.Callback(PrepareGeneratorPhase) registry.Callback(GeneratorOptimizerCallback) registry.Callback(PrepareGeneratorPhase) registry.Callback(PrepareDiscriminatorPhase) registry.Callback(DiscriminatorLoss) registry.Callback(DiscriminatorOptimizerCallback)
# flake8: noqa # isort:skip_file from catalyst.dl import registry, SupervisedRunner as Runner from .callbacks import PredictionCallback from .experiment import Experiment from .model import MultiHeadNet from efficientnet_pytorch import EfficientNet from catalyst.contrib.models.cv import ResnetEncoder registry.Model(MultiHeadNet) registry.Model(EfficientNet.from_pretrained, name='EfficientNet') registry.Model(ResnetEncoder) registry.Callback(PredictionCallback)
TwoSidedPooledTransformer, PCTCFS, patch_model_with_embedding, model_from_checkpoint, unfreezed_transf, ) from .callbacks import ( PrecisionCallback, RecallCallback, F1Callback, FBetaCallback, SpearmanScoreCallback, ) from .experiment import Experiment registry.Model(LinearModel) registry.Model(LSTM_GRU) registry.Model(MultiInputLstm) registry.Model(MultiInputLstmGru) registry.Model(MultiInputLstmGruAttention) registry.Model(TransfModel) registry.Model(PooledTransfModel) registry.Model(PooledLstmTransfModel) registry.Model(PooledTransfModelWithCatericalFeatures) registry.Model(PTCFS) registry.Model(PTM) registry.Model(PTC) registry.Model(TwoSidedPooledTransformer) registry.Model(PCTCFS) # functions registry.Model(patch_model_with_embedding)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from models import * from losses import * from callbacks import * from optimizers import * from schedulers import * from segmentation_models_pytorch import Unet as smpUnet # Register models registry.Model(UNet3D) registry.Model(UNet3D2) registry.Model(ResidualUNet3D) registry.Model(VNet) registry.Model(smpUnet) registry.Model(DeepLab) registry.MODELS._late_add_callbacks = [] # Register callbacks registry.Callback(MultiDiceCallback) # Register criterions registry.Criterion(MultiDiceLoss) # Register optimizers # registry.Optimizer(AdamW) # registry.Optimizer(Nadam)
from catalyst.dl import registry from torch_optimizer import Ranger from .callbacks import (CosineLossCallback, KLDivLossCallback, MaskedLanguageModelCallback, MSELossCallback, PerplexityMetricCallbackDistillation, CarbontrackerCallback) from .experiment import Experiment # noqa: F401 from .models import BertForMLM, DistilbertStudentModel from .runners import DistilMLMRunner as Runner # noqa: F401 registry.Model(BertForMLM) registry.Model(DistilbertStudentModel) registry.Optimizer(Ranger) registry.Callback(CosineLossCallback) registry.Callback(MaskedLanguageModelCallback) registry.Callback(KLDivLossCallback) registry.Callback(MSELossCallback) registry.Callback(PerplexityMetricCallbackDistillation) registry.Callback(CarbontrackerCallback)
from catalyst.dl import registry from .runner import Runner from .experiment import Experiment from .callbacks import CharErrorRateCallback from .optimizers import SWA from .models import ( LightLSTM, DeepSpeech, DeepSpeechV2, LightConv, LookaheadLSTM, ) registry.Callback(CharErrorRateCallback) registry.Model(LightLSTM) registry.Model(DeepSpeech) registry.Model(DeepSpeechV2) registry.Model(LightConv) registry.Model(LookaheadLSTM) registry.Optimizer(SWA)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from .callbacks import MyLossCallback, IterCheckpointCallback from .models import Net, FewShotModel from .losses import * registry.Model(Net) registry.Model(FewShotModel) registry.Callback(MyLossCallback) registry.Callback(IterCheckpointCallback) registry.Criterion(FocalLoss)
try: from catalyst.dl import SupervisedRunner as Runner from .experiment import Experiment from .model import MNISTNet from catalyst.dl import registry from .callbacks.infer_callback import MNISTInferCallback registry.Model(MNISTNet) except ImportError: print("Catalyst not found. Loading production environment")
import sys import warnings from catalyst.dl import registry from .runner import Runner from .experiment import Experiment from .models import BertBasedMLM from .callbacks import PerplexityCallback if not sys.warnoptions: warnings.simplefilter("ignore") registry.Model(BertBasedMLM) registry.Callback(PerplexityCallback)
from src.callbacks import ( DiscriminatorLossCallback, GeneratorLossCallback, GenerateAudioCallback, ShuffleDatasetCallback, ) from src.models import Generator, Discriminator from src.runner import MelGANRunner as Runner from catalyst.dl import registry from src.experiment import Experiment registry.Model(Generator) registry.Model(Discriminator) registry.Callback(GeneratorLossCallback) registry.Callback(DiscriminatorLossCallback) registry.Callback(GenerateAudioCallback) registry.Callback(ShuffleDatasetCallback)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from catalyst.dl import SupervisedRunner as Runner from .tsn import tsn registry.Model(tsn)
# flake8: noqa from .experiment import Experiment from catalyst.dl import registry from catalyst.dl import SupervisedRunner as Runner from src.callbacks.tensorboard import VisualizationCallback, ProjectorCallback from src.callbacks.cico.doe import DoECallback from src.callbacks.cico.benchmark import BenchmarkingCallback from src.models.cico.generic import GenericModel from src.schedulers.cosine import CosineAnnealingWarmUpRestarts from src.losses.cico.arcface import ArcFaceLinear, ArcFaceLoss, L2Norm from src.losses.cico.triplet import TripletSemiHardLoss registry.Model(GenericModel) registry.Module(L2Norm) registry.Module(ArcFaceLinear) registry.Criterion(ArcFaceLoss) registry.Criterion(TripletSemiHardLoss) registry.Callback(VisualizationCallback) registry.Callback(ProjectorCallback) registry.Callback(DoECallback) registry.Callback(BenchmarkingCallback) registry.Scheduler(CosineAnnealingWarmUpRestarts)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from .callbacks import * from .models import * from .losses import * registry.Model(Net) registry.Model(FewShotModel) registry.Model(Finetune) registry.Callback(MixupLossCallback) registry.Callback(IterCheckpointCallback) registry.Criterion(FocalLoss)
from .experiment import Experiment from .optimizers import PlainRAdam, AdamW from .metrics import MeanDiceCallback, AllAccuracyCallback, F1Callback, FBetaCallback from .losses import ( JointLoss, CCE, BinaryDiceLoss, BinaryDiceLogLoss, MulticlassDiceLoss, TverskyLoss, DiceAndBCE, FocalLossMultiChannel, FocalAndBCE, ) registry.Model(ResUnet) registry.Model(UNetResNet) registry.Model(LinkNet34) registry.Model(DenseNetDetector) registry.Model(ResnetDetector) registry.Model(resnet34) registry.Model(SCseUnet) registry.Model(ResUnetScSeDecoded) registry.Model(QUnet) registry.Model(EfficientUnet) registry.Model(PretrainedResnet) registry.Model(PretrainedDensenet) registry.Model(ModelFromCheckpoint) registry.Callback(MeanDiceCallback) registry.Callback(AllAccuracyCallback)
# flake8: noqa from catalyst.dl import registry, SupervisedRunner as Runner from .experiment import Experiment from .net import Net registry.Model(Net)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from models import * from losses import * from callbacks import * from optimizers import * # Register models registry.Model(CNNFinetuneModels) registry.Model(TIMMModels) registry.Model(MultiModals) # Register callbacks registry.Callback(MultiTaskCriterionCallback) registry.Criterion(LogLoss) registry.Optimizer(Nadam) registry.Optimizer(AdamW)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from .model import MultiHeadNet registry.Model(MultiHeadNet)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from models import * from losses import * from callbacks import * from optimizers import * # Register models registry.Model(ResNet) registry.Model(cell_senet) registry.Model(cell_densenet) # Register callbacks registry.Callback(LabelSmoothCriterionCallback) # Register criterions registry.Criterion(LabelSmoothingCrossEntropy) # Register optimizers registry.Optimizer(AdamW) registry.Optimizer(Nadam) registry.Optimizer(RAdam)
from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from .callbacks import KappaCallback, KappaCriterionCallback, SmoothCCECallback, OrdinalCriterionCallback from .model import resnet34, resnet34_pretrained, resnext50_pretrained, efficientnet_pretrained, cadene_model, \ ordinal_efficientnet registry.Model(resnet34) registry.Model(resnet34_pretrained) registry.Model(resnext50_pretrained) registry.Model(efficientnet_pretrained) registry.Model(cadene_model) registry.Model(ordinal_efficientnet) registry.Callback(KappaCallback) registry.Callback(KappaCriterionCallback) registry.Callback(SmoothCCECallback) registry.Callback(OrdinalCriterionCallback)
# flake8: noqa # isort:skip_file from catalyst.dl import registry, SupervisedRunner as Runner from .experiment import Experiment from .model import get_model_classification registry.Model(get_model_classification)
import experiments as exp if os.environ.get("USE_WANDB", "0") == "1": from catalyst.dl import SupervisedWandbRunner as Runner elif os.environ.get("USE_NEPTUNE", "0") == "1": from catalyst.dl import SupervisedNeptuneRunner as Runner elif os.environ.get("USE_ALCHEMY", "0") == "1": from catalyst.dl import SupervisedAlchemyRunner as Runner else: from catalyst.dl import SupervisedRunner as Runner from .models import (TIMMModels, TIMMetricLearningMModels, proxy_model, SSUnet) from .callbacks import (MultiDiceCallback, MacroF2ScoreCallback) from .losses import (LabelSmoothingCrossEntropy, MetricLearningLoss, MultiDiceLoss) registry.MODELS.add_from_module(m) registry.EXPERIMENTS.add_from_module(exp) registry.Model(TIMMModels) registry.Model(TIMMetricLearningMModels) registry.Model(proxy_model) registry.Model(SSUnet) registry.Callback(MultiDiceCallback) registry.Callback(MacroF2ScoreCallback) registry.Criterion(LabelSmoothingCrossEntropy) registry.Criterion(MetricLearningLoss) registry.Criterion(MultiDiceLoss)
# flake8: noqa # pylint: disable=unused-import from catalyst.dl import registry from transformers import AdamW, WarmupLinearSchedule from .experiment import Experiment from .catalyst_ext.runner import BertSupervisedRunner as Runner from .model_wrapper import BertModel from .catalyst_ext.bert_criterion import BertCrossEntropyLoss, BertCriterionCallback registry.Model(BertModel) registry.Criterion(BertCrossEntropyLoss) registry.Callback(BertCriterionCallback) registry.Optimizer(AdamW, name='TransformersAdamW') registry.Scheduler(WarmupLinearSchedule)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from models import * from losses import * from callbacks import * from optimizers import * from schedulers import * # Register models registry.Model(ResNet) registry.Model(cell_senet) registry.Model(cell_densenet) registry.Model(SENetGrouplevel) registry.Model(EfficientNet) registry.Model(SENetTIMM) registry.Model(InceptionV3TIMM) registry.Model(GluonResnetTIMM) registry.Model(DSInceptionV3) registry.Model(DSSENet) registry.Model(DSResnet) registry.Model(ResNet50CutMix) registry.Model(Fishnet) registry.Model(SENetCellType) registry.Model(SENetCellMultipleDropout) registry.Model(MixNet) # Register callbacks registry.Callback(LabelSmoothCriterionCallback) registry.Callback(SmoothMixupCallback)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from .callbacks import * from .models import * from .losses import * registry.Model(Finetune) registry.Callback(F1Callback) registry.Callback(FbetaCallback) registry.Callback(MixupLossCallback) registry.Callback(IterCheckpointCallback) # Register loss registry.Criterion(FocalLoss) registry.Criterion(FbetaLoss) registry.Criterion(BCEAndFbeta) registry.Criterion(BCEFbetaFocalLoss)
# flake8: noqa # pylint: disable=unused-import from catalyst.contrib.models.nlp import BertClassifier from catalyst.dl import registry, SupervisedRunner as Runner from .experiment import Experiment registry.Model(BertClassifier)
from .experiment import Experiment from catalyst.dl import SupervisedRunner as Runner from catalyst.dl import registry from torch.optim import Adam from torch.nn.functional import cross_entropy import gensim from .vectorizer_orig import Vectorizer from .model import LstmCrf #from callbacks.nll_loss import CrfNllCallback from .runner import CustomRunner as Runner registry.Model(LstmCrf) registry.Optimizer(Adam) registry.Criterion(cross_entropy())
from catalyst.dl import registry from catalyst.dl import SupervisedRunner as Runner from .models import SimpleNet from .callbacks import DoSomethingWithDataCallback from .experiment import Experiment registry.Model(SimpleNet) registry.Callback(DoSomethingWithDataCallback)
# flake8: noqa from catalyst.dl import registry from .experiment import Experiment from .runner import ModelRunner as Runner from models import * from losses import * from callbacks import * from optimizers import * from schedulers import * from segmentation_models_pytorch import Unet as smpUnet from segmentation_models_pytorch import FPN import torchvision # Register models registry.Model(UNet3D) registry.Model(UNet3D2) registry.Model(ResidualUNet3D) registry.Model(VNet) registry.Model(smpUnet) registry.Model(FPN) # registry.Model(DeepLab) registry.MODELS._late_add_callbacks = [] # Register callbacks registry.Callback(MultiDiceCallback) # Register criterions registry.Criterion(MultiDiceLoss) # Register optimizers