registry.Model(EfficientNet) registry.Model(SENetTIMM) registry.Model(InceptionV3TIMM) registry.Model(GluonResnetTIMM) registry.Model(DSInceptionV3) registry.Model(DSSENet) registry.Model(DSResnet) registry.Model(ResNet50CutMix) registry.Model(Fishnet) registry.Model(SENetCellType) registry.Model(SENetCellMultipleDropout) registry.Model(MixNet) # Register callbacks registry.Callback(LabelSmoothCriterionCallback) registry.Callback(SmoothMixupCallback) registry.Callback(DSAccuracyCallback) registry.Callback(DSCriterionCallback) registry.Callback(SlackLogger) registry.Callback(TwoHeadsCriterionCallback) registry.Callback(DSMixupCallback) # Register criterions registry.Criterion(LabelSmoothingCrossEntropy) # Register optimizers registry.Optimizer(AdamW) registry.Optimizer(Nadam) registry.Optimizer(RAdam) registry.Scheduler(CyclicLRFix)
from catalyst.dl import registry from torch_optimizer import Ranger from .callbacks import (CosineLossCallback, KLDivLossCallback, MaskedLanguageModelCallback, MSELossCallback, PerplexityMetricCallbackDistillation, CarbontrackerCallback) from .experiment import Experiment # noqa: F401 from .models import BertForMLM, DistilbertStudentModel from .runners import DistilMLMRunner as Runner # noqa: F401 registry.Model(BertForMLM) registry.Model(DistilbertStudentModel) registry.Optimizer(Ranger) registry.Callback(CosineLossCallback) registry.Callback(MaskedLanguageModelCallback) registry.Callback(KLDivLossCallback) registry.Callback(MSELossCallback) registry.Callback(PerplexityMetricCallbackDistillation) registry.Callback(CarbontrackerCallback)
from catalyst.dl import registry from .runner import Runner from .experiment import Experiment from .callbacks import CharErrorRateCallback from .optimizers import SWA from .models import ( LightLSTM, DeepSpeech, DeepSpeechV2, LightConv, LookaheadLSTM, ) registry.Callback(CharErrorRateCallback) registry.Model(LightLSTM) registry.Model(DeepSpeech) registry.Model(DeepSpeechV2) registry.Model(LightConv) registry.Model(LookaheadLSTM) registry.Optimizer(SWA)
registry.Model(UNetResNet) registry.Model(LinkNet34) registry.Model(DenseNetDetector) registry.Model(ResnetDetector) registry.Model(resnet34) registry.Model(SCseUnet) registry.Model(ResUnetScSeDecoded) registry.Model(QUnet) registry.Model(EfficientUnet) registry.Model(PretrainedResnet) registry.Model(PretrainedDensenet) registry.Model(ModelFromCheckpoint) registry.Callback(MeanDiceCallback) registry.Callback(AllAccuracyCallback) registry.Callback(F1Callback) registry.Callback(FBetaCallback) registry.Optimizer(PlainRAdam) registry.Optimizer(AdamW) registry.Criterion(JointLoss) registry.Criterion(CCE) registry.Criterion(BinaryDiceLoss) registry.Criterion(BinaryDiceLogLoss) registry.Criterion(MulticlassDiceLoss) registry.Criterion(TverskyLoss) registry.Criterion(DiceAndBCE) registry.Criterion(FocalLossMultiChannel) registry.Criterion(FocalAndBCE)
from .experiment import Experiment from catalyst.dl import SupervisedRunner as Runner from catalyst.dl import registry from torch.optim import Adam from torch.nn.functional import cross_entropy import gensim from .vectorizer_orig import Vectorizer from .model import LstmCrf #from callbacks.nll_loss import CrfNllCallback from .runner import CustomRunner as Runner registry.Model(LstmCrf) registry.Optimizer(Adam) registry.Criterion(cross_entropy())
# flake8: noqa # pylint: disable=unused-import from catalyst.dl import registry from transformers import AdamW, WarmupLinearSchedule from .experiment import Experiment from .catalyst_ext.runner import BertSupervisedRunner as Runner from .model_wrapper import BertModel from .catalyst_ext.bert_criterion import BertCrossEntropyLoss, BertCriterionCallback registry.Model(BertModel) registry.Criterion(BertCrossEntropyLoss) registry.Callback(BertCriterionCallback) registry.Optimizer(AdamW, name='TransformersAdamW') registry.Scheduler(WarmupLinearSchedule)