import importlib import os from neurst.layers.search.sequence_search import SequenceSearch from neurst.utils.registry import setup_registry build_search_layer, register_search_layer = setup_registry( SequenceSearch.REGISTRY_NAME, base_class=SequenceSearch, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.layers.search.' + model_name)
import importlib import os from neurst.layers.encoders.encoder import Encoder from neurst.utils.registry import setup_registry build_encoder, register_encoder = setup_registry(Encoder.REGISTRY_NAME, base_class=Encoder) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.layers.encoders.' + model_name)
import importlib import os from neurst.data.text.tokenizer import Tokenizer from neurst.utils.registry import setup_registry build_tokenizer, register_tokenizer = setup_registry(Tokenizer.REGISTRY_NAME, base_class=Tokenizer) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.data.text.' + model_name)
Returns: Sparsity (%) that should be applied to the weights for the step. """ raise NotImplementedError( "PruningSchedule implementation must override sparsity_in_step") @classmethod def new(cls, args: dict): if args["target_sparsity"] == 0: return None return cls(**args) build_pruning_schedule, register_pruning_schedule = setup_registry( PruningSchedule.REGISTRY_NAME, base_class=PruningSchedule, create_fn="new", verbose_creation=True) @register_pruning_schedule class ConstantSparsity(PruningSchedule): """Pruning schedule with constant sparsity(%) throughout training.""" def __init__(self, **kwargs): super(ConstantSparsity, self).__init__(**kwargs) def sparsity_in_step(self, step): return tf.constant(self.target_sparsity, dtype=tf.float32) @register_pruning_schedule
import importlib import os import tensorflow as tf from neurst.utils.registry import setup_registry LR_SCHEDULE_REGISTRY_NAME = "lr_schedule" build_lr_schedule, register_lr_schedule = setup_registry( LR_SCHEDULE_REGISTRY_NAME, base_class=tf.keras.optimizers.schedules.LearningRateSchedule, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.optimizers.schedules.' + model_name)
import importlib import os from neurst.tasks.task import Task from neurst.utils.registry import setup_registry build_task, register_task = setup_registry(Task.REGISTRY_NAME, base_class=Task, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.tasks.' + model_name)
import importlib import os from neurst.data.datasets.dataset import Dataset, TFRecordDataset from neurst.utils.registry import setup_registry build_dataset, register_dataset = setup_registry(Dataset.REGISTRY_NAME, base_class=Dataset, verbose_creation=True) _ = TFRecordDataset models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.data.datasets.' + model_name) importlib.import_module("neurst.data.datasets.audio")
import importlib import os from neurst.utils.registry import setup_registry from neurst_pt.models.model import BaseModel build_model, register_model = setup_registry(BaseModel.REGISTRY_NAME, base_class=BaseModel, create_fn="new", verbose_creation=True, backend="pt") models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst_pt.models.' + model_name)
import importlib import os from neurst.training.callbacks import (CentralizedCallback, CustomCheckpointCallback, LearningRateScheduler, MetricReductionCallback) from neurst.training.validator import Validator from neurst.utils.registry import setup_registry build_validator, register_validator = setup_registry(Validator.REGISTRY_NAME, base_class=Validator, verbose_creation=True) __all__ = [ "CentralizedCallback", "CustomCheckpointCallback", "LearningRateScheduler", "MetricReductionCallback", "Validator", "register_validator", "build_validator" ] models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.training.' + model_name)
import torch.nn as nn from neurst.utils.registry import setup_registry from neurst_pt.layers.attentions.multi_head_attention import MultiHeadAttention, MultiHeadSelfAttention from neurst_pt.layers.common_layers import PrePostProcessingWrapper, TransformerFFN build_base_layer, register_base_layer = setup_registry("base_layer", base_class=nn.Module, verbose_creation=False, backend="pt") register_base_layer(MultiHeadSelfAttention) register_base_layer(MultiHeadAttention) register_base_layer(TransformerFFN) def build_transformer_component(layer_args, norm_shape, dropout_rate, pre_norm=True, epsilon=1e-6): base_layer = build_base_layer(layer_args) return PrePostProcessingWrapper(layer=base_layer, norm_shape=norm_shape, dropout_rate=dropout_rate, epsilon=epsilon, pre_norm=pre_norm)
import tensorflow as tf from neurst.utils.registry import setup_registry OPTIMIZER_REGISTRY_NAME = "optimizer" build_optimizer, register_optimizer = setup_registry( OPTIMIZER_REGISTRY_NAME, base_class=tf.keras.optimizers.Optimizer, verbose_creation=True) Adam = tf.keras.optimizers.Adam Adagrad = tf.keras.optimizers.Adagrad Adadelta = tf.keras.optimizers.Adadelta SGD = tf.keras.optimizers.SGD register_optimizer(Adam) register_optimizer(Adagrad) register_optimizer(Adadelta) register_optimizer(SGD)
import importlib import os from neurst.data.data_pipelines.data_pipeline import DataPipeline from neurst.utils.registry import setup_registry build_data_pipeline, register_data_pipeline = setup_registry(DataPipeline.REGISTRY_NAME, base_class=DataPipeline) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.data.data_pipelines.' + model_name)
import importlib import os from neurst.data.datasets.data_sampler.data_sampler import DataSampler from neurst.utils.registry import setup_registry build_data_sampler, register_data_sampler = setup_registry( DataSampler.REGISTRY_NAME, base_class=DataSampler, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.data.datasets.data_sampler.' + model_name)
import importlib import os from neurst.utils.converters.converter import Converter from neurst.utils.registry import setup_registry build_converter, register_converter = setup_registry(Converter.REGISTRY_NAME, base_class=Converter, verbose_creation=False, create_fn="new") models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.utils.converters.' + model_name)
import importlib import os from neurst.utils.registry import setup_registry _, register_agent = setup_registry("simuleval_agent", verbose_creation=False) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.utils.simuleval_agents.' + model_name)
import importlib import os from neurst.metrics.metric import Metric from neurst.utils.registry import setup_registry build_metric, register_metric = setup_registry(Metric.REGISTRY_NAME, base_class=Metric) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.metrics.' + model_name)
import importlib import os from neurst.layers.encoders.encoder import Encoder from neurst.utils.registry import setup_registry build_encoder, register_encoder = setup_registry("encoder", base_class=Encoder) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.layers.encoders.' + model_name)
import importlib import os from neurst.exps.base_experiment import BaseExperiment from neurst.utils.registry import setup_registry build_exp, register_exp = setup_registry(BaseExperiment.REGISTRY_NAME, base_class=BaseExperiment, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.exps.' + model_name)
import tensorflow as tf from neurst.layers.attentions.light_convolution_layer import LightConvolutionLayer from neurst.layers.attentions.multi_head_attention import MultiHeadAttention, MultiHeadSelfAttention from neurst.layers.common_layers import PrePostProcessingWrapper, TransformerFFN from neurst.utils.registry import setup_registry build_base_layer, register_base_layer = setup_registry("base_layer", base_class=tf.keras.layers.Layer, verbose_creation=False) register_base_layer(MultiHeadSelfAttention) register_base_layer(MultiHeadAttention) register_base_layer(TransformerFFN) register_base_layer(LightConvolutionLayer) def build_transformer_component(layer_args, dropout_rate): base_layer = build_base_layer(layer_args) return PrePostProcessingWrapper( layer=base_layer, dropout_rate=dropout_rate, name=base_layer.name + "_prepost_wrapper")
import importlib import os from neurst.criterions.criterion import Criterion from neurst.utils.registry import setup_registry build_criterion, register_criterion = setup_registry(Criterion.REGISTRY_NAME, base_class=Criterion, verbose_creation=True) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.criterions.' + model_name)
import importlib import os from neurst.utils.registry import setup_registry from neurst_pt.layers.decoders.decoder import Decoder build_decoder, register_decoder = setup_registry(Decoder.REGISTRY_NAME, base_class=Decoder, backend="pt") models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith('.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst_pt.layers.decoders.' + model_name)
import importlib import os from neurst.data.text.tokenizer import Tokenizer from neurst.utils.registry import setup_registry build_tokenizer, register_tokenizer = setup_registry("tokenizer", base_class=Tokenizer) models_dir = os.path.dirname(__file__) for file in os.listdir(models_dir): path = os.path.join(models_dir, file) if not file.startswith('_') and not file.startswith('.') and file.endswith( '.py'): model_name = file[:file.find('.py')] if file.endswith('.py') else file module = importlib.import_module('neurst.data.text.' + model_name)