示例#1
0
    def __init__(
        self,
        input_dims,
        output_dim,
        mm_dim=1200,
        activ_input="relu",
        activ_output="relu",
        normalize=False,
        dropout_input=0.0,
        dropout_pre_lin=0.0,
        dropout_output=0.0,
    ):
        super().__init__()
        self.input_dims = input_dims
        self.output_dim = output_dim
        self.mm_dim = mm_dim
        self.activ_input = activ_input
        self.activ_output = activ_output
        self.normalize = normalize
        self.dropout_input = dropout_input
        self.dropout_pre_lin = dropout_pre_lin
        self.dropout_output = dropout_output
        # Modules
        self.linear0 = nn.Linear(input_dims[0], mm_dim)
        self.linear1 = nn.Linear(input_dims[1], mm_dim)
        self.linear_out = nn.Linear(mm_dim, output_dim)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#2
0
    def __init__(
        self,
        input_dims,
        output_dim,
        mm_dim=1600,
        shared=False,
        normalize=False,
        dropout_input=0.0,
        dropout_pre_lin=0.0,
        dropout_output=0.0,
    ):
        super().__init__()
        self.input_dims = input_dims
        self.shared = shared
        self.mm_dim = mm_dim
        self.output_dim = output_dim
        self.normalize = normalize
        self.dropout_input = dropout_input
        self.dropout_pre_lin = dropout_pre_lin
        self.dropout_output = dropout_output
        # Modules
        self.linear0 = nn.Linear(input_dims[0], mm_dim)
        if shared:
            self.linear1 = self.linear0
        else:
            self.linear1 = nn.Linear(input_dims[1], mm_dim)
        self.linear1 = nn.Linear(input_dims[1], mm_dim)
        self.bilinear = nn.Bilinear(mm_dim, mm_dim, mm_dim)
        self.linear_out = nn.Linear(mm_dim, output_dim)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#3
0
 def __init__(self, name, *args, **kwargs):
     self.name = name
     self.required_params = ["scores", "targets"]
     # the set of datasets where this metric will be applied
     # an empty set means it will be applied on *all* datasets
     self._dataset_names = set()
     log_class_usage("Metric", self.__class__)
示例#4
0
    def __init__(self, config: Union[DictConfig, Config]):
        super().__init__()
        if not isinstance(config, DictConfig) and isinstance(config, self.Config):
            config = OmegaConf.structured(config)

        self.config = config

        self._logged_warning = {"losses_present": False}
        self._is_pretrained = False
        self._is_pl_enabled = False

        log_class_usage("Model", self.__class__)
    def __init__(self, dataset_name: Optional[str] = None, *args, **kwargs):
        super().__init__(*args, **kwargs)

        if dataset_name is None:
            # In case user doesn't pass it
            dataset_name = f"dataset_{uuid.uuid4().hex[:6]}"
        self.dataset_name = dataset_name
        self._train_dataset = None
        self._val_dataset = None
        self._test_dataset = None

        log_class_usage("DatasetBuilder", self.__class__)
示例#6
0
    def __init__(
        self,
        input_dims,
        output_dim,
        mm_dim=1600,
        chunks=20,
        rank=15,
        shared=False,
        dropout_input=0.0,
        dropout_pre_lin=0.0,
        dropout_output=0.0,
        pos_norm="before_cat",
    ):
        super().__init__()
        self.input_dims = input_dims
        self.output_dim = output_dim
        self.mm_dim = mm_dim
        self.chunks = chunks
        self.rank = rank
        self.shared = shared
        self.dropout_input = dropout_input
        self.dropout_pre_lin = dropout_pre_lin
        self.dropout_output = dropout_output
        assert pos_norm in ["before_cat", "after_cat"]
        self.pos_norm = pos_norm
        # Modules
        self.linear0 = nn.Linear(input_dims[0], mm_dim)
        if shared:
            self.linear1 = self.linear0
        else:
            self.linear1 = nn.Linear(input_dims[1], mm_dim)
        merge_linears0, merge_linears1 = [], []
        self.sizes_list = get_sizes_list(mm_dim, chunks)
        for size in self.sizes_list:
            ml0 = nn.Linear(size, size * rank)
            merge_linears0.append(ml0)
            if self.shared:
                ml1 = ml0
            else:
                ml1 = nn.Linear(size, size * rank)
            merge_linears1.append(ml1)
        self.merge_linears0 = nn.ModuleList(merge_linears0)
        self.merge_linears1 = nn.ModuleList(merge_linears1)
        self.linear_out = nn.Linear(mm_dim, output_dim)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#7
0
    def __init__(
        self,
        datamodules: List[pl.LightningDataModule],
        config: Config = None,
        dataset_type: str = "train",
    ):
        self.test_reporter_config = OmegaConf.merge(
            OmegaConf.structured(self.Config), config
        )
        self.datamodules = datamodules
        self.dataset_type = dataset_type
        self.config = registry.get("config")
        self.report = []
        self.timer = Timer()
        self.training_config = self.config.training
        self.num_workers = self.training_config.num_workers
        self.batch_size = self.training_config.batch_size
        self.report_folder_arg = get_mmf_env(key="report_dir")
        self.experiment_name = self.training_config.experiment_name

        self.current_datamodule_idx = -1
        self.dataset_names = list(self.datamodules.keys())
        self.current_datamodule = self.datamodules[
            self.dataset_names[self.current_datamodule_idx]
        ]
        self.current_dataloader = None

        self.save_dir = get_mmf_env(key="save_dir")
        self.report_folder = ckpt_name_from_core_args(self.config)
        self.report_folder += foldername_from_config_override(self.config)

        self.report_folder = os.path.join(self.save_dir, self.report_folder)
        self.report_folder = os.path.join(self.report_folder, "reports")

        if self.report_folder_arg:
            self.report_folder = self.report_folder_arg

        self.candidate_fields = self.test_reporter_config.candidate_fields

        PathManager.mkdirs(self.report_folder)

        log_class_usage("TestReporter", self.__class__)
示例#8
0
    def __init__(
        self,
        input_dims,
        output_dim,
        mm_dim=16000,
        activ_output="relu",
        dropout_output=0.0,
    ):
        super().__init__()
        self.input_dims = input_dims
        self.output_dim = output_dim
        self.mm_dim = mm_dim
        self.activ_output = activ_output
        self.dropout_output = dropout_output
        # Modules
        self.mcb = CompactBilinearPooling(input_dims[0], input_dims[1], mm_dim)
        self.linear_out = nn.Linear(mm_dim, output_dim)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#9
0
    def __init__(
        self,
        input_dims,
        output_dim,
        mm_dim=1600,
        chunks=20,
        shared=False,
        dropout_input=0.0,
        dropout_pre_lin=0.0,
        dropout_output=0.0,
        pos_norm="before_cat",
    ):
        super().__init__()
        self.input_dims = input_dims
        self.output_dim = output_dim
        self.mm_dim = mm_dim
        self.chunks = chunks
        self.shared = shared
        self.dropout_input = dropout_input
        self.dropout_pre_lin = dropout_pre_lin
        self.dropout_output = dropout_output
        assert pos_norm in ["before_cat", "after_cat"]
        self.pos_norm = pos_norm
        # Modules
        self.linear0 = nn.Linear(input_dims[0], mm_dim)
        if self.shared:
            self.linear1 = self.linear0
        else:
            self.linear1 = nn.Linear(input_dims[1], mm_dim)

        self.sizes_list = get_sizes_list(mm_dim, chunks)
        bilinears = []
        for size in self.sizes_list:
            bilinears.append(nn.Bilinear(size, size, size))
        self.bilinears = nn.ModuleList(bilinears)
        self.linear_out = nn.Linear(self.mm_dim, self.output_dim)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#10
0
    def __init__(self,
                 input_dims,
                 output_dim,
                 dimensions=None,
                 activation="relu",
                 dropout=0.0):
        super().__init__()
        self.input_dims = input_dims
        self.output_dim = output_dim
        self.input_dim = sum(input_dims)
        if dimensions is None:
            dimensions = [500, 500]
        self.dimensions = dimensions + [output_dim]
        self.activation = activation
        self.dropout = dropout
        # Modules
        self.mlp = MLP(self.input_dim, self.dimensions, self.activation,
                       self.dropout)
        self.n_params = sum(p.numel() for p in self.parameters()
                            if p.requires_grad)

        log_class_usage("Fusion", self.__class__)
示例#11
0
    def __init__(self, params=None):
        super().__init__()
        if params is None:
            params = {}

        is_mapping = isinstance(params, collections.abc.MutableMapping)

        if is_mapping:
            if "type" not in params:
                raise ValueError(
                    "Parameters to loss must have 'type' field to"
                    "specify type of loss to instantiate"
                )
            else:
                loss_name = params["type"]
        else:
            assert isinstance(
                params, str
            ), "loss must be a string or dictionary with 'type' key"
            loss_name = params

        self.name = loss_name

        loss_class = registry.get_loss_class(loss_name)

        log_class_usage("Loss", loss_class)

        if loss_class is None:
            raise ValueError(f"No loss named {loss_name} is registered to registry")
        # Special case of multi as it requires an array
        if loss_name.startswith("multi"):
            assert is_mapping
            self.loss_criterion = loss_class(params)
        else:
            if is_mapping:
                loss_params = params.get("params", {})
            else:
                loss_params = {}
            self.loss_criterion = loss_class(**loss_params)
示例#12
0
文件: encoders.py 项目: naykun/mmf
 def __init__(self):
     super().__init__()
     log_class_usage("Encoder", self.__class__)
示例#13
0
    def __init__(self, config: DictConfig):
        self.config = config
        self.training_config = self.config.training

        log_class_usage("Trainer", self.__class__)