コード例 #1
0
 def __init__(self, config: Dict) -> None:
     super().__init__(config)
     self.name = "config_default"
     self.train = BaseTrainConfig(config.pop(ConfigNamesConst.TRAIN))
     self.val = BaseValConfig(config.pop(ConfigNamesConst.VAL))
     self.dataset_train = BaseDatasetConfig(
         config.pop(ConfigNamesConst.DATASET_TRAIN))
     self.dataset_val = BaseDatasetConfig(
         config.pop(ConfigNamesConst.DATASET_VAL))
     self.logging = BaseLoggingConfig(config.pop(ConfigNamesConst.LOGGING))
     self.saving = BaseSavingConfig(config.pop(ConfigNamesConst.SAVING))
     self.optimizer = optimization.OptimizerConfig(
         config.pop(ConfigNamesConst.OPTIMIZER))
     self.lr_scheduler = lr_scheduler.SchedulerConfig(
         config.pop(ConfigNamesConst.LR_SCHEDULER))
コード例 #2
0
 def __init__(self, config: Dict) -> None:
     super().__init__(config)
     self.name = "config_default"
     self.train = trainer_configs.BaseTrainConfig(config.pop(Conf.TRAIN))
     self.val = trainer_configs.BaseValConfig(config.pop(Conf.VAL))
     self.dataset_train = data.BaseDatasetConfig(
         config.pop(Conf.DATASET_TRAIN))
     self.dataset_val = data.BaseDatasetConfig(config.pop(Conf.DATASET_VAL))
     self.logging = utils.BaseLoggingConfig(config.pop(Conf.LOGGING))
     self.saving = trainer_configs.BaseSavingConfig(config.pop(Conf.SAVING))
     self.optimizer = optimization.OptimizerConfig(
         config.pop(Conf.OPTIMIZER))
     self.lr_scheduler = lr_scheduler.SchedulerConfig(
         config.pop(Conf.LR_SCHEDULER))
     self.mlp = MLPNetConfig(config.pop("mlp"))
コード例 #3
0
    def __init__(self,
                 config: Dict[str, Any],
                 *,
                 is_train: bool = True) -> None:
        super().__init__(config)
        self.name = "config_ret"
        self.dim_feat_global: int = config.pop("dim_feat_global", 768)
        self.dim_feat_local: int = config.pop("dim_feat_local", 384)
        if not is_train:
            # Disable dataset caching
            logger = logging.getLogger(utils.LOGGER_NAME)
            logger.debug("Disable dataset caching during validation.")
            config["dataset_val"]["preload_vid_feat"] = False
            config["dataset_val"]["preload_text_feat"] = False

        try:
            self.train = RetrievalTrainConfig(config.pop(Conf.TRAIN))
            self.val = RetrievalValConfig(config.pop(Conf.VAL))
            self.dataset_train = RetrievalDatasetConfig(
                config.pop(Conf.DATASET_TRAIN))
            self.dataset_val = RetrievalDatasetConfig(
                config.pop(Conf.DATASET_VAL))
            self.logging = trainer_configs.BaseLoggingConfig(
                config.pop(Conf.LOGGING))
            self.saving = trainer_configs.BaseSavingConfig(
                config.pop(Conf.SAVING))
            self.optimizer = optimization.OptimizerConfig(
                config.pop(Conf.OPTIMIZER))
            self.lr_scheduler = lr_scheduler.SchedulerConfig(
                config.pop(Conf.LR_SCHEDULER))
            self.model_cfgs = {}
            for key in RetrievalNetworksConst.values():
                self.model_cfgs[key] = models.TransformerConfig(
                    config.pop(key))
        except KeyError as e:
            print()
            print(traceback.format_exc())
            print(
                f"ERROR: {e} not defined in config {self.__class__.__name__}\n"
            )
            raise e

        self.post_init()