Ejemplo n.º 1
0
    def __init__(self, config: ConfigClass, save_dir: str, log_name=''):
        self.log_name = f'{log_name}_{config.gpu_node}'
        self.save_dir = save_dir
        self.main_logger, self.main_log_handler = setup_logger(save_dir, self.log_name)
        self.main_logger.info(f'Saving to folder {save_dir}')
        self.main_writer = SummaryWriter(save_dir)

        self.config = config
        self.model_cfg = config.model
        self.train_cfg = config.training
        self.optim_cfg = config.training.optimizer
        self.loss_cfg = config.training.loss_fn
        self.resume_cfg = config.resume

        if self.train_cfg.seed is not None:
            torch.manual_seed(self.train_cfg.seed)
            random.seed(self.train_cfg.seed)
            np.random.seed(self.train_cfg.seed)
            self.main_logger.info(f'Seed set on {self.train_cfg.seed}')

        self.device = torch.device(f'cuda:{config.gpu_node}' if torch.cuda.is_available() else 'cpu')
        self.eval_train_loader = config.data.run_val_on_train

        if self.train_cfg.early_stop_fn == 'f1_score':
            self.eval_func = self.f1_score
        elif self.train_cfg.early_stop_fn == 'iou_score':
            self.eval_func = self.iou_score
        else:
            self.eval_func = self.val_loss

        self.use_ensemble = self.train_cfg.use_ensemble
        if self.train_cfg.use_ensemble:
            self.len_models = self.train_cfg.ensemble.number_models
Ejemplo n.º 2
0
def initialize_g_vars():
    global logger, args
    logger = setup_logger()
    args = setup_args()
    # get siem config
    args.config = config_file_to_dict(filename=args.config)
    pprint(args.config)
    # get sigma folder path
    args.sigma = args.sigma if not (
        args.sigma is None or args.sigma == '') else force_exit(
            'Sigma folder path is required...', exit=1)
    logger.debug(args.sigma)
    # get sigma config file path
    args.sigma_config = args.sigma_config if not (
        args.sigma_config is None or args.sigma_config == '') else force_exit(
            'Sigma Config is required...', exit=1)
    logger.debug(args.sigma_config)
    logger.debug(args.sigma_venv)
    args.sigma = args.sigma.rstrip('\\')
    args.sigma = args.sigma.rstrip('/')
    args.rule = args.rule.rstrip('\\')
    args.rule = args.rule.rstrip('/')
    args.sigma_venv = args.sigma_venv.rstrip('\\')
    args.sigma_venv = args.sigma_venv.rstrip('/')
    logger.setLevel(args.verbosity)
    logger.info('initialize_g_vars() finished successfully...')
Ejemplo n.º 3
0
def initialize_g_vars():
    global logger, args
    logger = setup_logger()
    args = setup_args()
    logger.setLevel(args.verbosity)
    logger.info('Description: {}'.format(args.description))
    logger.info('initialize_g_vars() finished successfully...')
Ejemplo n.º 4
0
    def _create_train_loggers(self, value):
        self.acquisition_step = value
        self.save_model_dir = os.path.join(self.save_dir, f'Step {value}')
        os.makedirs(self.save_model_dir)
        self.save_data_dir = os.path.join(self.main_data_dir, f'Step {value}')
        os.makedirs(self.save_data_dir)

        self.train_logger, self.train_log_handler = setup_logger(
            self.save_model_dir, f'Train step {value}')
        self.train_writer = SummaryWriter(self.save_model_dir)