def read_global_configuration(config_dict, ignore_undefined=False):
        """Read global-level configuration (accessible to all components and chains)
        Arguments:
            input_config {dict} -- The configuration
            ignore_undefined {boolean} -- Whether to ignore (true) of throw an error (false) on undefined keys
        """
        global_config = GlobalConfig()
        for global_conf_key, comp_conf in config_dict.items():
            if global_conf_key == GlobalConfig.chains_key:
                continue
            comp = [
                g for g in global_component_classes
                if g.conf_key_name == global_conf_key
            ]
            if len(comp) != 1:
                if not ignore_undefined:
                    error(
                        f"Undefined global configuration component: {global_conf_key}",
                        not comp)
                else:
                    continue
                error(
                    f"Multiple global configuration component matches: {global_conf_key}",
                    len(comp) > 1)
            comp = comp[0]
            component_config = comp(comp_conf)
            global_config.add_config_object(global_conf_key, component_config)

        global_config.finalize()
        if global_config.misc.run_id is None:
            global_config.misc.run_id = utils.datetime_str()
        # make directories
        for ddir in (global_config.folders.run, global_config.folders.raw_data,
                     global_config.folders.serialization):
            os.makedirs(ddir, exist_ok=True)
        return global_config
        return out


import argparse
import time
import os
import numpy as np
import torch
import torch.nn as nn
import sys
sys.path.append('../../')
from src.baselines.baseline_data import get_bisk_rnn_data
from src.helper import *
from config.global_config import GlobalConfig

config = GlobalConfig()

#######################################
# Hyper-parameters
#######################################
pad_idx = config.PAD_TOKEN
anc_dim = config.MAX_SCENE_OBJECT_COUNT
#######################################

#######################################
# Device configuration
#######################################
device = 'cuda' if torch.cuda.is_available() else 'cpu'
#######################################

global logger