Example #1
0
def add_file_handler_to_logger(logger):
    """
    Add a ``logging.FileHandler`` to the logger.
    Requires that AppState has the following variable already set:

        - AppState().logfile - name (with path) of the file that everything will be used as destination by the ``FileHandler``.

    :param logger: Logger object.

    """
    # This makes
    if AppState().log_file is None:
        return

    # Create file handler which logs even DEBUG messages.
    fh = logging.FileHandler(AppState().log_file)

    # Set logging level for this file.
    fh.setLevel(logging.DEBUG)

    # Create formatter and add it to the handlers.
    formatter = logging.Formatter(
        fmt='[%(asctime)s] - %(levelname)s - %(name)s >>> %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')
    fh.setFormatter(formatter)

    # Add the handler to the logger.
    logger.addHandler(fh)
 def __init__(self, *args, **kwargs):
     super(TestPipeline, self).__init__(*args, **kwargs)
     # Set required globals.
     app_state = AppState()
     app_state.__setitem__("bow_size", 10, override=True)
     # Extract absolute path to config.
     abs_config_path = os.path.realpath(__file__)
     # Save it in app_state!
     app_state.absolute_config_path = abs_config_path[:abs_config_path.find(
         "tests")] + "configs/"
Example #3
0
def initialize_logger(name, add_file_handler=True):
    """
    Initializes the logger, with a specific configuration.
    Requires that AppState has the following variable already set:
        - AppState().args.log_level -- log level (from command line argsuments)

    :param name: Name of the entity that "owns" the logger.

    :return: Logger object.

    """
    # Load the default logger configuration.
    logger_config = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'simple': {
                'format':
                '[%(asctime)s] - %(levelname)s - %(name)s >>> %(message)s',
                'datefmt': '%Y-%m-%d %H:%M:%S'
            }
        },
        'handlers': {
            'console': {
                'class': 'logging.StreamHandler',
                'level': 'INFO',
                'formatter': 'simple',
                'stream': 'ext://sys.stdout'
            }
        },
        'root': {
            'level': 'DEBUG',
            'handlers': ['console']
        }
    }

    logging_config.dictConfig(logger_config)

    # Create the Logger, set its label and logging level.
    logger = logging.getLogger(name=name)

    # Add file handler - when the file is initialized...
    if add_file_handler:
        add_file_handler_to_logger(logger)

    # Set logger level depending on the settings.
    if AppState().args is not None and AppState().args.log_level is not None:
        logger.setLevel(
            getattr(logging,
                    AppState().args.log_level.upper(), None))
    else:
        logger.setLevel('INFO')

    return logger
Example #4
0
    def __init__(self, name, config):
        """
        Initializes the pipeline manager.

        :param config: Parameters used to instantiate all required components.
        :type config: :py:class:`ptp.configuration.ConfigInterface`

        """
        # Initialize the logger.
        self.name = name
        self.config = config
        self.app_state = AppState()
        self.logger = logging.initialize_logger(name)

        # Set initial values of all pipeline elements.
        # Empty list of all components, sorted by their priorities.
        self.__components = {}
        # Empty list of all models - it will contain only "references" to objects stored in the components list.
        self.models = []
        # Empty list of all losses - it will contain only "references" to objects stored in the components list.
        self.losses = []

        # Initialization of best loss - as INF.
        self.best_loss = inf
        self.best_status = "Unknown"
Example #5
0
 def test_01keys_present(self):
     """ Tests whether the original keys are present and can be retrieved/modified. """
     # Initialize object.
     app_state = AppState()
     # Add global.
     app_state["global1"] = 1
     # Check its value.
     self.assertEqual(app_state['global1'], 1)
    def __init__(self, key_mappings):
        """
        Constructor. Initializes app state and stores key mappings.

        :param key_mappings: Dictionary of global key mappings of the parent object.
        """
        # Remember parent object global keys mappings.
        self.key_mappings = key_mappings
        self.app_state = AppState()
Example #7
0
    def __init__(self, name, class_type, config):
        """
        Initializes the component. This constructor:

            - sets the access to ``AppState`` (for dtypes, settings, globals etc.)
            - stores the component name and type
            - stores reference to the passed configuration registry section
            - loads default component parameters
            - initializes the logger
            - initializes mapping facilities and facades

        :param name: Name of the component.

        :param class_type: Class type of the component.

        :param config: Dictionary of parameters (read from configuration ``.yaml`` file).
        :type config: :py:class:`ptp.configuration.ConfigInterface`

        """
        self.name = name
        self.config = config

        # Get access to AppState: for command line args, globals etc.
        self.app_state = AppState()

        # Initialize logger.
        self.logger = logging.initialize_logger(self.name)

        # Load default configuration.
        if class_type is not None:
            self.config.add_default_params(
                load_class_default_config_file(class_type))

        # Initialize the "streams mapping facility".
        if "streams" not in config or config["streams"] is None:
            self.__stream_keys = {}
        else:
            self.__stream_keys = config["streams"]
        self.stream_keys = KeyMappingsFacade(self.__stream_keys)

        # Initialize the "globals mapping facility".
        if "globals" not in config or config["globals"] is None:
            self.__global_keys = {}
        else:
            self.__global_keys = config["globals"]
        self.global_keys = KeyMappingsFacade(self.__global_keys)

        # Initialize the "statistics mapping facility".
        if "statistics" not in config or config["statistics"] is None:
            self.__statistics_keys = {}
        else:
            self.__statistics_keys = config["statistics"]
        self.statistics_keys = KeyMappingsFacade(self.__statistics_keys)

        # Facade for accessing global parameters (stored still in AppState).
        self.globals = GlobalsFacade(self.__global_keys)
def load_class_default_config_file(class_type):
    """
    Function loads default configuration from the default config file associated with the given class type and adds it to parameter registry.

    :param class_type: Class type of a given object.

    :raturn: Loaded default configuration.
    """

    # Extract path to default config.
    module = class_type.__module__.replace(".", "/")
    rel_path = module[module.find("ptp") + 4:]
    # Build the abs path to the default config file of a given component/worker.
    abs_default_config = os.path.join(AppState().absolute_config_path,
                                      "default", rel_path) + ".yml"

    # Check if file exists.
    if not os.path.isfile(abs_default_config):
        print(
            "ERROR: The default configuration file '{}' for '{}' does not exist"
            .format(abs_default_config, class_type.__module__))
        exit(-1)

    try:
        # Open file and get parameter dictionary.
        with open(abs_default_config, 'r') as stream:
            param_dict = yaml.safe_load(stream)

        # Return default parameters so they can be added to the global registry.
        if param_dict is None:
            print("WARNING: The default configuration file '{}' is empty!".
                  format(abs_default_config))
            return {}
        else:
            return param_dict

    except yaml.YAMLError as e:
        print(
            "ERROR: Couldn't properly parse the '{}' default configuration file. YAML error:\n  {}"
            .format(abs_default_config, e))
        exit(-2)
Example #9
0
 def test_04keys_overwrite(self):
     """ Tests whether you can overwrite existing key. """
     with self.assertRaises(KeyError):
         AppState()["global1"] = 2
Example #10
0
 def test_03keys_absent(self):
     """ Tests whether absent keys are really absent. """
     with self.assertRaises(KeyError):
         a = AppState()["global2"]
Example #11
0
 def test_02keys_present_singleton(self):
     """ Tests whether the original keys are still present in new AppState "instance". """
     # Initialize object.
     app_state = AppState()
     # Check its value.
     self.assertEqual(app_state['global1'], 1)
Example #12
0
    def __init__(self, name, add_default_parser_args=True):
        """
        Base constructor for all workers:

            - Initializes the AppState singleton:

                >>> self.app_state = AppState()

            - Initializes the Configuration Registry:

                >>> self.config = ConfigInterface()

            - Creates parser and adds default worker command line arguments.

        :param name: Name of the worker.
        :type name: str

        :param add_default_parser_args: If set, adds default parser arguments (DEFAULT: True).
        :type add_default_parser_args: bool

        """
        # Call base constructor.
        super(Worker, self).__init__()

        # Set worker name.
        self.name = name

        # Initialize the application state singleton.
        self.app_state = AppState()

        # Initialize parameter interface/registry.
        self.config = ConfigInterface()

        # Create parser with a list of runtime arguments.
        self.parser = argparse.ArgumentParser(
            formatter_class=argparse.RawTextHelpFormatter)

        # Add arguments to the specific parser.
        if add_default_parser_args:
            # These arguments will be shared by all basic workers.
            self.parser.add_argument(
                '--config',
                dest='config',
                type=str,
                default='',
                help='Name of the configuration file(s) to be loaded. '
                'If specifying more than one file, they must be separated with coma ",".'
            )

            self.parser.add_argument(
                '--disable',
                type=str,
                default='',
                dest='disable',
                help=
                'Comma-separated list of components to be disabled (DEFAULT: empty)'
            )

            self.parser.add_argument(
                '--load',
                type=str,
                default='',
                dest='load_checkpoint',
                help=
                'Path and name of the checkpoint file containing the saved parameters'
                ' of the pipeline models to load (should end with a .pt extension)'
            )

            self.parser.add_argument(
                '--gpu',
                dest='use_gpu',
                action='store_true',
                help=
                'The current worker will move the computations on GPU devices, if available '
                'in the system. (Default: False)')

            self.parser.add_argument(
                '--expdir',
                dest='expdir',
                type=str,
                default="~/experiments",
                help=
                'Path to the directory where the experiment(s) folders are/will be stored.'
                ' (DEFAULT: ~/experiments)')

            self.parser.add_argument('--savetag',
                                     dest='savetag',
                                     type=str,
                                     default='',
                                     help='Tag for the save directory.')

            self.parser.add_argument('--logger',
                                     action='store',
                                     dest='log_level',
                                     type=str,
                                     default='INFO',
                                     choices=[
                                         'CRITICAL', 'ERROR', 'WARNING',
                                         'INFO', 'DEBUG', 'NOTSET'
                                     ],
                                     help="Log level. (DEFAULT: INFO)")

            self.parser.add_argument(
                '--interval',
                dest='logging_interval',
                default=100,
                type=int,
                help=
                'Statistics logging interval. Will impact logging to the logger and '
                'exporting to TensorBoard. Writing to the csv file is not impacted '
                '(exports at every step). (DEFAULT: 100, i.e. logs every 100 episodes).'
            )

            self.parser.add_argument(
                '--agree',
                dest='confirm',
                action='store_true',
                help=
                'Request user confirmation just after loading the settings, '
                'before starting the experiment. (DEFAULT: False)')
Example #13
0
 def __init__(self, *args, **kwargs):
     super(TestPipeline, self).__init__(*args, **kwargs)
     # Set required globals.
     app_state = AppState()
     app_state.__setitem__("bow_size", 10, override=True)