示例#1
0
    def __init__(self, config_file=None, **kwargs):
        """

        Simulator base constructor

        :param resource_manager: Resource manager class instantiation
        :param reader: Reader class instantiation
        :param job_factory: Job Factory instantiation
        :param _dispatcher: Dispatcher instantiation
        :param config_file: Path to the config file in json format.
        :param \*\*kwargs: Dictionary of key:value parameters to be used in the simulator. It overwrites the current parameters. All parameters will be available on the constant variable

        """
        self.constants = CONSTANT()
        self.timeout = kwargs.pop('timeout', None)
        self._id = kwargs.pop('id', None)
        self._log_level = kwargs.pop('LOG_LEVEL', self.LOG_LEVEL_INFO)
        self.define_default_constants(config_file, **kwargs)

        self._logger, self._logger_listener = self.define_logger()
        self.real_init_time = datetime.now()

        # self.dispatcher = None
        self.reader = None
        self.resource_manager = None
        self.job_factory = None
        self.mapper = None
        self.additional_data = None

        if self.constants.OVERWRITE_PREVIOUS:
            self.remove_previous()
示例#2
0
    def __init__(self, resource_manager, dispatcher, additional_data,
                 **kwargs):
        """

        This class coordinates events submission, queueing and ending.

        :param resource_manager: Resource manager instance
        :param \*\*kwargs: nothing for the moment.

        """
        assert (isinstance(
            resource_manager,
            ResourceManager)), 'Wrong type for the resource_manager argument.'
        self.resource_manager = resource_manager
        self.dispatcher = dispatcher
        self.additional_data = additional_data
        self.constants = CONSTANT()
        # self.debug = debug
        # Stats
        self.first_time_dispatch = None
        self.last_run_time = None
        self.slowdowns = []
        self.wtimes = []

        self.current_time = None
        self.time_points = SortedSet()
        self.events = SortedDict()
        self.loaded = SortedDict()
        self.queued = []
        self.real_ending = SortedDict()
        self.running = []
        self.finished = 0

        self._logger = logging.getLogger('accasim')

        self._writers = []
        if self.constants.SCHEDULING_OUTPUT:
            _sched_fp = path.join(
                self.constants.RESULTS_FOLDER_PATH,
                self.constants.SCHED_PREFIX + self.constants.WORKLOAD_FILENAME)
            self._sched_writer = AsyncWriter(
                path=_sched_fp,
                pre_process_fun=EventManager._schd_write_preprocessor)
            self._writers.append(self._sched_writer)

        if self.constants.PPRINT_OUTPUT:
            _pprint_fp = path.join(
                self.constants.RESULTS_FOLDER_PATH,
                self.constants.PPRINT_PREFIX +
                self.constants.WORKLOAD_FILENAME)
            self._pprint_writer = AsyncWriter(
                path=_pprint_fp,
                pre_process_fun=EventManager._schd_pprint_preprocessor)
            self._writers.append(self._pprint_writer)

        for ad in self.additional_data:
            ad.set_event_manager(self)
示例#3
0
 def __init__(self, event_manager=None):
     """
     
     Constructor. 
     The event mapper (:class:`accasim.base.event_class.EventManager`) must be defined at the instantiation or later, but it is mandatory for working.
     
     :param event_manager: Event manager object.
     
     """
     self.constant = CONSTANT()
     self.event_mapper = event_manager
 def __init__(self, resource_manager=None, seed=0, **kwargs):
     SchedulerBase.__init__(self, seed, None)
     self.constants = CONSTANT()
     self._safe = kwargs.pop('safe', False)
     self._cur_q_length = self._q_length = kwargs.pop('q_length', 100)
     self._reduce_job_length = kwargs.pop('reduce_job_length', True)
     self._initial_timelimit = kwargs.pop('initial_timelimit', 1000)
     self._max_timelimit = kwargs.pop('timelimit', 16000)
     self._reduced_model = kwargs.pop('reduced_model', True)
     self._considered_cannot_start = kwargs.pop('considered', 1)
     self._max_retries = kwargs.pop('max_retries', 4)
     self._trace_search = kwargs.pop('trace_search', False)
     self._trace_propagation = kwargs.pop('trace_propagation', False)
     self._sched_bt = kwargs.pop('sched_bt', True)
     self._element_v2 = kwargs.pop('element_v2', False)
     self._sim_break_alloc = kwargs.pop('sim_break_alloc', True)
     self._defined_resource_types = None
     self._resources_map = self._nodes_map = self._c_capacities = None
示例#5
0
    def _schd_write_preprocessor(event):
        """
        To be used as a pre-processor for AsyncWriter objects applied to event schedules.
        Pre-processes an event object and converts it to a String representation.
        It uses the format specified in the SCHEDULE_OUTPUT constant.

        :param event: The event to be written to output
        """
        constants = CONSTANT()
        _dict = constants.SCHEDULE_OUTPUT
        _attrs = {}
        for a, av in _dict['attributes'].items():
            try:
                _attrs[a] = locate(av[-1])(*event.subattr(event, av[:-1]))
            except ValueError:
                _attrs[a] = 'NA'
        output_format = _dict['format']
        return output_format.format(**_attrs) + '\n'
示例#6
0
    def __init__(self, job_id, queued_time, duration, requested_nodes,
                 requested_resources):
        """

        Constructor of the basic job event.

        :param job_id: Identification of the job.
        :param queued_time: Corresponding time to the submission time to the system in unix timestamp.
        :param duration: Real duration of the job in unix timestamp.
        :param requested_nodes: Number of requested nodes
        :param requested_resources: Dictionary with the requested resources for a single node.

        """
        self.constants = CONSTANT()
        self.id = str(job_id)
        self.queued_time = queued_time
        self._requested_nodes = requested_nodes
        self._requested_resources = requested_resources
        self.start_time = None
        self.end_time = None
        self.duration = duration
        self.end_order = 0
        self._checked = False
示例#7
0
    def _schd_pprint_preprocessor(event):
        """
        To be used as a pre-processor for AsyncWriter objects applied to pretty-print event schedules.
        Pre-processes an event object and converts it to a String representation.
        It uses the format specified in the PPRINT_SCHEDULE_OUTPUT constant.

        :param event: The event to be written to output
        """
        constants = CONSTANT()
        _dict = constants.PPRINT_SCHEDULE_OUTPUT
        _order = _dict['order']
        _attrs = {}
        for a, av in _dict['attributes'].items():
            try:
                _attrs[a] = locate(av[-1])(*event.subattr(event, av[:-1]))
            except ValueError:
                _attrs[a] = 'NA'
        output_format = _dict['format']
        values = [_attrs[k] for k in _order]
        if event.end_order == 1:
            return (output_format.format(*_order) + '\n',
                    output_format.format(*values) + '\n')
        else:
            return output_format.format(*values) + '\n'
示例#8
0
class SimulatorBase(ABC):

    LOG_LEVEL_INFO = 'INFO'
    LOG_LEVEL_DEBUG = 'DEBUG'
    LOG_LEVEL_TRACE = 'TRACE'

    def __init__(self, config_file=None, **kwargs):
        """

        Simulator base constructor

        :param resource_manager: Resource manager class instantiation
        :param reader: Reader class instantiation
        :param job_factory: Job Factory instantiation
        :param _dispatcher: Dispatcher instantiation
        :param config_file: Path to the config file in json format.
        :param \*\*kwargs: Dictionary of key:value parameters to be used in the simulator. It overwrites the current parameters. All parameters will be available on the constant variable

        """
        self.constants = CONSTANT()
        self.timeout = kwargs.pop('timeout', None)
        self._id = kwargs.pop('id', None)
        self._log_level = kwargs.pop('LOG_LEVEL', self.LOG_LEVEL_INFO)
        self.define_default_constants(config_file, **kwargs)

        self._logger, self._logger_listener = self.define_logger()
        self.real_init_time = datetime.now()

        # self.dispatcher = None
        self.reader = None
        self.resource_manager = None
        self.job_factory = None
        self.mapper = None
        self.additional_data = None

        if self.constants.OVERWRITE_PREVIOUS:
            self.remove_previous()

    def define_logger(self):
        self._define_trace_logger()
        FORMAT = '%(asctime)-15s %(module)s-%(levelname)s: %(message)s'

        queue = Queue(-1)
        queue_handler = handlers.QueueHandler(queue)
        handler = logging.StreamHandler()
        handler.setLevel(self._log_level)
        listener = handlers.QueueListener(queue, handler)

        logger_name = 'accasim'
        logger = logging.getLogger(logger_name)
        logger.addHandler(queue_handler)
        formatter = logging.Formatter(FORMAT)
        handler.setFormatter(formatter)
        logger.setLevel(getattr(logging, self._log_level))

        self.constants.load_constant('LOGGER_NAME', logger_name)
        return logger, listener

    def _define_trace_logger(self):
        level = logging.TRACE = logging.DEBUG - 5

        def log_logger(self, message, *args, **kwargs):
            if self.isEnabledFor(level):
                self._log(level, message, args, **kwargs)

        logging.getLoggerClass().trace = log_logger

        def log_root(msg, *args, **kwargs):
            logging.log(level, msg, *args, **kwargs)

        logging.addLevelName(level, "TRACE")
        logging.trace = log_root

    @abstractmethod
    def start_simulation(self):
        """

        Simulation initialization

        """
        raise NotImplementedError('Must be implemented!')

    @abstractmethod
    def load_events(self):
        """

        Method that loads the job from a datasource. Check the default implementation in the Simulator class.

        """
        raise NotImplementedError('Must be implemented!')

    def additional_data_init(self, _additional_data):
        """

        Initializes the additional_data classes or set the event manager in the objects

        :param _additional_data: A list of AdditionalData objects or classes

        :return: Return a list with all the AdditionalData objects ready to be executed

        """
        _ad = []
        for ad in _additional_data:
            if not ad:
                raise AdditionalDataError(
                    'AdditionalData class/object is None')

            if isinstance(ad, AdditionalData):
                ad.set_event_manager(self.mapper)
                _ad.append(ad)
            elif issubclass(ad, AdditionalData):
                _ad.append(ad(self.mapper))
            else:
                raise Exception(
                    'Additional data class must be a subclass of the AdditionalData class. Received {}'
                    .format(AdditionalData.__class__))
        return _ad

    def check_request(self, attrs_names):
        """

        Verifies that the job factory attributes can be supported by the system resurces.

        :return: True if attributes are supported, False otherwise.

        """
        _system_resources = self.resource_manager.system_resource_types
        for _res in _system_resources:
            if not (_res in attrs_names):
                print(
                    'Resource \'{}\' is not included in the Job dict.'.format(
                        _res))
                return False
        return True

    def generate_enviroment(self, config_path):
        """

        Generated the syntethic system from the config file

        :param config_path: Path the config file

        :return: resource manager object.

        """
        config = load_config(config_path)
        equiv = config.pop('equivalence', {})
        start_time = config.pop('start_time', 0)
        resources = Resources(**config)
        return ResourceManager(resources), equiv, start_time

    def define_filepaths(self, **kwargs):
        """

        Add to the kwargs useful filepaths.

        """
        kwargs['WORKLOAD_FILENAME'] = path_leaf(kwargs['WORKLOAD_FILEPATH'])[1]
        if 'RESULTS_FOLDER_PATH' not in kwargs:
            # New in 3.5
            filename = stack()[-1].filename
            script_path, script_name = path_leaf(filename)
            rfolder = kwargs.pop('RESULTS_FOLDER_NAME')
            kwargs['RESULTS_FOLDER_PATH'] = path.join(script_path, rfolder)
        dir_exists(kwargs['RESULTS_FOLDER_PATH'], create=True)
        return kwargs

    def set_workload_input(self, workload_path, **kwargs):
        """

        Creates a default reader object

        :param workload_path: Path to the workload
        :param \*\*kwargs: extra arguments

        :return: A reader object

        """
        return DefaultReader(workload_path, **kwargs)

    def prepare_arguments(self, possible_arguments, arguments):
        """

        Verifies arguments for a specific instantation and create the dictionary.

        :Note:

            this method will be moved to misc

        :param possible_arguments: Required arguments.
        :param arguments: Available arguments.

        :return: Dictionary with the corresponding arguments.

        """
        return {k: v for k, v in arguments.items() if k in possible_arguments}

    def define_default_constants(self, config_filepath, **kwargs):
        """

        Defines the default constants of the simulator, and update if the user gives new values.

        :param config_filepath: Path to the config file in json format

        """
        config = DEFAULT_SIMULATION
        for k, v in config.items():
            if k not in kwargs:
                kwargs[k] = v
        if config_filepath:
            for k, v in load_config(config_filepath).items():
                kwargs[k] = v
        kwargs = self.define_filepaths(**kwargs)
        self.constants.load_constants(kwargs)

    def show_config(self):
        """

        Shows the current simulator config

        """
        self._logger.info('Initializing the simulator')
        self._logger.info('Settings: ')
        self._logger.info('\tSystem Configuration file: {}'.format(
            self.constants.SYS_CONFIG_FILEPATH))
        self._logger.info('\tWorkload file: {}'.format(
            self.constants.WORKLOAD_FILEPATH))
        self._logger.info('\tResults folder: {}{}.'.format(
            self.constants.RESULTS_FOLDER_PATH, ', Overwrite previous files'
            if self.constants.OVERWRITE_PREVIOUS else ''))
        self._logger.info(
            '\t\t ({}) Dispatching Plan Output. Prefix: {}'.format(
                self.on_off(self.constants.SCHEDULING_OUTPUT),
                self.constants.SCHED_PREFIX))
        self._logger.info('\t\t ({}) Statistics Output. Prefix: {}'.format(
            self.on_off(self.constants.STATISTICS_OUTPUT),
            self.constants.STATISTICS_PREFIX))
        self._logger.info(
            '\t\t ({}) Dispatching Plan. Pretty Print Output. Prefix: {}'.
            format(self.on_off(self.constants.PPRINT_OUTPUT),
                   self.constants.PPRINT_PREFIX))
        self._logger.info('\t\t ({}) Benchmark Output. Prefix: {}'.format(
            self.on_off(self.constants.BENCHMARK_OUTPUT),
            self.constants.BENCHMARK_PREFIX))
        self._logger.info('Ready to Start')

    def on_off(self, state):
        """

        True: ON, False: OFF
        Just for visualization purposes.

        :param state: State of a constant. True or False

        """
        return 'ON' if state else 'OFF'

    def remove_previous(self):
        """

        To clean the previous results.

        """
        _wouts = [
            (self.constants.SCHEDULING_OUTPUT, self.constants.SCHED_PREFIX),
            (self.constants.STATISTICS_OUTPUT,
             self.constants.STATISTICS_PREFIX),
            (self.constants.PPRINT_OUTPUT, self.constants.PPRINT_PREFIX),
            (self.constants.BENCHMARK_OUTPUT, self.constants.BENCHMARK_PREFIX)
        ]

        _paths = [
            path.join(self.constants.RESULTS_FOLDER_PATH,
                      _prefix + self.constants.WORKLOAD_FILENAME)
            for state, _prefix in _wouts if state
        ]
        clean_results(*_paths)

    def _clean_simulator_constants(self):
        self.constants.clean_constants()

    def _save_parameters(self,
                         _parameters,
                         filename='simulator_parameters.json'):
        filename_path = path.join(self.constants.RESULTS_FOLDER_PATH, filename)
        _dict = {
            _param: getattr(self.constants, _param)
            for _param in _parameters
        }
        save_jsonfile(filename_path, _dict)

    def _generated_filepaths(self):
        possible_filepaths = [
            (self.constants.STATISTICS_OUTPUT,
             self.constants.STATISTICS_PREFIX,
             path.join(
                 self.constants.RESULTS_FOLDER_PATH,
                 self.constants.STATISTICS_PREFIX +
                 self.constants.WORKLOAD_FILENAME)),
            (self.constants.BENCHMARK_OUTPUT, self.constants.BENCHMARK_PREFIX,
             path.join(
                 self.constants.RESULTS_FOLDER_PATH,
                 self.constants.BENCHMARK_PREFIX +
                 self.constants.WORKLOAD_FILENAME)),
            (self.constants.SCHEDULING_OUTPUT, self.constants.SCHED_PREFIX,
             path.join(
                 self.constants.RESULTS_FOLDER_PATH,
                 self.constants.SCHED_PREFIX +
                 self.constants.WORKLOAD_FILENAME)),
            (self.constants.PPRINT_OUTPUT, self.constants.PPRINT_PREFIX,
             path.join(
                 self.constants.RESULTS_FOLDER_PATH,
                 self.constants.PPRINT_PREFIX +
                 self.constants.WORKLOAD_FILENAME))
        ]
        return {f[1]: f[2] for f in possible_filepaths if f[0]}
示例#9
0
import socket, json
import os
import argparse
from accasim.utils.misc import CONSTANT, load_config, system_status

# A simple launch script to contact the watcher demon related to a running test session, and obtain information about
# it. By default the connection is launched on localhost, but the user can choose an IP if the tests are running
# on a different machine.
if __name__ == '__main__':
    CONFIG_FOLDER = 'config/'
    ESSENTIALS_FILENAME = 'essentials.config'
    const = CONSTANT()
    const.load_constants(load_config(os.path.join(CONFIG_FOLDER, ESSENTIALS_FILENAME)))

    parser = argparse.ArgumentParser(description="Client For HPC Simulator Watcher Daemon")
    parser.add_argument("-usage", action="store_true", dest="usage", help="Request current virtual resource usage.")
    parser.add_argument("-progress", action="store_true", dest="progress", help="Request current local progress.")
    parser.add_argument("-all", action="store_true", dest="all", help="Request all previous data.")
    parser.add_argument("-ip", action="store", dest="hostip", default="localhost", type=str, help="IP of server machine.")

    args = parser.parse_args()

    # Remember that commands and responses must not be longer than MAX_LENGTH!
    command = ''
    if args.usage:
        command = 'usage'
    elif args.progress:
        command = 'progress'
    elif args.all:
        command = 'all'
    else: