class SimulatorBase(ABC): LOG_LEVEL_INFO = 'INFO' LOG_LEVEL_DEBUG = 'DEBUG' LOG_LEVEL_TRACE = 'TRACE' def __init__(self, config_file=None, **kwargs): """ Simulator base constructor :param resource_manager: Resource manager class instantiation :param reader: Reader class instantiation :param job_factory: Job Factory instantiation :param _dispatcher: Dispatcher instantiation :param config_file: Path to the config file in json format. :param \*\*kwargs: Dictionary of key:value parameters to be used in the simulator. It overwrites the current parameters. All parameters will be available on the constant variable """ self.constants = CONSTANT() self.timeout = kwargs.pop('timeout', None) self._id = kwargs.pop('id', None) self._log_level = kwargs.pop('LOG_LEVEL', self.LOG_LEVEL_INFO) self.define_default_constants(config_file, **kwargs) self._logger, self._logger_listener = self.define_logger() self.real_init_time = datetime.now() # self.dispatcher = None self.reader = None self.resource_manager = None self.job_factory = None self.mapper = None self.additional_data = None if self.constants.OVERWRITE_PREVIOUS: self.remove_previous() def define_logger(self): self._define_trace_logger() FORMAT = '%(asctime)-15s %(module)s-%(levelname)s: %(message)s' queue = Queue(-1) queue_handler = handlers.QueueHandler(queue) handler = logging.StreamHandler() handler.setLevel(self._log_level) listener = handlers.QueueListener(queue, handler) logger_name = 'accasim' logger = logging.getLogger(logger_name) logger.addHandler(queue_handler) formatter = logging.Formatter(FORMAT) handler.setFormatter(formatter) logger.setLevel(getattr(logging, self._log_level)) self.constants.load_constant('LOGGER_NAME', logger_name) return logger, listener def _define_trace_logger(self): level = logging.TRACE = logging.DEBUG - 5 def log_logger(self, message, *args, **kwargs): if self.isEnabledFor(level): self._log(level, message, args, **kwargs) logging.getLoggerClass().trace = log_logger def log_root(msg, *args, **kwargs): logging.log(level, msg, *args, **kwargs) logging.addLevelName(level, "TRACE") logging.trace = log_root @abstractmethod def start_simulation(self): """ Simulation initialization """ raise NotImplementedError('Must be implemented!') @abstractmethod def load_events(self): """ Method that loads the job from a datasource. Check the default implementation in the Simulator class. """ raise NotImplementedError('Must be implemented!') def additional_data_init(self, _additional_data): """ Initializes the additional_data classes or set the event manager in the objects :param _additional_data: A list of AdditionalData objects or classes :return: Return a list with all the AdditionalData objects ready to be executed """ _ad = [] for ad in _additional_data: if not ad: raise AdditionalDataError( 'AdditionalData class/object is None') if isinstance(ad, AdditionalData): ad.set_event_manager(self.mapper) _ad.append(ad) elif issubclass(ad, AdditionalData): _ad.append(ad(self.mapper)) else: raise Exception( 'Additional data class must be a subclass of the AdditionalData class. Received {}' .format(AdditionalData.__class__)) return _ad def check_request(self, attrs_names): """ Verifies that the job factory attributes can be supported by the system resurces. :return: True if attributes are supported, False otherwise. """ _system_resources = self.resource_manager.system_resource_types for _res in _system_resources: if not (_res in attrs_names): print( 'Resource \'{}\' is not included in the Job dict.'.format( _res)) return False return True def generate_enviroment(self, config_path): """ Generated the syntethic system from the config file :param config_path: Path the config file :return: resource manager object. """ config = load_config(config_path) equiv = config.pop('equivalence', {}) start_time = config.pop('start_time', 0) resources = Resources(**config) return ResourceManager(resources), equiv, start_time def define_filepaths(self, **kwargs): """ Add to the kwargs useful filepaths. """ kwargs['WORKLOAD_FILENAME'] = path_leaf(kwargs['WORKLOAD_FILEPATH'])[1] if 'RESULTS_FOLDER_PATH' not in kwargs: # New in 3.5 filename = stack()[-1].filename script_path, script_name = path_leaf(filename) rfolder = kwargs.pop('RESULTS_FOLDER_NAME') kwargs['RESULTS_FOLDER_PATH'] = path.join(script_path, rfolder) dir_exists(kwargs['RESULTS_FOLDER_PATH'], create=True) return kwargs def set_workload_input(self, workload_path, **kwargs): """ Creates a default reader object :param workload_path: Path to the workload :param \*\*kwargs: extra arguments :return: A reader object """ return DefaultReader(workload_path, **kwargs) def prepare_arguments(self, possible_arguments, arguments): """ Verifies arguments for a specific instantation and create the dictionary. :Note: this method will be moved to misc :param possible_arguments: Required arguments. :param arguments: Available arguments. :return: Dictionary with the corresponding arguments. """ return {k: v for k, v in arguments.items() if k in possible_arguments} def define_default_constants(self, config_filepath, **kwargs): """ Defines the default constants of the simulator, and update if the user gives new values. :param config_filepath: Path to the config file in json format """ config = DEFAULT_SIMULATION for k, v in config.items(): if k not in kwargs: kwargs[k] = v if config_filepath: for k, v in load_config(config_filepath).items(): kwargs[k] = v kwargs = self.define_filepaths(**kwargs) self.constants.load_constants(kwargs) def show_config(self): """ Shows the current simulator config """ self._logger.info('Initializing the simulator') self._logger.info('Settings: ') self._logger.info('\tSystem Configuration file: {}'.format( self.constants.SYS_CONFIG_FILEPATH)) self._logger.info('\tWorkload file: {}'.format( self.constants.WORKLOAD_FILEPATH)) self._logger.info('\tResults folder: {}{}.'.format( self.constants.RESULTS_FOLDER_PATH, ', Overwrite previous files' if self.constants.OVERWRITE_PREVIOUS else '')) self._logger.info( '\t\t ({}) Dispatching Plan Output. Prefix: {}'.format( self.on_off(self.constants.SCHEDULING_OUTPUT), self.constants.SCHED_PREFIX)) self._logger.info('\t\t ({}) Statistics Output. Prefix: {}'.format( self.on_off(self.constants.STATISTICS_OUTPUT), self.constants.STATISTICS_PREFIX)) self._logger.info( '\t\t ({}) Dispatching Plan. Pretty Print Output. Prefix: {}'. format(self.on_off(self.constants.PPRINT_OUTPUT), self.constants.PPRINT_PREFIX)) self._logger.info('\t\t ({}) Benchmark Output. Prefix: {}'.format( self.on_off(self.constants.BENCHMARK_OUTPUT), self.constants.BENCHMARK_PREFIX)) self._logger.info('Ready to Start') def on_off(self, state): """ True: ON, False: OFF Just for visualization purposes. :param state: State of a constant. True or False """ return 'ON' if state else 'OFF' def remove_previous(self): """ To clean the previous results. """ _wouts = [ (self.constants.SCHEDULING_OUTPUT, self.constants.SCHED_PREFIX), (self.constants.STATISTICS_OUTPUT, self.constants.STATISTICS_PREFIX), (self.constants.PPRINT_OUTPUT, self.constants.PPRINT_PREFIX), (self.constants.BENCHMARK_OUTPUT, self.constants.BENCHMARK_PREFIX) ] _paths = [ path.join(self.constants.RESULTS_FOLDER_PATH, _prefix + self.constants.WORKLOAD_FILENAME) for state, _prefix in _wouts if state ] clean_results(*_paths) def _clean_simulator_constants(self): self.constants.clean_constants() def _save_parameters(self, _parameters, filename='simulator_parameters.json'): filename_path = path.join(self.constants.RESULTS_FOLDER_PATH, filename) _dict = { _param: getattr(self.constants, _param) for _param in _parameters } save_jsonfile(filename_path, _dict) def _generated_filepaths(self): possible_filepaths = [ (self.constants.STATISTICS_OUTPUT, self.constants.STATISTICS_PREFIX, path.join( self.constants.RESULTS_FOLDER_PATH, self.constants.STATISTICS_PREFIX + self.constants.WORKLOAD_FILENAME)), (self.constants.BENCHMARK_OUTPUT, self.constants.BENCHMARK_PREFIX, path.join( self.constants.RESULTS_FOLDER_PATH, self.constants.BENCHMARK_PREFIX + self.constants.WORKLOAD_FILENAME)), (self.constants.SCHEDULING_OUTPUT, self.constants.SCHED_PREFIX, path.join( self.constants.RESULTS_FOLDER_PATH, self.constants.SCHED_PREFIX + self.constants.WORKLOAD_FILENAME)), (self.constants.PPRINT_OUTPUT, self.constants.PPRINT_PREFIX, path.join( self.constants.RESULTS_FOLDER_PATH, self.constants.PPRINT_PREFIX + self.constants.WORKLOAD_FILENAME)) ] return {f[1]: f[2] for f in possible_filepaths if f[0]}
import socket, json import os import argparse from accasim.utils.misc import CONSTANT, load_config, system_status # A simple launch script to contact the watcher demon related to a running test session, and obtain information about # it. By default the connection is launched on localhost, but the user can choose an IP if the tests are running # on a different machine. if __name__ == '__main__': CONFIG_FOLDER = 'config/' ESSENTIALS_FILENAME = 'essentials.config' const = CONSTANT() const.load_constants(load_config(os.path.join(CONFIG_FOLDER, ESSENTIALS_FILENAME))) parser = argparse.ArgumentParser(description="Client For HPC Simulator Watcher Daemon") parser.add_argument("-usage", action="store_true", dest="usage", help="Request current virtual resource usage.") parser.add_argument("-progress", action="store_true", dest="progress", help="Request current local progress.") parser.add_argument("-all", action="store_true", dest="all", help="Request all previous data.") parser.add_argument("-ip", action="store", dest="hostip", default="localhost", type=str, help="IP of server machine.") args = parser.parse_args() # Remember that commands and responses must not be longer than MAX_LENGTH! command = '' if args.usage: command = 'usage' elif args.progress: command = 'progress' elif args.all: command = 'all' else: