def start(self): # shortcuts for commonly used parameters namespace = self.params['namespace'] run_dir = self.params['dir'] podlabel = self.params['podlabel'] # start the servers logger.info(f'{self.tag}: starting server_fio pods') conn_params = self.serverhandle.start(self.serverparams) # update self.params with parameters required for server_fio util_functions.deep_update(self.params, conn_params) templates_dir = self.dirpath + '/' + self.params['templates_dir'] template_file = self.params['template'] yaml_file = run_dir + '/' + self.params['yaml_file'] # create yaml for prepare phase util_functions.instantiate_template (templates_dir, \ template_file, yaml_file, self.params) logger.info(f'{self.tag}: creating pod') # create pod, and continue without waiting for ready k8s_wrappers.createpods_async(namespace, yaml_file)
def __init__(self, run_dir, params_dict, globals): # get directory pathname for module self.dirpath = os.path.dirname(os.path.abspath(__file__)) # get a unique id and tag self.id = server_fio.instance_counter self.tag = 'server-fio' + str(self.id) server_fio.instance_counter += 1 # load defaults from file yaml_file = self.dirpath + '/defaults.yaml' self.params = util_functions.dict_from_file(yaml_file) # update params labels_path = ['servers', 'server_fio'] new_params = util_functions.get_modparams(params_dict, labels_path) util_functions.deep_update(self.params, new_params) util_functions.update_modparams(self.params, globals) self.params['dir'] = run_dir + '/' + self.tag self.params['name'] = self.tag self.params['podlabel'] = "name=" + self.tag # parameters clients need in order to use this object self.returnparams = {} self.returnparams['datadir'] = self.params['datadir'] self.returnparams['serverlist'] = [] # populated at start
def __init__(self, run_dir, params_dict, globals): # get directory pathname for module self.dirpath = os.path.dirname(os.path.abspath(__file__)) # get a unique id and tag self.id = fio_iops.instance_counter self.tag = 'calm-fioiops-' + str(self.id) fio_iops.instance_counter += 1 # load defaults from file yaml_file = self.dirpath + '/defaults.yaml' self.params = util_functions.dict_from_file(yaml_file) # update params; this will override some of the defaults labels_path = ['calm', 'fio_iops'] new_params = util_functions.get_modparams(params_dict, labels_path) util_functions.deep_update(self.params, new_params) util_functions.update_modparams(self.params, globals) self.params['dir'] = run_dir + '/' + self.tag self.params['name'] = self.tag self.params['podlabel'] = "name=" + self.tag logger.debug(f'parameters: {self.params}') # create directory for self util_functions.create_dir(self.params['dir']) # create a handle for the fio server object # self.serverhandle = server_fio.server_fio \ (self.params['dir'], params_dict, globals) # # derive parameters for fio server for later use # # pass on basic parameters self.serverparams = { 'nservers': self.params['ninstances'], 'fio_image': self.params['fio_image'] } # derive space requirements if 'pvcsize_gb' in self.params: self.serverparams['pvcsize_gb'] = self.params['pvcsize_gb'] else: pvcsize_gb = self.params['numjobs'] * self.params['filesize_gb'] self.serverparams['pvcsize_gb'] = int (pvcsize_gb * \ self.params['scalefactor'] + self.params['extraspace_gb']) # pass on storageclass, if specified if 'storageclass' in self.params: self.serverparams['storageclass'] = self.params['storageclass'] # pass on nodeselector, if specified if 'server_nodeselector' in self.params: self.serverparams['nodeselector'] = \ self.params['server_nodeselector']
def __init__(self, run_dir, params_dict, globals): # TODO: read from defaults file self.params = {'duration_sec': 5} labels_path = ['benchmarks', 'dummy_pause'] new_params = util_functions.get_modparams(params_dict, labels_path) util_functions.deep_update(self.params, new_params) logger.debug(f'dummy_pause parameters: {self.params}')
def perform_runs(batch_dir, params_dict): module_label = 'batch_control' # input file does not have section for batch_control if module_label not in params_dict: run_control.perform_singlerun(batch_dir, params_dict) return # remove my parameters from params_dict module_params = params_dict.pop(module_label, {}) if module_params is None: module_params = {} # get list of runs to iterate over run_list = module_params.get('run_list', []) if run_list is None: run_list = [] iter = 0 for run_dict in run_list: if run_dict is None: run_dict = {} if 'run_tag' in run_dict: run_subdir = run_dict.pop('run_tag') else: run_subdir = 'run-' + str(iter) # TODO: handle exception # set directory for this run run_dir = util_functions.create_subdir(batch_dir, run_subdir) # generate single dict: params_dict updated with run_dict run_params = copy.deepcopy(params_dict) util_functions.deep_update(run_params, run_dict) logger.info(f'starting run: {run_subdir}') # run_params is now in a form that perform_singlerun expects run_control.perform_singlerun(run_dir, run_params) iter += 1 logger.info(f'all runs completed')
def prepare(self): # shortcuts for commonly used parameters namespace = self.params['namespace'] run_dir = self.params['dir'] preparep_dir = run_dir + '/prepare_phase' podlabel = self.params['prep_podlabel'] # create directory for prepare_phase output util_functions.create_dir(preparep_dir) # start the servers logger.info(f'starting server_fio pods') conn_params = self.serverhandle.start(self.serverparams) # update self.params with parameters required for server_fio util_functions.deep_update(self.params, conn_params) templates_dir = self.dirpath + '/' + self.params['templates_dir'] template_file = self.params['prepare_template'] yaml_file = run_dir + '/' + self.params['prepare_yaml'] # create yaml for prepare phase util_functions.instantiate_template (templates_dir, \ template_file, yaml_file, self.params) logger.info(f'creating prep pod') # create prep pod, and wait for its completion # expected pod count is 1, pause of 5 sec, 0 retries k8s_wrappers.createpods_sync (namespace, yaml_file, podlabel, \ 1, 5, 0, self.params['maxruntime_sec']) logger.info(f'prep pod completed') # copy output from pod k8s_wrappers.copyfrompods (namespace, podlabel, \ self.params['podoutdir'], preparep_dir) logger.info(f'copied output from prep pod') # delete prep pod k8s_wrappers.deletefrom_yaml(yaml_file, namespace) logger.info(f'deleted prep pod')
def start(self, passed_params): logger.debug(f'{self.tag} start') util_functions.deep_update(self.params, passed_params) logger.debug(f'{self.tag} parameters: {self.params}') # create directory for self util_functions.create_dir(self.params['dir']) # shortcuts namespace = self.params['namespace'] podlabel = self.params['podlabel'] templates_dir = self.dirpath + '/' + self.params['templates_dir'] template_file = self.params['template_file'] yaml_file = self.params['dir'] + '/' + self.params['yaml_file'] util_functions.instantiate_template ( templates_dir, \ template_file, yaml_file, self.params) logger.debug(f'starting {self.tag} pods') # create the pods # expected count is nservers # set retries to nservers with pause of 30 sec # timeout of 300 sec; TODO: use a param here k8s_wrappers.createpods_sync (namespace, yaml_file, podlabel, \ self.params['nservers'], 30, self.params['nservers'], 300) logger.debug(f'{self.tag} pods ready') # get pod locations k8s_wrappers.get_podlocations (podlabel, namespace, \ self.params['dir']) # TODO: use list of pods as returned by k8s # update returnparams with server list for inst in range(self.params['nservers']): new_elem = self.tag + '-' + str(inst) + '.' + self.tag self.returnparams['serverlist'].append(new_elem) return self.returnparams
def __init__(self, run_dir, params_dict, globals): # get directory pathname for module self.dirpath = os.path.dirname(os.path.abspath(__file__)) # get a unique id and tag self.id = sysstat.instance_counter self.tag = 'sysstat-' + str(self.id) sysstat.instance_counter += 1 # load defaults from file yaml_file = self.dirpath + '/defaults.yaml' self.params = util_functions.dict_from_file(yaml_file) # update params labels_path = ['statistics', 'sysstat'] new_params = util_functions.get_modparams(params_dict, labels_path) util_functions.deep_update(self.params, new_params) util_functions.update_modparams(self.params, globals) self.params['dir'] = run_dir + '/' + self.tag self.params['name'] = self.tag self.params['podlabel'] = "name=" + self.tag
def update_params(self, passed_params): util_functions.deep_update(self.params, passed_params)
def update_params(self, passed_params): util_functions.deep_update(self.params, passed_params) logger.debug(f'sysstat parameters: {self.params}')
def perform_runs(run_params): # make a copy of params params_dict = copy.deepcopy(run_params) # remove global parameters from params_dict passed_globals = params_dict.pop('global', {}) if passed_globals is None: passed_globals = {} # read global defaults from file dirpath = os.path.dirname(os.path.abspath(__file__)) yaml_file = dirpath + '/global_defaults.yaml' global_params = util_functions.dict_from_file(yaml_file) # update global defaults with passed values util_functions.deep_update(global_params, passed_globals) # create a directory for runs, if needed if 'output_dir' not in global_params: global_rundir = util_functions.createdir_ts \ (global_params['output_basedir'], 'run_') else: global_rundir = global_params['output_dir'] # write a copy of input params as yaml input_file_copy = global_params['input_copy'] input_file_copy = global_rundir + '/' + input_file_copy util_functions.dict_to_file(run_params, input_file_copy) # # set up logging # logger = logging.getLogger() logger.setLevel(logging.DEBUG) logging_params = global_params['log_control'] stderr_params = logging_params['stderr'] if stderr_params['enabled']: ch = logging.StreamHandler() fmt = logging.Formatter('%(module)s - %(message)s') ch.setLevel(stderr_params['level']) ch.setFormatter(fmt) logger.addHandler(ch) file_params = logging_params['file'] if file_params['enabled']: if 'dirname' in file_params: dir = file_params['dirname'] else: dir = global_rundir file = dir + '/' + file_params['filename'] fh = logging.FileHandler(file) fmt = logging.Formatter \ ('%(asctime)s - %(levelname)s - %(module)s - %(message)s') fh.setLevel(file_params['level']) fh.setFormatter(fmt) logger.addHandler(fh) logger.info(f'output directory: {global_rundir}') # # perform runs # batch_control.perform_runs(global_rundir, params_dict)