def as_string(self): if self.file: return '{} x {}'.format( self.proc, Paths.path_end(Paths.without_ext(self.file), level=2) ) return 'process'
def parse(self): for k, v in self.configs.items(): self.configs[k] = ConfigBase(k) for k, v in self.files.items(): config = Paths.join(Paths.dirname(k), yamlc.CONFIG_YAML) self.files[k] = self.configs[config]
def do_work(parser, args=None, debug=False): """ :type args: list :type parser: utils.argparser.ArgParser """ # parse arguments global arg_options, arg_others, arg_rest, debug_mode arg_options, arg_others, arg_rest = parser.parse(args) debug_mode = debug # configure path Paths.format = PathFormat.ABSOLUTE Paths.base_dir('' if not arg_options.root else arg_options.root) # check commands if len(arg_rest) == 0: parser.exit_usage('no MPI executable provided', exit_code=1) if len(arg_rest) == 1: parser.exit_usage('no executable provided', exit_code=2) # turn on dynamic messages if batch is not set Printer.dynamic_output = not arg_options.batch # # run local or pbs mode if arg_options.queue: return run_pbs_mode(debug) else: return run_local_mode(debug)
def parse(self, missing_policy=ConfigBase.MISSING_POLICY_CREATE_DEFAULT): for k, v in list(self.configs.items()): self.configs[k] = ConfigBase(k, missing_policy) for k, v in list(self.files.items()): config = Paths.join(Paths.dirname(k), yamlc.CONFIG_YAML) self.files[k] = self.configs[config]
def compare(self, reference_filepath, other_filepath, **kwargs): """ Method can do anything as long as int value is returned :param reference_filepath: :param other_filepath: :param kwargs: :return: """ reference_content = IO.read( Paths.abspath(reference_filepath) ) other_content = IO.read( Paths.abspath(other_filepath) ) self.output.write("In case of emergency,") self.output.write(" you can provide details on what went wrong") self.output.write(" using self.output.write method") self.output.write("") self.output.write("Error while comparing files \n{} \n{}" .format(reference_filepath, other_filepath)) # must return return-code! return 1
def __init__(self, yaml_config_file): self.yaml_config_file = yaml_config_file self.root = Paths.dirname(self.yaml_config_file) self.yamls = self._get_all_yamls() self.cases = list() self.common_config = None # create dummy case for every yaml file in folder if not Paths.exists(self.yaml_config_file): self.common_config = deepcopy(DEFAULTS) for y in self.yamls: dummy_case = deepcopy(DEFAULTS) dummy_case['file'] = [y] self.cases.append(dummy_case) else: # setup common config values self.yaml_config = self._read_yaml() self.common_config = self.merge(DEFAULTS, self.yaml_config.get('common_config', {})) # first process files which are specified in test_cases missing = [Paths.basename(y) for y in self.yamls] for case in self.yaml_config.get('test_cases', []): case_config = self.merge(self.common_config, case) self.cases.append(case_config) for f in case_config['file']: if f in missing: missing.remove(f) # process rest (dummy case) for y in missing: dummy_case = deepcopy(self.common_config) dummy_case['file'] = [y] self.cases.append(dummy_case)
def get_command(f1, f2, **details): return [ Paths.ndiff(), '-r', str(details.get('r_tol', '100.01')), '-a', str(details.get('a_tol', '100.0001')), Paths.abspath(f1), Paths.abspath(f2) ]
def open(self): if self.mode in {self.SHOW, self.HIDE}: return {self.SHOW: None, self.HIDE: subprocess.PIPE}.get(self.mode) if self.mode in {self.WRITE, self.APPEND, self.VARIABLE}: if not self.fp: Paths.ensure_path(self.filename) self.fp = open(self.filename, 'w+' if self.mode is self.WRITE else 'a+') return self.fp
def copy(self): # create dirs for target file Paths.ensure_path(self.target) # copy file shutil.copy(self.source, self.target) # remove file if set if self.remove_original: os.unlink(self.source)
def get_command(f1, f2, **details): return [ Paths.ndiff(), "-r", str(details.get("r_tol", "0.01")), "-a", str(details.get("a_tol", "0.0001")), Paths.abspath(f1), Paths.abspath(f2), ]
def __init__(self, o, config): o = ConfigBase.merge(yamlc.DEFAULTS, deepcopy(o)) self.file = o.get(yamlc.TAG_FILES, None) self.proc = int(o.get(yamlc.TAG_PROC, None)) self.time_limit = float(o.get(yamlc.TAG_TIME_LIMIT, None)) self.memory_limit = float(o.get(yamlc.TAG_MEMORY_LIMIT, None)) self.tags = set(o.get(yamlc.TAG_TAGS, None)) self.check_rules = o.get(yamlc.TAG_CHECK_RULES, None) self.config = config if self.config: self.file = Paths.join(self.config.root, Paths.basename(self.file)) self.without_ext = Paths.basename(Paths.without_ext(self.file)) self.shortname = '{name}.{proc}'.format(name=self.without_ext, proc=self.proc) self.fs = yamlc.ConfigCaseFiles( root=self.config.root, ref_output=Paths.join(self.config.root, yamlc.REF_OUTPUT_DIR, self.without_ext), output=Paths.join(self.config.root, yamlc.TEST_RESULTS, self.shortname)) else: # create temp folder where files will be tmp_folder = Paths.temp_file(o.get('tmp') + '-{date}-{time}-{rnd}') Paths.ensure_path(tmp_folder, is_file=False) self.fs = yamlc.ConfigCaseFiles(root=tmp_folder, ref_output=tmp_folder, output=tmp_folder)
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format( module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err( 'Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err( 'Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err( 'Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def _prepare(self): # configure printer Printer.batch_output = self.arg_options.batch Printer.dynamic_output = not self.arg_options.batch self.progress = Printer.dynamic_output self.batch = Printer.batch_output # configure path Paths.format = PathFormat.ABSOLUTE if self.arg_options.root: Paths.init(self.arg_options.root)
def get_one(self, yaml_case_file): """ :rtype: list[ConfigCase] """ result = list() for case in self.cases: for f in case[yamlc.TAG_FILES]: if Paths.basename(f) == Paths.basename(yaml_case_file): dummy_case = deepcopy(case) dummy_case[yamlc.TAG_FILES] = [yaml_case_file] result.extend(self._get_all_for_case(dummy_case)) return [ConfigCase(r, self) for r in result]
def create_comparisons(self): comparisons = ComparisonMultiThread( self.case.fs.ndiff_log, progress=printf.verbosity() is printf.OutputVerbosity.FULL) for check_rule in self.case.check_rules: method = str(list(check_rule.keys())[0]) module = self.get_module(method) comp_data = check_rule[method] if not module: printf.error('Warning! No module for check_rule method "{}"', method) continue pairs = self._get_ref_output_files(comp_data) if pairs: for pair in pairs: # load module and determine whether we are dealing with # exec comparison or inplace comparison if issubclass(module.__class__, modules.ExecComparison): command = module.get_command(*pair, **comp_data) pm = PyPy(BinExecutor(command)) pm.executor.output = OutputMode.variable_output() else: module = self.get_module(method) module.prepare(*pair, **comp_data) pm = PyPy(module) pm.executor.output = OutputMode.dummy_output() # pm.error_monitor.deactivate() # if we fail, set error to 13 pm.custom_error = 13 # TODO: maybe some time limit would be useful pm.full_output = self.case.fs.ndiff_log path = Paths.path_end_until(pair[0], REF_OUTPUT_DIR) test_name = Paths.basename( Paths.dirname(Paths.dirname(self.case.fs.ref_output))) size = Paths.filesize(pair[0], True) pm.name = '{}: {} ({})'.format(test_name, path, size) if printf.verbosity() is printf.OutputVerbosity.FULL: pm.monitor.color_complete_format = '{}: {} ({})'.format( test_name, path, size) else: pm.monitor.error_complete_format = '{}: {} ({})'.format( test_name, path, size) comparisons.add(pm) return comparisons
def create_comparisons(self): comparisons = ComparisonMultiThread(self.case.fs.ndiff_log) comparisons.thread_name_property = True for check_rule in self.case.check_rules: method = str(check_rule.keys()[0]) module = self.get_module(method) comp_data = check_rule[method] if not module: Printer.all.err( 'Warning! No module for check_rule method "{}"', method) continue pairs = self._get_ref_output_files(comp_data) if pairs: for pair in pairs: # load module and determine whether we are dealing with # exec comparison or inplace comparison if issubclass(module.__class__, modules.ExecComparison): command = module.get_command(*pair, **comp_data) pm = PyPy(BinExecutor(command), progress=True) pm.executor.output = OutputMode.variable_output() else: module = self.get_module(method) module.prepare(*pair, **comp_data) pm = PyPy(module, progress=True) pm.executor.output = OutputMode.dummy_output() pm.error_monitor.deactivate() # if we fail, set error to 13 pm.custom_error = 13 pm.start_monitor.deactivate() pm.end_monitor.deactivate() pm.progress_monitor.deactivate() pm.limit_monitor.deactivate( ) # TODO: maybe some time limit would be useful pm.output_monitor.policy = pm.output_monitor.POLICY_ERROR_ONLY pm.error_monitor.message = 'Comparison using method {} failed!'.format( method) pm.error_monitor.indent = 1 pm.full_output = self.case.fs.ndiff_log path = Paths.path_end_until(pair[0], REF_OUTPUT_DIR) test_name = Paths.basename( Paths.dirname(Paths.dirname(self.case.fs.ref_output))) size = Paths.filesize(pair[0], True) pm.name = '{}: {} ({})'.format(test_name, path, size) comparisons.add(pm) return comparisons
def __init__(self, o, config): o = ConfigBase.merge(DEFAULTS, deepcopy(o)) self.file = o.get('file', None) self.proc = int(o.get('proc', None)) self.time_limit = float(o.get('time_limit', None)) self.memory_limit = float(o.get('memory_limit', None)) self.tags = set(o.get('tags', None)) self.check_rules = o.get('check_rules', None) self.config = config if self.config: self.file = Paths.join(self.config.root, self.file) self.without_ext = Paths.basename(Paths.without_ext(self.file)) self.shortname = '{name}.{proc}'.format(name=self.without_ext, proc=self.proc) self.fs = ConfigCaseFiles( root=self.config.root, ref_output=Paths.join(self.config.root, 'ref_output', self.without_ext), output=Paths.join( self.config.root, 'test_results', self.shortname )) else: # create temp folder where files will be tmp_folder = Paths.temp_file(o.get('tmp') + '-{date}-{time}-{rnd}') Paths.ensure_path(tmp_folder, is_file=False) self.fs = ConfigCaseFiles( root=tmp_folder, ref_output=tmp_folder, output=tmp_folder )
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: Printer.all.out(Paths.relpath(dirname, test_dir)) with Printer.all.with_level(1): for basename in result[dirname]: Printer.all.out('{: >4s} {: <40s} {}', '', basename, Paths.relpath(Paths.join(dirname, basename), test_dir)) Printer.all.newline()
def open(self): if self.mode in {self.SHOW, self.HIDE}: return {self.SHOW: None, self.HIDE: subprocess.PIPE}.get(self.mode) if self.mode in {self.WRITE, self.APPEND, self.VARIABLE}: # open file manually when append or write if self.mode in {self.WRITE, self.APPEND}: Paths.ensure_path(self.filename) self.fp = open(self.filename, 'w+' if self.mode is self.WRITE else 'a+') # create temp file otherwise if self.mode is self.VARIABLE: self.fp, self.filename = tempfile.mkstemp() return self.fp
def _get_ref_output_files(self, comp_data): """ :type comp_data: dict """ # parse filters filters = [PathFilters.filter_wildcards(x) for x in comp_data.get('files', [])] # browse files and make them relative to ref output so filters works properly files = Paths.walk(self.case.fs.ref_output, [PathFilters.filter_type_is_file()]) files = [Paths.relpath(f, self.case.fs.ref_output) for f in files] # filter files and make them absolute again files = Paths.match(files, filters) files = [Paths.join(self.case.fs.ref_output, f) for f in files] return zip(files, self._get_mirror_files(files))
def _get_all_yamls(self): yamls = Paths.browse( self.root,( PathFilters.filter_endswith(YAML), PathFilters.filter_not(PathFilters.filter_endswith(CONFIG_YAML)) )) return yamls
def generate_status_file(cls, target): """ Will generate status file if target has option turned on :type target: PyPy """ if target.status_file: IO.write(target.status_file, json.dumps(target.status(), indent=4)) output_dir = Paths.dirname(target.status_file) files = Paths.browse( output_dir, [PathFilters.filter_wildcards('*/profiler_info_*.log.json')]) # profiler json is missing? if not files: IO.write( Paths.join(output_dir, 'profiler_info_dummy.log.json'), '{}')
def create_path_dict(filename): path = Paths.split(filename)[1:-1] result = dict() for i in range(len(path)): result['+' + str(i)] = path[i] result['-' + str(i)] = path[-i] return result
def _run(self): """ Run method for this module """ # prepare executor progress = not self.arg_options.batch executor = BinExecutor(self.rest) pypy = PyPy(executor, progress=progress) n_lines = 0 if self.arg_options.batch else 10 # set up streams log_file = Paths.temp_file('exec-limit-{date}-{time}-{rnd}.log') pypy.executor.output = OutputMode.variable_output() pypy.full_output = log_file # set limits pypy.limit_monitor.time_limit = self.arg_options.time_limit pypy.limit_monitor.memory_limit = self.arg_options.memory_limit # save output to file pypy.output_monitor.log_file = log_file # start process pypy.start() pypy.join() return pypy
def _run(self): """ Run method for this module """ # prepare executor progress = not self.arg_options.batch executor = BinExecutor(self.arg_options.rest) pypy = PyPy(executor, progress=progress) n_lines = 0 if self.arg_options.batch else 10 # set up streams log_file = Paths.temp_file('exec-limit-{date}-{time}-{rnd}.log') pypy.executor.output = OutputMode.variable_output() pypy.full_output = log_file # set limits pypy.limit_monitor.time_limit = self.arg_options.time_limit pypy.limit_monitor.memory_limit = self.arg_options.memory_limit # save output to file pypy.output_monitor.log_file = log_file # start process pypy.start() pypy.join() return pypy
def run_local_mode_one(self, proc): """ Method runs single job with specified number of CPU :param proc: """ if int(proc) == 0: command = self.arg_options.rest[1:] else: command = [self.arg_options.rest[0], '-np', proc ] + self.arg_options.rest[1:] n_lines = 0 if self.arg_options.batch else 10 pypy = PyPy(BinExecutor(command)) # set limits pypy.limit_monitor.time_limit = self.time_limit pypy.limit_monitor.memory_limit = self.memory_limit # catch output to variable # in batched mode we will keep the files # otherwise we will keep logs only on error log_file = Paths.temp_file('exec-parallel-{date}-{time}-{rnd}.log') pypy.executor.output = OutputMode.variable_output() pypy.full_output = log_file # save output to file pypy.output_monitor.log_file = log_file # start and wait for exit pypy.start() pypy.join() return pypy
def _get_flow123d(self): return [ Paths.flow123d(), '-s', self.case.file, '-i', self.case.fs.input, '-o', self.case.fs.output ]
def run_local_mode_one(self, proc): """ Method runs single job with specified number of CPU :param proc: """ if int(proc) == 0: command = self.rest[1:] else: command = [self.rest[0], '-np', proc] + self.rest[1:] n_lines = 0 if self.arg_options.batch else 10 pypy = PyPy(BinExecutor(command)) # set limits pypy.limit_monitor.time_limit = self.time_limit pypy.limit_monitor.memory_limit = self.memory_limit # catch output to variable # in batched mode we will keep the files # otherwise we will keep logs only on error log_file = Paths.temp_file('exec-parallel-{date}-{time}-{rnd}.log') pypy.executor.output = OutputMode.variable_output() pypy.full_output = log_file # save output to file pypy.output_monitor.log_file = log_file # start and wait for exit pypy.start() pypy.join() return pypy
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format(module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err('Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err('Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err('Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def _run(self): if Paths.exists(self.dir): try: shutil.rmtree(self.dir) self.returncode = 0 except OSError as e: self.returncode = 4 self.error = str(e)
def _get_flow123d(self): return [ Paths.flow123d(), '-s', self.case.file, '-o', self.case.fs.output, ]
def _run(self): """ Run method for this module """ if self.arg_options.random_output_dir: import scripts.yamlc as yamlc from core.base import System yamlc.TEST_RESULTS = 'test_results-{}'.format(System.rnd8) self.all_yamls = list() for path in self.others: if not Paths.exists(path): Printer.all.err('given path does not exists, ignoring path "{}"', path) sys.exit(3) # append files to all_yamls if Paths.is_dir(path): self.all_yamls.extend(Paths.walk(path, ConfigPool.yaml_filters)) else: self.all_yamls.append(path) Printer.all.out("Found {} yaml file/s", len(self.all_yamls)) if not self.all_yamls: Printer.all.wrn('No yaml files found in locations: \n {}', '\n '.join(self.others)) sys.exit(0) self.configs = self.read_configs(self.all_yamls) self.configs.update( proc=self.arg_options.cpu, time_limit=self.arg_options.time_limit, memory_limit=self.arg_options.memory_limit, ) # filter tags for includes and excludes self.configs.filter_tags( include=self.include, exclude=self.exclude ) if self.arg_options.queue: Printer.all.out('Running in PBS mode') return self.run_pbs_mode() else: Printer.all.out('Running in LOCAL mode') return self.run_local_mode()
def _run(self): """ Run method for this module """ if self.arg_options.random_output_dir: import scripts.yamlc as yamlc yamlc.TEST_RESULTS = 'test_results-{}'.format(self.arg_options.random_output_dir) self.all_yamls = list() for path in self.arg_options.args: if not Paths.exists(path): Printer.all.err('given path does not exists, path "{}"', path) sys.exit(3) # append files to all_yamls if Paths.is_dir(path): self.all_yamls.extend(Paths.walk(path, ConfigPool.yaml_filters)) else: self.all_yamls.append(path) Printer.all.out("Found {} yaml file/s", len(self.all_yamls)) if not self.all_yamls: Printer.all.wrn('No yaml files found in locations: \n {}', '\n '.join(self.arg_options.args)) sys.exit(0) self.configs = self.read_configs(self.all_yamls) self.configs.update( proc=self.arg_options.cpu, time_limit=self.arg_options.time_limit, memory_limit=self.arg_options.memory_limit, ) # filter tags for includes and excludes self.configs.filter_tags( include=self.include, exclude=self.exclude ) if self.arg_options.queue: Printer.all.out('Running in PBS mode') return self.run_pbs_mode() else: Printer.all.out('Running in LOCAL mode') return self.run_local_mode()
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: printf.warning(Paths.relpath(dirname, test_dir)) with printf: paths = list() wrap = 2 for basename in result[dirname]: paths.append(basename) for i in range(0, len(paths), wrap): printf.out(' '.join( ['{:<40s}'.format(x) for x in paths[i:i + wrap]])) printf.sep()
def __init__(self, yaml_config_file, missing_policy=MISSING_POLICY_CREATE_DEFAULT): self.yaml_config_file = yaml_config_file self.root = Paths.dirname(self.yaml_config_file) self.yamls = self._get_all_yamls() self.cases = list() self.common_config = None self.missing_policy = missing_policy # create dummy case for every yaml file in folder if not Paths.exists(self.yaml_config_file): self.common_config = deepcopy(yamlc.DEFAULTS) for y in self.yamls: dummy_case = deepcopy(yamlc.DEFAULTS) dummy_case['files'] = [y] self.cases.append(dummy_case) else: # setup common config values self.yaml_config = self._read_yaml() self.common_config = self.merge( yamlc.DEFAULTS, self.yaml_config.get('common_config', {})) # first process files which are specified in test_cases missing = [Paths.basename(y) for y in self.yamls] for case in self.yaml_config.get(yamlc.TAG_TEST_CASES, []): case_config = self.merge(self.common_config, case) # ensure that value are array case_config[yamlc.TAG_FILES] = ensure_iterable( case_config.get(yamlc.TAG_FILES, [])) # keep correct order self.cases.append(case_config) for f in case_config[yamlc.TAG_FILES]: if f in missing: missing.remove(f) # process rest (dummy case) if missing_policy == self.MISSING_POLICY_CREATE_DEFAULT: for y in missing: dummy_case = deepcopy(self.common_config) dummy_case[yamlc.TAG_FILES] = [y] self.cases.append(dummy_case)
def open(self): if self.mode is self.DUMMY: return subprocess.PIPE if self.mode in {self.SHOW, self.HIDE}: return {self.SHOW: None, self.HIDE: subprocess.PIPE}.get(self.mode) if self.mode in {self.WRITE, self.APPEND, self.VARIABLE}: # open file manually when append or write if self.mode in {self.WRITE, self.APPEND}: Paths.ensure_path(self.filename) self.fp = open(self.filename, 'w+' if self.mode is self.WRITE else 'a+') # create temp file otherwise if self.mode is self.VARIABLE: self.fp, self.filename = tempfile.mkstemp() return self.fp
def _walk_files(self): # switching processing logic self.dir_mode = False # in this loop we are processing all given files/folders all_yamls = list() for path in self.arg_options.args: if not Paths.exists(path): printf.error('given path does not exists, path "{}"', path) sys.exit(3) # append files to all_yamls if Paths.is_dir(path): self.dir_mode = True all_yamls.extend(Paths.walk(path, ConfigPool.yaml_filters)) else: all_yamls.append(path) return all_yamls
def _extract_parts(filename): parts = Paths.split(filename) test = parts[-3] case = parts[-2].split('.')[0] nproc = int(parts[-2].split('.')[1]) return { "test-name": test, "case-name": case, "nproc": nproc }
def _check_arguments(self): """ Arguments additional check """ if self.arg_options.list: self.list_tests() sys.exit(0) # we need flow123d, mpiexec and ndiff to exists in LOCAL mode if not self.arg_options.queue and not Paths.test_paths('flow123d', 'mpiexec', 'ndiff'): Printer.all.wrn('Missing obligatory files!')
def create_comparisons(self): comparisons = ComparisonMultiThread(self.case.fs.ndiff_log) comparisons.thread_name_property = True for check_rule in self.case.check_rules: method = str(check_rule.keys()[0]) module = getattr(file_comparison, 'Compare{}'.format(method.capitalize()), None) comp_data = check_rule[method] if not module: Printer.all.err('Warning! No module for check_rule method "{}"', method) continue pairs = self._get_ref_output_files(comp_data) if pairs: for pair in pairs: command = module.get_command(*pair, **comp_data) pm = PyPy(BinExecutor(command), progress=True) # if we fail, set error to 13 pm.custom_error = 13 pm.start_monitor.deactivate() pm.end_monitor.deactivate() pm.progress_monitor.deactivate() pm.limit_monitor.deactivate() # TODO: maybe some time limit would be useful pm.output_monitor.policy = pm.output_monitor.POLICY_ERROR_ONLY pm.error_monitor.message = 'Comparison using method {} failed!'.format(method) pm.error_monitor.indent = 1 # catch output pm.executor.output = OutputMode.variable_output() pm.full_output = self.case.fs.ndiff_log path = Paths.path_end_until(pair[0], REF_OUTPUT_DIR) test_name = Paths.basename(Paths.dirname(Paths.dirname(self.case.fs.ref_output))) size = Paths.filesize(pair[0], True) pm.name = '{}: {} ({})'.format(test_name, path, size) comparisons.add(pm) return comparisons
def __init__(self, yaml_config_file): self.yaml_config_file = yaml_config_file self.root = Paths.dirname(self.yaml_config_file) self.yamls = self._get_all_yamls() self.cases = list() self.common_config = None # create dummy case for every yaml file in folder if not Paths.exists(self.yaml_config_file): self.common_config = deepcopy(yamlc.DEFAULTS) for y in self.yamls: dummy_case = deepcopy(yamlc.DEFAULTS) dummy_case['files'] = [y] self.cases.append(dummy_case) else: # setup common config values self.yaml_config = self._read_yaml() self.common_config = self.merge( yamlc.DEFAULTS, self.yaml_config.get('common_config', {})) # first process files which are specified in test_cases missing = [Paths.basename(y) for y in self.yamls] for case in self.yaml_config.get(yamlc.TAG_TEST_CASES, []): case_config = self.merge(self.common_config, case) # ensure that value is array case_config[yamlc.TAG_FILES] = ensure_iterable( case_config.get(yamlc.TAG_FILES, [])) # keep correct order self.cases.append(case_config) for f in case_config[yamlc.TAG_FILES]: if f in missing: missing.remove(f) # process rest (dummy case) for y in missing: dummy_case = deepcopy(self.common_config) dummy_case[yamlc.TAG_FILES] = [y] self.cases.append(dummy_case)
def get_pbs_module(hostname=None): """ :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None if not hostname: hostname = platform.node() # try to get name from json file host_file = Paths.join(Paths.source_dir(), 'host_table.json') if Paths.exists(host_file): with open(host_file, 'r') as fp: hosts = json.load(fp) pbs_module_path = hosts.get(hostname, None) if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) Printer.wrn('Warning! no host specified assuming module {}', pbs_module_path) # try to get pbs_module return importlib.import_module('scripts.pbs.modules.{}'.format(pbs_module_path))
def run(self): # wipe out dirs on demand if self.wipeout_dir: for d in self.wipeout_dir: if Paths.exists(d): Printer.all.out('Deleting directory {}', d) shutil.rmtree(d) total = 0 for p in self: total += 1 p.copy() Printer.all.out("Copied out {} files", total)
def __iter__(self): fileset = formic.FileSet(self.includes, directory=self.source) for filename in fileset: name = Paths.basename(filename) if not self.name else self.name.format( path=self.create_path_dict(filename), name=Paths.basename(filename), ) if self.flat: root = self.target else: rel_path = Paths.relpath(Paths.dirname(filename), Paths.abspath(self.source)) root = Paths.abspath(Paths.join(self.target, rel_path)) yield CopyRule(filename, Paths.join(root, name), self.remove_original)
def get_commit(cls): """ Calls git show on git root to determine unix timestamp of the current commit (HEAD) :return: """ import subprocess try: root = Paths.flow123d_root() # get current hash(%H) and date(%ct) from git repo result = subprocess.check_output( 'git show -s --format=%H,%ct HEAD'.split(), cwd=root).decode() sha, date = str(result).strip().split(',') return dict(hash=sha, date=int(date)) except: return None
def _check_arguments(self): """ Arguments additional check """ if self.arg_options.list: self.list_tests() sys.exit(0) # we need flow123d, mpiexec and ndiff to exists in LOCAL mode if not self.arg_options.queue and not Paths.test_paths('flow123d', 'mpiexec', 'ndiff'): Printer.all.err('Missing obligatory files! Exiting') sys.exit(1) # test yaml args if not self.others: self.parser.exit_usage('Error: No yaml files or folder given') sys.exit(2)
def on_complete(self, pypy=None): if self.pypy.returncode > 0: if self.message: Printer.separator() Printer.open() Printer.out(self.message) else: Printer.open() # if file pointer exist try to read errors and outputs output = self.pypy.executor.output.read() if output: if self.pypy.full_output: Printer.out('Output (last {} lines, rest in {}): ', self.tail, Paths.abspath(self.pypy.full_output)) else: Printer.out('Output (last {} lines): ', self.tail) Printer.err(format_n_lines(output, -self.tail, indent=Printer.indent * ' ')) Printer.close()
def configure_arguments(self, args): """ Method will replace given arguments placeholders available placeholders are: - FLOW123D_DIR - path to repository root (such as /opt/flow123d) - CURRENT_TEST_DIR - path to current test dir (such as /opt/flow123d/tests/01_cmd_line) - CURRENT_OUTPUT_DIR - path to current test output dir (such as /opt/flow123d/tests/01_cmd_line/test_results/02_input_format.1) - CURRENT_REF_OUTPUT_DIR - path to current test dir (such as /opt/flow123d/tests/01_cmd_line/ref_out/02_input_format) - TESTS_DIR - path to tests dir (such as /opt/flow123d/tests) NOTE: This value may not be precise and works only when running tests in standard flow123d structure. This value is essentially CURRENT_TEST_DIR/.. :type args: list[str] """ # build replacements map for the current case replacements = dict( FLOW123D_DIR=Paths.flow123d_root(), CURRENT_TEST_DIR=self.case.fs.root, CURRENT_OUTPUT_DIR=self.case.fs.output, CURRENT_REF_OUTPUT_DIR=self.case.fs.ref_output, TESTS_DIR=Paths.dirname(self.case.fs.root), ) for i in range(len(args)): for repl, val in replacements.items(): args[i] = args[i].replace('$%s$' % repl, val) # works for $VALUE$ args[i] = args[i].replace('<%s>' % repl, val) # works for <VALUE> args[i] = args[i].replace('{%s}' % repl, val) # works for {VALUE} return args
def parse_yaml(self): # register yaml parser tags from scripts.artifacts.collector import Collector from scripts.artifacts.command import Command from scripts.artifacts.modules.mongodb import DatabaseMongo from scripts.artifacts.modules.lscpu import CommandLSCPU with open(self.yaml_file, 'r') as fp: yaml_data = fp.read() yaml_data = strings.replace_placeholders( yaml_data, _format_ = '<{}>', root=Paths.flow123d_root(), time=System.time, date=System.date, datetime=System.datetime, rnd8=System.rnd8, rnd16=System.rnd16, rnd32=System.rnd32, rnd=System.rnd, ) self.configuration = yaml.load(yaml_data) or {}
def in_output(self, *names): """ Will return path for file located in output :rtype: str """ return Paths.join(self.output, *names)
def in_root(self, *names): """ Will return path for file located in root :rtype: str """ return Paths.join(self.root, *names)
def _get_mpi(self): return [Paths.mpiexec(), '-np', self.case.proc]
def __init__(self, yaml_file): self.yaml_file = Paths.abspath(yaml_file) self.configuration = None # set default language as english os.environ['LC_ALL'] = 'C'
def as_string(self): if self.file: return '{} x {}'.format( self.proc, Paths.path_end(Paths.without_ext(self.file), level=2)) return 'process'
def add_case(self, yaml_case_file): config = Paths.join(Paths.dirname(yaml_case_file), yamlc.CONFIG_YAML) self.configs[config] = None self.files[yaml_case_file] = None return self
result.extend(self._get_flow123d()) return result def _get_ref_output_files(self, comp_data): """ :type comp_data: dict """ # parse filters filters = [ PathFilters.filter_wildcards(x) for x in comp_data.get('files', []) ] # browse files and make them relative to ref output so filters works properly files = Paths.walk(self.case.fs.ref_output, [PathFilters.filter_type_is_file()]) files = [Paths.relpath(f, self.case.fs.ref_output) for f in files] # filter files and make them absolute again files = Paths.match(files, filters) files = [Paths.join(self.case.fs.ref_output, f) for f in files] return list(zip(files, self._get_mirror_files(files))) def _get_mirror_files(self, paths): return [ Paths.join(self.case.fs.output, Paths.relpath(p, self.case.fs.ref_output)) for p in paths ]