def __init__(self, o, config): o = ConfigBase.merge(yamlc.DEFAULTS, deepcopy(o)) self.file = o.get(yamlc.TAG_FILES, None) self.proc = int(o.get(yamlc.TAG_PROC, None)) self.time_limit = float(o.get(yamlc.TAG_TIME_LIMIT, None)) self.memory_limit = float(o.get(yamlc.TAG_MEMORY_LIMIT, None)) self.tags = set(o.get(yamlc.TAG_TAGS, None)) self.check_rules = o.get(yamlc.TAG_CHECK_RULES, None) self.config = config if self.config: self.file = Paths.join(self.config.root, Paths.basename(self.file)) self.without_ext = Paths.basename(Paths.without_ext(self.file)) self.shortname = '{name}.{proc}'.format(name=self.without_ext, proc=self.proc) self.fs = yamlc.ConfigCaseFiles( root=self.config.root, ref_output=Paths.join(self.config.root, yamlc.REF_OUTPUT_DIR, self.without_ext), output=Paths.join(self.config.root, yamlc.TEST_RESULTS, self.shortname)) else: # create temp folder where files will be tmp_folder = Paths.temp_file(o.get('tmp') + '-{date}-{time}-{rnd}') Paths.ensure_path(tmp_folder, is_file=False) self.fs = yamlc.ConfigCaseFiles(root=tmp_folder, ref_output=tmp_folder, output=tmp_folder)
def __init__(self, o, config): o = ConfigBase.merge(DEFAULTS, deepcopy(o)) self.file = o.get('file', None) self.proc = int(o.get('proc', None)) self.time_limit = float(o.get('time_limit', None)) self.memory_limit = float(o.get('memory_limit', None)) self.tags = set(o.get('tags', None)) self.check_rules = o.get('check_rules', None) self.config = config if self.config: self.file = Paths.join(self.config.root, self.file) self.without_ext = Paths.basename(Paths.without_ext(self.file)) self.shortname = '{name}.{proc}'.format(name=self.without_ext, proc=self.proc) self.fs = ConfigCaseFiles( root=self.config.root, ref_output=Paths.join(self.config.root, 'ref_output', self.without_ext), output=Paths.join( self.config.root, 'test_results', self.shortname )) else: # create temp folder where files will be tmp_folder = Paths.temp_file(o.get('tmp') + '-{date}-{time}-{rnd}') Paths.ensure_path(tmp_folder, is_file=False) self.fs = ConfigCaseFiles( root=tmp_folder, ref_output=tmp_folder, output=tmp_folder )
def __iter__(self): fileset = formic.FileSet(self.includes, directory=self.source) for filename in fileset: name = Paths.basename(filename) if not self.name else self.name.format( path=self.create_path_dict(filename), name=Paths.basename(filename), ) if self.flat: root = self.target else: rel_path = Paths.relpath(Paths.dirname(filename), Paths.abspath(self.source)) root = Paths.abspath(Paths.join(self.target, rel_path)) yield CopyRule(filename, Paths.join(root, name), self.remove_original)
def parse(self): for k, v in self.configs.items(): self.configs[k] = ConfigBase(k) for k, v in self.files.items(): config = Paths.join(Paths.dirname(k), yamlc.CONFIG_YAML) self.files[k] = self.configs[config]
def parse(self, missing_policy=ConfigBase.MISSING_POLICY_CREATE_DEFAULT): for k, v in list(self.configs.items()): self.configs[k] = ConfigBase(k, missing_policy) for k, v in list(self.files.items()): config = Paths.join(Paths.dirname(k), yamlc.CONFIG_YAML) self.files[k] = self.configs[config]
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: Printer.all.out(Paths.relpath(dirname, test_dir)) with Printer.all.with_level(1): for basename in result[dirname]: Printer.all.out('{: >4s} {: <40s} {}', '', basename, Paths.relpath(Paths.join(dirname, basename), test_dir)) Printer.all.newline()
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: Printer.all.out(Paths.relpath(dirname, test_dir)) with Printer.all.with_level(1): for basename in result[dirname]: Printer.all.out('{: >4s} {: <40s} {}', '', basename, Paths.relpath(Paths.join(dirname, basename), test_dir)) Printer.all.newline()
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: printf.warning(Paths.relpath(dirname, test_dir)) with printf: paths = list() wrap = 2 for basename in result[dirname]: paths.append(basename) for i in range(0, len(paths), wrap): printf.out(' '.join( ['{:<40s}'.format(x) for x in paths[i:i + wrap]])) printf.sep()
def parse(self): for k, v in self.configs.items(): self.configs[k] = ConfigBase(k) for k, v in self.files.items(): config = Paths.join(Paths.dirname(k), yamlc.CONFIG_YAML) self.files[k] = self.configs[config]
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format( module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err( 'Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err( 'Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err( 'Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def _get_ref_output_files(self, comp_data): """ :type comp_data: dict """ # parse filters filters = [PathFilters.filter_wildcards(x) for x in comp_data.get('files', [])] # browse files and make them relative to ref output so filters works properly files = Paths.walk(self.case.fs.ref_output, [PathFilters.filter_type_is_file()]) files = [Paths.relpath(f, self.case.fs.ref_output) for f in files] # filter files and make them absolute again files = Paths.match(files, filters) files = [Paths.join(self.case.fs.ref_output, f) for f in files] return zip(files, self._get_mirror_files(files))
def __init__(self, o, config): o = ConfigBase.merge(yamlc.DEFAULTS, deepcopy(o)) self.file = o.get(yamlc.TAG_FILES, None) self.proc = int(o.get(yamlc.TAG_PROC, None)) self.time_limit = float(o.get(yamlc.TAG_TIME_LIMIT, None)) self.memory_limit = float(o.get(yamlc.TAG_MEMORY_LIMIT, None)) self.tags = set(o.get(yamlc.TAG_TAGS, None)) self.check_rules = o.get(yamlc.TAG_CHECK_RULES, None) self.config = config if self.config: self.file = Paths.join(self.config.root, Paths.basename(self.file)) self.without_ext = Paths.basename(Paths.without_ext(self.file)) self.shortname = '{name}.{proc}'.format( name=self.without_ext, proc=self.proc) self.fs = yamlc.ConfigCaseFiles( root=self.config.root, ref_output=Paths.join( self.config.root, yamlc.REF_OUTPUT_DIR, self.without_ext), output=Paths.join( self.config.root, yamlc.TEST_RESULTS, self.shortname )) else: # create temp folder where files will be tmp_folder = Paths.temp_file(o.get('tmp') + '-{date}-{time}-{rnd}') Paths.ensure_path(tmp_folder, is_file=False) self.fs = yamlc.ConfigCaseFiles( root=tmp_folder, ref_output=tmp_folder, output=tmp_folder )
def generate_status_file(cls, target): """ Will generate status file if target has option turned on :type target: PyPy """ if target.status_file: IO.write(target.status_file, json.dumps(target.status(), indent=4)) output_dir = Paths.dirname(target.status_file) files = Paths.browse( output_dir, [PathFilters.filter_wildcards('*/profiler_info_*.log.json')]) # profiler json is missing? if not files: IO.write( Paths.join(output_dir, 'profiler_info_dummy.log.json'), '{}')
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format(module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err('Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err('Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err('Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def get_pbs_module(hostname=None): """ :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None if not hostname: hostname = platform.node() # try to get name from json file host_file = Paths.join(Paths.source_dir(), 'host_table.json') if Paths.exists(host_file): with open(host_file, 'r') as fp: hosts = json.load(fp) pbs_module_path = hosts.get(hostname, None) if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) Printer.wrn('Warning! no host specified assuming module {}', pbs_module_path) # try to get pbs_module return importlib.import_module('scripts.pbs.modules.{}'.format(pbs_module_path))
def in_output(self, *names): """ Will return path for file located in output :rtype: str """ return Paths.join(self.output, *names)
def in_root(self, *names): """ Will return path for file located in root :rtype: str """ return Paths.join(self.root, *names)
def add_case(self, yaml_case_file): config = Paths.join(Paths.dirname(yaml_case_file), yamlc.CONFIG_YAML) self.configs[config] = None self.files[yaml_case_file] = None return self
def _get_mirror_files(self, paths): return [ Paths.join(self.case.fs.output, Paths.relpath(p, self.case.fs.ref_output)) for p in paths ]
def add_case(self, yaml_case_file): config = Paths.join(Paths.dirname(yaml_case_file), yamlc.CONFIG_YAML) self.configs[config] = None self.files[yaml_case_file] = None return self
def in_root(self, *names): """ :rtype: str """ return Paths.join(self.root, *names)
def in_output(self, *names): """ :rtype: str """ return Paths.join(self.output, *names)