def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: printf.warning(Paths.relpath(dirname, test_dir)) with printf: paths = list() wrap = 2 for basename in result[dirname]: paths.append(basename) for i in range(0, len(paths), wrap): printf.out(' '.join( ['{:<40s}'.format(x) for x in paths[i:i + wrap]])) printf.sep()
def list_tests(): test_dir = Paths.join(Paths.flow123d_root(), 'tests') tests = Paths.walk(test_dir, [ PathFilters.filter_type_is_file(), PathFilters.filter_endswith('.yaml'), PathFilters.filter_not(PathFilters.filter_name('config.yaml')), ]) result = dict() for r in tests: dirname = Paths.dirname(r) basename = Paths.basename(r) if Paths.dirname(dirname) != test_dir: continue if dirname not in result: result[dirname] = list() result[dirname].append(basename) keys = sorted(result.keys()) for dirname in keys: Printer.all.out(Paths.relpath(dirname, test_dir)) with Printer.all.with_level(1): for basename in result[dirname]: Printer.all.out('{: >4s} {: <40s} {}', '', basename, Paths.relpath(Paths.join(dirname, basename), test_dir)) Printer.all.newline()
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format( module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err( 'Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err( 'Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err( 'Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def get_commit(cls): """ Calls git show on git root to determine unix timestamp of the current commit (HEAD) :return: """ import subprocess try: root = Paths.flow123d_root() # get current hash(%H) and date(%ct) from git repo result = subprocess.check_output( 'git show -s --format=%H,%ct HEAD'.split(), cwd=root).decode() sha, date = str(result).strip().split(',') return dict(hash=sha, date=int(date)) except: return None
def get_pbs_module(hostname_hint=None): """ file host_table.yaml serves as lookup table when using python script in queue mode each key is hostname and each value names a module which should be loaded modules are located in /src/python/scripts/pbs/modules If no matching key for current machine exists try to use pbs_<hostname> where all dots(.) are replaced with underscores(_) if hostname_hint is not set node name will be used :rtype : scripts.pbs.modules.pbs_tarkil_cesnet_cz """ pbs_module_path = None host_file = Paths.join(Paths.flow123d_root(), 'config', 'host_table.yaml') host_file_exists = Paths.exists(host_file) hostname = hostname_hint or platform.node() from_host = False # try to get name from json file if host_file_exists: with open(host_file, 'r') as fp: hosts = yaml.load(fp) pbs_module_path = hosts.get(hostname, None) from_host = pbs_module_path is not None if not pbs_module_path: hostname = hostname.replace('.', '_') pbs_module_path = 'pbs_{}'.format(hostname) # construct full path for import full_module_path = 'scripts.pbs.modules.{module_name}'.format(module_name=pbs_module_path) # try to get pbs_module try: return importlib.import_module(full_module_path) except ImportError: Printer.all.err('Could not load module "{}" ({}) for hostname "{}"', pbs_module_path, full_module_path, hostname) with Printer.all.with_level(2): if host_file_exists: if from_host: Printer.all.err('Value specified in host_table.yaml "{}" points to non-existing module', pbs_module_path) else: Printer.all.err('Config file host_table.yaml does not have entry for hostname "{}"', hostname) else: Printer.all.err('Config file host_table.yaml does not exists ({}) and auto module detection failed', host_file) raise
def parse_yaml(self): # register yaml parser tags from scripts.artifacts.collector import Collector from scripts.artifacts.command import Command from scripts.artifacts.modules.mongodb import DatabaseMongo from scripts.artifacts.modules.lscpu import CommandLSCPU with open(self.yaml_file, "r") as fp: yaml_data = fp.read() yaml_data = strings.replace_placeholders( yaml_data, _format_="<{}>", root=Paths.flow123d_root(), time=System.time, date=System.date, datetime=System.datetime, rnd8=System.rnd8, rnd16=System.rnd16, rnd32=System.rnd32, rnd=System.rnd, ) self.configuration = yaml.load(yaml_data) or {}
def configure_arguments(self, args): """ Method will replace given arguments placeholders available placeholders are: - FLOW123D_DIR - path to repository root (such as /opt/flow123d) - CURRENT_TEST_DIR - path to current test dir (such as /opt/flow123d/tests/01_cmd_line) - CURRENT_OUTPUT_DIR - path to current test output dir (such as /opt/flow123d/tests/01_cmd_line/test_results/02_input_format.1) - CURRENT_REF_OUTPUT_DIR - path to current test dir (such as /opt/flow123d/tests/01_cmd_line/ref_out/02_input_format) - TESTS_DIR - path to tests dir (such as /opt/flow123d/tests) NOTE: This value may not be precise and works only when running tests in standard flow123d structure. This value is essentially CURRENT_TEST_DIR/.. :type args: list[str] """ # build replacements map for the current case replacements = dict( FLOW123D_DIR=Paths.flow123d_root(), CURRENT_TEST_DIR=self.case.fs.root, CURRENT_OUTPUT_DIR=self.case.fs.output, CURRENT_REF_OUTPUT_DIR=self.case.fs.ref_output, TESTS_DIR=Paths.dirname(self.case.fs.root), ) for i in range(len(args)): for repl, val in replacements.items(): args[i] = args[i].replace('$%s$' % repl, val) # works for $VALUE$ args[i] = args[i].replace('<%s>' % repl, val) # works for <VALUE> args[i] = args[i].replace('{%s}' % repl, val) # works for {VALUE} return args
def parse_yaml(self): # register yaml parser tags from scripts.artifacts.collector import Collector from scripts.artifacts.command import Command from scripts.artifacts.modules.mongodb import DatabaseMongo from scripts.artifacts.modules.lscpu import CommandLSCPU with open(self.yaml_file, 'r') as fp: yaml_data = fp.read() yaml_data = strings.replace_placeholders( yaml_data, _format_ = '<{}>', root=Paths.flow123d_root(), time=System.time, date=System.date, datetime=System.datetime, rnd8=System.rnd8, rnd16=System.rnd16, rnd32=System.rnd32, rnd=System.rnd, ) self.configuration = yaml.load(yaml_data) or {}