def set_sanity(self): hellos = sn.findall(r'hello world from OS-thread \s*(?P<tid>\d+) on ' r'locality (?P<lid>\d+)', self.stdout) # https://stellar-group.github.io/hpx/docs/sphinx/branches/master/html/terminology.html#term-locality num_localities = self.num_tasks // self.num_tasks_per_node assert_num_tasks = sn.assert_eq(sn.count(hellos), self.num_tasks*self.num_cpus_per_task) assert_threads = sn.map(lambda x: sn.assert_lt(int(x.group('tid')), self.num_cpus_per_task), hellos) assert_localities = sn.map(lambda x: sn.assert_lt(int(x.group('lid')), num_localities), hellos) self.sanity_patterns = sn.all(sn.chain([assert_num_tasks], assert_threads, assert_localities))
def __init__(self): super().__init__() self.descr = ('Check the values of a set of environment variables ' 'when accessing remotely over SSH') self.valid_systems = ['daint:login', 'dom:login'] self.valid_prog_environs = ['PrgEnv-cray'] reference = { 'CRAY_CPU_TARGET': 'haswell', 'CRAYPE_NETWORK_TARGET': 'aries', 'MODULEPATH': r'[\S+]', 'MODULESHOME': r'/opt/cray/pe/modules/[\d+\.+]', 'PE_PRODUCT_LIST': ('CRAYPE_HASWELL:CRAY_RCA:CRAY_ALPS:DVS:' 'CRAY_XPMEM:CRAY_DMAPP:CRAY_PMI:CRAY_UGNI:' 'CRAY_UDREG:CRAY_LIBSCI:CRAYPE:CRAY:' 'PERFTOOLS:CRAYPAT'), 'SCRATCH': r'/scratch/[\S+]', 'XDG_RUNTIME_DIR': r'/run/user/[\d+]' } self.executable = 'ssh' echo_args = ' '.join('{0}=${0}'.format(i) for i in reference.keys()) self.executable_opts = [self.current_system.name, 'echo', "'%s'" % echo_args] self.sanity_patterns = sn.all( sn.map(self.assert_envvar, list(reference.items()))) self.maintainers = ['RS', 'LM'] self.tags = {'maintenance', 'production'}
def __init__(self): super().__init__() self.valid_systems = ['dom:gpu', 'daint:gpu'] self.valid_prog_environs = ['PrgEnv-pgi'] self.sourcesdir = os.path.join(self.current_system.resourcesdir, 'RRTMGP') self.tags = {'external-resources'} self.prebuild_cmd = ['cp build/Makefile.conf.dom build/Makefile.conf'] self.executable = 'python' self.executable_opts = [ 'util/scripts/run_tests.py', '--verbose', '--rel_diff_cut 1e-13', '--root ..', '--test ${INIFILE}_ncol-${NCOL}.ini' ] self.pre_run = [ 'pwd', 'module load netcdf-python/1.4.1-CrayGNU-19.06-python2', 'cd test' ] self.modules = ['craype-accel-nvidia60', 'cray-netcdf'] self.variables = {'NCOL': '500', 'INIFILE': 'openacc-solvers-lw'} values = sn.extractall(r'.*\[\S+, (\S+)\]', self.stdout, 1, float) self.sanity_patterns = sn.all( sn.chain( [sn.assert_gt(sn.count(values), 0, msg='regex not matched')], sn.map(lambda x: sn.assert_lt(x, 1e-5), values))) self.maintainers = ['WS', 'VK']
def set_sanity_patterns(self): numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) self.sanity_patterns = sn.all([ sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 90, 100), numbers)) ])
def validate_test(self): numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) return sn.all([ sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 90, 100), numbers)) ])
def setup(self, partition, environ, **job_opts): result = sn.findall( r'(?P<lid>\d+),\s*(?P<tid>\d+),' r'\s*(?P<time>(\d+)?.?\d+),' r'\s*(?P<pts>\d+),' r'\s*(?P<parts>\d+),' r'\s*(?P<steps>\d+)', self.stdout) if partition.fullname == 'daint:gpu': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 12 elif partition.fullname == 'daint:mc': self.num_tasks = 4 self.num_tasks_per_node = 2 self.num_cpus_per_task = 18 self.num_tasks_per_socket = 1 elif partition.fullname == 'dom:gpu': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 12 elif partition.fullname == 'dom:mc': self.num_tasks = 4 self.num_tasks_per_node = 2 self.num_cpus_per_task = 18 self.num_tasks_per_socket = 1 self.executable_opts += ['--hpx:threads=%s' % self.num_cpus_per_task] num_threads = self.num_tasks * self.num_cpus_per_task assert_num_tasks = sn.map( lambda x: sn.assert_eq(int(x.group('lid')), self.num_tasks), result) assert_num_threads = sn.map( lambda x: sn.assert_eq(int(x.group('tid')), num_threads), result) assert_num_points = sn.map( lambda x: sn.assert_eq(x.group('pts'), self.nx_opts), result) assert_num_parts = sn.map( lambda x: sn.assert_eq(x.group('parts'), self.np_opts), result) assert_num_steps = sn.map( lambda x: sn.assert_eq(x.group('steps'), self.nt_opts), result) self.sanity_patterns = sn.all( sn.chain(assert_num_tasks, assert_num_threads, assert_num_points, assert_num_parts, assert_num_steps)) super().setup(partition, environ, **job_opts)
def test_map(): l = [1, 2, 3] dm = sn.map(lambda x: 2*x + 1, l) for i, x in sn.enumerate(dm, start=1): assert 2*i + 1 == x # Alternative test assert [3, 5, 7] == list(sn.evaluate(dm))
def test_map(self): l = [1, 2, 3] dm = sn.map(lambda x: 2 * x + 1, l) for i, x in sn.enumerate(dm, start=1): self.assertEqual(2 * i + 1, x) # Alternative test self.assertEqual([3, 5, 7], list(evaluate(dm)))
def set_sanity(self): result = sn.findall(r'(?P<tid>\d+),\s*(?P<time>(\d+)?.?\d+),' r'\s*(?P<pts>\d+),\s*(?P<parts>\d+),' r'\s*(?P<steps>\d+)', self.stdout) assert_num_threads = sn.map(lambda x: sn.assert_eq( int(x.group('tid')), self.num_cpus_per_task), result) assert_num_points = sn.map(lambda x: sn.assert_eq( x.group('pts'), self.nx_opts), result) assert_num_parts = sn.map(lambda x: sn.assert_eq(x.group('parts'), self.np_opts), result) assert_num_steps = sn.map(lambda x: sn.assert_eq(x.group('steps'), self.nt_opts), result) self.sanity_patterns = sn.all(sn.chain(assert_num_threads, assert_num_points, assert_num_parts, assert_num_steps))
def __init__(self): super().__init__() self.descr = ('OpenFOAM-Extend check buoyantBoussinesqSimpleFoam: ' 'hotRoom test') self.executable = 'buoyantBoussinesqSimpleFoam' result = sn.extractall(r'\sglobal\s=\s(?P<res>\S+),', self.stdout, 'res', float) self.sanity_patterns = sn.all( sn.map(lambda x: sn.assert_lt(abs(x), 1.e-17), result))
def __init__(self): super().__init__() self.descr = 'OpenFOAM check of pimpleFoam: tjunction tutorial' residual = sn.extractall(r'Solving for epsilon, \w+\s\w+\s=\s\d.\d+.\s' r'Final residual\s=\s(?P<res>-?\S+),', self.stdout, 'res', float) self.sanity_patterns = sn.all(sn.chain( sn.map(lambda x: sn.assert_lt(x, 5.e-05), residual), [sn.assert_found(r'^\s*[Ee]nd', self.stdout)], ))
def __init__(self): super().__init__() self.descr = ('OpenFOAM check of buoyantBoussinesqSimpleFoam: ' 'hotroom tutorial') self.executable = 'buoyantBoussinesqSimpleFoam' residual = sn.extractall(r'\sglobal\s=\s(?P<res>\S+),', self.stdout, 'res', float) self.sanity_patterns = sn.all( sn.chain(sn.map(lambda x: sn.assert_lt(x, 1.e-17), residual), [sn.assert_found(r'^\s*[Ee]nd', self.stdout)]))
def __init__(self): self.descr = 'Apply a sanity function iteratively' self.valid_systems = ['*'] self.valid_prog_environs = ['*'] self.executable = './random_numbers.sh' numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) self.sanity_patterns = sn.and_( sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 90, 100), numbers)))
def __init__(self): super().__init__() self.descr = 'OpenFOAM-Extend check of simpleFoam: motorbike tutorial' self.executable_opts = ['-parallel'] self.num_tasks = 6 self.num_tasks_per_node = 6 result = sn.extractall( r'time step continuity errors : ' r'\S+\s\S+ = \S+\sglobal = (?P<res>-?\S+),', self.stdout, 'res', float) self.sanity_patterns = sn.all( sn.map(lambda x: sn.assert_lt(abs(x), 5.e-04), result))
def __init__(self): super().__init__() self.descr = ('OpenFOAM-Extend check of reconstructPar: ' 'multiRegionHeater test') self.executable_opts = ['-parallel'] self.num_tasks = 4 self.num_tasks_per_node = 4 result = sn.extractall(r'\sglobal\s=\s(?P<res>-?\S+),', self.stdout, 'res', float)[-5:] self.sanity_patterns = sn.all( sn.map(lambda x: sn.assert_lt(abs(x), 1.e-04), result))
def setup(self, partition, environ, **job_opts): result = sn.findall( r'Hello World from thread \s*(\d+) out ' r'of \s*(\d+) from process \s*(\d+) out of ' r'\s*(\d+)', self.stdout) self.sanity_patterns = sn.all( sn.chain( [ sn.assert_eq(sn.count(result), self.num_tasks * self.num_cpus_per_task) ], sn.map( lambda x: sn.assert_lt(int(x.group(1)), int(x.group(2))), result), sn.map( lambda x: sn.assert_lt(int(x.group(3)), int(x.group(4))), result), sn.map( lambda x: sn.assert_lt(int(x.group(1)), self. num_cpus_per_task), result), sn.map( lambda x: sn.assert_eq(int(x.group(2)), self. num_cpus_per_task), result), sn.map(lambda x: sn.assert_lt(int(x.group(3)), self.num_tasks), result), sn.map(lambda x: sn.assert_eq(int(x.group(4)), self.num_tasks), result), )) self.perf_patterns = { 'compilation_time': sn.getattr(self, 'compilation_time_seconds') } self.reference = {'*': {'compilation_time': (60, None, 0.1)}} super().setup(partition, environ, **job_opts)
def setup(self, partition, environ, **job_opts): hellos = sn.findall( r'hello world from OS-thread \s*(?P<tid>\d+) on ' r'locality (?P<lid>\d+)', self.stdout) if partition.fullname == 'daint:gpu': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 12 elif partition.fullname == 'daint:mc': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 36 elif partition.fullname == 'dom:gpu': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 12 elif partition.fullname == 'dom:mc': self.num_tasks = 2 self.num_tasks_per_node = 1 self.num_cpus_per_task = 36 self.executable_opts = ['--hpx:threads=%s' % self.num_cpus_per_task] # https://stellar-group.github.io/hpx/docs/sphinx/branches/master/html/terminology.html#term-locality num_localities = self.num_tasks // self.num_tasks_per_node assert_num_tasks = sn.assert_eq( sn.count(hellos), self.num_tasks * self.num_cpus_per_task) assert_threads = sn.map( lambda x: sn.assert_lt(int(x.group('tid')), self.num_cpus_per_task ), hellos) assert_localities = sn.map( lambda x: sn.assert_lt(int(x.group('lid')), num_localities), hellos) self.sanity_patterns = sn.all( sn.chain([assert_num_tasks], assert_threads, assert_localities)) super().setup(partition, environ, **job_opts)
def __init__(self): self.descr = ('ReFrame tutorial demonstrating the use of deferred ' 'iteration via the `map` sanity function.') self.valid_systems = ['*'] self.valid_prog_environs = ['*'] self.executable = './random_numbers.sh' numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) self.sanity_patterns = sn.and_( sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 90, 100), numbers))) self.maintainers = ['put-your-name-here'] self.tags = {'tutorial'}
def __init__(self, variant, lang, linkage): self.linkage = linkage self.variables = {'CRAYPE_LINK_TYPE': linkage} self.prgenv_flags = {} self.lang_names = {'c': 'C', 'cpp': 'C++', 'f90': 'Fortran 90'} self.descr = self.lang_names[lang] + ' Hello World' self.sourcepath = 'hello_world' self.build_system = 'SingleSource' self.valid_systems = ['ubelix:compute', 'ubelix:gpu'] self.valid_prog_environs = ['foss', 'intel'] self.compilation_time_seconds = None result = sn.findall( r'Hello World from thread \s*(\d+) out ' r'of \s*(\d+) from process \s*(\d+) out of ' r'\s*(\d+)', self.stdout) num_tasks = sn.getattr(self, 'num_tasks') num_cpus_per_task = sn.getattr(self, 'num_cpus_per_task') def tid(match): return int(match.group(1)) def num_threads(match): return int(match.group(2)) def rank(match): return int(match.group(3)) def num_ranks(match): return int(match.group(4)) self.sanity_patterns = sn.all( sn.chain( [ sn.assert_eq(sn.count(result), num_tasks * num_cpus_per_task) ], sn.map(lambda x: sn.assert_lt(tid(x), num_threads(x)), result), sn.map(lambda x: sn.assert_lt(rank(x), num_ranks(x)), result), sn.map(lambda x: sn.assert_lt(tid(x), num_cpus_per_task), result), sn.map( lambda x: sn.assert_eq(num_threads(x), num_cpus_per_task), result), sn.map(lambda x: sn.assert_lt(rank(x), num_tasks), result), sn.map(lambda x: sn.assert_eq(num_ranks(x), num_tasks), result), )) self.perf_patterns = { 'compilation_time': sn.getattr(self, 'compilation_time_seconds') } self.reference = {'*': {'compilation_time': (60, None, 0.1, 's')}} self.maintainers = ['VH', 'EK'] self.tags = {'production', 'prgenv'}
def __init__(self): self.descr = 'Pre- and post-run demo test' self.valid_systems = ['*'] self.valid_prog_environs = ['*'] self.prerun_cmds = ['source limits.sh'] self.postrun_cmds = ['echo FINISHED'] self.executable = './random_numbers.sh' numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) self.sanity_patterns = sn.all([ sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 90, 100), numbers)), sn.assert_found(r'FINISHED', self.stdout) ])
def __init__(self): super().__init__() self.descr = 'OpenFOAM check of potentialFoam: motorbike tutorial' self.executable_opts = ['-parallel'] self.num_tasks = 6 self.num_tasks_per_node = 6 residual = sn.extractall(r'Final residual = (?P<res>-?\S+),', self.stdout, 'res', float)[-5:] self.sanity_patterns = sn.all( sn.chain(sn.map(lambda x: sn.assert_lt(x, 1.e-07), residual), [ sn.assert_eq(5, sn.count(residual)), sn.assert_found('Finalising parallel run', self.stdout), sn.assert_found(r'^\s*[Ee]nd', self.stdout) ]))
def __init__(self): super().__init__() self.descr = ('OpenFOAM check of chtMultiRegionSimpleFoam:' ' heatexchanger tutorial') self.executable_opts = ['-parallel'] self.num_tasks = 4 self.num_tasks_per_node = 4 residual = sn.extractall(r'\sglobal\s=\s(?P<res>\S+),', self.stdout, 'res', float)[-10:] self.sanity_patterns = sn.all(sn.chain( sn.map(lambda x: sn.assert_lt(x, 1.e-03), residual), [sn.assert_eq(10, sn.count(residual)), sn.assert_found('Finalising parallel run', self.stdout), sn.assert_found(r'^\s*[Ee]nd', self.stdout)]))
def __init__(self): self.descr = ('ReFrame tutorial demonstrating the use of ' 'pre- and post-run commands') self.valid_systems = ['*'] self.valid_prog_environs = ['*'] self.pre_run = ['source scripts/limits.sh'] self.post_run = ['echo FINISHED'] self.executable = './random_numbers.sh' numbers = sn.extractall(r'Random: (?P<number>\S+)', self.stdout, 'number', float) self.sanity_patterns = sn.all([ sn.assert_eq(sn.count(numbers), 100), sn.all(sn.map(lambda x: sn.assert_bounded(x, 50, 80), numbers)), sn.assert_found('FINISHED', self.stdout) ]) self.maintainers = ['put-your-name-here'] self.tags = {'tutorial'}
def assert_hello_world(self): result = sn.findall( r'Hello, World from thread \s*(\d+) out ' r'of \s*(\d+) from process \s*(\d+) out of ' r'\s*(\d+)', self.stdout) num_tasks = sn.getattr(self, 'num_tasks') num_cpus_per_task = sn.getattr(self, 'num_cpus_per_task') def tid(match): return int(match.group(1)) def num_threads(match): return int(match.group(2)) def rank(match): return int(match.group(3)) def num_ranks(match): return int(match.group(4)) return sn.all( sn.chain( [ sn.assert_eq(sn.count(result), num_tasks * num_cpus_per_task) ], sn.map(lambda x: sn.assert_lt(tid(x), num_threads(x)), result), sn.map(lambda x: sn.assert_lt(rank(x), num_ranks(x)), result), sn.map(lambda x: sn.assert_lt(tid(x), num_cpus_per_task), result), sn.map( lambda x: sn.assert_eq(num_threads(x), num_cpus_per_task), result), sn.map(lambda x: sn.assert_lt(rank(x), num_tasks), result), sn.map(lambda x: sn.assert_eq(num_ranks(x), num_tasks), result), ))
def eval_sanity(self): output_files = [] output_files = [ file for file in os.listdir(self.stagedir) if file.startswith('output-') ] num_greasy_tasks = len(output_files) failure_msg = (f'Requested {self.num_greasy_tasks} task(s), but ' f'executed only {num_greasy_tasks} tasks(s)') sn.evaluate( sn.assert_eq(num_greasy_tasks, self.num_greasy_tasks, msg=failure_msg)) num_tasks = sn.getattr(self, 'nranks_per_worker') num_cpus_per_task = sn.getattr(self, 'num_cpus_per_task') def tid(match): return int(match.group(1)) def num_threads(match): return int(match.group(2)) def rank(match): return int(match.group(3)) def num_ranks(match): return int(match.group(4)) for output_file in output_files: result = sn.findall( r'Hello, World from thread \s*(\d+) out ' r'of \s*(\d+) from process \s*(\d+) out of ' r'\s*(\d+)', output_file) failure_msg = (f'Found {sn.count(result)} Hello, World... ' f'pattern(s) but expected ' f'{num_tasks * num_cpus_per_task} pattern(s) ' f'inside the output file {output_file}') sn.evaluate( sn.assert_eq(sn.count(result), num_tasks * num_cpus_per_task, msg=failure_msg)) sn.evaluate( sn.all( sn.chain( sn.map( lambda x: sn.assert_lt( tid(x), num_threads(x), msg=(f'Found {tid(x)} threads rather than ' f'{num_threads(x)}')), result), sn.map( lambda x: sn.assert_lt( rank(x), num_ranks(x), msg =(f'Rank id {rank(x)} is not lower than the ' f'number of ranks {self.nranks_per_worker} ' f'in output file')), result), sn.map( lambda x: sn.assert_lt( tid(x), self.num_cpus_per_task, msg=(f'Rank id {tid(x)} is not lower than the ' f'number of cpus per task ' f'{self.num_cpus_per_task} in output ' f'file {output_file}')), result), sn.map( lambda x: sn.assert_eq( num_threads(x), num_cpus_per_task, msg =(f'Found {num_threads(x)} threads rather than ' f'{self.num_cpus_per_task} in output file ' f'{output_file}')), result), sn.map( lambda x: sn.assert_lt( rank(x), num_tasks, msg=( f'Found {rank(x)} threads rather than ' f'{self.num_cpus_per_task} in output file ' f'{output_file}')), result), sn.map( lambda x: sn.assert_eq( num_ranks(x), num_tasks, msg=(f'Number of ranks {num_ranks(x)} is not ' f'equal to {self.nranks_per_worker} in ' f'output file {output_file}')), result)))) sn.evaluate(sn.assert_found(r'Finished greasing', self.greasy_logfile)) sn.evaluate( sn.assert_found((f'INFO: Summary of {self.num_greasy_tasks} ' f'tasks: ' f'{self.num_greasy_tasks} OK, ' f'0 FAILED, ' f'0 CANCELLED, ' fr'0 INVALID\.'), self.greasy_logfile)) return True
def __init__(self, variant, lang, linkage): self.linkage = linkage self.variables = {'CRAYPE_LINK_TYPE': linkage} self.prgenv_flags = {} self.lang_names = {'c': 'C', 'cpp': 'C++', 'f90': 'Fortran 90'} self.descr = f'{self.lang_names[lang]} Hello World' self.sourcepath = 'hello_world' self.build_system = 'SingleSource' self.valid_systems = [ 'daint:gpu', 'daint:mc', 'dom:gpu', 'dom:mc', 'kesch:cn', 'tiger:gpu', 'arolla:cn', 'arolla:pn', 'tsa:cn', 'tsa:pn' ] self.valid_prog_environs = [ 'PrgEnv-cray', 'PrgEnv-cray_classic', 'PrgEnv-intel', 'PrgEnv-gnu', 'PrgEnv-pgi', 'PrgEnv-gnu-nocuda', 'PrgEnv-pgi-nocuda' ] if self.current_system.name in ['kesch', 'arolla', 'tsa']: self.exclusive_access = True # Removing static compilation from kesch if (self.current_system.name in ['kesch'] and linkage == 'static'): self.valid_prog_environs = [] self.compilation_time_seconds = None result = sn.findall( r'Hello World from thread \s*(\d+) out ' r'of \s*(\d+) from process \s*(\d+) out of ' r'\s*(\d+)', self.stdout) num_tasks = sn.getattr(self, 'num_tasks') num_cpus_per_task = sn.getattr(self, 'num_cpus_per_task') def tid(match): return int(match.group(1)) def num_threads(match): return int(match.group(2)) def rank(match): return int(match.group(3)) def num_ranks(match): return int(match.group(4)) self.sanity_patterns = sn.all( sn.chain( [ sn.assert_eq(sn.count(result), num_tasks * num_cpus_per_task) ], sn.map(lambda x: sn.assert_lt(tid(x), num_threads(x)), result), sn.map(lambda x: sn.assert_lt(rank(x), num_ranks(x)), result), sn.map(lambda x: sn.assert_lt(tid(x), num_cpus_per_task), result), sn.map( lambda x: sn.assert_eq(num_threads(x), num_cpus_per_task), result), sn.map(lambda x: sn.assert_lt(rank(x), num_tasks), result), sn.map(lambda x: sn.assert_eq(num_ranks(x), num_tasks), result), )) self.perf_patterns = { 'compilation_time': sn.getattr(self, 'compilation_time_seconds') } self.reference = {'*': {'compilation_time': (60, None, 0.1, 's')}} self.maintainers = ['VH', 'EK'] self.tags = {'production', 'craype'}
def __init__(self, part, n_tasks, n_tasks_per_node): self.valid_systems = [part] self.valid_prog_environs = ['openfoam'] self.num_tasks_per_node = n_tasks_per_node self.num_tasks = n_tasks self.num_nodes = int(n_tasks / n_tasks_per_node) self.tags = { 'num_procs=%i' % self.num_tasks, 'num_nodes=%i' % self.num_nodes } self.sourcesdir = 'downloads' self.exclusive_access = True self.time_limit = '1h' self.prerun_cmds = [ 'tar --strip-components 2 -xf Motorbike_bench_template.tar.gz bench_template/basecase', './Allclean', # removes logs, old timehistories etc just in case # set domain decomposition: # using 'scotch' method means simpleCoeffs is ignored so it doesn't need to match num_tasks: 'sed -i -- "s/method .*/method scotch;/g" system/decomposeParDict', 'sed -i -- "s/numberOfSubdomains .*/numberOfSubdomains %i;/g" system/decomposeParDict' % self.num_tasks, # remove streamlines: 'sed -i -- \'s/ #include "streamLines"//g\' system/controlDict', 'sed -i -- \'s/ #include "wallBoundedStreamLines"//g\' system/controlDict', # fix location of mesh quality defaults (needed for v6+?) "sed -i -- 's|caseDicts|caseDicts/mesh/generation|' system/meshQualityDict", './Allmesh', # do meshing 'time \\', # want to run mpi task under time ] # could also check: #$ ompi_info -c | grep -oE "MPI_THREAD_MULTIPLE[^,]*" # MPI_THREAD_MULTIPLE: yes self.executable = 'simpleFoam' self.executable_opts = ['-parallel'] self.keep_files = [ 'log.snappyHexMesh', 'log.blockMesh', 'log.decomposePar' ] result = sn.extractall( r'time step continuity errors : ' r'\S+\s\S+ = \S+\sglobal = (?P<res>-?\S+),', self.stdout, 'res', float) # NB: `time` outputs to stderr so can't assume that should be empty self.sanity_patterns = sn.all([ # ensure meshing finished ok: sn.assert_found('End', 'log.blockMesh'), sn.assert_found('End', 'log.decomposePar'), sn.assert_found('Finished meshing without any errors', 'log.snappyHexMesh'), # ensure simpleFoam finished ok: sn.assert_found('Finalising parallel run', self.stdout), sn.assert_not_found('FOAM FATAL ERROR', self.stdout), sn.assert_not_found('FOAM FATAL ERROR', self.stderr), # ensure continuity errors small enough - copied from # https://github.com/eth-cscs/reframe/blob/0a4dc5207b35c737861db346bd483fd4ac202846/cscs-checks/apps/openfoam/check_openfoam_extend.py#L56 sn.all(sn.map(lambda x: sn.assert_lt(abs(x), 5.e-04), result)), ]) self.perf_patterns = { # from openfoam output: 'ExecutionTime': sn.extractall( r'ExecutionTime = ([\d.]+) s ClockTime = ([\d.]+) s', self.stdout, 1, float)[-1], 'ClockTime': sn.extractall( r'ExecutionTime = ([\d.]+) s ClockTime = ([\d.]+) s', self.stdout, 2, float)[-1], # from `time`: 'runtime_real': sn.extractsingle(r'^real\s+(\d+m[\d.]+s)$', self.stderr, 1, parse_time_cmd), } self.reference = { '*': { 'ExecutionTime': (0, None, None, 's'), 'ClockTime': (0, None, None, 's'), 'runtime_real': (0, None, None, 's'), } }