def __init__(self):
     self.descr = 'OSU benchmarks build test'
     self.valid_systems = [
         'build-node:parallel', 'beluga:cpu_parallel', 'cedar:cpu_parallel',
         'graham:cpu_parallel'
     ]
     self.valid_prog_environs = ['*']
     self.sourcesdir = None
     self.prebuild_cmd = [
         'cp /cvmfs/soft.computecanada.ca/easybuild/sources/o/OSU-Micro-Benchmarks/osu-micro-benchmarks-5.6.1.tar.gz .',
         'tar xzf osu-micro-benchmarks-5.6.1.tar.gz',
         'cd osu-micro-benchmarks-5.6.1'
     ]
     self.build_system = 'Autotools'
     self.build_system.max_concurrency = 8
     self.sanity_patterns = sn.assert_not_found('error', self.stderr)
Exemple #2
0
 def __init__(self):
     super().__init__()
     self.descr = ('verifies merging of 3 standard netCDF files')
     self.executable = 'cdo'
     self.executable_opts = [
         '-O', 'merge', 'sresa1b_ncar_ccsm3-example_pr.nc',
         'sresa1b_ncar_ccsm3-example_tas.nc',
         'sresa1b_ncar_ccsm3-example_area.nc',
         'sresa1b_ncar_ccsm3-example_area_pr_tas_area.nc'
     ]
     self.sanity_patterns = sn.all([
         sn.assert_not_found(r'(?i)unsupported|error', self.stderr),
         sn.assert_found(
             r'merge: Processed( 98304 values from)? '
             r'3 variables', self.stderr)
     ])
Exemple #3
0
 def __init__(self):
     super().__init__()
     self.descr = ('verifies merging of 3 netCDF-4 files')
     self.executable = 'cdo'
     self.executable_opts = [
         '-O', 'merge', 'test_echam_spectral-deflated_wind10.nc4',
         'test_echam_spectral-deflated_wl.nc4',
         'test_echam_spectral-deflated_ws.nc4',
         'test_echam_spectral-deflated_wind10_wl_ws.nc4'
     ]
     self.sanity_patterns = sn.all([
         sn.assert_not_found(r'(?i)unsupported|error', self.stderr),
         sn.assert_found(
             r'merge: Processed( 442368 values from)? '
             r'3 variables', self.stderr)
     ])
Exemple #4
0
 def __init__(self):
     super().__init__()
     self.descr = ('verifies merging and compressing of 3 compressed '
                   'netCDF-4 files')
     self.executable = 'cdo'
     self.executable_opts = [
         '-O', '-z', 'zip', 'merge',
         'test_echam_spectral-deflated_wind10.nc4c',
         'test_echam_spectral-deflated_wl.nc4c',
         'test_echam_spectral-deflated_ws.nc4c',
         'test_echam_spectral-deflated_wind10_wl_ws.nc4c'
     ]
     self.sanity_patterns = sn.all([
         sn.assert_not_found(r'(?i)unsupported|error', self.stderr),
         sn.assert_found(
             r'cdo merge: Processed 442368 values from 3 '
             r'variables over (8|24) timesteps', self.stderr)
     ])
Exemple #5
0
    def __init__(self):
        self.descr = 'check for avx2 instructions'
        self.valid_systems = ['dom:login', 'daint:login']
        self.valid_prog_environs = ['PrgEnv-cray', 'PrgEnv-gnu',
                                    'PrgEnv-intel', 'PrgEnv-pgi']
        self.modules = ['craype-haswell']

        self.sourcesdir = 'src/haswell_fma'
        self.build_system = 'Make'
        self.build_system.cflags = ['-O3', '-S']
        self.build_system.cxxflags = ['-O3', '-S']
        self.build_system.fflags = ['-O3', '-S']
        self.sanity_patterns = sn.all([
            sn.assert_found(r'vfmadd', 'vectorize_fma_c.s'),
            sn.assert_found(r'vfmadd', 'vectorize_fma_cplusplus.s'),
            sn.assert_found(r'vfmadd', 'vectorize_fma_ftn.s'),
            sn.assert_not_found('warning|WARNING', self.stderr)
        ])

        self.maintainers = ['AJ', 'CB']
        self.tags = {'production', 'craype'}
Exemple #6
0
    def __init__(self, **kwargs):
        super().__init__('haswell_fma_check', os.path.dirname(__file__),
                         **kwargs)
        self.descr = 'check for avx2 instructions'
        self.valid_systems = ['dom:login', 'daint:login', 'kesch:login']
        if self.current_system.name == 'kesch':
            self.valid_prog_environs = ['PrgEnv-cray', 'PrgEnv-gnu']
        else:
            self.valid_prog_environs = [
                'PrgEnv-cray', 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi'
            ]
        self.sourcesdir = 'src/haswell_fma'
        self.sanity_patterns = sn.all([
            sn.assert_found(r'vfmadd', 'vectorize_fma_c.s'),
            sn.assert_found(r'vfmadd', 'vectorize_fma_cplusplus.s'),
            sn.assert_found(r'vfmadd', 'vectorize_fma_ftn.s'),
            sn.assert_not_found('warning|WARNING', self.stderr)
        ])

        self.maintainers = ['AJ', 'VK']
        self.tags = {'production'}
    def __init__(self, variant):
        super().__init__()
        self.valid_systems = ['daint:gpu']
        self.sourcesdir = os.path.join('../', variant)
        if variant == 'cuda':
            self.valid_prog_environs = ['PrgEnv-gnu']
        elif variant == 'openacc':
            self.valid_prog_environs = ['PrgEnv-pgi']

        if self.current_system.name == 'daint':
            self.modules = ['craype-accel-nvidia60']

        self.executable = './main'
        self.executable_opts = ['256', '256', '100', '0.01']
        self.keep_files = ['output.bin', 'output.bov']
        self.time_limit = (0, 5, 0)
        self.sanity_patterns = sn.allx([
            sn.assert_found(r'Goodbye\!', self.stdout),
            sn.assert_not_found(r'ERROR', self.stdout)
        ])
        self.perf_patterns = {
            'exec_time':
            sn.extractsingle(r'simulation took (\S+) seconds', self.stdout, 1,
                             float),
            'perf':
            sn.extractsingle(r'at rate of (\S+) iters/second', self.stdout, 1,
                             float)
        }
        self.reference = {
            'daint:gpu': {
                'exec_time': (0, None, None, 's'),
                'perf': (0, None, None, 'iters/s')
            },
            '*': {
                'exec_time': (0, None, None, 's'),
                'perf': (0, None, None, 'iters/s')
            }
        }
        self.maintainers = ['karakasis<at>cscs.ch']
Exemple #8
0
    def __init__(self):
        # {{{ pe
        self.descr = 'Tool validation'
        self.valid_prog_environs = ['PrgEnv-gnu', 'cpeGNU']
        self.valid_systems = [
            'dom:mc', 'dom:gpu', 'daint:mc', 'daint:gpu', 'eiger:mc',
            'pilatus:mc'
        ]
        self.tool = 'stat-cl'
        self.modules = ['cray-stat', 'cray-cti']
        self.maintainers = ['JG']
        self.tags = {'sph', 'hpctools', 'cpu', 'craype', 'debugging'}
        # }}}

        # {{{ compile
        self.testname = 'sedov'
        self.executable = 'mpi+omp'
        # re_ver_1 = 'STAT_VERSION1=$'
        # re_ver_2 = 'STAT_VERSION2=$'
        version_rpt = 'version.rpt'
        which_rpt = 'which.rpt'
        cs = self.current_system.name
        if cs not in {'pilatus', 'eiger'}:
            self.prebuild_cmds += [
                # --- chech tool version
                f'echo STAT_VERSION1=$STAT_VERSION > {version_rpt}',
                f'echo STAT_VERSION2=`STATbin --version` >> {version_rpt}',
            ]
        else:
            self.prebuild_cmds += [
                # --- chech tool version
                f'echo STAT_VERSION1=$STAT_LEVEL > {version_rpt}',
                f'echo STAT_VERSION2=`STATbin --version` >> {version_rpt}',
            ]

        self.prebuild_cmds += [
            f'STATbin -V >> {version_rpt}',
            f'which {self.tool} > {which_rpt}',
        ]

        # {{{ run
        self.time_limit = '10m'
        # }}}

        # {{{ sanity
        # TODO: regex = (
        #     r'function="(?P<fun>.*)", source="(?P<filename>.*)",'
        #     r' line=":(?P<ll>\d+)"'
        # )
        # + cuda
        # + largescale
        self.sanity_patterns = sn.all([
            # check the job output:
            sn.assert_found(r'Starting main loop', self.stdout),
            # check the tool output:
            sn.assert_not_found('not found', which_rpt),
            sn.assert_not_found('ERROR', self.stdout),
            # <LMON FE API> (ERROR): read_lmonp_msgheader failed while
            # attempting to receive continue launch message from back end
            sn.assert_found(r'STAT started', self.stdout),
            sn.assert_found(r'Attaching to job launcher', self.stdout),
            sn.assert_found(r'launching tool daemons', self.stdout),
            sn.assert_found(r'Results written to', self.stdout),
        ])
 def __init__(self):
     self.valid_prog_environs = ['*']
     self.valid_systems = ['*']
     self.prerun_cmds = ['echo hello']
     self.executable = 'true'
     self.sanity_patterns = sn.assert_not_found(r'hello', self.stdout)
Exemple #10
0
def test_assert_not_found_encoding(utf16_file):
    assert sn.assert_not_found(r'Iliad', utf16_file, encoding='utf-16')
Exemple #11
0
def test_assert_not_found(tempfile):
    assert sn.assert_not_found(r'foo: \d+', tempfile)
    assert sn.assert_not_found(r'foo: \d+', sn.defer(tempfile))
    with pytest.raises(SanityError):
        sn.evaluate(sn.assert_not_found(r'Step: \d+', tempfile))
Exemple #12
0
 def set_sanity_patterns(self):
     self.sanity_patterns = sn.assert_not_found(r'warning', self.stdout)
Exemple #13
0
 def sanity_check_build(self):
     return sanity.assert_not_found("error", self.stderr)
Exemple #14
0
 def validate(self):
     return sn.assert_not_found(r'(?i)error', self.stdout)
Exemple #15
0
 def validate_compilation(self):
     return sn.assert_not_found(r'warning', self.stdout)
Exemple #16
0
 def test_assert_not_found_encoding(self):
     self.assertTrue(
         sn.assert_not_found('Iliad', self.utf16_file, encoding='utf-16'))
Exemple #17
0
 def validate_build(self):
     return sn.assert_not_found('error', self.stderr)
Exemple #18
0
    def assert_config(self):
        patterns = [
            sn.assert_found(r'Added \d+ new compilers', self.stdout),
            sn.assert_not_found(r'ERROR', self.stdout),
        ]

        # Compiler assertions
        with open(self.compilers_file_path) as f:
            spack_compilers = yaml.safe_load(f)

        all_compilers = sorted(self.all_compilers)
        for compiler in all_compilers:
            patterns += [
                sn.assert_found(f'spec: {compiler}', self.compilers_file_path)
            ]

        if 'compilers' in spack_compilers:
            compilers = []
            for compiler in spack_compilers['compilers']:
                if 'compiler' in compiler:
                    spec = compiler['compiler']
                    if 'spec' in spec:
                        compilers.append(spec['spec'])
                    else:
                        raise SanityError(
                            f'spec entry missing in {spec} inside the compiler.yaml file'
                        )
                else:
                    raise SanityError(
                        f'compiler entry missing in {compiler} inside the compiler.yaml file'
                    )
            for i, a in enumerate(sorted(compilers)):
                # we should have a one to one mapping between the identified
                # compilers and the entries inside the newly generated compilers.yaml file
                patterns += [sn.assert_eq(a, all_compilers[i])]
        else:
            raise SanityError(
                'compilers entry not found in compilers.yaml file')

        # package assertions
        with open(self.packages_file_path) as f:
            spack_packages = yaml.safe_load(f)

        if 'packages' in spack_packages:
            pkgs = set()
            for name, pkg in self.pe_independent_pkgs.items():
                if 'name' in pkg:
                    pkgs.add(pkg['name'])
                else:
                    pkgs.add(name)
            for name, pkg in self.pe_dependent_pkgs.items():
                if 'name' in pkg:
                    pkgs.add(pkg['name'])
                else:
                    pkgs.add(name)

            for pkg in pkgs:
                patterns += [
                    sn.assert_found(f'{pkg}:', self.packages_file_path)
                ]
        else:
            raise SanityError('packages entry not found in packages.yaml file')

        config = spackconfig.SPACK_CONFIG

        # config assertions
        with open(self.config_file_path) as f:
            spack_config = yaml.safe_load(f)

        if 'config' in spack_config:
            for option in [
                    'build_stage', 'source_cache', 'misc_cache',
                    'db_lock_timeout', 'build_jobs'
            ]:
                patterns += [
                    sn.assert_found(f'{option}:', self.config_file_path)
                ]
        else:
            raise SanityError('config entry not found in config.yaml file')

        return sn.all(patterns)
Exemple #19
0
    def __init__(self):
        # {{{ pe
        self.descr = 'Tool validation'
        self.valid_prog_environs = [
            'PrgEnv-gnu', 'cpeGNU'
            # 'PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi', 'PrgEnv-cray',
            # 'PrgEnv-aocc', 'cpeGNU', 'cpeIntel', 'cpeCray', 'cpeAMD',
        ]
        self.valid_systems = [
            'dom:mc', 'dom:gpu', 'daint:mc', 'daint:gpu',
            'eiger:mc', 'pilatus:mc'
        ]
        self.tool = 'atp'
        self.modules = [self.tool, 'cray-stat']  # cray-cti
        self.maintainers = ['JG']
        self.tags = {'sph', 'hpctools', 'cpu', 'craype', 'debugging'}
        # }}}

        # {{{ compile
        self.testname = 'sedov'
        self.sourcepath = f'{self.testname}.cpp'
        # atp requires full path:
        self.executable = './mpi+omp'
        cs = self.current_system.name
        re_slm_1 = 'libAtpSigHandler.so'
        re_slm_2 = 'libAtpDispatch.so'
        re_epr_ini1 = '^slurm = True'
        re_epr_cfg1 = '.debug., .ps., None, False'
        re_epr_cfg2 = '.eproxy., .eproxy., None, True'
        re_hosts_1 = 'login'
        re_ver_1 = 'STAT_VERSION1=$'
        re_ver_2 = 'STAT_VERSION2=$'
        re_ver_3 = 'ATP_VERSION1=$'
        re_ver_4 = 'ATP_VERSION2=$'
        re_ver_5 = 'ATP_HOME=$'
        re_which_1 = 'not found'
        re_stderr_1 = 'forcing job termination|Force Terminated (job|Step)'
        re_stderr_2 = 'Producing core dumps for rank'
        re_stderr_3 = 'View application merged backtrace tree with: stat-view'
        re_dot_1 = 'MPI_Allreduce|MPID_Abort|PMPI_Abort'
        re_dot_2 = 'sphexa::sph::|cstone::'
        re_core_1 = 'core file x86-64'
        # TODO: grep sphexa::sph atpMergedBT_line.dot -> perf_patterns
        #   94 [pos="0,0", label="sphexa::sph::neighborsSum(...
        ldd_rpt = 'ldd.rpt'
        cfg_rpt = 'cfg.rpt'
        version_rpt = 'version.rpt'
        which_rpt = 'which.rpt'
        slurm_cfg_file = '/etc/opt/slurm/plugstack.conf'
        cfg_file_path = '/opt/cray/elogin/eproxy'
        eproxy_ini_cfg_file = f'{cfg_file_path}/etc/eproxy.ini'
        eproxy_cfg_file = f'{cfg_file_path}/default/bin/eproxy_config.py'
        hosts_cfg_file = '/etc/hosts'
        apt_dot_file = 'atpMergedBT_line.dot'
        # TODO: regex_rk0 = 'core.atp.*.0.0.*'
        # {{{ Needed when reporting a support case:
        if cs not in {'pilatus', 'eiger'}:
            self.prebuild_cmds += [
                # --- check slurm_cfg
                #     (optional /opt/cray/pe/atp/libAtpDispatch.so)
                f'grep "{re_slm_2}" {slurm_cfg_file} > {cfg_rpt}',
                # --- check ini_cfg_file (slurm = True)
                f'grep "{re_epr_ini1}" {eproxy_ini_cfg_file} >> {cfg_rpt}',
                # --- check eproxy_cfg_file (['debug', 'ps', None, False])
                f'grep "{re_epr_cfg1}" {eproxy_cfg_file} >> {cfg_rpt}',
                # --- check eproxy_cfg_file (['eproxy', 'eproxy', None, True])
                f'grep "{re_epr_cfg2}" {eproxy_cfg_file} >> {cfg_rpt}',
                # --- check STAT_MOM_NODE in /etc/hosts (daintgw01|domgw03)
                f'grep "{re_hosts_1}" {hosts_cfg_file} >> {cfg_rpt}',
                # --- chech stat version
                f'echo STAT_VERSION1=$STAT_VERSION > {version_rpt}',
                f'echo STAT_VERSION2=`STATbin --version` >> {version_rpt}',
            ]
        else:
            self.prebuild_cmds += [
                # --- chech stat version
                f'echo STAT_VERSION1=$STAT_LEVEL > {version_rpt}',
                f'echo STAT_VERSION2=`STATbin --version` >> {version_rpt}',
            ]
        # }}}

        self.prebuild_cmds += [
            # TODO: open cray case
            f'export PKG_CONFIG_PATH=$ATP_INSTALL_DIR/lib/pkgconfig:'
            f'$PKG_CONFIG_PATH',
            # --- check atp version and path
            f'echo ATP_VERSION1=$ATP_VERSION >> {version_rpt}',
            f'echo ATP_VERSION2='
            f'`pkg-config --modversion libAtpSigHandler` >> {version_rpt}',
            f'echo ATP_HOME=$ATP_HOME >> {version_rpt}',
            f'pkg-config --variable=exec_prefix libAtpSigHandler &>{which_rpt}'
        ]
        self.postbuild_cmds += [
            # --- check exe (/opt/cray/pe/atp/3.8.1/lib/libAtpSigHandler.so.1)
            f'ldd {self.executable}* |grep "{re_slm_1}" &> {ldd_rpt}',
        ]
        # }}}

        # {{{ run
        self.time_limit = '10m'
        self.variables['ATP_ENABLED'] = '1'
        self.postrun_cmds += [
            f'ldd {self.executable}* |grep atp',
            'file core*'
        ]
# {{{ TODO: gdb_command
# -        gdb_command = (r'-e %s '
# -                       r'--eval-command="set pagination off" '
# -                       r'--eval-command="bt" '
# -                       r'--eval-command="quit"' % self.executable)
# -        regex_not_rk0 = r'grep -m1 -v atp'
# -        self.postrun_cmds = [
# -            'echo stoptime=`date +%s`',
# -            # --- rank 0: MPI_Allreduce
# -            f'gdb -c {regex_rk0} {gdb_command} &> {self.rpt_rk0}',
# -
# -            # --- rank>2: MPI::Comm::Abort
# -#            f'ln -s `ls -1 core.* |{regex_not_rk0}` mycore',
# -#            f'gdb -c mycore {gdb_command} &> {self.rpt_rkn}',
# -            # can't do this because core filename is unknown at runtime:
# -            # 'gdb -c core.atp.*.%s.* -e %s' % (self.core, self.executable),
# -
# -            '# stat-view atpMergedBT_line.dot'
# }}}
        # }}}

        # {{{ sanity
        # TODO: self.sanity_patterns += ... ?
        if cs in {'pilatus', 'eiger'}:
            self.sanity_patterns = sn.all([
                # check the job output:
                sn.assert_found(r'UpdateSmoothingLength: \S+s', self.stdout),
                # check the tool output:
                sn.assert_found(re_slm_1, ldd_rpt),
                #
                sn.assert_not_found(re_ver_1, version_rpt),
                sn.assert_not_found(re_ver_2, version_rpt),
                sn.assert_not_found(re_ver_3, version_rpt),
                sn.assert_not_found(re_ver_4, version_rpt),
                sn.assert_not_found(re_ver_5, version_rpt),
                sn.assert_not_found(re_which_1, which_rpt),
                #
                sn.assert_found(re_stderr_1, self.stderr),
                sn.assert_found(re_stderr_2, self.stderr),
                sn.assert_found(re_stderr_3, self.stderr),
                #
                sn.assert_found(re_dot_1, apt_dot_file),
                # sn.assert_found(re_dot_2, apt_dot_file),
                sn.assert_found(re_core_1, self.stdout),
            ])
        else:
            self.sanity_patterns = sn.all([
                # check the job output:
                sn.assert_found(r'UpdateSmoothingLength: \S+s', self.stdout),
                # check the tool output:
                sn.assert_found(re_slm_1, ldd_rpt),
                sn.assert_found(re_slm_2, cfg_rpt),
                sn.assert_found(re_epr_ini1, cfg_rpt),
                sn.assert_found(re_epr_cfg1, cfg_rpt),
                sn.assert_found(re_epr_cfg2, cfg_rpt),
                sn.assert_found(re_hosts_1, cfg_rpt),
                #
                sn.assert_not_found(re_ver_1, version_rpt),
                sn.assert_not_found(re_ver_2, version_rpt),
                sn.assert_not_found(re_ver_3, version_rpt),
                sn.assert_not_found(re_ver_4, version_rpt),
                sn.assert_not_found(re_ver_5, version_rpt),
                sn.assert_not_found(re_which_1, which_rpt),
                #
                sn.assert_found(re_stderr_1, self.stderr),
                sn.assert_found(re_stderr_2, self.stderr),
                sn.assert_found(re_stderr_3, self.stderr),
                #
                sn.assert_found(re_dot_1, apt_dot_file),
                # sn.assert_found(re_dot_2, apt_dot_file),
                sn.assert_found(re_core_1, self.stdout),
            ])
Exemple #20
0
    def __init__(self, lang):
        super().__init__()
        self.name = 'scorep_mpi_omp_%s' % lang.replace('+', 'p')
        self.descr = 'SCORE-P %s check' % lang
        self.valid_systems = ['daint:gpu', 'daint:mc', 'dom:gpu', 'dom:mc']

        self.valid_prog_environs = ['PrgEnv-gnu', 'PrgEnv-intel', 'PrgEnv-pgi']

        self.scorep_modules = {
            'PrgEnv-gnu': ['Score-P/4.0-CrayGNU-18.08'],
            'PrgEnv-intel': ['Score-P/4.0-CrayIntel-18.08'],
            'PrgEnv-pgi': ['Score-P/4.0-CrayPGI-18.08']
        }

        self.prgenv_flags = {
            'PrgEnv-cray': ['-g', '-homp'],
            'PrgEnv-gnu': ['-g', '-fopenmp'],
            'PrgEnv-intel': ['-g', '-openmp'],
            'PrgEnv-pgi': ['-g', '-mp']
        }

        self.executable = 'jacobi'
        self.build_system = 'Make'
        self.build_system.makefile = 'Makefile_scorep_mpi_omp'
        # NOTE: Restrict concurrency to allow creation of Fortran modules
        if lang == 'F90':
            self.build_system.max_concurrency = 1

        self.sourcesdir = os.path.join('src', lang)
        self.num_tasks = 3
        self.num_tasks_per_node = 3
        self.num_cpus_per_task = 4
        self.num_iterations = 200

        self.variables = {
            'OMP_NUM_THREADS': str(self.num_cpus_per_task),
            'ITERATIONS': str(self.num_iterations),
            'SCOREP_ENABLE_PROFILING': 'false',
            'SCOREP_ENABLE_TRACING': 'true',
            'OMP_PROC_BIND': 'true',
            'SCOREP_TIMER': 'clock_gettime'
        }

        cpu_count = self.num_cpus_per_task * self.num_tasks_per_node
        self.otf2_file = 'otf2.txt'
        self.sanity_patterns = sn.all([
            sn.assert_found('SUCCESS', self.stdout),
            sn.assert_eq(
                sn.count(
                    sn.extractall(r'(?P<line>LEAVE.*omp\s+\S+\s+\@_jacobi)',
                                  self.otf2_file, 'line')),
                4 * self.num_iterations * cpu_count),
            sn.assert_not_found('warning|WARNING', self.stderr)
        ])

        self.maintainers = ['MK', 'JG']
        self.tags = {'production'}

        # additional program call in order to generate the tracing output for
        # the sanity check
        self.post_run = [
            'otf2-print scorep-*/traces.otf2 > %s' % self.otf2_file
        ]
Exemple #21
0
 def validate(self):
     return sn.assert_not_found(r'hello', self.stdout)
Exemple #22
0
 def set_sanity_patterns(self):
     self.sanity_patterns = sn.assert_not_found('error', self.stderr)
Exemple #23
0
 def validate_build(self):
     return sn.assert_not_found(r'warning', self.stdout)
Exemple #24
0
    def __init__(self):
        self.descr = 'GPU specs test'
        self.maintainers = ['JG']
        self.valid_systems = [
            'daint:gpu', 'dom:gpu', 'arolla:cn', 'tsa:cn', 'ault:amdv100',
            'ault:intelv100', 'ault:amda100', 'ault:amdvega'
        ]
        self.valid_prog_environs = ['PrgEnv-gnu']
        self.sourcesdir = 'src'
        self.build_system = 'Make'
        self.executable = './exe'

        # {{{ run
        self.num_tasks = 1
        self.num_tasks_per_node = 1
        self.num_cpus_per_task = 1
        self.num_tasks_per_core = 1
        # self.use_multithreading = False
        # self.exclusive = True
        # self.exclusive_access = True
        self.time_limit = '1m'
        self.prerun_cmds = ['module list']
        # }}}

        # {{{ sanity_patterns
        self.sanity_patterns = sn.all([
            sn.assert_not_found(r'MapSMtoCores.*is undefined', self.stdout),
            sn.assert_found(r'Kernel Module', self.stdout),
            sn.assert_found(r'CUDA Driver Version / Runtime', self.stdout),
            sn.assert_found(r'CUDA Capability Major/Minor', self.stdout),
            sn.assert_found(r'Theoretical peak performance', self.stdout),
            sn.assert_found(r'^Device ', self.stdout),
            # sn.assert_found(r'', self.stdout),
        ])
        # self.sanity_patterns = self.assert_num_tasks()
        # }}}

        # {{{ performance
        regex1 = r'Kernel Module\s+(\S+)\.\d+\s+'
        regex2 = r'\s+CUDA Driver Version / Runtime Version\s+(\S+) / (\S+)'
        regex3 = r'\s+CUDA Capability Major/Minor version number:\s+(\S+)'
        regex4 = r'Theoretical peak performance per GPU:\s+(\S+) Gflop/s'
        self.perf_patterns = {
            'driver': sn.extractsingle(regex1, self.stdout, 1, float),
            'cuda_driver_version': sn.extractsingle(regex2, self.stdout, 1,
                                                    float),
            'cuda_runtime_version': sn.extractsingle(regex2, self.stdout, 2,
                                                     float),
            'cuda_capability': sn.extractsingle(regex3, self.stdout, 1, float),
            'peak_perf': sn.extractsingle(regex4, self.stdout, 1, int),
        }
        self.reference = {
            'dom:gpu': {
                'peak_perf': (4761, -0.10, None, 'Gflop/s'),
            },
            'daint:gpu': {
                'peak_perf': (4761, -0.10, None, 'Gflop/s'),
            },
            'ault:intelv100': {
                'peak_perf': (7066, -0.10, None, 'Gflop/s'),
            },
            'ault:amda100': {
                'peak_perf': (15000, -0.10, None, 'Gflop/s'),
            },
            'ault:amdv100': {
                'peak_perf': (5500, -0.10, None, 'Gflop/s'),
            },
            'ault:amdvega': {
                'peak_perf': (3450, -0.10, None, 'Gflop/s'),
            },
            'arolla:cn': {
                'peak_perf': (5861, -0.10, None, 'Gflop/s'),
            },
            'tsa:cn': {
                'peak_perf': (5861, -0.10, None, 'Gflop/s'),
            },
            '*': {
                'driver': (0, None, None, ''),
                'cuda_driver_version': (0, None, None, ''),
                'cuda_runtime_version': (0, None, None, ''),
                'cuda_capability': (0, None, None, '')
            },
        }
Exemple #25
0
 def validate_download(self):
     return sn.assert_not_found('error', self.stderr)
    def __init__(self, part, n_tasks, n_tasks_per_node):

        self.valid_systems = [part]
        self.valid_prog_environs = ['openfoam']

        self.num_tasks_per_node = n_tasks_per_node
        self.num_tasks = n_tasks
        self.num_nodes = int(n_tasks / n_tasks_per_node)
        self.tags = {
            'num_procs=%i' % self.num_tasks,
            'num_nodes=%i' % self.num_nodes
        }

        self.sourcesdir = 'downloads'
        self.exclusive_access = True
        self.time_limit = '1h'

        self.prerun_cmds = [
            'tar --strip-components 2 -xf Motorbike_bench_template.tar.gz bench_template/basecase',
            './Allclean',  # removes logs, old timehistories etc just in case 

            # set domain decomposition:
            # using 'scotch' method means simpleCoeffs is ignored so it doesn't need to match num_tasks:
            'sed -i -- "s/method .*/method          scotch;/g" system/decomposeParDict',
            'sed -i -- "s/numberOfSubdomains .*/numberOfSubdomains %i;/g" system/decomposeParDict'
            % self.num_tasks,

            # remove streamlines:
            'sed -i -- \'s/    #include "streamLines"//g\' system/controlDict',
            'sed -i -- \'s/    #include "wallBoundedStreamLines"//g\' system/controlDict',

            # fix location of mesh quality defaults (needed for v6+?)
            "sed -i -- 's|caseDicts|caseDicts/mesh/generation|' system/meshQualityDict",
            './Allmesh',  # do meshing
            'time \\',  # want to run mpi task under time
        ]
        # could also check:
        #$ ompi_info -c | grep -oE "MPI_THREAD_MULTIPLE[^,]*"
        # MPI_THREAD_MULTIPLE: yes

        self.executable = 'simpleFoam'
        self.executable_opts = ['-parallel']

        self.keep_files = [
            'log.snappyHexMesh', 'log.blockMesh', 'log.decomposePar'
        ]

        result = sn.extractall(
            r'time step continuity errors : '
            r'\S+\s\S+ = \S+\sglobal = (?P<res>-?\S+),', self.stdout, 'res',
            float)
        # NB: `time` outputs to stderr so can't assume that should be empty
        self.sanity_patterns = sn.all([
            # ensure meshing finished ok:
            sn.assert_found('End', 'log.blockMesh'),
            sn.assert_found('End', 'log.decomposePar'),
            sn.assert_found('Finished meshing without any errors',
                            'log.snappyHexMesh'),

            # ensure simpleFoam finished ok:
            sn.assert_found('Finalising parallel run', self.stdout),
            sn.assert_not_found('FOAM FATAL ERROR', self.stdout),
            sn.assert_not_found('FOAM FATAL ERROR', self.stderr),

            # ensure continuity errors small enough - copied from
            # https://github.com/eth-cscs/reframe/blob/0a4dc5207b35c737861db346bd483fd4ac202846/cscs-checks/apps/openfoam/check_openfoam_extend.py#L56
            sn.all(sn.map(lambda x: sn.assert_lt(abs(x), 5.e-04), result)),
        ])

        self.perf_patterns = {
            # from openfoam output:
            'ExecutionTime':
            sn.extractall(
                r'ExecutionTime = ([\d.]+) s  ClockTime = ([\d.]+) s',
                self.stdout, 1, float)[-1],
            'ClockTime':
            sn.extractall(
                r'ExecutionTime = ([\d.]+) s  ClockTime = ([\d.]+) s',
                self.stdout, 2, float)[-1],
            # from `time`:
            'runtime_real':
            sn.extractsingle(r'^real\s+(\d+m[\d.]+s)$', self.stderr, 1,
                             parse_time_cmd),
        }
        self.reference = {
            '*': {
                'ExecutionTime': (0, None, None, 's'),
                'ClockTime': (0, None, None, 's'),
                'runtime_real': (0, None, None, 's'),
            }
        }