Exemplo n.º 1
0
def test_appkeroutputparser(datadir):
    from akrr.parsers.akrrappkeroutputparser import AppKerOutputParser, total_seconds
    import xml.etree.ElementTree as ElementTree

    parser = AppKerOutputParser(name='test',
                                version=1,
                                description="Test the resource deployment",
                                url='http://xdmod.buffalo.edu',
                                measurement_name='test')
    parser.add_common_must_have_params_and_stats()
    assert len(parser.mustHaveParameters) == 2
    parser.add_must_have_parameter("Must have parameter 1")
    parser.add_must_have_parameter("Must have parameter 2")
    parser.add_must_have_parameter("Must have parameter 3")
    parser.add_must_have_statistic("Must have statistic 1")
    parser.add_must_have_statistic("Must have statistic 2")
    parser.add_must_have_statistic("Must have statistic 3")

    assert "Must have parameter 1" in parser.mustHaveParameters
    assert "Must have parameter 2" in parser.mustHaveParameters
    assert "Must have parameter 3" in parser.mustHaveParameters
    assert "Must have statistic 1" in parser.mustHaveStatistics
    assert "Must have statistic 2" in parser.mustHaveStatistics
    assert "Must have statistic 3" in parser.mustHaveStatistics

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(str(datadir / 'appstdout'),
                                         str(datadir / 'stdout'),
                                         str(datadir / 'stderr'),
                                         str(datadir / 'gen.info'),
                                         resource_appker_vars={
                                             'resource': {
                                                 'name': 'HPC-Cluster'
                                             },
                                             'app': {
                                                 'name': 'test'
                                             }
                                         })

    # set statistics
    if parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")

    # check resulting xml
    xml_text_out = parser.get_xml()
    xml_out = ElementTree.fromstring(xml_text_out)
    params = xml_out.find(".//parameters")
    stats = xml_out.find(".//statistics")

    assert len(
        params.find(".//parameter[ID='RunEnv:Nodes']").find('value').text) > 5
    assert stats.find(".//statistic[ID='Network scratch directory accessible']"
                      ).find('value').text == '1'
    assert stats.find(".//statistic[ID='Task working directory exists']").find(
        'value').text == '1'
    assert float(
        xml_out.find(".//statistic[ID='Wall Clock Time']").find(
            'value').text) == 2.0
    assert xml_out.find('./exitStatus/completed').text == "false"
Exemplo n.º 2
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    """
    Process test appkernel output.
    """
    # set App Kernel Description
    parser = AppKerOutputParser(name='test',
                                version=1,
                                description="Test the resource deployment",
                                url='http://xdmod.buffalo.edu',
                                measurement_name='test')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_statistic('Wall Clock Time')
    parser.add_must_have_statistic('Shell is BASH')
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # set statistics
    if parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(stdout):
        fin = open(stdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.set_statistic('Shell is BASH', 0)
    j = 0
    while j < len(lines):
        if lines[j].count("Checking that the shell is BASH") > 0 and lines[
                j + 1].count("bash") > 0:
            parser.set_statistic('Shell is BASH', 1)
        j += 1

    if __name__ == "__main__":
        # output for testing purpose
        print(("parsing complete:", parser.parsing_complete()))
        parser.print_params_stats_as_must_have()
        print((parser.get_xml()))

    # return complete XML otherwise return None
    return parser.get_xml()
Exemplo n.º 3
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, proclog=None, 
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='ior',
        version=1,
        description="IOR (Interleaved-Or-Random) Benchmark",
        url='http://freshmeat.net/projects/ior',
        measurement_name='IOR'
    )
    app_vars = None
    if resource_appker_vars is not None and 'app' in resource_appker_vars:
        app_vars = resource_appker_vars['app']

    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    if app_vars is None or (
            app_vars is not None and 'testHDF5' in app_vars and
            app_vars['testHDF5'] is True):
        parser.add_must_have_parameter('HDF Version')
        parser.add_must_have_parameter('HDF5 Collective N-to-1 Test File System')
        parser.add_must_have_parameter('HDF5 Independent N-to-1 Test File System')
        parser.add_must_have_parameter('HDF5 N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testMPIIO' in app_vars and
            app_vars['testMPIIO'] is True):
        parser.add_must_have_parameter('MPIIO Collective N-to-1 Test File System')
        parser.add_must_have_parameter('MPIIO Independent N-to-1 Test File System')
        parser.add_must_have_parameter('MPIIO N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testPOSIX' in app_vars and
            app_vars['testPOSIX'] is True):
        parser.add_must_have_parameter('POSIX N-to-1 Test File System')
        parser.add_must_have_parameter('POSIX N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testNetCDF' in app_vars and
            app_vars['testNetCDF'] is True):
        parser.add_must_have_parameter('Parallel NetCDF Collective N-to-1 Test File System')
        parser.add_must_have_parameter('Parallel NetCDF Independent N-to-1 Test File System')
        parser.add_must_have_parameter('Parallel NetCDF Version')
        parser.add_must_have_parameter('Per-Process Data Size')
        parser.add_must_have_parameter('Per-Process I/O Block Size')
        parser.add_must_have_parameter('RunEnv:Nodes')
        parser.add_must_have_parameter('Transfer Size Per I/O')

    if app_vars is None or (
            app_vars is not None and 'testHDF5' in app_vars and
            app_vars['testHDF5'] is True):
        parser.add_must_have_statistic('HDF5 Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Independent N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testMPIIO' in app_vars and
            app_vars['testMPIIO'] is True):
        parser.add_must_have_statistic('MPIIO Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Independent N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testPOSIX' in app_vars and
            app_vars['testPOSIX'] is True):
        parser.add_must_have_statistic('POSIX N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testNetCDF' in app_vars and
            app_vars['testNetCDF'] is True):
        parser.add_must_have_statistic('Parallel NetCDF Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Independent N-to-1 Write Aggregate Throughput')

    parser.add_must_have_statistic('Number of Tests Passed')
    parser.add_must_have_statistic('Number of Tests Started')

    parser.add_must_have_statistic('Wall Clock Time')

    parser.completeOnPartialMustHaveStatistics = True
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time", total_seconds(parser.appKerWallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output

    # find which version of IOR was used
    ior_output_version = None
    j = 0
    while j < len(lines) - 1:
        # IOR RELEASE: IOR-2.10.3
        m = re.match(r'^#\s+IOR RELEASE:\s(.+)', lines[j])
        if m:
            ior_output_version = 20
        # IOR-3.2.0: MPI Coordinated Test of Parallel I/O
        # IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
        m = re.match(r'^IOR-([3-9])\.([0-9])+\.[0-9]\S*: MPI Coordinated Test of Parallel I/O', lines[j])
        if m:
            ior_major = int(m.group(1))
            ior_minor = int(m.group(2))
            if ior_major >=3:
                if ior_minor >= 3:
                    ior_output_version = 33
                elif ior_minor >= 2:
                    ior_output_version = 32
                else:
                    ior_output_version = 30

        j += 1

    if ior_output_version is None:
        print("ERROR: unknown version of IOR output!!!")

    parser.successfulRun = False

    total_number_of_tests, tests_passed = process_ior_output_v33(parser, lines)

    if app_vars is not None and 'doAllWritesFirst' in app_vars:
        if app_vars['doAllWritesFirst']:
            # i.e. separate read and write
            total_number_of_tests = total_number_of_tests // 2
    else:
        # by default separate read and write
        total_number_of_tests = total_number_of_tests // 2

    parser.set_statistic('Number of Tests Passed', tests_passed)
    parser.set_statistic('Number of Tests Started', total_number_of_tests)

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        parser.print_template_for_pytest()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 4
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='graph500',
        version=1,
        description="Graph500 Benchmark",
        url='http://www.Graph500.org',
        measurement_name='Graph500'
    )
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Edge Factor')
    parser.add_must_have_parameter('Input File')
    parser.add_must_have_parameter('Number of Roots to Check')
    parser.add_must_have_parameter('Number of Edges')
    parser.add_must_have_parameter('Number of Vertices')
    parser.add_must_have_parameter('Scale')

    parser.add_must_have_statistic('Harmonic Mean TEPS')
    parser.add_must_have_statistic('Harmonic Standard Deviation TEPS')
    parser.add_must_have_statistic('Median TEPS')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    if parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time", total_seconds(parser.appKerWallClockTime), "Second")
    elif parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time", total_seconds(parser.wallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = True
    num_of_errors = 0
    j = 0
    while j < len(lines):
        m = re.match(r'^Graph500 version:\s+(.+)', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1).strip())

        m = re.match(r'ERROR:\s+(.+)', lines[j])
        if m:
            num_of_errors += 1

        m = re.match(r'^Reading input from\s+(.+)', lines[j])
        if m:
            parser.set_parameter("Input File", m.group(1))

        m = re.match(r'^SCALE:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Scale", m.group(1))

        m = re.match(r'^edgefactor:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Edge Factor", m.group(1))

        m = re.match(r'^NBFS:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Number of Roots to Check", m.group(1))

        m = re.match(r'^median_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.set_statistic("Median TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^harmonic_mean_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.successfulRun = True
            parser.set_statistic("Harmonic Mean TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^harmonic_stddev_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.set_statistic("Harmonic Standard Deviation TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^median_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Median Validation Time", m.group(1), "Second")

        m = re.match(r'^mean_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Mean Validation Time", m.group(1), "Second")

        m = re.match(r'^stddev_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Standard Deviation Validation Time", m.group(1), "Second")

        j += 1

    if num_of_errors > 0:
        parser.successfulRun = False

    if parser.get_parameter('Scale') is not None and parser.get_parameter('Edge Factor') is not None:
        scale = int(parser.get_parameter('Scale'))
        edgefactor = int(parser.get_parameter('Edge Factor'))
        parser.set_parameter("Number of Vertices", 2 ** scale)
        parser.set_parameter("Number of Edges", edgefactor * 2 ** scale)

    if __name__ == "__main__":
        # output for testing purpose
        parser.parsing_complete(True)
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 5
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # initiate parser
    parser = AppKerOutputParser(name='mdtest')
    # set obligatory parameters and statistics
    # set common parameters and statistics (App:ExeBinSignature and RunEnv:Nodes)
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_parameter('Arguments (single directory per process)')
    parser.add_must_have_parameter('Arguments (single directory)')
    parser.add_must_have_parameter(
        'Arguments (single tree directory per process)')
    parser.add_must_have_parameter('Arguments (single tree directory)')
    parser.add_must_have_parameter(
        'files/directories (single directory per process)')
    parser.add_must_have_parameter('files/directories (single directory)')
    parser.add_must_have_parameter(
        'files/directories (single tree directory per process)')
    parser.add_must_have_parameter('files/directories (single tree directory)')
    parser.add_must_have_parameter('tasks (single directory per process)')
    parser.add_must_have_parameter('tasks (single directory)')
    parser.add_must_have_parameter('tasks (single tree directory per process)')
    parser.add_must_have_parameter('tasks (single tree directory)')

    parser.add_must_have_statistic(
        'Directory creation (single directory per process)')
    parser.add_must_have_statistic('Directory creation (single directory)')
    parser.add_must_have_statistic(
        'Directory creation (single tree directory per process)')
    parser.add_must_have_statistic(
        'Directory creation (single tree directory)')
    parser.add_must_have_statistic(
        'Directory removal (single directory per process)')
    parser.add_must_have_statistic('Directory removal (single directory)')
    parser.add_must_have_statistic(
        'Directory removal (single tree directory per process)')
    parser.add_must_have_statistic('Directory removal (single tree directory)')
    parser.add_must_have_statistic(
        'Directory stat (single directory per process)')
    parser.add_must_have_statistic('Directory stat (single directory)')
    parser.add_must_have_statistic(
        'Directory stat (single tree directory per process)')
    parser.add_must_have_statistic('Directory stat (single tree directory)')
    parser.add_must_have_statistic(
        'File creation (single directory per process)')
    parser.add_must_have_statistic('File creation (single directory)')
    parser.add_must_have_statistic(
        'File creation (single tree directory per process)')
    parser.add_must_have_statistic('File creation (single tree directory)')
    parser.add_must_have_statistic('File read (single directory per process)')
    parser.add_must_have_statistic('File read (single directory)')
    parser.add_must_have_statistic(
        'File read (single tree directory per process)')
    parser.add_must_have_statistic('File read (single tree directory)')
    parser.add_must_have_statistic(
        'File removal (single directory per process)')
    parser.add_must_have_statistic('File removal (single directory)')
    parser.add_must_have_statistic(
        'File removal (single tree directory per process)')
    parser.add_must_have_statistic('File removal (single tree directory)')
    parser.add_must_have_statistic('File stat (single directory per process)')
    parser.add_must_have_statistic('File stat (single directory)')
    parser.add_must_have_statistic(
        'File stat (single tree directory per process)')
    parser.add_must_have_statistic('File stat (single tree directory)')
    parser.add_must_have_statistic(
        'Tree creation (single directory per process)')
    parser.add_must_have_statistic('Tree creation (single directory)')
    parser.add_must_have_statistic(
        'Tree creation (single tree directory per process)')
    parser.add_must_have_statistic('Tree creation (single tree directory)')
    parser.add_must_have_statistic(
        'Tree removal (single directory per process)')
    parser.add_must_have_statistic('Tree removal (single directory)')
    parser.add_must_have_statistic(
        'Tree removal (single tree directory per process)')
    parser.add_must_have_statistic('Tree removal (single tree directory)')

    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Here can be custom output parsing
    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    testname = ""
    parser.successfulRun = False
    j = 0
    while j < len(lines):
        m = re.match(r'^#Testing (.+)', lines[j])
        if m:
            testname = " (" + m.group(1).strip() + ")"

        m = re.match(r'^SUMMARY.*:', lines[j])
        if m:
            j = j + 3
            while j < len(lines):
                m = re.match(
                    r'([A-Za-z0-9 ]+):\s+[0-9.]+\s+[0-9.]+\s+([0-9.]+)\s+([0-9.]+)',
                    lines[j])
                if m:
                    parser.set_statistic(
                        m.group(1).strip() + testname, m.group(2),
                        "Operations/Second")
                else:
                    break
                j = j + 1
        m = re.search(r'finished at', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^Command line used:.+mdtest\s+(.+)', lines[j])

        if m:
            parser.set_parameter("Arguments" + testname, m.group(1).strip())
        m = re.search(r'([0-9]+) tasks, ([0-9]+) files/directories', lines[j])
        if m:
            parser.set_parameter("tasks" + testname, m.group(1).strip())
            parser.set_parameter("files/directories" + testname,
                                 m.group(2).strip())
        j = j + 1

        # parser.set_parameter("mega parameter",m.group(1))
    #
    #         m=re.search(r'My mega parameter\s+(\d+)',lines[j])
    #         if m:parser.set_statistic("mega statistics",m.group(1),"Seconds")
    #
    #         m=re.search(r'Done',lines[j])
    #         if m:parser.successfulRun=True
    #
    #         j+=1

    if __name__ == "__main__":
        # output for testing purpose
        print("Parsing complete:", parser.parsing_complete(verbose=True))
        print("Following statistics and parameter can be set as obligatory:")
        parser.print_params_stats_as_must_have()
        print("\nResulting XML:")
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()
Exemplo n.º 6
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    """
    Process test appkernel output.
    """
    # set App Kernel Description
    parser = AppKerOutputParser(name='test',
                                version=1,
                                description="Test the resource deployment",
                                url='http://xdmod.buffalo.edu',
                                measurement_name='test')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    # parser.add_must_have_statistic('Wall Clock Time')
    # parser.add_must_have_statistic('Shell is BASH')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # set statistics
    if parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(stdout):
        fin = open(stdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.set_statistic('Shell is BASH', 0)
    j = 0
    while j < len(lines):
        if lines[j].count("Checking that the shell is BASH") > 0 and lines[
                j + 1].count("bash") > 0:
            parser.set_statistic('Shell is BASH', 1)
        j += 1

    # process proc log
    if proclog is not None:
        os_start = None
        os_first_login = None
        os_start_shutdown = None
        os_terminated = None
        with open(proclog, "rt") as fin:
            for line in fin:
                m = re.search(
                    "Starting OpenStack instance \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_start = datetime.datetime(int(m.group(1)),
                                                 int(m.group(2)),
                                                 int(m.group(3)),
                                                 int(m.group(4)),
                                                 int(m.group(5)),
                                                 int(m.group(6)))
                m = re.search(
                    "OpenStack Instance should be up and running \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_first_login = datetime.datetime(int(m.group(1)),
                                                       int(m.group(2)),
                                                       int(m.group(3)),
                                                       int(m.group(4)),
                                                       int(m.group(5)),
                                                       int(m.group(6)))
                m = re.search(
                    "Shutting down OpenStack instance \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_start_shutdown = datetime.datetime(
                        int(m.group(1)), int(m.group(2)), int(m.group(3)),
                        int(m.group(4)), int(m.group(5)), int(m.group(6)))
                m = re.search(
                    "OpenStack Instance should be down and terminated \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_terminated = datetime.datetime(int(m.group(1)),
                                                      int(m.group(2)),
                                                      int(m.group(3)),
                                                      int(m.group(4)),
                                                      int(m.group(5)),
                                                      int(m.group(6)))

                # googlecloud
                m = re.search(
                    "Starting Google Cloud instance \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_start = datetime.datetime(int(m.group(1)),
                                                 int(m.group(2)),
                                                 int(m.group(3)),
                                                 int(m.group(4)),
                                                 int(m.group(5)),
                                                 int(m.group(6)))
                m = re.search(
                    "Google Cloud Instance should be up and running \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_first_login = datetime.datetime(int(m.group(1)),
                                                       int(m.group(2)),
                                                       int(m.group(3)),
                                                       int(m.group(4)),
                                                       int(m.group(5)),
                                                       int(m.group(6)))
                m = re.search(
                    "Shutting down Google Cloud instance \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_start_shutdown = datetime.datetime(
                        int(m.group(1)), int(m.group(2)), int(m.group(3)),
                        int(m.group(4)), int(m.group(5)), int(m.group(6)))
                m = re.search(
                    "Google Cloud Instance should be down and terminated \(([0-9]+)-([0-9]+)-([0-9]+) ([0-9]+):([0-9]+):([0-9]+)\)",
                    line)
                if m:
                    os_terminated = datetime.datetime(int(m.group(1)),
                                                      int(m.group(2)),
                                                      int(m.group(3)),
                                                      int(m.group(4)),
                                                      int(m.group(5)),
                                                      int(m.group(6)))

        if os_start is not None and os_first_login is not None:
            parser.set_statistic('Cloud Instance, Start Time to Login',
                                 total_seconds(os_first_login - os_start))
        if os_start_shutdown is not None and os_terminated is not None:
            parser.set_statistic(
                'Cloud Instance, Shut Down Time',
                total_seconds(os_terminated - os_start_shutdown))
        # log.info("OpenStack Instance should be up and running  (%s)"
        # log.info("Shutting down OpenStack instance (%s)" % datetime.datetime.today().strftime("%Y-%m-%d %H:%M:%S"))
        # log.info("OpenStack Instance should be down and terminated (%s)"

    if __name__ == "__main__":
        # output for testing purpose
        print(("parsing complete:", parser.parsing_complete()))
        parser.print_params_stats_as_must_have()
        print((parser.get_xml()))

    # return complete XML otherwise return None
    return parser.get_xml()
Exemplo n.º 7
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='hpcc',
                                version=1,
                                description="HPC Challenge Benchmarks",
                                url='http://icl.cs.utk.edu/hpcc/',
                                measurement_name='xdmod.benchmark.hpcc')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:DGEMM Problem Size')
    parser.add_must_have_parameter('Input:High Performance LINPACK Grid Cols')
    parser.add_must_have_parameter('Input:High Performance LINPACK Grid Rows')
    parser.add_must_have_parameter(
        'Input:High Performance LINPACK Problem Size')
    parser.add_must_have_parameter('Input:MPI Ranks')
    parser.add_must_have_parameter('Input:MPIRandom Problem Size')
    parser.add_must_have_parameter('Input:OpenMP Threads')
    parser.add_must_have_parameter('Input:PTRANS Problem Size')
    parser.add_must_have_parameter('Input:STREAM Array Size')
    parser.add_must_have_parameter('RunEnv:CPU Speed')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic(
        'Average Double-Precision General Matrix Multiplication (DGEMM) Floating-Point Performance'
    )
    parser.add_must_have_statistic("Average STREAM 'Add' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Copy' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Scale' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Triad' Memory Bandwidth")
    parser.add_must_have_statistic(
        'Fast Fourier Transform (FFTW) Floating-Point Performance')
    parser.add_must_have_statistic('High Performance LINPACK Efficiency')
    parser.add_must_have_statistic(
        'High Performance LINPACK Floating-Point Performance')
    parser.add_must_have_statistic('High Performance LINPACK Run Time')
    parser.add_must_have_statistic('MPI Random Access')
    parser.add_must_have_statistic('Parallel Matrix Transpose (PTRANS)')
    parser.add_must_have_statistic('Wall Clock Time')
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Intel MPI benchmark suite contains three classes of benchmarks:
    #
    #  Single-transfer, which needs only 2 processes
    #  Parallel-transfer, which can use as many processes that are available
    #  Collective, which can use as many processes that are available

    # The parameters mapping table
    params = {
        "CommWorldProcs": ["MPI Ranks", "", ""],
        "HPL_N": ["High Performance LINPACK Problem Size", "", ""],
        "HPL_nprow": ["High Performance LINPACK Grid Rows", "", ""],
        "HPL_npcol": ["High Performance LINPACK Grid Cols", "", ""],
        "PTRANS_n": ["PTRANS Problem Size", "", ""],
        "MPIRandomAccess_N":
        ["MPIRandom Problem Size", "MWord", "val/1024/1024"],
        "STREAM_VectorSize": ["STREAM Array Size", "MWord", ""],
        "DGEMM_N": ["DGEMM Problem Size", "", ""],
        "omp_get_num_threads": ["OpenMP Threads", "", ""],
    }

    # The result mapping table
    metrics = {
        "HPL_Tflops": [
            "High Performance LINPACK Floating-Point Performance",
            "MFLOP per Second", "val*1e6"
        ],
        "HPL_time": ["High Performance LINPACK Run Time", "Second", ""],
        "PTRANS_GBs":
        ["Parallel Matrix Transpose (PTRANS)", "MByte per Second", "val*1024"],
        "MPIRandomAccess_GUPs":
        ["MPI Random Access", "MUpdate per Second", "val*1000"],
        "MPIFFT_Gflops": [
            "Fast Fourier Transform (FFTW) Floating-Point Performance",
            "MFLOP per Second", "val*1000"
        ],
        "StarDGEMM_Gflops": [
            "Average Double-Precision General Matrix Multiplication (DGEMM) Floating-Point Performance",
            "MFLOP per Second", "val*1000"
        ],
        "StarSTREAM_Copy": [
            "Average STREAM 'Copy' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Scale": [
            "Average STREAM 'Scale' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Add": [
            "Average STREAM 'Add' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Triad": [
            "Average STREAM 'Triad' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ]
    }

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    result_begin = None
    hpl_tflops = None
    num_cores = None

    values = {}
    j = -1
    while j < len(lines) - 1:
        j += 1
        m = re.search(r'End of HPC Challenge tests', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^Begin of Summary section', lines[j])
        if m:
            result_begin = 1
            continue

        m = re.match(r'^(\w+)=([\w.]+)', lines[j])
        if m and result_begin:
            metric_name = m.group(1).strip()
            values[metric_name] = m.group(2).strip()
            if metric_name == "HPL_Tflops":
                hpl_tflops = float(values[metric_name])
            if metric_name == "CommWorldProcs":
                num_cores = int(values[metric_name])
        m = re.match(r'^Running on ([0-9.]+) processors', lines[j])
        if m:
            num_cores = int(m.group(1).strip())

    if hpl_tflops is None or num_cores is None:
        parser.successfulRun = False

    hpcc_version = None
    mhz = None
    theoretical_gflops = None

    if "VersionMajor" in values and "VersionMinor" in values and "VersionMicro" in values:
        hpcc_version = values["VersionMajor"] + "." + values[
            "VersionMinor"] + "." + values["VersionMicro"]
    if "VersionRelease" in values:
        hpcc_version += values["VersionRelease"]
    if hpcc_version:
        parser.set_parameter("App:Version", hpcc_version)

    for k, v in params.items():
        if k not in values:
            continue
        val = values[k]
        if v[2].find('val') >= 0:
            # if convertion formula is used, then first set val variable and then eval the formula
            val = get_float_or_int(values[k])
            val = eval(v[2])
        units = v[1] if [1] != "" else None
        parser.set_parameter("Input:" + v[0], val, units)

    for k, v in metrics.items():
        if k not in values:
            continue
        val = values[k]
        if v[2].find('val') >= 0:
            # if convertion formula is used, then first set val variable and then eval the formula
            val = get_float_or_int(values[k])
            val = eval(v[2])
        units = v[1] if [1] != "" else None
        parser.set_statistic(v[0], val, units)

    if "cpu_speed" in parser.geninfo:
        ll = parser.geninfo["cpu_speed"].splitlines()
        cpu_speed_max = 0.0
        for l in ll:
            m = re.search(r'([\d.]+)$', l)
            if m:
                v = float(m.group(1).strip())
                if v > cpu_speed_max:
                    cpu_speed_max = v
        if cpu_speed_max > 0.0:
            parser.set_parameter("RunEnv:CPU Speed", cpu_speed_max, "MHz")
            mhz = cpu_speed_max

    if resource_appker_vars is not None:
        if 'resource' in resource_appker_vars and 'app' in resource_appker_vars:
            if 'theoreticalGFlopsPerCore' in resource_appker_vars['app']:
                resname = resource_appker_vars['resource']['name']
                if resname in resource_appker_vars['app'][
                        'theoreticalGFlopsPerCore']:
                    theoretical_gflops = resource_appker_vars['app'][
                        'theoreticalGFlopsPerCore'][resname] * num_cores
                    print("theoreticalGFlops", resname, theoretical_gflops)

    if theoretical_gflops is None and mhz is not None:
        # Most modern x86 & POWER processors are superscale and can issue 4 instructions per cycle
        theoretical_gflops = mhz * num_cores * 4 / 1000.0
    if theoretical_gflops and hpl_tflops:
        # Convert both to GFlops and derive the Efficiency
        percent = (1000.0 * hpl_tflops / theoretical_gflops) * 100.0
        parser.set_statistic("High Performance LINPACK Efficiency",
                             "%.3f" % percent, "Percent")

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 8
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='hpcg',
                                version=1,
                                description="HPCG Benchmark",
                                url='http://www.hpcg-benchmark.org/index.html',
                                measurement_name='HPCG')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Distributed Processes')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nx')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Ny')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nz')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nx')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Ny')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nz')
    parser.add_must_have_parameter('Input:Number of Coarse Grid Levels')
    parser.add_must_have_parameter('Input:Threads per processes')
    parser.add_must_have_parameter('RunEnv:CPU Speed')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic('Floating-Point Performance, Raw DDOT')
    parser.add_must_have_statistic('Floating-Point Performance, Raw MG')
    parser.add_must_have_statistic('Floating-Point Performance, Raw SpMV')
    parser.add_must_have_statistic('Floating-Point Performance, Raw Total')
    parser.add_must_have_statistic('Floating-Point Performance, Raw WAXPBY')
    parser.add_must_have_statistic('Floating-Point Performance, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Read')
    parser.add_must_have_statistic('Memory Bandwidth, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Write')
    parser.add_must_have_statistic('Setup Time')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime') and getattr(
            parser, 'appKerWallClockTime') is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # get path to YAML file
    # read data
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # older version stores results in yaml
    # Parse YAML lines because YAML is often malformed
    yaml_lines = []
    # get yaml lines from appstdout
    bool_in_yaml_section = False
    for line in lines:
        if re.match(r"^====== .+\.yaml End   ======", line):
            break

        if bool_in_yaml_section:
            yaml_lines.append(line)

        if re.match(r"^====== .+\.yaml Start ======", line):
            bool_in_yaml_section = True
    # newer in summary txt
    # txt "====== HPCG-Benchmark_3.1_2020-09-23_17-54-20.txt Start ======"
    txt_lines = []
    bool_in_txt_section = False
    for line in lines:
        if re.match(r"^====== HPCG-Benchmark.*\.txt End   ======", line):
            break

        if bool_in_txt_section:
            txt_lines.append(line)

        if re.match(r"^====== HPCG-Benchmark.*\.txt Start ======", line):
            bool_in_txt_section = True

    if len(yaml_lines) > 5:
        process_yaml(yaml_lines, parser)
    else:
        process_txt(txt_lines, parser)

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()
Exemplo n.º 9
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='gamess',
        version=1,
        description=
        "Gamess: General Atomic and Molecular Electronic Structure System",
        url='http://www.msg.ameslab.gov',
        measurement_name='Gamess')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')

    parser.add_must_have_statistic('Wall Clock Time')
    parser.add_must_have_statistic('User Time')
    parser.add_must_have_statistic('Time Spent in MP2 Energy Calculation')
    parser.add_must_have_statistic(
        'Time Spent in Restricted Hartree-Fock Calculation')
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    start_time = None
    end_time = None
    mp2_energy_calculation_time = 0.0
    rhf_calculation_time = 0.0
    efficiency = None
    j = 0
    while j < len(lines):

        m = re.search(r'GAMESS VERSION = ([^*]+)', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1).strip())

        m = re.search(
            r'PARALLEL VERSION RUNNING ON\s*([\d.]+) PROCESSORS IN\s*([\d.]+) NODE',
            lines[j])
        if m:
            parser.set_parameter("App:NCores", m.group(1).strip())
            parser.set_parameter("App:NNodes", m.group(2).strip())

        m = re.search(r'EXECUTION OF GAMESS BEGUN (.+)', lines[j])
        if m:
            start_time = parser.get_datetime_local(m.group(1).strip())

        m = re.search(r'EXECUTION OF GAMESS TERMINATED NORMALLY (.+)',
                      lines[j])
        if m:
            end_time = parser.get_datetime_local(m.group(1).strip())

        if re.search(r'DONE WITH MP2 ENERGY', lines[j]):
            j += 1
            m = re.search(r'STEP CPU TIME=\s*([\d.]+)', lines[j])
            if m:
                mp2_energy_calculation_time += float(m.group(1).strip())

        if re.search(r'END OF RHF CALCULATION', lines[j]):
            j += 1
            m = re.search(r'STEP CPU TIME=\s*([\d.]+)', lines[j])
            if m:
                rhf_calculation_time += float(m.group(1).strip())

        m = re.search(r'TOTAL WALL CLOCK TIME.+CPU UTILIZATION IS\s+([\d.]+)',
                      lines[j])
        if m:
            efficiency = float(m.group(1).strip())

        j += 1

    if start_time and end_time:
        walltime = total_seconds(end_time - start_time)
        if walltime >= 0.0:
            parser.set_statistic('Wall Clock Time', str(walltime), "Second")
            if efficiency:
                parser.set_statistic("User Time",
                                     str((0.01 * efficiency * walltime)),
                                     "Second")

    parser.set_statistic("Time Spent in MP2 Energy Calculation",
                         str(mp2_energy_calculation_time), "Second")
    parser.set_statistic("Time Spent in Restricted Hartree-Fock Calculation",
                         str(rhf_calculation_time), "Second")

    if "attemptsToLaunch" in parser.geninfo:
        parser.set_statistic("Attempts to Launch",
                             parser.geninfo['attemptsToLaunch'])
    else:
        parser.set_statistic("Attempts to Launch", 1)

    if __name__ == "__main__":
        # output for testing purpose
        print(("parsing complete:", parser.parsing_complete()))
        parser.print_params_stats_as_must_have()
        print((parser.get_xml()))

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 10
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='hpcg',
                                version=1,
                                description="HPCG Benchmark",
                                url='http://www.hpcg-benchmark.org/index.html',
                                measurement_name='HPCG')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Distributed Processes')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nx')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Ny')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nz')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nx')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Ny')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nz')
    parser.add_must_have_parameter('Input:Number of Coarse Grid Levels')
    parser.add_must_have_parameter('Input:Threads per processes')
    parser.add_must_have_parameter('RunEnv:CPU Speed')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic('Floating-Point Performance, Raw DDOT')
    parser.add_must_have_statistic('Floating-Point Performance, Raw MG')
    parser.add_must_have_statistic('Floating-Point Performance, Raw SpMV')
    parser.add_must_have_statistic('Floating-Point Performance, Raw Total')
    parser.add_must_have_statistic('Floating-Point Performance, Raw WAXPBY')
    parser.add_must_have_statistic('Floating-Point Performance, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Read')
    parser.add_must_have_statistic('Memory Bandwidth, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Write')
    parser.add_must_have_statistic('Setup Time')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # get path to YAML file
    # read data
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # Parse YAML lines because YAML is often malformed
    yaml_lines = []
    # get yaml lines from appstdout
    bool_in_yaml_section = False
    for line in lines:
        if re.match(r"^====== .+\.yaml End   ======", line):
            break

        if bool_in_yaml_section:
            yaml_lines.append(line)

        if re.match(r"^====== .+\.yaml Start ======", line):
            bool_in_yaml_section = True

    from pprint import pprint
    import yaml
    # fix some issues with yaml
    if re.search(r"After confirmation please upload results from the YAML",
                 yaml_lines[-1]):
        yaml_lines.pop()
    if re.search(r"You have selected the QuickPath option", yaml_lines[-1]):
        yaml_lines.pop()

    yaml_text = "".join(yaml_lines)

    yaml_text = re.sub(r"^ {6}HPCG 2\.4 Rating \(for historical value\) is:",
                       "  HPCG 2.4 Rating (for historical value) is:",
                       yaml_text,
                       flags=re.M)

    results_yaml = yaml.load(yaml_text)

    # Set Parameters
    # App version
    app_version_list = []
    for ver in [x for x in results_yaml.keys() if re.search("version", x)]:
        app_version_list.append(ver + " " + str(results_yaml[ver]))
    app_version = ", ".join(app_version_list)
    parser.set_parameter('App:Version', app_version)

    # Problem size
    parser.set_parameter(
        'Input:Number of Coarse Grid Levels',
        results_yaml['Multigrid Information']['Number of coarse grid levels'])

    parser.set_parameter(
        'Input:Global Problem Dimensions Nx',
        results_yaml['Global Problem Dimensions']['Global nx'])
    parser.set_parameter(
        'Input:Global Problem Dimensions Ny',
        results_yaml['Global Problem Dimensions']['Global ny'])
    parser.set_parameter(
        'Input:Global Problem Dimensions Nz',
        results_yaml['Global Problem Dimensions']['Global nz'])

    parser.set_parameter('Input:Local Domain Dimensions Nx',
                         results_yaml['Local Domain Dimensions']['nx'])
    parser.set_parameter('Input:Local Domain Dimensions Ny',
                         results_yaml['Local Domain Dimensions']['ny'])
    parser.set_parameter('Input:Local Domain Dimensions Nz',
                         results_yaml['Local Domain Dimensions']['nz'])

    parser.set_parameter(
        'Input:Distributed Processes',
        results_yaml['Machine Summary']['Distributed Processes'])
    parser.set_parameter(
        'Input:Threads per processes',
        results_yaml['Machine Summary']['Threads per processes'])

    if "cpu_speed" in parser.geninfo:
        ll = parser.geninfo["cpu_speed"].splitlines()
        cpu_speed_max = 0.0
        for l in ll:
            m = re.search(r'([\d.]+)$', l)
            if m:
                v = float(m.group(1).strip())
                if v > cpu_speed_max:
                    cpu_speed_max = v
        if cpu_speed_max > 0.0:
            parser.set_parameter("RunEnv:CPU Speed", cpu_speed_max, "MHz")

    # Set Statistics
    parser.successfulRun = results_yaml['Reproducibility Information'][
        'Result'] == 'PASSED'

    parser.set_statistic('Setup Time',
                         results_yaml['Setup Information']['Setup Time'],
                         'Seconds')

    parser.set_statistic('Memory Bandwidth, Read',
                         results_yaml['GB/s Summary']['Raw Read B/W'], 'GB/s')
    parser.set_statistic('Memory Bandwidth, Write',
                         results_yaml['GB/s Summary']['Raw Write B/W'], 'GB/s')
    parser.set_statistic('Memory Bandwidth, Total',
                         results_yaml['GB/s Summary']['Raw Total B/W'], 'GB/s')

    parser.set_statistic(
        'Floating-Point Performance, Total',
        results_yaml['__________ Final Summary __________']
        ['HPCG result is VALID with a GFLOP/s rating of'], 'GFLOP/s')

    parser.set_statistic('Floating-Point Performance, Raw DDOT',
                         results_yaml['GFLOP/s Summary']['Raw DDOT'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw WAXPBY',
                         results_yaml['GFLOP/s Summary']['Raw WAXPBY'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw SpMV',
                         results_yaml['GFLOP/s Summary']['Raw SpMV'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw MG',
                         results_yaml['GFLOP/s Summary']['Raw MG'], 'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw Total',
                         results_yaml['GFLOP/s Summary']['Raw Total'],
                         'GFLOP/s')

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()
Exemplo n.º 11
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='bundle',
                                version=1,
                                description='bundled tasks',
                                url='https://xdmod.ccr.buffalo.edu',
                                measurement_name='BUNDLE')
    parser.add_must_have_parameter('RunEnv:Nodes')
    parser.add_must_have_statistic('Wall Clock Time')
    parser.add_must_have_statistic("Success Rate")
    parser.add_must_have_statistic("Successful Subtasks")
    parser.add_must_have_statistic("Total Number of Subtasks")

    # set obligatory parameters and statistics
    # set common parameters and statistics

    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'wallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")

    # check the status of subtasks

    # resource_appker_vars['taskId']=self.task_id
    # resource_appker_vars['subTasksId']=self.subTasksId
    success_rate = 0.0
    total_subtasks = 0
    successful_subtasks = 0
    try:
        db, cur = akrr.db.get_akrr_db()

        for subTaskId in resource_appker_vars['subTasksId']:
            cur.execute(
                '''SELECT instance_id,status FROM akrr_xdmod_instanceinfo
                WHERE instance_id=%s ;''', (subTaskId, ))
            raw = cur.fetchall()
            instance_id, status = raw[0]
            success_rate += status
            successful_subtasks += status

        success_rate /= len(resource_appker_vars['subTasksId'])
        total_subtasks = len(resource_appker_vars['subTasksId'])
        cur.close()
        del db
    except:
        print(traceback.format_exc())

    parser.set_statistic("Success Rate", success_rate)
    parser.set_statistic("Successful Subtasks", successful_subtasks)
    parser.set_statistic("Total Number of Subtasks", total_subtasks)
    # if successfulSubtasks==totalSubtasks:

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 12
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='imb',
                                version=1,
                                description="Intel MPI Benchmarks",
                                url='http://www.intel.com/software/imb',
                                measurement_name='Intel MPI Benchmarks')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:MPI Thread Environment')
    parser.add_must_have_parameter('App:MPI Version')
    parser.add_must_have_parameter('App:Max Message Size')

    parser.add_must_have_statistic('Max Exchange Bandwidth')
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Get' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Get' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Put' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Put' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Get' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Get' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Put' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Put' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic('Max PingPing Bandwidth')
    parser.add_must_have_statistic('Max PingPong Bandwidth')
    parser.add_must_have_statistic('Max SendRecv Bandwidth')
    parser.add_must_have_statistic('Min AllGather Latency')
    parser.add_must_have_statistic('Min AllGatherV Latency')
    parser.add_must_have_statistic('Min AllReduce Latency')
    parser.add_must_have_statistic('Min AllToAll Latency')
    parser.add_must_have_statistic('Min AllToAllV Latency')
    parser.add_must_have_statistic('Min Barrier Latency')
    parser.add_must_have_statistic('Min Broadcast Latency')
    parser.add_must_have_statistic('Min Gather Latency')
    parser.add_must_have_statistic('Min GatherV Latency')
    # parser.add_must_have_statistic("Min MPI-2 'Accumulate' Latency (aggregate)")
    # parser.add_must_have_statistic("Min MPI-2 'Accumulate' Latency (non-aggregate)")
    parser.add_must_have_statistic('Min MPI-2 Window Creation Latency')
    parser.add_must_have_statistic('Min Reduce Latency')
    parser.add_must_have_statistic('Min ReduceScatter Latency')
    parser.add_must_have_statistic('Min Scatter Latency')
    parser.add_must_have_statistic('Min ScatterV Latency')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Intel MPI benchmark suite contains three classes of benchmarks:
    #
    #  Single-transfer, which needs only 2 processes
    #  Parallel-transfer, which can use as many processes that are available
    #  Collective, which can use as many processes that are available

    # The parameters mapping table
    params = {
        "MPI Thread Environment": ["MPI Thread Environment", "", ""],
        "MPI Version": ["MPI Version", "", ""],
        "Maximum message length in bytes":
        ["Max Message Size", "MByte", "<val>/1024/1024"]
    }

    # The result mapping table
    metrics = {
        "PingPing": ["PingPing Bandwidth", "MByte per Second", "max"],
        "PingPong": ["PingPong Bandwidth", "MByte per Second", "max"],
        "Multi-PingPing": ["PingPing Bandwidth", "MByte per Second", "max"],
        "Multi-PingPong": ["PingPong Bandwidth", "MByte per Second", "max"],
        "Sendrecv": ["SendRecv Bandwidth", "MByte per Second", "max"],
        "Exchange": ["Exchange Bandwidth", "MByte per Second", "max"],
        "Allreduce": ["AllReduce Latency", "us", "min"],
        "Reduce": ["Reduce Latency", "us", "min"],
        "Reduce_scatter": ["ReduceScatter Latency", "us", "min"],
        "Allgather": ["AllGather Latency", "us", "min"],
        "Allgatherv": ["AllGatherV Latency", "us", "min"],
        "Gather": ["Gather Latency", "us", "min"],
        "Gatherv": ["GatherV Latency", "us", "min"],
        "Scatter": ["Scatter Latency", "us", "min"],
        "Scatterv": ["ScatterV Latency", "us", "min"],
        "Alltoall": ["AllToAll Latency", "us", "min"],
        "Alltoallv": ["AllToAllV Latency", "us", "min"],
        "Bcast": ["Broadcast Latency", "us", "min"],
        "Barrier": ["Barrier Latency", "us", "min"],
        "Window": ["MPI-2 Window Creation Latency", "us", "min"],
        "Multi-Unidir_Get":
        ["MPI-2 Unidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Multi-Unidir_Put":
        ["MPI-2 Unidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Multi-Bidir_Get":
        ["MPI-2 Bidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Multi-Bidir_Put":
        ["MPI-2 Bidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Unidir_Get":
        ["MPI-2 Unidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Unidir_Put":
        ["MPI-2 Unidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Bidir_Get":
        ["MPI-2 Bidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Bidir_Put":
        ["MPI-2 Bidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Accumulate": ["MPI-2 'Accumulate' Latency", "us", "min"]
    }

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    aggregate_mode = None
    metric = None
    j = -1
    while j < len(lines) - 1:
        j += 1
        m = re.search(r'All processes entering MPI_Finalize', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^# Benchmarking\s+(\S+)', lines[j])
        if m:
            if m.group(1) in metrics:
                metric = m.group(1)
                continue

        m = re.match(r'^#\s+MODE:\s+(\S+)', lines[j])
        if m and metric and aggregate_mode is None:
            aggregate_mode = m.group(1)
            continue

        m = re.match(r'^# (.+): (.+)', lines[j])
        if m:  # benchmark parameters
            param = m.group(1).strip()
            if param in params:
                val = m.group(2).strip()
                v = params[param][2]
                if v.find('<val>') >= 0:
                    val = get_float_or_int(val)
                    val = eval(v.replace('<val>', 'val'))
                parser.set_parameter("App:" + params[param][0],
                                     str(val) + " ", params[param][1])
            continue

        m = re.match(r'^\s+([1-9]\d*)\s+\d+', lines[j])
        if m and metric:  # this effectively skips the first line of result, which has #bytes = 0
            results = []

            while m:
                numbers = lines[j].split()
                results.append(
                    float(numbers[-1]
                          ))  # tokenize the line, and extract the last column

                j += 1
                if j < len(lines):
                    m = re.match(r'^\s+([1-9]\d*)\s+\d+', lines[j])
                    if lines[j].count('IMB_init_buffers_iter') > 0:
                        break
                else:
                    break
            metric_name = metrics[metric][0]
            if aggregate_mode:
                metric_name += " (" + aggregate_mode.lower() + ")"
            if len(results) > 0:
                if metrics[metric][1] == 'us':
                    statname = metrics[metric][2][0].upper(
                    ) + metrics[metric][2][1:] + " " + metric_name
                    statval = eval(metrics[metric][2] + "(results)")
                    parser.set_statistic(statname, statval * 1e-6, "Second")
                else:
                    statname = metrics[metric][2][0].upper(
                    ) + metrics[metric][2][1:] + " " + metric_name
                    statval = eval(metrics[metric][2] + "(results)")
                    parser.set_statistic(statname, statval, metrics[metric][1])

            aggregate_mode = None
            metric = None
    if parser.get_parameter("App:MPI Thread Environment") is None:
        parser.set_parameter("App:MPI Thread Environment", "")

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())
    # Print out missing parameters for debug purpose
    parser.parsing_complete(verbose=True)
    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 13
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='akrr network check',
                                version=1,
                                description="network benchmarking",
                                url='http://www.xdmod.org',
                                measurement_name='akrr network check')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_statistic('Ping, Mean')
    parser.add_must_have_statistic('Secure Copy Bandwidth (in), Mean')
    parser.add_must_have_statistic('Secure Copy Bandwidth (out), Mean')
    parser.add_must_have_statistic('WGet Bandwidth, Mean')
    parser.add_must_have_statistic('Wall Clock Time')

    # set app kernel custom sets
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'wallClockTime') and parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")
    if hasattr(
            parser,
            'appKerWallClockTime') and parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    successful_run = False
    j = 0

    start = None
    while j < len(lines):
        if lines[j].strip() == "AKRR Network Check Results:":
            start = j
        if lines[j].strip() == "Done":
            end = j
        j += 1

    if start is not None and end is not None:
        r = json.loads(" ".join(lines[(start + 1):end]))
        successful_run = True
        if 'ping' in r:
            count = 0
            ping = 0.0
            for k, v in r['ping'].items():
                if v is None:
                    successful_run = False
                else:
                    ping += float(v['rtt_avg'])
                    count += 1
            parser.set_statistic("Ping, Mean", ping / count, "ms")
        if 'wget' in r:
            count = 0
            val = 0.0
            for k, v in r['wget'].items():
                if v is None:
                    successful_run = False
                else:
                    val += float(v['bandwidth'])
                    count += 1
            parser.set_statistic("WGet Bandwidth, Mean", val / count, "MB/s")
        if 'scp' in r:
            count = 0
            val_in = 0.0
            val_out = 0.0
            for k, v in r['scp'].items():
                if v is None:
                    successful_run = False
                else:
                    val_in += float(v['bandwidth_ingress'])
                    val_out += float(v['bandwidth_egress'])
                    count += 1
            parser.set_statistic("Secure Copy Bandwidth (in), Mean",
                                 val_in / count, "MB/s")
            parser.set_statistic("Secure Copy Bandwidth (out), Mean",
                                 val_out / count, "MB/s")

    parser.successfulRun = successful_run

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
Exemplo n.º 14
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    if resource_appker_vars is not None and 'app' in resource_appker_vars and 'name' in resource_appker_vars[
            'app']:
        ak_name = resource_appker_vars['app']['name']
    else:
        ak_name = 'unknown'

    # initiate parser
    parser = AppKerOutputParser(name=ak_name)
    # set obligatory parameters and statistics
    # set common parameters and statistics (App:ExeBinSignature and RunEnv:Nodes)
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    # parser.add_must_have_parameter('App:Version')

    parser.add_must_have_statistic('Wall Clock Time')
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Here can be custom output parsing
    #     #read output
    #     lines=[]
    #     if os.path.isfile(appstdout):
    #         fin=open(appstdout,"rt")
    #         lines=fin.readlines()
    #         fin.close()
    #
    #     #process the output
    #     parser.successfulRun=False
    #     j=0
    #     while j<len(lines):
    #         m=re.search(r'My mega parameter\s+(\d+)',lines[j])
    #         if m:parser.set_parameter("mega parameter",m.group(1))
    #
    #         m=re.search(r'My mega parameter\s+(\d+)',lines[j])
    #         if m:parser.set_statistic("mega statistics",m.group(1),"Seconds")
    #
    #         m=re.search(r'Done',lines[j])
    #         if m:parser.successfulRun=True
    #
    #         j+=1

    if __name__ == "__main__":
        # output for testing purpose
        print("Parsing complete:", parser.parsing_complete(verbose=True))
        print("Following statistics and parameter can be set as obligatory:")
        parser.print_params_stats_as_must_have()
        print("\nResulting XML:")
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()