示例#1
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='gromacs_micro',
        version=1,
        description="GROMACS: micro-benchmark for testing purposes",
        url='http://www.gromacs.org/',
        measurement_name='GROMACS-Micro')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')

    parser.add_must_have_statistic('Simulation Speed')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    successful_run = False
    j = 0
    while j < len(lines):
        m = re.search(r'^GROMACS:\s+ gmx mdrun, version\s+(\S+)$', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1))

        m = re.search(r'^Performance: \s+([0-9.]+)', lines[j])
        if m:
            parser.set_statistic("Simulation Speed", float(m.group(1)),
                                 "ns/day")

        m = re.search(r'^ \s+Time: \s+([0-9.]+) \s+([0-9.]+)', lines[j])
        if m:
            parser.set_statistic("Wall Clock Time", m.group(2), "Second")
            parser.set_statistic("Core Clock Time", m.group(1), "Second")

        m = re.match(r'^GROMACS reminds you', lines[j])
        if m:
            successful_run = True

        j += 1

    parser.successfulRun = successful_run

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#2
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, proclog=None, 
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='amber',
        version=1,
        description="Amber: Assisted Model Building with Energy Refinement",
        url='http://ambermd.org',
        measurement_name='Amber'
    )
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Coordinate File')
    parser.add_must_have_parameter('Input:Number of Angles')
    parser.add_must_have_parameter('Input:Number of Atoms')
    parser.add_must_have_parameter('Input:Number of Bonds')
    parser.add_must_have_parameter('Input:Number of Dihedrals')
    parser.add_must_have_parameter('Input:Number of Steps')
    parser.add_must_have_parameter('Input:Structure File')
    parser.add_must_have_parameter('Input:Timestep')

    parser.add_must_have_statistic('Molecular Dynamics Simulation Performance')
    parser.add_must_have_statistic('Time Spent in Direct Force Calculation')
    parser.add_must_have_statistic('Time Spent in Non-Bond List Regeneration')
    parser.add_must_have_statistic('Time Spent in Reciprocal Force Calculation')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    num_steps = 0
    step_size = 0
    j = 0
    while j < len(lines):

        m = re.search(r'Amber\s+([0-9a-zA-Z]+)\s+SANDER\s+20[0-9]+', lines[j])
        if m:
            parser.set_parameter("App:Version", "SANDER " + m.group(1))

        m = re.match(r'^\|\s+PMEMD implementation of SANDER, Release\s+([0-9.]+)', lines[j])
        if m:
            parser.set_parameter("App:Version", "PMEMD " + m.group(1))

        m = re.match(r'^\|\s+INPCRD:\s+(\S+)', lines[j])
        if m:
            parser.set_parameter("Input:Coordinate File", m.group(1))

        m = re.match(r'^\|\s+PARM:\s+(\S+)', lines[j])
        if m:
            parser.set_parameter("Input:Structure File", m.group(1))

        if re.search(r'CONTROL\s+DATA\s+FOR\s+THE\s+RUN', lines[j]):
            j += 2
            for k in range(256):
                if re.match(r'^-----------------------------', lines[j]):
                    break

                m = re.search(r'nstlim\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_steps = int(m.group(1).strip())
                    parser.set_parameter("Input:Number of Steps", num_steps)

                m = re.search(r'dt\s+=\s+([0-9.]+)', lines[j])
                if m:
                    step_size = 1000.0 * float(m.group(1).strip())
                    parser.set_parameter("Input:Timestep", step_size * 1e-15, "Second per Step")

                j += 1

        if re.search(r'RESOURCE\s+USE', lines[j]):
            j += 2
            num_bonds = 0
            num_angles = 0
            num_dihedrals = 0
            for k in range(256):
                if re.match(r'^-----------------------------', lines[j]):
                    break

                m = re.search(r'NATOM\s+=\s+([0-9]+)', lines[j])
                if m:
                    parser.set_parameter("Input:Number of Atoms", m.group(1).strip())

                m = re.search(r'NBONH\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_bonds += int(m.group(1).strip())

                m = re.search(r'NBONA\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_bonds += int(m.group(1).strip())

                m = re.search(r'NTHETH\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_angles += int(m.group(1).strip())

                m = re.search(r'NTHETA\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_angles += int(m.group(1).strip())

                m = re.search(r'NPHIH\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_dihedrals += int(m.group(1).strip())

                m = re.search(r'NPHIA\s+=\s+([0-9]+)', lines[j])
                if m:
                    num_dihedrals += int(m.group(1).strip())

                j += 1

            if num_bonds > 0:
                parser.set_parameter("Input:Number of Bonds", num_bonds)
            if num_angles > 0:
                parser.set_parameter("Input:Number of Angles", num_angles)
            if num_dihedrals > 0:
                parser.set_parameter("Input:Number of Dihedrals", num_dihedrals)

        if re.search(r'PME Nonbond Pairlist CPU Time', lines[j]):
            j += 2
            for k in range(20):
                m = re.search(r'Total\s+([\d.]+)', lines[j])
                if m:
                    parser.set_statistic("Time Spent in Non-Bond List Regeneration", m.group(1), "Second")
                    break
                j += 1
        if re.search(r'PME Direct Force CPU Time', lines[j]):
            j += 2
            for k in range(20):
                m = re.search(r'Total\s+([\d.]+)', lines[j])
                if m:
                    parser.set_statistic("Time Spent in Direct Force Calculation", m.group(1), "Second")
                    break
                j += 1
        if re.search(r'PME Reciprocal Force CPU Time', lines[j]):
            j += 2
            for k in range(20):
                m = re.search(r'Total\s+([\d.]+)', lines[j])
                if m:
                    parser.set_statistic("Time Spent in Reciprocal Force Calculation", m.group(1), "Second")
                    break
                j += 1
        m = re.match(r'^\|\s+Master Total wall time:\s+([0-9.]+)\s+seconds', lines[j])
        if m:
            parser.set_statistic("Wall Clock Time", m.group(1), "Second")
            parser.successfulRun = True

            # calculate the performance
            simulation_time = step_size * num_steps * 0.000001  # measured in nanoseconds
            if simulation_time > 0.0:
                parser.set_statistic("Molecular Dynamics Simulation Performance",
                                     1.e-9 * simulation_time / (float(m.group(1)) / 86400.0), "Second per Day")

        j += 1

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#3
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # initiate parser
    parser = AppKerOutputParser(name='mdtest')
    # set obligatory parameters and statistics
    # set common parameters and statistics (App:ExeBinSignature and RunEnv:Nodes)
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_parameter('Arguments (single directory per process)')
    parser.add_must_have_parameter('Arguments (single directory)')
    parser.add_must_have_parameter(
        'Arguments (single tree directory per process)')
    parser.add_must_have_parameter('Arguments (single tree directory)')
    parser.add_must_have_parameter(
        'files/directories (single directory per process)')
    parser.add_must_have_parameter('files/directories (single directory)')
    parser.add_must_have_parameter(
        'files/directories (single tree directory per process)')
    parser.add_must_have_parameter('files/directories (single tree directory)')
    parser.add_must_have_parameter('tasks (single directory per process)')
    parser.add_must_have_parameter('tasks (single directory)')
    parser.add_must_have_parameter('tasks (single tree directory per process)')
    parser.add_must_have_parameter('tasks (single tree directory)')

    parser.add_must_have_statistic(
        'Directory creation (single directory per process)')
    parser.add_must_have_statistic('Directory creation (single directory)')
    parser.add_must_have_statistic(
        'Directory creation (single tree directory per process)')
    parser.add_must_have_statistic(
        'Directory creation (single tree directory)')
    parser.add_must_have_statistic(
        'Directory removal (single directory per process)')
    parser.add_must_have_statistic('Directory removal (single directory)')
    parser.add_must_have_statistic(
        'Directory removal (single tree directory per process)')
    parser.add_must_have_statistic('Directory removal (single tree directory)')
    parser.add_must_have_statistic(
        'Directory stat (single directory per process)')
    parser.add_must_have_statistic('Directory stat (single directory)')
    parser.add_must_have_statistic(
        'Directory stat (single tree directory per process)')
    parser.add_must_have_statistic('Directory stat (single tree directory)')
    parser.add_must_have_statistic(
        'File creation (single directory per process)')
    parser.add_must_have_statistic('File creation (single directory)')
    parser.add_must_have_statistic(
        'File creation (single tree directory per process)')
    parser.add_must_have_statistic('File creation (single tree directory)')
    parser.add_must_have_statistic('File read (single directory per process)')
    parser.add_must_have_statistic('File read (single directory)')
    parser.add_must_have_statistic(
        'File read (single tree directory per process)')
    parser.add_must_have_statistic('File read (single tree directory)')
    parser.add_must_have_statistic(
        'File removal (single directory per process)')
    parser.add_must_have_statistic('File removal (single directory)')
    parser.add_must_have_statistic(
        'File removal (single tree directory per process)')
    parser.add_must_have_statistic('File removal (single tree directory)')
    parser.add_must_have_statistic('File stat (single directory per process)')
    parser.add_must_have_statistic('File stat (single directory)')
    parser.add_must_have_statistic(
        'File stat (single tree directory per process)')
    parser.add_must_have_statistic('File stat (single tree directory)')
    parser.add_must_have_statistic(
        'Tree creation (single directory per process)')
    parser.add_must_have_statistic('Tree creation (single directory)')
    parser.add_must_have_statistic(
        'Tree creation (single tree directory per process)')
    parser.add_must_have_statistic('Tree creation (single tree directory)')
    parser.add_must_have_statistic(
        'Tree removal (single directory per process)')
    parser.add_must_have_statistic('Tree removal (single directory)')
    parser.add_must_have_statistic(
        'Tree removal (single tree directory per process)')
    parser.add_must_have_statistic('Tree removal (single tree directory)')

    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Here can be custom output parsing
    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    testname = ""
    parser.successfulRun = False
    j = 0
    while j < len(lines):
        m = re.match(r'^#Testing (.+)', lines[j])
        if m:
            testname = " (" + m.group(1).strip() + ")"

        m = re.match(r'^SUMMARY.*:', lines[j])
        if m:
            j = j + 3
            while j < len(lines):
                m = re.match(
                    r'([A-Za-z0-9 ]+):\s+[0-9.]+\s+[0-9.]+\s+([0-9.]+)\s+([0-9.]+)',
                    lines[j])
                if m:
                    parser.set_statistic(
                        m.group(1).strip() + testname, m.group(2),
                        "Operations/Second")
                else:
                    break
                j = j + 1
        m = re.search(r'finished at', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^Command line used:.+mdtest\s+(.+)', lines[j])

        if m:
            parser.set_parameter("Arguments" + testname, m.group(1).strip())
        m = re.search(r'([0-9]+) tasks, ([0-9]+) files/directories', lines[j])
        if m:
            parser.set_parameter("tasks" + testname, m.group(1).strip())
            parser.set_parameter("files/directories" + testname,
                                 m.group(2).strip())
        j = j + 1

        # parser.set_parameter("mega parameter",m.group(1))
    #
    #         m=re.search(r'My mega parameter\s+(\d+)',lines[j])
    #         if m:parser.set_statistic("mega statistics",m.group(1),"Seconds")
    #
    #         m=re.search(r'Done',lines[j])
    #         if m:parser.successfulRun=True
    #
    #         j+=1

    if __name__ == "__main__":
        # output for testing purpose
        print("Parsing complete:", parser.parsing_complete(verbose=True))
        print("Following statistics and parameter can be set as obligatory:")
        parser.print_params_stats_as_must_have()
        print("\nResulting XML:")
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()
示例#4
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='graph500',
        version=1,
        description="Graph500 Benchmark",
        url='http://www.Graph500.org',
        measurement_name='Graph500'
    )
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Edge Factor')
    parser.add_must_have_parameter('Input File')
    parser.add_must_have_parameter('Number of Roots to Check')
    parser.add_must_have_parameter('Number of Edges')
    parser.add_must_have_parameter('Number of Vertices')
    parser.add_must_have_parameter('Scale')

    parser.add_must_have_statistic('Harmonic Mean TEPS')
    parser.add_must_have_statistic('Harmonic Standard Deviation TEPS')
    parser.add_must_have_statistic('Median TEPS')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    if parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time", total_seconds(parser.appKerWallClockTime), "Second")
    elif parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time", total_seconds(parser.wallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = True
    num_of_errors = 0
    j = 0
    while j < len(lines):
        m = re.match(r'^Graph500 version:\s+(.+)', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1).strip())

        m = re.match(r'ERROR:\s+(.+)', lines[j])
        if m:
            num_of_errors += 1

        m = re.match(r'^Reading input from\s+(.+)', lines[j])
        if m:
            parser.set_parameter("Input File", m.group(1))

        m = re.match(r'^SCALE:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Scale", m.group(1))

        m = re.match(r'^edgefactor:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Edge Factor", m.group(1))

        m = re.match(r'^NBFS:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter("Number of Roots to Check", m.group(1))

        m = re.match(r'^median_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.set_statistic("Median TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^harmonic_mean_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.successfulRun = True
            parser.set_statistic("Harmonic Mean TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^harmonic_stddev_TEPS:\s+(\d[0-9.e+]+)', lines[j])
        if m:
            parser.set_statistic("Harmonic Standard Deviation TEPS", m.group(1), "Traversed Edges Per Second")

        m = re.match(r'^median_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Median Validation Time", m.group(1), "Second")

        m = re.match(r'^mean_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Mean Validation Time", m.group(1), "Second")

        m = re.match(r'^stddev_validate:\s+([\d.]+)\s+s', lines[j])
        if m:
            parser.set_statistic("Standard Deviation Validation Time", m.group(1), "Second")

        j += 1

    if num_of_errors > 0:
        parser.successfulRun = False

    if parser.get_parameter('Scale') is not None and parser.get_parameter('Edge Factor') is not None:
        scale = int(parser.get_parameter('Scale'))
        edgefactor = int(parser.get_parameter('Edge Factor'))
        parser.set_parameter("Number of Vertices", 2 ** scale)
        parser.set_parameter("Number of Edges", edgefactor * 2 ** scale)

    if __name__ == "__main__":
        # output for testing purpose
        parser.parsing_complete(True)
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#5
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='mpi-tile-io',
        version=1,
        description="MPI-Tile-IO Benchmark",
        url='http://www.mcs.anl.gov/research/projects/pio-benchmark',
        measurement_name='MPI-Tile-IO')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('2D Collective Read Test File System')
    parser.add_must_have_parameter('2D Collective Write Test File System')
    parser.add_must_have_parameter('2D HDF5 Collective Read Test File System')
    parser.add_must_have_parameter('2D HDF5 Collective Write Test File System')
    parser.add_must_have_parameter('2D Independent Read Test File System')
    parser.add_must_have_parameter('2D Independent Write Test File System')
    parser.add_must_have_parameter('2D Per-Process Data Topology')
    parser.add_must_have_parameter('2D Per-Process Ghost Zone')
    parser.add_must_have_parameter('2D Per-Process Memory')
    parser.add_must_have_parameter('2D Process Topology')
    parser.add_must_have_parameter('3D Collective Read Test File System')
    parser.add_must_have_parameter('3D Collective Write Test File System')
    parser.add_must_have_parameter('3D HDF5 Collective Read Test File System')
    parser.add_must_have_parameter('3D HDF5 Collective Write Test File System')
    parser.add_must_have_parameter('3D Independent Read Test File System')
    parser.add_must_have_parameter('3D Independent Write Test File System')
    parser.add_must_have_parameter('3D Per-Process Data Topology')
    parser.add_must_have_parameter('3D Per-Process Ghost Zone')
    parser.add_must_have_parameter('3D Per-Process Memory')
    parser.add_must_have_parameter('3D Process Topology')
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_parameter('HDF Version')
    # parser.add_must_have_parameter('MPI-IO Hints')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic(
        '2D Array Collective Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '2D Array Collective Write Aggregate Throughput')
    parser.add_must_have_statistic(
        '2D Array HDF5 Collective Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '2D Array HDF5 Collective Write Aggregate Throughput')
    parser.add_must_have_statistic(
        '2D Array Independent Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '2D Array Independent Write Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array Collective Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array Collective Write Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array HDF5 Collective Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array HDF5 Collective Write Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array Independent Read Aggregate Throughput')
    parser.add_must_have_statistic(
        '3D Array Independent Write Aggregate Throughput')
    parser.add_must_have_statistic('File Close Time (2D Data Collective Read)')
    parser.add_must_have_statistic(
        'File Close Time (2D Data Collective Write)')
    parser.add_must_have_statistic(
        'File Close Time (2D Data HDF5 Collective Read)')
    parser.add_must_have_statistic(
        'File Close Time (2D Data HDF5 Collective Write)')
    parser.add_must_have_statistic(
        'File Close Time (2D Data Independent Read)')
    parser.add_must_have_statistic(
        'File Close Time (2D Data Independent Write)')
    parser.add_must_have_statistic('File Close Time (3D Data Collective Read)')
    parser.add_must_have_statistic(
        'File Close Time (3D Data Collective Write)')
    parser.add_must_have_statistic(
        'File Close Time (3D Data HDF5 Collective Read)')
    parser.add_must_have_statistic(
        'File Close Time (3D Data HDF5 Collective Write)')
    parser.add_must_have_statistic(
        'File Close Time (3D Data Independent Read)')
    parser.add_must_have_statistic(
        'File Close Time (3D Data Independent Write)')
    parser.add_must_have_statistic('File Open Time (2D Data Collective Read)')
    parser.add_must_have_statistic('File Open Time (2D Data Collective Write)')
    parser.add_must_have_statistic(
        'File Open Time (2D Data HDF5 Collective Read)')
    parser.add_must_have_statistic(
        'File Open Time (2D Data HDF5 Collective Write)')
    parser.add_must_have_statistic('File Open Time (2D Data Independent Read)')
    parser.add_must_have_statistic(
        'File Open Time (2D Data Independent Write)')
    parser.add_must_have_statistic('File Open Time (3D Data Collective Read)')
    parser.add_must_have_statistic('File Open Time (3D Data Collective Write)')
    parser.add_must_have_statistic(
        'File Open Time (3D Data HDF5 Collective Read)')
    parser.add_must_have_statistic(
        'File Open Time (3D Data HDF5 Collective Write)')
    parser.add_must_have_statistic('File Open Time (3D Data Independent Read)')
    parser.add_must_have_statistic(
        'File Open Time (3D Data Independent Write)')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             parser.appKerWallClockTime.total_seconds(),
                             "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    # The parameters mapping table
    # The result mapping table
    pm = {
        "processesTopology": {
            're': r"^# processes topology:(.+)",
            'refun': re.match,
            'val': None
        },
        "localDatasetTopology": {
            're': r"^# local dataset topology:(.+)element",
            'refun': re.match,
            'val': None
        },
        "localMemoryUsage": {
            're': r"^# local dataset memory usage:(.+)byte",
            'refun': re.match,
            'val': None
        },
        "datasetGhostZone": {
            're': r"^# local dataset ghost zone:(.+)",
            'refun': re.match,
            'val': None
        },
        "mpiIOhints": {
            're': r"^# mpiio hints:(.+)",
            'refun': re.match,
            'val': None
        },
        "maxFileOpenTime": {
            're': r"^# Open:.+?max=(\S+)",
            'refun': re.match,
            'val': None
        },
        "maxFileCloseTime": {
            're': r"^# Close:.+?max=(\S+)",
            'refun': re.match,
            'val': None
        },
        "collectiveIO": {
            're': r"^# collective IO:(.+)",
            'refun': re.match,
            'val': None
        },
        "testFileName": {
            're': r"^# filename:(.+)",
            'refun': re.match,
            'val': None
        },
        "fileSystem": {
            're': r"^# filesystem:(.+)",
            'refun': re.match,
            'val': None
        },
        "hdf5Version": {
            're': r"^# HDF5 Version:(.+)",
            'refun': re.match,
            'val': None
        },
    }

    parser.successfulRun = False
    j = -1
    while j < len(lines) - 1:
        for k, v in pm.items():
            m = v['refun'](v['re'], lines[j])
            if m:
                v['val'] = m.group(1).strip()

        m = re.match(r'^# (.+?)bandwidth:(.+)bytes', lines[j])
        if m:
            read_or_write = m.group(1).strip()
            io_bandwidth = m.group(2).strip()

            # can output data ?
            if pm['processesTopology']['val'] and pm['collectiveIO']['val']:
                # construct the label
                label = ''
                dim = '2D'
                m = re.search(r'\d+x\d+x\d', pm['processesTopology']['val'])
                if m:
                    dim = '3D'

                if pm['hdf5Version']['val']:
                    label += 'HDF5 '
                    parser.set_parameter("HDF Version",
                                         pm['hdf5Version']['val'])

                m = re.search(r'yes', pm['collectiveIO']['val'], re.I)
                if m:
                    label += 'Collective '
                else:
                    label += 'Independent '

                m0 = re.search(r'read', read_or_write, re.I)
                m1 = re.search(r'write', read_or_write, re.I)
                if m0:
                    label += 'Read'
                elif m1:
                    label += 'Write'
                else:
                    label += read_or_write[0].upper() + read_or_write[1:]

                parser.set_statistic(
                    "%s Array %s Aggregate Throughput" % (dim, label),
                    "%.2f" % (float(io_bandwidth) / 1024.0 / 1024.0),
                    "MByte per Second")
                if pm["maxFileOpenTime"]['val']:
                    parser.set_statistic(
                        "File Open Time (%s Data %s)" % (dim, label),
                        pm["maxFileOpenTime"]['val'], "Second")
                if pm["maxFileCloseTime"]['val']:
                    parser.set_statistic(
                        "File Close Time (%s Data %s)" % (dim, label),
                        pm["maxFileCloseTime"]['val'], "Second")

                parser.set_parameter("%s Process Topology" % (dim, ),
                                     pm["processesTopology"]['val'])
                if pm["localMemoryUsage"]['val']:
                    parser.set_parameter(
                        "%s Per-Process Memory" % (dim, ),
                        float(pm["localMemoryUsage"]['val']) / 1024.0 / 1024.0,
                        "MByte")
                if pm["localDatasetTopology"]['val']:
                    parser.set_parameter(
                        "%s Per-Process Data Topology" % (dim, ),
                        pm["localDatasetTopology"]['val'], "Element")
                if pm["datasetGhostZone"]['val']:
                    parser.set_parameter("%s Per-Process Ghost Zone" % (dim, ),
                                         pm["datasetGhostZone"]['val'])
                if pm["mpiIOhints"]['val']:
                    parser.set_parameter("MPI-IO Hints",
                                         pm["mpiIOhints"]['val'])
                # $benchmark->set_parameter( "${dim} ${label} Test File", $testFileName ) if( defined($testFileName) )
                if pm["fileSystem"]['val']:
                    parser.set_parameter(
                        "%s %s Test File System" % (dim, label),
                        pm["fileSystem"]['val'])
                parser.successfulRun = True

                pm["processesTopology"]['val'] = None
                pm["localDatasetTopology"]['val'] = None
                pm["localMemoryUsage"]['val'] = None
                pm["datasetGhostZone"]['val'] = None
                pm["mpiIOhints"]['val'] = None
                # pm["readOrWrite"]['val']=None
                pm["collectiveIO"]['val'] = None
                # pm["IObandwidth"]['val']=None
                pm["maxFileOpenTime"]['val'] = None
                pm["maxFileCloseTime"]['val'] = None
                pm["testFileName"]['val'] = None
                pm["fileSystem"]['val'] = None
                pm["hdf5Version"]['val'] = None
        j += 1

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())
    # Print out missing parameters for debug purpose
    parser.parsing_complete(verbose=True)
    # return complete XML overwize return None
    return parser.get_xml()
示例#6
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='hpcc',
                                version=1,
                                description="HPC Challenge Benchmarks",
                                url='http://icl.cs.utk.edu/hpcc/',
                                measurement_name='xdmod.benchmark.hpcc')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:DGEMM Problem Size')
    parser.add_must_have_parameter('Input:High Performance LINPACK Grid Cols')
    parser.add_must_have_parameter('Input:High Performance LINPACK Grid Rows')
    parser.add_must_have_parameter(
        'Input:High Performance LINPACK Problem Size')
    parser.add_must_have_parameter('Input:MPI Ranks')
    parser.add_must_have_parameter('Input:MPIRandom Problem Size')
    parser.add_must_have_parameter('Input:OpenMP Threads')
    parser.add_must_have_parameter('Input:PTRANS Problem Size')
    parser.add_must_have_parameter('Input:STREAM Array Size')
    parser.add_must_have_parameter('RunEnv:CPU Speed')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic(
        'Average Double-Precision General Matrix Multiplication (DGEMM) Floating-Point Performance'
    )
    parser.add_must_have_statistic("Average STREAM 'Add' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Copy' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Scale' Memory Bandwidth")
    parser.add_must_have_statistic("Average STREAM 'Triad' Memory Bandwidth")
    parser.add_must_have_statistic(
        'Fast Fourier Transform (FFTW) Floating-Point Performance')
    parser.add_must_have_statistic('High Performance LINPACK Efficiency')
    parser.add_must_have_statistic(
        'High Performance LINPACK Floating-Point Performance')
    parser.add_must_have_statistic('High Performance LINPACK Run Time')
    parser.add_must_have_statistic('MPI Random Access')
    parser.add_must_have_statistic('Parallel Matrix Transpose (PTRANS)')
    parser.add_must_have_statistic('Wall Clock Time')
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Intel MPI benchmark suite contains three classes of benchmarks:
    #
    #  Single-transfer, which needs only 2 processes
    #  Parallel-transfer, which can use as many processes that are available
    #  Collective, which can use as many processes that are available

    # The parameters mapping table
    params = {
        "CommWorldProcs": ["MPI Ranks", "", ""],
        "HPL_N": ["High Performance LINPACK Problem Size", "", ""],
        "HPL_nprow": ["High Performance LINPACK Grid Rows", "", ""],
        "HPL_npcol": ["High Performance LINPACK Grid Cols", "", ""],
        "PTRANS_n": ["PTRANS Problem Size", "", ""],
        "MPIRandomAccess_N":
        ["MPIRandom Problem Size", "MWord", "val/1024/1024"],
        "STREAM_VectorSize": ["STREAM Array Size", "MWord", ""],
        "DGEMM_N": ["DGEMM Problem Size", "", ""],
        "omp_get_num_threads": ["OpenMP Threads", "", ""],
    }

    # The result mapping table
    metrics = {
        "HPL_Tflops": [
            "High Performance LINPACK Floating-Point Performance",
            "MFLOP per Second", "val*1e6"
        ],
        "HPL_time": ["High Performance LINPACK Run Time", "Second", ""],
        "PTRANS_GBs":
        ["Parallel Matrix Transpose (PTRANS)", "MByte per Second", "val*1024"],
        "MPIRandomAccess_GUPs":
        ["MPI Random Access", "MUpdate per Second", "val*1000"],
        "MPIFFT_Gflops": [
            "Fast Fourier Transform (FFTW) Floating-Point Performance",
            "MFLOP per Second", "val*1000"
        ],
        "StarDGEMM_Gflops": [
            "Average Double-Precision General Matrix Multiplication (DGEMM) Floating-Point Performance",
            "MFLOP per Second", "val*1000"
        ],
        "StarSTREAM_Copy": [
            "Average STREAM 'Copy' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Scale": [
            "Average STREAM 'Scale' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Add": [
            "Average STREAM 'Add' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ],
        "StarSTREAM_Triad": [
            "Average STREAM 'Triad' Memory Bandwidth", "MByte per Second",
            "val*1024"
        ]
    }

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    result_begin = None
    hpl_tflops = None
    num_cores = None

    values = {}
    j = -1
    while j < len(lines) - 1:
        j += 1
        m = re.search(r'End of HPC Challenge tests', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^Begin of Summary section', lines[j])
        if m:
            result_begin = 1
            continue

        m = re.match(r'^(\w+)=([\w.]+)', lines[j])
        if m and result_begin:
            metric_name = m.group(1).strip()
            values[metric_name] = m.group(2).strip()
            if metric_name == "HPL_Tflops":
                hpl_tflops = float(values[metric_name])
            if metric_name == "CommWorldProcs":
                num_cores = int(values[metric_name])
        m = re.match(r'^Running on ([0-9.]+) processors', lines[j])
        if m:
            num_cores = int(m.group(1).strip())

    if hpl_tflops is None or num_cores is None:
        parser.successfulRun = False

    hpcc_version = None
    mhz = None
    theoretical_gflops = None

    if "VersionMajor" in values and "VersionMinor" in values and "VersionMicro" in values:
        hpcc_version = values["VersionMajor"] + "." + values[
            "VersionMinor"] + "." + values["VersionMicro"]
    if "VersionRelease" in values:
        hpcc_version += values["VersionRelease"]
    if hpcc_version:
        parser.set_parameter("App:Version", hpcc_version)

    for k, v in params.items():
        if k not in values:
            continue
        val = values[k]
        if v[2].find('val') >= 0:
            # if convertion formula is used, then first set val variable and then eval the formula
            val = get_float_or_int(values[k])
            val = eval(v[2])
        units = v[1] if [1] != "" else None
        parser.set_parameter("Input:" + v[0], val, units)

    for k, v in metrics.items():
        if k not in values:
            continue
        val = values[k]
        if v[2].find('val') >= 0:
            # if convertion formula is used, then first set val variable and then eval the formula
            val = get_float_or_int(values[k])
            val = eval(v[2])
        units = v[1] if [1] != "" else None
        parser.set_statistic(v[0], val, units)

    if "cpu_speed" in parser.geninfo:
        ll = parser.geninfo["cpu_speed"].splitlines()
        cpu_speed_max = 0.0
        for l in ll:
            m = re.search(r'([\d.]+)$', l)
            if m:
                v = float(m.group(1).strip())
                if v > cpu_speed_max:
                    cpu_speed_max = v
        if cpu_speed_max > 0.0:
            parser.set_parameter("RunEnv:CPU Speed", cpu_speed_max, "MHz")
            mhz = cpu_speed_max

    if resource_appker_vars is not None:
        if 'resource' in resource_appker_vars and 'app' in resource_appker_vars:
            if 'theoreticalGFlopsPerCore' in resource_appker_vars['app']:
                resname = resource_appker_vars['resource']['name']
                if resname in resource_appker_vars['app'][
                        'theoreticalGFlopsPerCore']:
                    theoretical_gflops = resource_appker_vars['app'][
                        'theoreticalGFlopsPerCore'][resname] * num_cores
                    print("theoreticalGFlops", resname, theoretical_gflops)

    if theoretical_gflops is None and mhz is not None:
        # Most modern x86 & POWER processors are superscale and can issue 4 instructions per cycle
        theoretical_gflops = mhz * num_cores * 4 / 1000.0
    if theoretical_gflops and hpl_tflops:
        # Convert both to GFlops and derive the Efficiency
        percent = (1000.0 * hpl_tflops / theoretical_gflops) * 100.0
        parser.set_statistic("High Performance LINPACK Efficiency",
                             "%.3f" % percent, "Percent")

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#7
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='namd',
        version=1,
        description="NAMD: Scalable Molecular Dynamics Package",
        url='http://www.ks.uiuc.edu/Research/namd/',
        measurement_name='NAMD')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Coordinate File')
    parser.add_must_have_parameter('Input:Number of Angles')
    parser.add_must_have_parameter('Input:Number of Atoms')
    parser.add_must_have_parameter('Input:Number of Bonds')
    parser.add_must_have_parameter('Input:Number of Dihedrals')
    parser.add_must_have_parameter('Input:Number of Steps')
    parser.add_must_have_parameter('Input:Structure File')
    parser.add_must_have_parameter('Input:Timestep')

    parser.add_must_have_statistic('Memory')
    parser.add_must_have_statistic('Molecular Dynamics Simulation Performance')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    successful_run = False
    j = 0
    while j < len(lines):

        m = re.match(r'^Info: NAMD ([0-9a-zA-Z.]+)', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1))

        m = re.match(r'^Info: TIMESTEP\s+([0-9.]+)', lines[j])
        if m:
            parser.set_parameter("Input:Timestep",
                                 m.group(1) + "e-15", "Second per Step")

        m = re.match(r'^Info: NUMBER OF STEPS\s+([0-9.]+)', lines[j])
        if m:
            parser.set_parameter("Input:Number of Steps", m.group(1))

        m = re.match(r'^Info: COORDINATE PDB\s+(.+)', lines[j])
        if m:
            parser.set_parameter("Input:Coordinate File", m.group(1))

        m = re.match(r'^Info: STRUCTURE FILE\s+(.+)', lines[j])
        if m:
            parser.set_parameter("Input:Structure File", m.group(1))

        m = re.match(
            r'^Info: Running on ([0-9.]+) processors, ([0-9.]+) nodes, ([0-9.]+) physical nodes.',
            lines[j])
        if m:
            parser.set_parameter("App:NCores", m.group(1).strip())
            parser.set_parameter("App:NNodes", m.group(3).strip())

        if re.match(r'^Info: STRUCTURE SUMMARY', lines[j]):
            j += 1
            for k in range(25):
                if re.match(r'^Info: \*\*\*\*\*', lines[j]):
                    break

                m = re.match(r'^Info:\s+([0-9]+)\s+ATOMS\n', lines[j])
                if m:
                    parser.set_parameter("Input:Number of Atoms", m.group(1))

                m = re.match(r'^Info:\s+([0-9]+)\s+BONDS\n', lines[j])
                if m:
                    parser.set_parameter("Input:Number of Bonds", m.group(1))

                m = re.match(r'^Info:\s+([0-9]+)\s+ANGLES\n', lines[j])
                if m:
                    parser.set_parameter("Input:Number of Angles", m.group(1))

                m = re.match(r'^Info:\s+([0-9]+)\s+DIHEDRALS\n', lines[j])
                if m:
                    parser.set_parameter("Input:Number of Dihedrals",
                                         m.group(1))

                j += 1

        if re.search(r'Info: Benchmark time:', lines[j]):
            m = re.search(r' ([0-9.]+) days/ns', lines[j])
            if m:
                parser.set_statistic(
                    "Molecular Dynamics Simulation Performance",
                    str(1.0e-9 / float(m.group(1))), "Second per Day")

        m = re.match(
            r'^WallClock:\s+([0-9.]+)\s+CPUTime:\s+([0-9.]+)\s+Memory:\s+([0-9.]+)',
            lines[j])
        if m:
            parser.set_statistic("Wall Clock Time", m.group(1), "Second")
            parser.set_statistic("Memory", m.group(3), "MByte")
            successful_run = True

        m = re.match(r'^End of program', lines[j])
        if m:
            successful_run = True

        j += 1

    parser.successfulRun = successful_run

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#8
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='akrr network check',
                                version=1,
                                description="network benchmarking",
                                url='http://www.xdmod.org',
                                measurement_name='akrr network check')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_statistic('Ping, Mean')
    parser.add_must_have_statistic('Secure Copy Bandwidth (in), Mean')
    parser.add_must_have_statistic('Secure Copy Bandwidth (out), Mean')
    parser.add_must_have_statistic('WGet Bandwidth, Mean')
    parser.add_must_have_statistic('Wall Clock Time')

    # set app kernel custom sets
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'wallClockTime') and parser.wallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.wallClockTime), "Second")
    if hasattr(
            parser,
            'appKerWallClockTime') and parser.appKerWallClockTime is not None:
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    successful_run = False
    j = 0

    start = None
    while j < len(lines):
        if lines[j].strip() == "AKRR Network Check Results:":
            start = j
        if lines[j].strip() == "Done":
            end = j
        j += 1

    if start is not None and end is not None:
        r = json.loads(" ".join(lines[(start + 1):end]))
        successful_run = True
        if 'ping' in r:
            count = 0
            ping = 0.0
            for k, v in r['ping'].items():
                if v is None:
                    successful_run = False
                else:
                    ping += float(v['rtt_avg'])
                    count += 1
            parser.set_statistic("Ping, Mean", ping / count, "ms")
        if 'wget' in r:
            count = 0
            val = 0.0
            for k, v in r['wget'].items():
                if v is None:
                    successful_run = False
                else:
                    val += float(v['bandwidth'])
                    count += 1
            parser.set_statistic("WGet Bandwidth, Mean", val / count, "MB/s")
        if 'scp' in r:
            count = 0
            val_in = 0.0
            val_out = 0.0
            for k, v in r['scp'].items():
                if v is None:
                    successful_run = False
                else:
                    val_in += float(v['bandwidth_ingress'])
                    val_out += float(v['bandwidth_egress'])
                    count += 1
            parser.set_statistic("Secure Copy Bandwidth (in), Mean",
                                 val_in / count, "MB/s")
            parser.set_statistic("Secure Copy Bandwidth (out), Mean",
                                 val_out / count, "MB/s")

    parser.successfulRun = successful_run

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#9
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='wrf',
        version=1,
        description="Weather Research and Forecasting Model",
        url='http://www.wrf-model.org',
        measurement_name='WRF')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Grid Resolution')
    parser.add_must_have_parameter('Input:Simulation Length')
    parser.add_must_have_parameter('Input:Simulation Start Date')
    parser.add_must_have_parameter('Input:Timestep')
    parser.add_must_have_parameter('RunEnv:Nodes')
    parser.add_must_have_parameter('WRF Dynamical Solver')

    # parser.add_must_have_statistic('Average Floating-Point Performance')
    parser.add_must_have_statistic('Average Simulation Speed')
    parser.add_must_have_statistic('Mean Time To Simulate One Timestep')
    parser.add_must_have_statistic('Output Data Size')
    # parser.add_must_have_statistic('Peak Floating-Point Performance')
    parser.add_must_have_statistic('Peak Simulation Speed')
    parser.add_must_have_statistic('Time Spent on I/O')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    io_size = None
    wall_clock_time = None
    iteration_wall_clock_time = []
    sim_time_per_iteration = None
    dx = None
    dy = None
    flops_conversion = None

    j = 0
    while j < len(lines):
        m = re.search(r'XDMOD\*\*\*SIZE OF CURRENT DIR BEFORE WRF RUN\s*(\d+)',
                      lines[j])
        if m:
            io_size = int(m.group(1).strip())

        m = re.search(r'XDMOD\*\*\*SIZE OF CURRENT DIR AFTER WRF RUN\s*(\d+)',
                      lines[j])
        if m and io_size:
            parser.set_statistic("Output Data Size",
                                 (int(m.group(1).strip()) - io_size) / 1024.0 /
                                 1024.0, "MByte")

        m = re.search(r'XDMOD\*\*\*WRF RUN BEGINS HERE --(.+)', lines[j])
        if m:
            wall_clock_time = parser.get_datetime_local(m.group(1).strip())

        m = re.search(r'XDMOD\*\*\*WRF RUN HAS FINISHED --(.+)', lines[j])
        if m and wall_clock_time:
            wall_clock_time = parser.get_datetime_local(
                m.group(1).strip()) - wall_clock_time
            parser.set_statistic("Wall Clock Time",
                                 wall_clock_time.total_seconds(), "Second")

        if lines[j].find('XDMOD***RESULT OF rsl.out.0000 BEGINS') >= 0:
            # the output from MPI rank #0
            io_time = None
            while j < len(lines):
                if lines[j].find('XDMOD***RESULT OF rsl.out.0000 ENDS') >= 0:
                    break

                m = re.search(
                    r'Timing for processing restart file.+?:\s+(\d\S+)',
                    lines[j], re.I)
                if m:
                    if io_time is None:
                        io_time = 0.0
                    io_time += float(m.group(1).strip())

                m = re.search(r'Timing for Writing.+?:\s+(\d\S+)', lines[j],
                              re.I)
                if m:
                    if io_time is None:
                        io_time = 0.0
                    io_time += float(m.group(1).strip())

                m = re.search(
                    r'Timing for main: time.+?on domain.+?:\s+(\d\S+)',
                    lines[j], re.I)
                if m:
                    iteration_wall_clock_time.append(float(m.group(1).strip()))

                m = re.search(r'WRF NUMBER OF TILES.+?(\d+)', lines[j])
                if m:
                    omp_threads = int(m.group(1).strip())
                    if omp_threads > 1:
                        parser.set_parameter("Number of OpenMP Threads",
                                             omp_threads)

                m = re.match(r'^\s+WRF V(\S+) MODEL', lines[j])
                if m:
                    parser.set_parameter("App:Version", m.group(1).strip())
                j += 1
            parser.set_statistic("Time Spent on I/O", io_time, "Second")

        if re.search('XDMOD\*\*\*RESULT OF wrfout.+?BEGINS',
                     lines[j]) is not None:
            # the output file's header (netCDF dump)
            io_time = None
            while j < len(lines):
                if re.search('XDMOD\*\*\*RESULT OF wrfout.+?ENDS',
                             lines[j]) is not None:
                    break

                m = re.search(r':DX = (\d+)', lines[j], re.I)
                if m:
                    dx = float(m.group(1).strip()) * 0.001  # in meters

                m = re.search(r':DY = (\d+)', lines[j], re.I)
                if m:
                    dy = float(m.group(1).strip()) * 0.001  # in meters

                m = re.search(r':DT = (\d+)', lines[j], re.I)
                if m:
                    sim_time_per_iteration = float(
                        m.group(1).strip())  # in seconds
                    parser.set_parameter("Input:Timestep",
                                         sim_time_per_iteration,
                                         "Second per Step")

                m = re.search(r':SIMULATION_START_DATE = "(.+?)"', lines[j],
                              re.I)
                if m:
                    parser.set_parameter("Input:Simulation Start Date",
                                         (m.group(1).strip()))

                m = re.search(r':GRIDTYPE = "(.+?)"', lines[j], re.I)
                if m:
                    solver = m.group(1).strip()
                    if solver == 'C':
                        solver = 'Advanced Research WRF (ARW)'
                    if solver == 'E':
                        solver = 'Nonhydrostatic Mesoscale Model (NMM)'
                    parser.set_parameter("WRF Dynamical Solver", solver)

                m = re.search(r'Timing for Writing.+?:\s+(\d\S+)', lines[j],
                              re.I)
                if m:
                    if io_time is None:
                        io_time = 0.0
                    io_time += float(m.group(1).strip())

                m = re.search(
                    r'Timing for main: time.+?on domain.+?:\s+(\d\S+)',
                    lines[j], re.I)
                if m:
                    iteration_wall_clock_time.append(float(m.group(1).strip()))

                m = re.search(r'WRF NUMBER OF TILES.+?(\d+)', lines[j])
                if m:
                    omp_threads = int(m.group(1).strip())
                    if omp_threads > 1:
                        parser.set_parameter("Number of OpenMP Threads",
                                             omp_threads)

                m = re.match(r'^\s+WRF V(\S+) MODEL', lines[j])
                if m:
                    parser.set_parameter("App:Version", m.group(1).strip())
                j += 1
            if dx and dy:
                if (dx - int(dx)) * 1000 < 0.1 and (
                        dy - int(dy)
                ) * 1000 < 0.1:  # back compatibility with output format
                    parser.set_parameter("Input:Grid Resolution",
                                         "%.0f x %.0f" % (dx, dy), "km^2")
                else:
                    parser.set_parameter("Input:Grid Resolution",
                                         str(dx) + " x " + str(dy), "km^2")

        m = re.search(r'XDMOD\*\*\*FLOATING-POINT PERFORMANCE CONVERSION',
                      lines[j])
        if m:
            flops_conversion = lines[j + 1].strip()
        j += 1

    if wall_clock_time:
        parser.successfulRun = True
    else:
        parser.successfulRun = False

    if len(iteration_wall_clock_time) > 0 and sim_time_per_iteration:
        parser.set_parameter("Input:Simulation Length",
                             (len(iteration_wall_clock_time)) *
                             sim_time_per_iteration / 3600.0, "Hour")
        iteration_wall_clock_time = sorted(iteration_wall_clock_time)
        iteration_wall_clock_time.pop()

        t = 0.0
        min_t = iteration_wall_clock_time[0]
        for tt in iteration_wall_clock_time:
            t += tt
        t = t / len(iteration_wall_clock_time)
        parser.set_statistic("Mean Time To Simulate One Timestep", t, "Second")
        parser.set_statistic("Average Simulation Speed",
                             sim_time_per_iteration / t,
                             "Simulated Second per Second")
        parser.set_statistic("Peak Simulation Speed",
                             sim_time_per_iteration / min_t,
                             "Simulated Second per Second")

        if flops_conversion:
            flops_conversion = flops_conversion.replace("$", "").replace(
                "gflops=", "")
            gflops = eval(flops_conversion, {'T': t})
            parser.set_statistic("Average Floating-Point Performance",
                                 1000.0 * gflops, "MFLOP per Second")
            gflops = eval(flops_conversion, {'T': min_t})
            parser.set_statistic("Peak Floating-Point Performance",
                                 1000.0 * gflops, "MFLOP per Second")

    if __name__ == "__main__":
        # output for testing purpose
        parsing_complete = parser.parsing_complete(True)
        print("parsing complete:", parsing_complete)
        if hasattr(parser, 'successfulRun'):
            print("successfulRun", parser.successfulRun)
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#10
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='quantum_espresso',
                                version=1,
                                description="Quantum ESPRESSO (PWSCF)",
                                url='http://www.quantum-espresso.org',
                                measurement_name='Quantum_ESPRESSO')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Number of Atoms per Cell')
    parser.add_must_have_parameter('Input:Number of Atomic Types')
    parser.add_must_have_parameter('Input:Number of Electrons')

    parser.add_must_have_statistic('Wall Clock Time')
    parser.add_must_have_statistic('User Time')
    parser.add_must_have_statistic("Per-Process Dynamical Memory")
    parser.add_must_have_statistic("Time Spent in Program Initialization")
    parser.add_must_have_statistic("Time Spent in Electron Energy Calculation")
    parser.add_must_have_statistic("Time Spent in Force Calculation")
    # This statistic probably was working for a different set of inputs, optional now
    # parser.add_must_have_statistic("Time Spent in Stress Calculation")
    # This statistic probably was working for a different set of inputs, optional now
    # parser.add_must_have_statistic("Time Spent in Potential Updates "\
    # "(Charge Density and Wavefunctions Extrapolations)")

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    j = 0
    while j < len(lines):

        m = re.match(r'^\s+Program PWSCF\s+([\w.]+)\s+starts', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1).strip())

        m = re.match(r'^\s+number of atoms/cell\s*=\s*([\d.]+)', lines[j])
        if m:
            parser.set_parameter("Input:Number of Atoms per Cell",
                                 m.group(1).strip())

        m = re.match(r'^\s+number of atomic types\s*=\s*([\d.]+)', lines[j])
        if m:
            parser.set_parameter("Input:Number of Atomic Types",
                                 m.group(1).strip())

        m = re.match(r'^\s+number of electrons\s*=\s*([\d.]+)', lines[j])
        if m:
            parser.set_parameter("Input:Number of Electrons",
                                 m.group(1).strip())

        m = re.match(r'^\s+per-process dynamical memory:\s*([\d.]+)\s*Mb',
                     lines[j])
        if m:
            parser.set_statistic("Per-Process Dynamical Memory",
                                 (m.group(1).strip()), "MByte")

        m = re.match(r'^\s+init_run\s+:\s*([\d.]+)s CPU', lines[j])
        if m:
            parser.set_statistic("Time Spent in Program Initialization",
                                 (m.group(1).strip()), "Second")

        m = re.match(r'^\s+electrons\s+:\s*([\d.]+)s CPU', lines[j])
        if m:
            parser.set_statistic("Time Spent in Electron Energy Calculation",
                                 (m.group(1).strip()), "Second")

        m = re.match(r'^\s+forces\s+:\s*([\d.]+)s CPU', lines[j])
        if m:
            parser.set_statistic("Time Spent in Force Calculation",
                                 (m.group(1).strip()), "Second")

        m = re.match(r'^\s+stress\s+:\s*([\d.]+)s CPU', lines[j])
        if m:
            parser.set_statistic("Time Spent in Stress Calculation",
                                 (m.group(1).strip()), "Second")

        m = re.match(r'^\s+update_pot\s+:\s*([\d.]+)s CPU', lines[j])
        if m:
            parser.set_statistic(
                "Time Spent in Potential Updates (Charge Density and Wavefunctions Extrapolations)",
                float(m.group(1).strip()), "Second")

        m = re.match(r'^\s+PWSCF\s+:(.+CPU.+)', lines[j])
        if m:
            run_times = m.group(1).strip().split(',')
            for run_time in run_times:
                v = run_time.split()
                if len(v) > 1:
                    if v[0].lower().find("m") >= 0:
                        m = re.match(r'^([0-9]+)m([0-9.]+)s', v[0])
                        sec = float(m.group(1)) * 60.0 + float(m.group(2))
                    else:
                        m = re.match(r'^([0-9.]+)s', v[0])
                        sec = float(m.group(1))
                    if v[1].upper().find("CPU") >= 0:
                        parser.set_statistic("User Time", sec, "Second")
                    if v[1].upper().find("WALL") >= 0:
                        parser.set_statistic("Wall Clock Time", sec, "Second")

        if re.match(r'^\s+JOB DONE', lines[j]):
            parser.successfulRun = True
        j += 1
    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(True))
        if hasattr(parser, 'successfulRun'):
            print("successfulRun", parser.successfulRun)
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#11
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='lammps',
        version=1,
        description=
        "LAMMPS: Large-scale Atomic/Molecular Massively Parallel Simulator",
        url='http://lammps.sandia.gov',
        measurement_name='LAMMPS')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Number of Atoms')
    parser.add_must_have_parameter('Input:Number of Steps')
    parser.add_must_have_parameter('Input:Timestep')

    parser.add_must_have_statistic('Molecular Dynamics Simulation Performance')
    parser.add_must_have_statistic('Per-Process Memory')
    parser.add_must_have_statistic('Time Spent in Bond Potential Calculation')
    parser.add_must_have_statistic('Time Spent in Communication')
    parser.add_must_have_statistic(
        'Time Spent in Long-Range Coulomb Potential (K-Space) Calculation')
    parser.add_must_have_statistic('Time Spent in Neighbor List Regeneration')
    parser.add_must_have_statistic(
        'Time Spent in Pairwise Potential Calculation')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    wall_clock_time = None
    simulation_units = None
    num_steps = None
    step_size = None
    j = 0
    while j < len(lines):

        m = re.match(r'^LAMMPS\s+\(([\w ]+)\)', lines[j])
        if m:
            parser.set_parameter("App:Version", m.group(1).strip())

        m = re.match(r'^Memory usage per processor = ([\d.]+) Mbyte', lines[j])
        if m:
            parser.set_statistic("Per-Process Memory",
                                 m.group(1).strip(), "MByte")

        m = re.match(r'^Loop time of ([\d.]+) on', lines[j])
        if m:
            parser.successfulRun = True
            wall_clock_time = float(m.group(1).strip())
            parser.set_statistic("Wall Clock Time", wall_clock_time, "Second")
            m1 = re.search(r'(\d+) atoms', lines[j])
            if m1:
                parser.set_parameter("Input:Number of Atoms",
                                     m1.group(1).strip())

        m = re.match(r'^units\s+(\w+)', lines[j])
        if m:
            simulation_units = m.group(1).strip().lower()

        m = re.match(r'^run\s+(\d+)', lines[j])
        if m:
            num_steps = int(m.group(1).strip())
            parser.set_parameter("Input:Number of Steps", num_steps)

        m = re.match(r'^timestep\s+([\d.]+)', lines[j])
        if m:
            step_size = float(m.group(1).strip())

        m = re.match(r'^Pair\s+time.+= ([\d.]+)', lines[j])
        if parser.successfulRun and m:
            parser.set_statistic(
                "Time Spent in Pairwise Potential Calculation",
                m.group(1).strip(), "Second")

        m = re.match(r'^Bond\s+time.+= ([\d.]+)', lines[j])
        if parser.successfulRun and m:
            parser.set_statistic("Time Spent in Bond Potential Calculation",
                                 m.group(1).strip(), "Second")

        m = re.match(r'^Kspce\s+time.+= ([\d.]+)', lines[j])
        if parser.successfulRun and m:
            parser.set_statistic(
                "Time Spent in Long-Range Coulomb Potential (K-Space) Calculation",
                m.group(1).strip(), "Second")

        m = re.match(r'^Neigh\s+time.+= ([\d.]+)', lines[j])
        if parser.successfulRun and m:
            parser.set_statistic("Time Spent in Neighbor List Regeneration",
                                 m.group(1).strip(), "Second")

        m = re.match(r'^Comm\s+time.+= ([\d.]+)', lines[j])
        if parser.successfulRun and m:
            parser.set_statistic("Time Spent in Communication",
                                 m.group(1).strip(), "Second")

        j += 1

    if parser.successfulRun and num_steps and simulation_units != "lj":
        # The default value for $stepSize is (see http://lammps.sandia.gov/doc/units.html):
        #
        #   0.005 tau for $simulationUnits eq "lj"
        #   1e-15 second for $simulationUnits eq "real" or "metal"
        #   1e-18 second for $simulationUnits eq "electron"
        #   1e-8  second for $simulationUnits eq "si" or "cgs"

        # If $simulationUnits is (see http://lammps.sandia.gov/doc/units.html)
        #
        #  "lj", the unit for $stepSize is tau
        #  "real" or "electron", the unit for $stepSize is 1e-15 second
        #  "metal", the unit for $stepSize is 1e-12 second
        #  "si" or "cgs", the unit for $stepSize is second

        # The default $simulationUnits is "lj"
        #
        # We ignore "lj" since "lj" is unitless.
        if step_size is None:
            if simulation_units == "real":
                step_size = 1.0
            if simulation_units.find("electron") >= 0 or simulation_units.find(
                    "metal") >= 0:
                step_size = 0.001
            if simulation_units.find("si") >= 0 or simulation_units.find(
                    "cgs") >= 0:
                step_size = 1.0e-8

        step_size_in_sec = step_size
        if step_size:
            if simulation_units.find("electron") >= 0 or simulation_units.find(
                    "real") >= 0:
                step_size_in_sec = step_size * 1.0e-15
            if simulation_units == "metal":
                step_size_in_sec = step_size * 1.0e-12
        if step_size_in_sec:
            parser.set_parameter("Input:Timestep", step_size_in_sec,
                                 "Second per Step")
            parser.set_statistic(
                "Molecular Dynamics Simulation Performance",
                1.0e-9 * (1.0e9 * step_size_in_sec * num_steps) /
                (wall_clock_time / 86400.0), "Second per Day")
    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#12
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='hpcg',
                                version=1,
                                description="HPCG Benchmark",
                                url='http://www.hpcg-benchmark.org/index.html',
                                measurement_name='HPCG')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Distributed Processes')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nx')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Ny')
    parser.add_must_have_parameter('Input:Global Problem Dimensions Nz')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nx')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Ny')
    parser.add_must_have_parameter('Input:Local Domain Dimensions Nz')
    parser.add_must_have_parameter('Input:Number of Coarse Grid Levels')
    parser.add_must_have_parameter('Input:Threads per processes')
    parser.add_must_have_parameter('RunEnv:CPU Speed')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic('Floating-Point Performance, Raw DDOT')
    parser.add_must_have_statistic('Floating-Point Performance, Raw MG')
    parser.add_must_have_statistic('Floating-Point Performance, Raw SpMV')
    parser.add_must_have_statistic('Floating-Point Performance, Raw Total')
    parser.add_must_have_statistic('Floating-Point Performance, Raw WAXPBY')
    parser.add_must_have_statistic('Floating-Point Performance, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Read')
    parser.add_must_have_statistic('Memory Bandwidth, Total')
    parser.add_must_have_statistic('Memory Bandwidth, Write')
    parser.add_must_have_statistic('Setup Time')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # get path to YAML file
    # read data
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # Parse YAML lines because YAML is often malformed
    yaml_lines = []
    # get yaml lines from appstdout
    bool_in_yaml_section = False
    for line in lines:
        if re.match(r"^====== .+\.yaml End   ======", line):
            break

        if bool_in_yaml_section:
            yaml_lines.append(line)

        if re.match(r"^====== .+\.yaml Start ======", line):
            bool_in_yaml_section = True

    from pprint import pprint
    import yaml
    # fix some issues with yaml
    if re.search(r"After confirmation please upload results from the YAML",
                 yaml_lines[-1]):
        yaml_lines.pop()
    if re.search(r"You have selected the QuickPath option", yaml_lines[-1]):
        yaml_lines.pop()

    yaml_text = "".join(yaml_lines)

    yaml_text = re.sub(r"^ {6}HPCG 2\.4 Rating \(for historical value\) is:",
                       "  HPCG 2.4 Rating (for historical value) is:",
                       yaml_text,
                       flags=re.M)

    results_yaml = yaml.load(yaml_text)

    # Set Parameters
    # App version
    app_version_list = []
    for ver in [x for x in results_yaml.keys() if re.search("version", x)]:
        app_version_list.append(ver + " " + str(results_yaml[ver]))
    app_version = ", ".join(app_version_list)
    parser.set_parameter('App:Version', app_version)

    # Problem size
    parser.set_parameter(
        'Input:Number of Coarse Grid Levels',
        results_yaml['Multigrid Information']['Number of coarse grid levels'])

    parser.set_parameter(
        'Input:Global Problem Dimensions Nx',
        results_yaml['Global Problem Dimensions']['Global nx'])
    parser.set_parameter(
        'Input:Global Problem Dimensions Ny',
        results_yaml['Global Problem Dimensions']['Global ny'])
    parser.set_parameter(
        'Input:Global Problem Dimensions Nz',
        results_yaml['Global Problem Dimensions']['Global nz'])

    parser.set_parameter('Input:Local Domain Dimensions Nx',
                         results_yaml['Local Domain Dimensions']['nx'])
    parser.set_parameter('Input:Local Domain Dimensions Ny',
                         results_yaml['Local Domain Dimensions']['ny'])
    parser.set_parameter('Input:Local Domain Dimensions Nz',
                         results_yaml['Local Domain Dimensions']['nz'])

    parser.set_parameter(
        'Input:Distributed Processes',
        results_yaml['Machine Summary']['Distributed Processes'])
    parser.set_parameter(
        'Input:Threads per processes',
        results_yaml['Machine Summary']['Threads per processes'])

    if "cpu_speed" in parser.geninfo:
        ll = parser.geninfo["cpu_speed"].splitlines()
        cpu_speed_max = 0.0
        for l in ll:
            m = re.search(r'([\d.]+)$', l)
            if m:
                v = float(m.group(1).strip())
                if v > cpu_speed_max:
                    cpu_speed_max = v
        if cpu_speed_max > 0.0:
            parser.set_parameter("RunEnv:CPU Speed", cpu_speed_max, "MHz")

    # Set Statistics
    parser.successfulRun = results_yaml['Reproducibility Information'][
        'Result'] == 'PASSED'

    parser.set_statistic('Setup Time',
                         results_yaml['Setup Information']['Setup Time'],
                         'Seconds')

    parser.set_statistic('Memory Bandwidth, Read',
                         results_yaml['GB/s Summary']['Raw Read B/W'], 'GB/s')
    parser.set_statistic('Memory Bandwidth, Write',
                         results_yaml['GB/s Summary']['Raw Write B/W'], 'GB/s')
    parser.set_statistic('Memory Bandwidth, Total',
                         results_yaml['GB/s Summary']['Raw Total B/W'], 'GB/s')

    parser.set_statistic(
        'Floating-Point Performance, Total',
        results_yaml['__________ Final Summary __________']
        ['HPCG result is VALID with a GFLOP/s rating of'], 'GFLOP/s')

    parser.set_statistic('Floating-Point Performance, Raw DDOT',
                         results_yaml['GFLOP/s Summary']['Raw DDOT'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw WAXPBY',
                         results_yaml['GFLOP/s Summary']['Raw WAXPBY'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw SpMV',
                         results_yaml['GFLOP/s Summary']['Raw SpMV'],
                         'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw MG',
                         results_yaml['GFLOP/s Summary']['Raw MG'], 'GFLOP/s')
    parser.set_statistic('Floating-Point Performance, Raw Total',
                         results_yaml['GFLOP/s Summary']['Raw Total'],
                         'GFLOP/s')

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML otherwise return None
    return parser.get_xml()
示例#13
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          proclog=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(name='imb',
                                version=1,
                                description="Intel MPI Benchmarks",
                                url='http://www.intel.com/software/imb',
                                measurement_name='Intel MPI Benchmarks')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:MPI Thread Environment')
    parser.add_must_have_parameter('App:MPI Version')
    parser.add_must_have_parameter('App:Max Message Size')

    parser.add_must_have_statistic('Max Exchange Bandwidth')
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Get' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Get' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Put' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Bidirectional 'Put' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Get' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Get' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Put' Bandwidth (aggregate)")
    parser.add_must_have_statistic(
        "Max MPI-2 Unidirectional 'Put' Bandwidth (non-aggregate)")
    parser.add_must_have_statistic('Max PingPing Bandwidth')
    parser.add_must_have_statistic('Max PingPong Bandwidth')
    parser.add_must_have_statistic('Max SendRecv Bandwidth')
    parser.add_must_have_statistic('Min AllGather Latency')
    parser.add_must_have_statistic('Min AllGatherV Latency')
    parser.add_must_have_statistic('Min AllReduce Latency')
    parser.add_must_have_statistic('Min AllToAll Latency')
    parser.add_must_have_statistic('Min AllToAllV Latency')
    parser.add_must_have_statistic('Min Barrier Latency')
    parser.add_must_have_statistic('Min Broadcast Latency')
    parser.add_must_have_statistic('Min Gather Latency')
    parser.add_must_have_statistic('Min GatherV Latency')
    # parser.add_must_have_statistic("Min MPI-2 'Accumulate' Latency (aggregate)")
    # parser.add_must_have_statistic("Min MPI-2 'Accumulate' Latency (non-aggregate)")
    parser.add_must_have_statistic('Min MPI-2 Window Creation Latency')
    parser.add_must_have_statistic('Min Reduce Latency')
    parser.add_must_have_statistic('Min ReduceScatter Latency')
    parser.add_must_have_statistic('Min Scatter Latency')
    parser.add_must_have_statistic('Min ScatterV Latency')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             total_seconds(parser.appKerWallClockTime),
                             "Second")

    # Intel MPI benchmark suite contains three classes of benchmarks:
    #
    #  Single-transfer, which needs only 2 processes
    #  Parallel-transfer, which can use as many processes that are available
    #  Collective, which can use as many processes that are available

    # The parameters mapping table
    params = {
        "MPI Thread Environment": ["MPI Thread Environment", "", ""],
        "MPI Version": ["MPI Version", "", ""],
        "Maximum message length in bytes":
        ["Max Message Size", "MByte", "<val>/1024/1024"]
    }

    # The result mapping table
    metrics = {
        "PingPing": ["PingPing Bandwidth", "MByte per Second", "max"],
        "PingPong": ["PingPong Bandwidth", "MByte per Second", "max"],
        "Multi-PingPing": ["PingPing Bandwidth", "MByte per Second", "max"],
        "Multi-PingPong": ["PingPong Bandwidth", "MByte per Second", "max"],
        "Sendrecv": ["SendRecv Bandwidth", "MByte per Second", "max"],
        "Exchange": ["Exchange Bandwidth", "MByte per Second", "max"],
        "Allreduce": ["AllReduce Latency", "us", "min"],
        "Reduce": ["Reduce Latency", "us", "min"],
        "Reduce_scatter": ["ReduceScatter Latency", "us", "min"],
        "Allgather": ["AllGather Latency", "us", "min"],
        "Allgatherv": ["AllGatherV Latency", "us", "min"],
        "Gather": ["Gather Latency", "us", "min"],
        "Gatherv": ["GatherV Latency", "us", "min"],
        "Scatter": ["Scatter Latency", "us", "min"],
        "Scatterv": ["ScatterV Latency", "us", "min"],
        "Alltoall": ["AllToAll Latency", "us", "min"],
        "Alltoallv": ["AllToAllV Latency", "us", "min"],
        "Bcast": ["Broadcast Latency", "us", "min"],
        "Barrier": ["Barrier Latency", "us", "min"],
        "Window": ["MPI-2 Window Creation Latency", "us", "min"],
        "Multi-Unidir_Get":
        ["MPI-2 Unidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Multi-Unidir_Put":
        ["MPI-2 Unidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Multi-Bidir_Get":
        ["MPI-2 Bidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Multi-Bidir_Put":
        ["MPI-2 Bidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Unidir_Get":
        ["MPI-2 Unidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Unidir_Put":
        ["MPI-2 Unidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Bidir_Get":
        ["MPI-2 Bidirectional 'Get' Bandwidth", "MByte per Second", "max"],
        "Bidir_Put":
        ["MPI-2 Bidirectional 'Put' Bandwidth", "MByte per Second", "max"],
        "Accumulate": ["MPI-2 'Accumulate' Latency", "us", "min"]
    }

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    aggregate_mode = None
    metric = None
    j = -1
    while j < len(lines) - 1:
        j += 1
        m = re.search(r'All processes entering MPI_Finalize', lines[j])
        if m:
            parser.successfulRun = True

        m = re.match(r'^# Benchmarking\s+(\S+)', lines[j])
        if m:
            if m.group(1) in metrics:
                metric = m.group(1)
                continue

        m = re.match(r'^#\s+MODE:\s+(\S+)', lines[j])
        if m and metric and aggregate_mode is None:
            aggregate_mode = m.group(1)
            continue

        m = re.match(r'^# (.+): (.+)', lines[j])
        if m:  # benchmark parameters
            param = m.group(1).strip()
            if param in params:
                val = m.group(2).strip()
                v = params[param][2]
                if v.find('<val>') >= 0:
                    val = get_float_or_int(val)
                    val = eval(v.replace('<val>', 'val'))
                parser.set_parameter("App:" + params[param][0],
                                     str(val) + " ", params[param][1])
            continue

        m = re.match(r'^\s+([1-9]\d*)\s+\d+', lines[j])
        if m and metric:  # this effectively skips the first line of result, which has #bytes = 0
            results = []

            while m:
                numbers = lines[j].split()
                results.append(
                    float(numbers[-1]
                          ))  # tokenize the line, and extract the last column

                j += 1
                if j < len(lines):
                    m = re.match(r'^\s+([1-9]\d*)\s+\d+', lines[j])
                    if lines[j].count('IMB_init_buffers_iter') > 0:
                        break
                else:
                    break
            metric_name = metrics[metric][0]
            if aggregate_mode:
                metric_name += " (" + aggregate_mode.lower() + ")"
            if len(results) > 0:
                if metrics[metric][1] == 'us':
                    statname = metrics[metric][2][0].upper(
                    ) + metrics[metric][2][1:] + " " + metric_name
                    statval = eval(metrics[metric][2] + "(results)")
                    parser.set_statistic(statname, statval * 1e-6, "Second")
                else:
                    statname = metrics[metric][2][0].upper(
                    ) + metrics[metric][2][1:] + " " + metric_name
                    statval = eval(metrics[metric][2] + "(results)")
                    parser.set_statistic(statname, statval, metrics[metric][1])

            aggregate_mode = None
            metric = None
    if parser.get_parameter("App:MPI Thread Environment") is None:
        parser.set_parameter("App:MPI Thread Environment", "")

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())
    # Print out missing parameters for debug purpose
    parser.parsing_complete(verbose=True)
    # return complete XML overwize return None
    return parser.get_xml()
示例#14
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, proclog=None, 
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='ior',
        version=1,
        description="IOR (Interleaved-Or-Random) Benchmark",
        url='http://freshmeat.net/projects/ior',
        measurement_name='IOR'
    )
    app_vars = None
    if resource_appker_vars is not None and 'app' in resource_appker_vars:
        app_vars = resource_appker_vars['app']

    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    if app_vars is None or (
            app_vars is not None and 'testHDF5' in app_vars and
            app_vars['testHDF5'] is True):
        parser.add_must_have_parameter('HDF Version')
        parser.add_must_have_parameter('HDF5 Collective N-to-1 Test File System')
        parser.add_must_have_parameter('HDF5 Independent N-to-1 Test File System')
        parser.add_must_have_parameter('HDF5 N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testMPIIO' in app_vars and
            app_vars['testMPIIO'] is True):
        parser.add_must_have_parameter('MPIIO Collective N-to-1 Test File System')
        parser.add_must_have_parameter('MPIIO Independent N-to-1 Test File System')
        parser.add_must_have_parameter('MPIIO N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testPOSIX' in app_vars and
            app_vars['testPOSIX'] is True):
        parser.add_must_have_parameter('POSIX N-to-1 Test File System')
        parser.add_must_have_parameter('POSIX N-to-N Test File System')

    if app_vars is None or (
            app_vars is not None and 'testNetCDF' in app_vars and
            app_vars['testNetCDF'] is True):
        parser.add_must_have_parameter('Parallel NetCDF Collective N-to-1 Test File System')
        parser.add_must_have_parameter('Parallel NetCDF Independent N-to-1 Test File System')
        parser.add_must_have_parameter('Parallel NetCDF Version')
        parser.add_must_have_parameter('Per-Process Data Size')
        parser.add_must_have_parameter('Per-Process I/O Block Size')
        parser.add_must_have_parameter('RunEnv:Nodes')
        parser.add_must_have_parameter('Transfer Size Per I/O')

    if app_vars is None or (
            app_vars is not None and 'testHDF5' in app_vars and
            app_vars['testHDF5'] is True):
        parser.add_must_have_statistic('HDF5 Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 Independent N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('HDF5 N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testMPIIO' in app_vars and
            app_vars['testMPIIO'] is True):
        parser.add_must_have_statistic('MPIIO Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO Independent N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('MPIIO N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testPOSIX' in app_vars and
            app_vars['testPOSIX'] is True):
        parser.add_must_have_statistic('POSIX N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-N Read Aggregate Throughput')
        parser.add_must_have_statistic('POSIX N-to-N Write Aggregate Throughput')

    if app_vars is None or (
            app_vars is not None and 'testNetCDF' in app_vars and
            app_vars['testNetCDF'] is True):
        parser.add_must_have_statistic('Parallel NetCDF Collective N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Collective N-to-1 Write Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Independent N-to-1 Read Aggregate Throughput')
        parser.add_must_have_statistic('Parallel NetCDF Independent N-to-1 Write Aggregate Throughput')

    parser.add_must_have_statistic('Number of Tests Passed')
    parser.add_must_have_statistic('Number of Tests Started')

    parser.add_must_have_statistic('Wall Clock Time')

    parser.completeOnPartialMustHaveStatistics = True
    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time", total_seconds(parser.appKerWallClockTime), "Second")

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output

    # find which version of IOR was used
    ior_output_version = None
    j = 0
    while j < len(lines) - 1:
        # IOR RELEASE: IOR-2.10.3
        m = re.match(r'^#\s+IOR RELEASE:\s(.+)', lines[j])
        if m:
            ior_output_version = 20
        # IOR-3.2.0: MPI Coordinated Test of Parallel I/O
        # IOR-3.3.0+dev: MPI Coordinated Test of Parallel I/O
        m = re.match(r'^IOR-([3-9])\.([0-9])+\.[0-9]\S*: MPI Coordinated Test of Parallel I/O', lines[j])
        if m:
            ior_major = int(m.group(1))
            ior_minor = int(m.group(2))
            if ior_major >=3:
                if ior_minor >= 3:
                    ior_output_version = 33
                elif ior_minor >= 2:
                    ior_output_version = 32
                else:
                    ior_output_version = 30

        j += 1

    if ior_output_version is None:
        print("ERROR: unknown version of IOR output!!!")

    parser.successfulRun = False

    total_number_of_tests, tests_passed = process_ior_output_v33(parser, lines)

    if app_vars is not None and 'doAllWritesFirst' in app_vars:
        if app_vars['doAllWritesFirst']:
            # i.e. separate read and write
            total_number_of_tests = total_number_of_tests // 2
    else:
        # by default separate read and write
        total_number_of_tests = total_number_of_tests // 2

    parser.set_statistic('Number of Tests Passed', tests_passed)
    parser.set_statistic('Number of Tests Started', total_number_of_tests)

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete(verbose=True))
        parser.print_params_stats_as_must_have()
        parser.print_template_for_pytest()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#15
0
def process_appker_output(appstdout=None, stdout=None, stderr=None, geninfo=None, resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='enzo',
        version=1,
        description="Enzo: an Adaptive Mesh Refinement Code for Astrophysics",
        url='http://enzo-project.org',
        measurement_name='Enzo'
    )
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo, resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()
    parser.set_parameter("App:Version", "unknown")
    # process the output
    successful_run = False
    j = 0
    while j < len(lines):
        m = re.match(r'^Mercurial Branch\s+(\S+)', lines[j])
        if m:
            branch = m.group(1)
            revision = ""
            if j + 1 < len(lines):
                m = re.match(r'^Mercurial Revision\s+(\S+)', lines[j + 1])
                if m:
                    revision = m.group(1)
            parser.set_parameter("App:Version", "Branch:" + branch + " Revision:" + revision)

        m = re.match(r'^Time\s*=\s*([0-9.]+)\s+CycleNumber\s*=\s*([0-9]+)\s+Wallclock\s*=\s*([0-9.]+)', lines[j])
        if m:
            parser.set_statistic("Final Simulation Time", m.group(1), "Enzo Time Unit")
            parser.set_statistic("Total Cycles", m.group(2))
            parser.set_statistic("Wall Clock Time", m.group(3), "Second")
            successful_run = True

        m = re.match(r'^Successful run, exiting.', lines[j])
        if m:
            successful_run = True

        # performance
        m = re.match(r'^Cycle_Number\s+([0-9]+)', lines[j])
        if m:
            j += 1
            performance_metrics = {}
            while j < len(lines):
                if lines[j].strip() != "":
                    v = lines[j].strip().split()
                    if v[0] not in performance_metrics:
                        performance_metrics[v[0]] = float(v[1])
                    else:
                        performance_metrics[v[0]] += float(v[1])
                else:
                    if j + 1 < len(lines):
                        m = re.match(r'^Cycle_Number\s+([0-9]+)', lines[j + 1])
                        if m:
                            pass
                        else:
                            break
                    else:
                        break
                j += 1

            metric = "CommunicationTranspose"
            if metric in performance_metrics:
                parser.set_statistic("Communication Transpose Time", performance_metrics[metric], "Second")

            metric = "ComputePotentialFieldLevelZero"
            if metric in performance_metrics:
                parser.set_statistic("Gravitational Potential Field Computing Time", performance_metrics[metric],
                                     "Second")

            metric = "EvolvePhotons"
            if metric in performance_metrics:
                parser.set_statistic("Radiative Transfer Calculation Time", performance_metrics[metric], "Second")

            metric = "Group_WriteAllData"
            if metric in performance_metrics:
                parser.set_statistic("All Data Group Write Time", performance_metrics[metric], "Second")

            metric = "Level_00"
            if metric in performance_metrics:
                parser.set_statistic("All Grid Level 00 Calculation Time", performance_metrics[metric], "Second")

            metric = "Level_01"
            if metric in performance_metrics:
                parser.set_statistic("All Grid Level 01 Calculation Time", performance_metrics[metric], "Second")

            metric = "Level_02"
            if metric in performance_metrics:
                parser.set_statistic("All Grid Level 02 Calculation Time", performance_metrics[metric], "Second")

            metric = "RebuildHierarchy"
            if metric in performance_metrics:
                parser.set_statistic("Grid Hierarchy Rebuilding Time", performance_metrics[metric], "Second")

            metric = "SetBoundaryConditions"
            if metric in performance_metrics:
                parser.set_statistic("Boundary Conditions Setting Time", performance_metrics[metric], "Second")

            metric = "SolveForPotential"
            if metric in performance_metrics:
                parser.set_statistic("Poisson Equation Solving Time", performance_metrics[metric], "Second")

            metric = "SolveHydroEquations"
            if metric in performance_metrics:
                parser.set_statistic("Hydro Equations Solving Time", performance_metrics[metric], "Second")

            metric = "Total"
            if metric in performance_metrics:
                parser.set_statistic("Total Time Spent in Cycles", performance_metrics[metric], "Second")

        j += 1
    parser.successfulRun = successful_run

    if __name__ == "__main__":
        # output for testing purpose
        print(("parsing complete:", parser.parsing_complete()))
        parser.print_params_stats_as_must_have()
        print((parser.get_xml()))

    # return complete XML overwize return None
    return parser.get_xml()
示例#16
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    parser = AppKerOutputParser(
        name='charmm',
        version=1,
        description="CHARMM: Chemistry at Harvard Macromolecular Mechanics",
        url='http://www.charmm.org',
        measurement_name='CHARMM')
    # set obligatory parameters and statistics
    # set common parameters and statistics
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Input:Number of Angles')
    parser.add_must_have_parameter('Input:Number of Atoms')
    parser.add_must_have_parameter('Input:Number of Bonds')
    parser.add_must_have_parameter('Input:Number of Dihedrals')
    parser.add_must_have_parameter('Input:Number of Steps')
    parser.add_must_have_parameter('Input:Timestep')

    parser.add_must_have_statistic('Molecular Dynamics Simulation Performance')
    parser.add_must_have_statistic('Time Spent in External Energy Calculation')
    parser.add_must_have_statistic('Time Spent in Integration')
    parser.add_must_have_statistic('Time Spent in Internal Energy Calculation')
    parser.add_must_have_statistic('Time Spent in Non-Bond List Generation')
    parser.add_must_have_statistic(
        'Time Spent in Waiting (Load Unbalance-ness)')
    parser.add_must_have_statistic('User Time')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo,
                                         resource_appker_vars)

    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    wall_clock_time = 0.0
    num_steps = 0
    step_size = 0.0
    time_breakdown_columns = None
    num_atoms = 0
    num_bonds = 0
    num_angles = 0
    num_dihedrals = 0

    j = 0
    while j < len(lines):

        m0 = re.search(r'\s+Chemistry at HARvard Macromolecular Mechanics',
                       lines[j])
        m1 = re.search(r'\sVersion\s+([\da-zA-Z]+)', lines[j + 1])
        if m0 and m1:
            parser.set_parameter("App:Version", m1.group(1).strip())

        if re.search(r'Summary of the structure file counters', lines[j]):
            j += 1
            for k in range(256):
                if re.search(r'CHARMM>', lines[j]):
                    break

                m = re.search(r'Number of atoms\s+=\s+(\d+)', lines[j])
                if m:
                    num_atoms += int(m.group(1).strip())

                m = re.search(r'Number of bonds\s+=\s+(\d+)', lines[j])
                if m:
                    num_bonds += int(m.group(1).strip())

                m = re.search(r'Number of angles\s+=\s+(\d+)', lines[j])
                if m:
                    num_angles += int(m.group(1).strip())

                m = re.search(r'Number of dihedrals\s+=\s+(\d+)', lines[j])
                if m:
                    num_dihedrals += int(m.group(1).strip())

                j += 1

        if re.search(r'<MAKGRP> found', lines[j]):
            j += 1
            for k in range(256):
                if re.search(r'NUMBER OF DEGREES OF FREEDOM', lines[j]):
                    break

                m = re.search(r'NSTEP\s+=\s+(\d+)', lines[j])
                if m:
                    num_steps = int(m.group(1).strip())
                    parser.set_parameter("Input:Number of Steps", num_steps)

                if re.search(r'TIME STEP\s+=', lines[j]):
                    m = re.search(r'([\d\-Ee.]+)\s+PS', lines[j])
                    if m:
                        step_size = 1000.0 * float(m.group(1).strip())
                        parser.set_parameter("Input:Timestep",
                                             step_size * 1e-15,
                                             "Second per Step")
                j += 1

        if re.search(r'NORMAL TERMINATION BY NORMAL STOP', lines[j]):
            parser.successfulRun = True

        if re.search(r'JOB ACCOUNTING INFORMATION', lines[j]):
            parser.successfulRun = True

            j += 1
            for k in range(256):
                if j > len(lines) - 1:
                    break
                m = re.search(r'ELAPSED TIME:\s*([\d.]+)\s*MINUTES', lines[j])
                if m:
                    wall_clock_time = 60.0 * float(m.group(1).strip())
                    parser.set_statistic("Wall Clock Time", wall_clock_time,
                                         "Second")

                m = re.search(r'CPU TIME:\s*([\d.]+)\s*MINUTES', lines[j])
                if m:
                    parser.set_statistic("User Time",
                                         60.0 * float(m.group(1).strip()),
                                         "Second")

                m = re.search(r'ELAPSED TIME:\s*([\d.]+)\s*SECONDS', lines[j])
                if m:
                    wall_clock_time = float(m.group(1).strip())
                    parser.set_statistic("Wall Clock Time", wall_clock_time,
                                         "Second")

                m = re.search(r'CPU TIME:\s*([\d.]+)\s*SECONDS', lines[j])
                if m:
                    parser.set_statistic("User Time",
                                         m.group(1).strip(), "Second")

                j += 1
            if j > len(lines) - 1:
                break

        if re.search(r'Parallel load balance \(sec', lines[j]):
            j += 1
            # grab the column headers from the output, e.g.
            #
            # Parallel load balance (sec.):
            # Node Eext      Eint   Wait    Comm    List   Integ   Total
            #   0   205.5     6.4     1.2    31.2    23.2     2.8   270.4
            #   1   205.2     7.3     1.1    31.2    23.3     3.2   271.2
            #   2   205.2     7.7     0.6    32.3    23.3     3.2   272.3
            #   3   205.2     7.8     0.6    32.1    23.3     3.3   272.3
            # PARALLEL> Average timing for all nodes:
            #   4   205.3     7.3     0.9    31.7    23.3     3.1   271.6
            time_breakdown_columns = lines[j].strip().split()

        if re.search(r'PARALLEL>\s*Average timing for all nodes',
                     lines[j]) and time_breakdown_columns:
            j += 1
            time_breakdown = lines[j].strip().split()
            if len(time_breakdown_columns) == len(time_breakdown):
                for k in range(len(time_breakdown)):
                    if time_breakdown_columns[k] == "Eext":
                        parser.set_statistic(
                            "Time Spent in External Energy Calculation",
                            time_breakdown[k], "Second")
                    if time_breakdown_columns[k] == "Eint":
                        parser.set_statistic(
                            "Time Spent in Internal Energy Calculation",
                            time_breakdown[k], "Second")
                    if time_breakdown_columns[k] == "Wait":
                        parser.set_statistic(
                            "Time Spent in Waiting (Load Unbalance-ness)",
                            time_breakdown[k], "Second")
                    if time_breakdown_columns[k] == "List":
                        parser.set_statistic(
                            "Time Spent in Non-Bond List Generation",
                            time_breakdown[k], "Second")
                    if time_breakdown_columns[k] == "Integ":
                        parser.set_statistic("Time Spent in Integration",
                                             time_breakdown[k], "Second")

        j += 1
    if num_atoms > 0:
        parser.set_parameter("Input:Number of Atoms", num_atoms)
    if num_bonds > 0:
        parser.set_parameter("Input:Number of Bonds", num_bonds)
    if num_angles > 0:
        parser.set_parameter("Input:Number of Angles", num_angles)
    if num_dihedrals > 0:
        parser.set_parameter("Input:Number of Dihedrals", num_dihedrals)

    if wall_clock_time > 0.0 and num_steps > 0 and step_size > 0.0:
        # $stepSize is in femtoseconds
        # $wallClockTime is in seconds
        parser.set_statistic("Molecular Dynamics Simulation Performance",
                             (1e-6 * step_size * num_steps) /
                             (wall_clock_time / 86400.0) * 1e-9,
                             "Second per Day")

    if __name__ == "__main__":
        # output for testing purpose
        print("parsing complete:", parser.parsing_complete())
        parser.print_params_stats_as_must_have()
        print(parser.get_xml())

    # return complete XML overwize return None
    return parser.get_xml()
示例#17
0
def process_appker_output(appstdout=None,
                          stdout=None,
                          stderr=None,
                          geninfo=None,
                          resource_appker_vars=None):
    # set App Kernel Description
    if resource_appker_vars is not None and 'app' in resource_appker_vars and 'name' in resource_appker_vars[
            'app']:
        akname = resource_appker_vars['app']['name']
    else:
        akname = 'unknown'

    # initiate parser
    parser = AppKerOutputParser(name=akname)
    # set obligatory parameters and statistics
    # set common parameters and statistics (App:ExeBinSignature and RunEnv:Nodes)
    parser.add_common_must_have_params_and_stats()
    # set app kernel custom sets
    parser.add_must_have_parameter('App:ExeBinSignature')
    parser.add_must_have_parameter('App:Version')
    parser.add_must_have_parameter('Number of Darts Throws')
    parser.add_must_have_parameter('Number of Rounds')
    parser.add_must_have_parameter('RunEnv:Nodes')

    parser.add_must_have_statistic('Darts Throws per Second')
    parser.add_must_have_statistic('Time for PI Calculation')
    parser.add_must_have_statistic('Wall Clock Time')

    # parse common parameters and statistics
    parser.parse_common_params_and_stats(appstdout, stdout, stderr, geninfo)

    if hasattr(parser, 'appKerWallClockTime'):
        parser.set_statistic("Wall Clock Time",
                             parser.appKerWallClockTime.total_seconds(),
                             "Second")

    # Here can be custom output parsing
    # read output
    lines = []
    if os.path.isfile(appstdout):
        fin = open(appstdout, "rt")
        lines = fin.readlines()
        fin.close()

    # process the output
    parser.successfulRun = False
    j = 0
    while j < len(lines):
        m = re.search(r'version:\s+(.+)', lines[j])
        if m:
            parser.set_parameter('App:Version', m.group(1))

        m = re.search(r'number of throws at dartboard:\s+(\d+)', lines[j])
        if m:
            parser.set_parameter('Number of Darts Throws', m.group(1))

        m = re.search(r'number of rounds for dartz throwing\s+(\d+)', lines[j])
        if m:
            parser.set_parameter('Number of Rounds', m.group(1))

        m = re.search(r'Time for PI calculation:\s+([0-9.]+)', lines[j])
        if m:
            parser.set_statistic("Time for PI Calculation", m.group(1),
                                 "Seconds")

        m = re.search(r'Giga Darts Throws per Second \(GDaPS\):\s+([0-9.]+)',
                      lines[j])
        if m:
            parser.set_statistic("Darts Throws per Second", m.group(1),
                                 "GDaPS")

        m = re.search(r'Giga Darts Throws per Second', lines[j])
        if m:
            parser.successfulRun = True

        j += 1

    if __name__ == "__main__":
        # output for testing purpose
        print(("Parsing complete:", parser.parsing_complete(verbose=True)))
        print("Following statistics and parameter can be set as obligatory:")
        parser.print_params_stats_as_must_have()
        print("\nResulting XML:")
        print((parser.get_xml()))

    # return complete XML otherwise return None
    return parser.get_xml()