Пример #1
0
def main(main_args):
    """Main entrypoint

    Args:
        main_args ([type]): Arguments to the script
    """

    coreclr_args = setup_args(main_args)

    antigen_directory = coreclr_args.antigen_directory
    core_root = coreclr_args.core_root
    tag_name = "{}-{}".format(coreclr_args.run_configuration,
                              coreclr_args.partition)
    output_directory = coreclr_args.output_directory
    run_duration = 120  # Run for 2 hours

    path_to_corerun = os.path.join(core_root, "corerun")
    path_to_tool = os.path.join(antigen_directory, "Antigen")
    if is_windows:
        path_to_corerun += ".exe"
        path_to_tool += ".exe"

    # Run tool such that issues are placed in a temp folder
    with TempDir() as temp_location:
        run_command([
            path_to_tool, "-c", path_to_corerun, "-o", temp_location, "-d",
            str(run_duration)
        ],
                    _exit_on_fail=True,
                    _long_running=True)

        # Copy issues for upload
        print("Copying issues to " + output_directory)
        copy_issues(temp_location, output_directory, tag_name)
Пример #2
0
def build_and_run(coreclr_args):
    """Run perf scenarios under crank and collect data with SPMI"

    Args:
        coreclr_args (CoreClrArguments): Arguments use to drive
        output_mch_name (string): Name of output mch file name
    """
    source_directory = coreclr_args.source_directory
    target_arch = coreclr_args.arch
    target_os = coreclr_args.host_os

    checked_root = path.join(source_directory, "artifacts", "bin", "coreclr", target_os + "." + coreclr_args.arch + ".Checked")
    release_root = path.join(source_directory, "artifacts", "bin", "coreclr", target_os + "." + coreclr_args.arch + ".Release")

    # We'll use repo script to install dotnet
    dotnet_install_script_name = "dotnet-install.cmd" if is_windows else "dotnet-install.sh"
    dotnet_install_script_path = path.join(source_directory, "eng", "common", dotnet_install_script_name)

    with TempDir(skip_cleanup=True) as temp_location:

        print ("Executing in " + temp_location)

        # install dotnet 6.0
        run_command([dotnet_install_script_path, "-Version", "6.0.4"], temp_location, _exit_on_fail=True)
        os.environ['DOTNET_MULTILEVEL_LOOKUP'] = '0'
        os.environ['DOTNET_SKIP_FIRST_TIME_EXPERIENCE'] = '1'
        dotnet_path = path.join(source_directory, ".dotnet")
        dotnet_exe = path.join(dotnet_path, "dotnet.exe") if is_windows else path.join(dotnet_path, "dotnet")
        run_command([dotnet_exe, "--info"], temp_location, _exit_on_fail=True)
        os.environ['DOTNET_ROOT'] = dotnet_path

        ## install crank as local tool
        run_command(
            [dotnet_exe, "tool", "install", "Microsoft.Crank.Controller", "--version", "0.2.0-*", "--tool-path", temp_location], _exit_on_fail=True)

        ## ideally just do sparse clone, but this doesn't work locally
        ## git clone --filter=blob:none --no-checkout https://github.com/aspnet/benchmarks
        ## cd benchmarks
        ## git sparse-checkout init --cone
        ## git sparse-checkout set scenarios

        ## could probably just pass a URL and avoid this

        run_command(
            ["git", "clone", "--quiet", "--depth", "1", "https://github.com/aspnet/benchmarks"], temp_location, _exit_on_fail=True)

        crank_app = path.join(temp_location, "crank")
        mcs_path = determine_mcs_tool_path(coreclr_args)
        superpmi_path = determine_superpmi_tool_path(coreclr_args)

        # todo: add grpc/signalr, perhaps

        configname_scenario_list = [
                                    ("platform", "plaintext"),
                                    ("json", "json"),
                                    ("plaintext", "mvc"),
                                    ("database", "fortunes_dapper"),
                                    ("database", "fortunes_ef_mvc_https"),
                                    ("database", "updates"),
                                    ("proxy", "proxy-yarp"),
                                    ("staticfiles", "static"),
                                    ("websocket", "websocket"),
                                    ("orchard", "about-sqlite")
                                    ]

        # configname_scenario_list = [("platform", "plaintext")]

        # note tricks to get one element tuples

        runtime_options_list = [("Dummy=0",), ("TieredCompilation=0", ), ("TieredPGO=1",), ("TieredPGO=1", "ReadyToRun=0"),
            ("ReadyToRun=0", "OSR_HitLimit=0", "TC_OnStackReplacement_InitialCounter=10"),
            ("TieredPGO=1", "ReadyToRun=0", "OSR_HitLimit=0", "TC_OnStackReplacement_InitialCounter=10")]

        # runtime_options_list = [("TieredCompilation=0", )]

        mch_file = path.join(coreclr_args.output_mch_path, "aspnet.run." + target_os + "." + target_arch + ".checked.mch")
        benchmark_machine = determine_benchmark_machine(coreclr_args)

        jitname = determine_native_name(coreclr_args, "clrjit", target_os)
        coreclrname = determine_native_name(coreclr_args, "coreclr", target_os)
        spminame = determine_native_name(coreclr_args, "superpmi-shim-collector", target_os)
        corelibname = "System.Private.CoreLib.dll"

        jitpath = path.join(".", jitname)
        jitlib  = path.join(checked_root, jitname)
        coreclr = path.join(release_root, coreclrname)
        corelib = path.join(release_root, corelibname)
        spmilib = path.join(checked_root, spminame)

        for (configName, scenario) in configname_scenario_list:
            configYml = configName + ".benchmarks.yml"
            configFile = path.join(temp_location, "benchmarks", "scenarios", configYml)

            crank_arguments = ["--config", configFile,
                               "--profile", benchmark_machine,
                               "--scenario", scenario,
                               "--application.framework", "net7.0",
                               "--application.channel", "edge",
                               "--application.sdkVersion", "latest",
                               "--application.environmentVariables", "COMPlus_JitName=" + spminame,
                               "--application.environmentVariables", "SuperPMIShimLogPath=.",
                               "--application.environmentVariables", "SuperPMIShimPath=" + jitpath,
                               "--application.environmentVariables", "COMPlus_EnableExtraSuperPmiQueries=1",
                               "--application.options.downloadFiles", "*.mc",
                               "--application.options.displayOutput", "true",
#                               "--application.options.dumpType", "full",
#                               "--application.options.fetch", "true",
                               "--application.options.outputFiles", spmilib,
                               "--application.options.outputFiles", jitlib,
                               "--application.options.outputFiles", coreclr,
                               "--application.options.outputFiles", corelib]

            for runtime_options in runtime_options_list:
                runtime_arguments = []
                for runtime_option in runtime_options:
                    runtime_arguments.append("--application.environmentVariables")
                    runtime_arguments.append("COMPlus_" + runtime_option)

                print("")
                print("================================")
                print("Config: " + configName + " scenario: " + scenario + " options: " + " ".join(runtime_options))
                print("================================")
                print("")

                description = ["--description", configName + "-" + scenario + "-" + "-".join(runtime_options)]
                subprocess.run([crank_app] + crank_arguments + description + runtime_arguments, cwd=temp_location)

        # merge
        command = [mcs_path, "-merge", "temp.mch", "*.mc", "-dedup", "-thin"]
        run_command(command, temp_location)

        # clean
        command = [superpmi_path, "-v", "ewmi", "-f", "fail.mcl", jitlib, "temp.mch"]
        run_command(command, temp_location)

        # strip
        if is_nonzero_length_file("fail.mcl"):
            print("Replay had failures, cleaning...");
            fail_file = path.join(coreclr_args.output_mch_path, "fail.mcl");
            command = [mcs_path, "-strip", "fail.mcl", "temp.mch", mch_file]
            run_command(command, temp_location)
        else:
            print("Replay was clean...");
            shutil.copy2("temp.mch", mch_file)

        # index
        command = [mcs_path, "-toc", mch_file]
        run_command(command, temp_location)

        # overall summary
        print("Merged summary for " + mch_file)
        command = [mcs_path, "-jitflags", mch_file]
        run_command(command, temp_location)
Пример #3
0
def main(main_args):
    """Main entrypoint

    Args:
        main_args ([type]): Arguments to the script
    """

    coreclr_args = setup_args(main_args)
    arch_name = coreclr_args.arch
    os_name = "win" if coreclr_args.platform.lower() == "windows" else "linux"
    run_configuration = "{}-{}".format(os_name, arch_name)
    source_directory = coreclr_args.source_directory

    # CorrelationPayload directories
    correlation_payload_directory = path.join(coreclr_args.source_directory, "payload")
    scripts_src_directory = path.join(source_directory, "src", "coreclr", 'scripts')
    coreroot_dst_directory = path.join(correlation_payload_directory, "CoreRoot")
    antigen_dst_directory = path.join(correlation_payload_directory, "exploratory")

    helix_source_prefix = "official"
    creator = ""
    ci = True
    if is_windows:
        helix_queue = "Windows.10.Arm64" if arch_name == "arm64" else "Windows.10.Amd64.X86"
    else:
        if arch_name == "arm":
            helix_queue = "(Ubuntu.1804.Arm32)[email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440"
        elif arch_name == "arm64":
            helix_queue = "(Ubuntu.1804.Arm64)[email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8-20210531091519-97d8652"
        else:
            helix_queue = "Ubuntu.1804.Amd64"

    # create exploratory directory
    print('Copying {} -> {}'.format(scripts_src_directory, coreroot_dst_directory))
    copy_directory(scripts_src_directory, coreroot_dst_directory, match_func=lambda path: any(path.endswith(extension) for extension in [".py"]))

    if is_windows:
        acceptable_copy = lambda path: any(path.endswith(extension) for extension in [".py", ".dll", ".exe", ".json"])
    else:
        # Need to accept files without any extension, which is how executable file's names look.
        acceptable_copy = lambda path: (os.path.basename(path).find(".") == -1) or any(path.endswith(extension) for extension in [".py", ".dll", ".so", ".json", ".a"])

    # copy CORE_ROOT
    print('Copying {} -> {}'.format(coreclr_args.core_root_directory, coreroot_dst_directory))
    copy_directory(coreclr_args.core_root_directory, coreroot_dst_directory, match_func=acceptable_copy)

    try:
        with TempDir() as tool_code_directory:
            # clone the tool
            run_command(
                ["git", "clone", "--quiet", "--depth", "1", "https://github.com/kunalspathak/Antigen.git", tool_code_directory])

            antigen_bin_directory = path.join(tool_code_directory, "bin", "Release", "net5.0")

            # build the tool
            with ChangeDir(tool_code_directory):
                dotnet_cmd = os.path.join(source_directory, "dotnet.cmd")
                if not is_windows:
                    dotnet_cmd = os.path.join(source_directory, "dotnet.sh")
                run_command([dotnet_cmd, "publish", "-c", "Release", "--self-contained", "-r", run_configuration, "-o", antigen_bin_directory], _exit_on_fail=True)

            if not os.path.exists(path.join(antigen_bin_directory, "Antigen.dll")):
                raise FileNotFoundError("Antigen.dll not present at {}".format(antigen_bin_directory))

            # copy antigen tool
            print('Copying {} -> {}'.format(antigen_bin_directory, antigen_dst_directory))
            copy_directory(antigen_bin_directory, antigen_dst_directory, match_func=acceptable_copy)
    except PermissionError as pe:
        print("Skipping file. Got error: %s", pe)

    # create foo.txt in work_item directories
    workitem_directory = path.join(source_directory, "workitem")
    os.mkdir(workitem_directory)
    foo_txt = os.path.join(workitem_directory, "foo.txt")
    with open(foo_txt, "w") as foo_txt_file:
        foo_txt_file.write("hello world!")

    # Set variables
    print('Setting pipeline variables:')
    set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory)
    set_pipeline_variable("WorkItemDirectory", workitem_directory)
    set_pipeline_variable("RunConfiguration", run_configuration)
    set_pipeline_variable("Creator", creator)
    set_pipeline_variable("Queue", helix_queue)
    set_pipeline_variable("HelixSourcePrefix", helix_source_prefix)
Пример #4
0
def build_and_run(coreclr_args, output_mch_name):
    """Build the microbenchmarks and run them under "superpmi collect"

    Args:
        coreclr_args (CoreClrArguments): Arguments use to drive
        output_mch_name (string): Name of output mch file name
    """
    arch = coreclr_args.arch
    python_path = sys.executable
    core_root = coreclr_args.core_root
    superpmi_directory = coreclr_args.superpmi_directory
    performance_directory = coreclr_args.performance_directory
    log_file = coreclr_args.log_file
    partition_count = coreclr_args.partition_count
    partition_index = coreclr_args.partition_index
    dotnet_directory = os.path.join(performance_directory, "tools", "dotnet",
                                    arch)
    dotnet_exe = os.path.join(dotnet_directory, "dotnet")

    artifacts_directory = os.path.join(performance_directory, "artifacts")
    artifacts_packages_directory = os.path.join(artifacts_directory,
                                                "packages")
    project_file = path.join(performance_directory, "src", "benchmarks",
                             "micro", "MicroBenchmarks.csproj")
    benchmarks_dll = path.join(artifacts_directory, "MicroBenchmarks.dll")

    if is_windows:
        shim_name = "%JitName%"
        corerun_exe = "CoreRun.exe"
        script_name = "run_microbenchmarks.bat"
    else:
        shim_name = "$JitName"
        corerun_exe = "corerun"
        script_name = "run_microbenchmarks.sh"

    make_executable(dotnet_exe)

    run_command([
        dotnet_exe, "restore", project_file, "--packages",
        artifacts_packages_directory
    ],
                _exit_on_fail=True)

    run_command([
        dotnet_exe, "build", project_file, "--configuration", "Release",
        "--framework", "net6.0", "--no-restore", "/p:NuGetPackageRoot=" +
        artifacts_packages_directory, "-o", artifacts_directory
    ],
                _exit_on_fail=True)

    # Disable ReadyToRun so we always JIT R2R methods and collect them
    collection_command = f"{dotnet_exe} {benchmarks_dll}  --filter \"*\" --corerun {path.join(core_root, corerun_exe)} --partition-count {partition_count} " \
                         f"--partition-index {partition_index} --envVars COMPlus_JitName:{shim_name} " \
                         " COMPlus_ZapDisable:1  COMPlus_ReadyToRun:0 " \
                         "--iterationCount 1 --warmupCount 0 --invocationCount 1 --unrollFactor 1 --strategy ColdStart"

    # Generate the execution script in Temp location
    with TempDir() as temp_location:
        script_name = path.join(temp_location, script_name)

        contents = []
        # Unset the JitName so dotnet process will not fail
        if is_windows:
            contents.append("set JitName=%COMPlus_JitName%")
            contents.append("set COMPlus_JitName=")
        else:
            contents.append("#!/bin/bash")
            contents.append("export JitName=$COMPlus_JitName")
            contents.append("unset COMPlus_JitName")
        contents.append(f"pushd {performance_directory}")
        contents.append(collection_command)

        with open(script_name, "w") as collection_script:
            collection_script.write(os.linesep.join(contents))

        print()
        print(f"{script_name} contents:")
        print("******************************************")
        print(os.linesep.join(contents))
        print("******************************************")

        make_executable(script_name)

        run_command([
            python_path,
            path.join(superpmi_directory, "superpmi.py"), "collect",
            "-core_root", core_root, "-output_mch_path", output_mch_name,
            "-log_file", log_file, "-log_level", "debug", script_name
        ],
                    _exit_on_fail=True)
Пример #5
0
def copy_issues(issues_directory, upload_directory, tag_name):
    """Copies issue files (only top 5 smallest files from each folder) into the upload_directory

    Args:
        issues_directory (string): Issues directory
        upload_directory (string): Upload directory
        tag_name (string): Tag name for zip file

    Returns:
        [type]: [description]
    """
    # Create upload_directory
    if not os.path.isdir(upload_directory):
        os.makedirs(upload_directory)

    # Create temp directory to copy all issues to upload. We don't want to create a sub-folder
    # inside issues_directory because that will also get included twice.
    with TempDir() as prep_directory:

        def sorter_by_size(pair):
            """ Sorts the pair (file_name, file_size) tuple in ascending order of file_size

            Args:
                pair ([(string, int)]): List of tuple of file_name, file_size
            """
            pair.sort(key=lambda x: x[1], reverse=False)
            return pair

        summary_of_summary = []
        for file_path, dirs, files in walk(issues_directory, topdown=True):
            filename_with_size = []
            # Credit: https://stackoverflow.com/a/19859907
            dirs[:] = [d for d in dirs]
            for name in files:
                if not name.lower().endswith(".g.cs"):
                    continue

                curr_file_path = path.join(file_path, name)
                size = getsize(curr_file_path)
                filename_with_size.append((curr_file_path, size))

            if len(filename_with_size) == 0:
                continue
            summary_file = path.join(file_path, "summary.txt")
            summary_of_summary.append("**** " + file_path)
            with open(summary_file, 'r') as sf:
                summary_of_summary.append(sf.read())
            filename_with_size.append(
                (summary_file, 0))  # always copy summary.txt

            # Copy atmost 5 files from each bucket
            sorted_files = [
                f[0] for f in sorter_by_size(filename_with_size)[:6]
            ]  # sorter_by_size(filename_with_size)[:6]
            print('### Copying below files from {0} to {1}:'.format(
                issues_directory, prep_directory))
            print('')
            print(os.linesep.join(sorted_files))
            for src_file in sorted_files:
                dst_file = src_file.replace(issues_directory, prep_directory)
                dst_directory = path.dirname(dst_file)
                if not os.path.exists(dst_directory):
                    os.makedirs(dst_directory)
                try:
                    shutil.copy2(src_file, dst_file)
                except PermissionError as pe_error:
                    print('Ignoring PermissionError: {0}'.format(pe_error))

        issues_summary_file_name = "issues-summary-{}.txt".format(tag_name)
        print("Creating {} in {}".format(issues_summary_file_name,
                                         upload_directory))
        issues_summary_file = os.path.join(upload_directory,
                                           issues_summary_file_name)
        with open(issues_summary_file, 'w') as sf:
            sf.write(os.linesep.join(summary_of_summary))

        # Also copy the issues-summary inside zip folder
        dst_issue_summary_file = os.path.join(prep_directory,
                                              issues_summary_file_name)
        try:
            shutil.copy2(issues_summary_file, dst_issue_summary_file)
        except PermissionError as pe_error:
            print('Ignoring PermissionError: {0}'.format(pe_error))

        # Zip compress the files we will upload
        zip_path = os.path.join(prep_directory, "AllIssues-" + tag_name)
        print("Creating archive: " + zip_path)
        shutil.make_archive(zip_path, 'zip', prep_directory)

        zip_path += ".zip"
        dst_zip_path = os.path.join(upload_directory,
                                    "AllIssues-" + tag_name + ".zip")
        print("Copying {} to {}".format(zip_path, dst_zip_path))
        try:
            shutil.copy2(zip_path, dst_zip_path)
        except PermissionError as pe_error:
            print('Ignoring PermissionError: {0}'.format(pe_error))