def main(main_args): """Main entrypoint Args: main_args ([type]): Arguments to the script """ coreclr_args = setup_args(main_args) arch = coreclr_args.arch source_directory = coreclr_args.source_directory product_directory = coreclr_args.product_directory # CorrelationPayload directories correlation_payload_directory = os.path.join(source_directory, "payload") superpmi_src_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts') helix_source_prefix = "official" creator = "" # Copy *.py to CorrelationPayload print('Copying {} -> {}'.format(superpmi_src_directory, correlation_payload_directory)) copy_directory(superpmi_src_directory, correlation_payload_directory, verbose_output=True, match_func=lambda path: any( path.endswith(extension) for extension in [".py"])) # Copy clrjit*_arch.dll binaries to CorrelationPayload print('Copying binaries {} -> {}'.format(product_directory, correlation_payload_directory)) copy_directory(product_directory, correlation_payload_directory, verbose_output=True, match_func=match_correlation_files) # Set variables print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) set_pipeline_variable("Architecture", arch) set_pipeline_variable("Creator", creator) set_pipeline_variable("HelixSourcePrefix", helix_source_prefix)
def main(main_args): """ Prepare the Helix data for SuperPMI asmdiffs Azure DevOps pipeline. The Helix correlation payload directory is created and populated as follows: <source_directory>\payload -- the correlation payload directory -- contains the *.py scripts from <source_directory>\src\coreclr\scripts -- contains superpmi.exe, mcs.exe from the target-specific build <source_directory>\payload\base -- contains the baseline JITs <source_directory>\payload\diff -- contains the diff JITs <source_directory>\payload\jit-analyze -- contains the self-contained jit-analyze build (from dotnet/jitutils) <source_directory>\payload\git -- contains a Portable ("xcopy installable") `git` tool, downloaded from: https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip This is needed by jit-analyze to do `git diff` on the generated asm. The `<source_directory>\payload\git\cmd` directory is added to the PATH. NOTE: this only runs on Windows. Then, AzDO pipeline variables are set. Args: main_args ([type]): Arguments to the script Returns: 0 on success, otherwise a failure code """ # Set up logging. logger = logging.getLogger() logger.setLevel(logging.INFO) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setLevel(logging.INFO) logger.addHandler(stream_handler) coreclr_args = setup_args(main_args) arch = coreclr_args.arch source_directory = coreclr_args.source_directory product_directory = coreclr_args.product_directory python_path = sys.executable # CorrelationPayload directories correlation_payload_directory = os.path.join(source_directory, "payload") superpmi_scripts_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts') base_jit_directory = os.path.join(correlation_payload_directory, "base") diff_jit_directory = os.path.join(correlation_payload_directory, "diff") jit_analyze_build_directory = os.path.join(correlation_payload_directory, "jit-analyze") git_directory = os.path.join(correlation_payload_directory, "git") ######## Get the portable `git` package git_url = "https://netcorenativeassets.blob.core.windows.net/resource-packages/external/windows/git/Git-2.32.0-64-bit.zip" print('Downloading {} -> {}'.format(git_url, git_directory)) urls = [ git_url ] # There are too many files to be verbose in the download and copy. download_files(urls, git_directory, verbose=False, display_progress=False) git_exe_tool = os.path.join(git_directory, "cmd", "git.exe") if not os.path.isfile(git_exe_tool): print('Error: `git` not found at {}'.format(git_exe_tool)) return 1 ######## Get SuperPMI python scripts # Copy *.py to CorrelationPayload print('Copying {} -> {}'.format(superpmi_scripts_directory, correlation_payload_directory)) copy_directory(superpmi_scripts_directory, correlation_payload_directory, verbose_copy=True, match_func=lambda path: any(path.endswith(extension) for extension in [".py"])) ######## Get baseline JIT # Figure out which baseline JIT to use, and download it. if not os.path.exists(base_jit_directory): os.makedirs(base_jit_directory) print("Fetching history of `main` branch so we can find the baseline JIT") run_command(["git", "fetch", "--depth=500", "origin", "main"], source_directory, _exit_on_fail=True) # Note: we only support downloading Windows versions of the JIT currently. To support downloading # non-Windows JITs on a Windows machine, pass `-host_os <os>` to jitrollingbuild.py. print("Running jitrollingbuild.py download to get baseline JIT") jit_rolling_build_script = os.path.join(superpmi_scripts_directory, "jitrollingbuild.py") _, _, return_code = run_command([ python_path, jit_rolling_build_script, "download", "-arch", arch, "-target_dir", base_jit_directory], source_directory) if return_code != 0: print('{} failed with {}'.format(jit_rolling_build_script, return_code)) return return_code ######## Get diff JIT print('Copying diff binaries {} -> {}'.format(product_directory, diff_jit_directory)) copy_directory(product_directory, diff_jit_directory, verbose_copy=True, match_func=match_jit_files) ######## Get SuperPMI tools # Put the SuperPMI tools directly in the root of the correlation payload directory. print('Copying SuperPMI tools {} -> {}'.format(product_directory, correlation_payload_directory)) copy_directory(product_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files) ######## Clone and build jitutils: we only need jit-analyze try: with TempDir() as jitutils_directory: run_command( ["git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory]) # Make sure ".dotnet" directory exists, by running the script at least once dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh" dotnet_script_path = os.path.join(source_directory, dotnet_script_name) run_command([dotnet_script_path, "--info"], jitutils_directory) # Build jit-analyze only, and build it as a self-contained app (not framework-dependent). # What target RID are we building? It depends on where we're going to run this code. # The RID catalog is here: https://docs.microsoft.com/en-us/dotnet/core/rid-catalog. # Windows x64 => win-x64 # Windows x86 => win-x86 # Windows arm32 => win-arm # Windows arm64 => win-arm64 # Linux x64 => linux-x64 # Linux arm32 => linux-arm # Linux arm64 => linux-arm64 # macOS x64 => osx-x64 # NOTE: we currently only support running on Windows x86/x64 (we don't pass the target OS) RID = None if arch == "x86": RID = "win-x86" if arch == "x64": RID = "win-x64" # Set dotnet path to run build os.environ["PATH"] = os.path.join(source_directory, ".dotnet") + os.pathsep + os.environ["PATH"] run_command([ "dotnet", "publish", "-c", "Release", "--runtime", RID, "--self-contained", "--output", jit_analyze_build_directory, os.path.join(jitutils_directory, "src", "jit-analyze", "jit-analyze.csproj")], jitutils_directory) except PermissionError as pe_error: # Details: https://bugs.python.org/issue26660 print('Ignoring PermissionError: {0}'.format(pe_error)) jit_analyze_tool = os.path.join(jit_analyze_build_directory, "jit-analyze.exe") if not os.path.isfile(jit_analyze_tool): print('Error: {} not found'.format(jit_analyze_tool)) return 1 ######## Set pipeline variables helix_source_prefix = "official" creator = "" print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) set_pipeline_variable("Architecture", arch) set_pipeline_variable("Creator", creator) set_pipeline_variable("HelixSourcePrefix", helix_source_prefix) return 0
def main(main_args): """Main entrypoint Args: main_args ([type]): Arguments to the script """ coreclr_args = setup_args(main_args) arch_name = coreclr_args.arch os_name = "win" if coreclr_args.platform.lower() == "windows" else "linux" run_configuration = "{}-{}".format(os_name, arch_name) source_directory = coreclr_args.source_directory # CorrelationPayload directories correlation_payload_directory = path.join(coreclr_args.source_directory, "payload") scripts_src_directory = path.join(source_directory, "src", "coreclr", 'scripts') coreroot_directory = path.join(correlation_payload_directory, "CoreRoot") dst_directory = path.join(correlation_payload_directory, "exploratory") helix_source_prefix = "official" creator = "" repo_urls = { "Antigen": "https://github.com/kunalspathak/Antigen.git", "Fuzzlyn": "https://github.com/jakobbotsch/Fuzzlyn.git", } # tool_name is verifed in setup_args assert coreclr_args.tool_name in repo_urls repo_url = repo_urls[coreclr_args.tool_name] # create exploratory directory print('Copying {} -> {}'.format(scripts_src_directory, coreroot_directory)) copy_directory(scripts_src_directory, coreroot_directory, verbose_output=True, match_func=lambda path: any( path.endswith(extension) for extension in [".py"])) if is_windows: acceptable_copy = lambda path: any( path.endswith(extension) for extension in [".py", ".dll", ".exe", ".json"]) else: # Need to accept files without any extension, which is how executable file's names look. acceptable_copy = lambda path: (os.path.basename(path).find( ".") == -1) or any( path.endswith(extension) for extension in [".py", ".dll", ".so", ".json", ".a"]) # copy CORE_ROOT print('Copying {} -> {}'.format(coreclr_args.core_root_directory, coreroot_directory)) copy_directory(coreclr_args.core_root_directory, coreroot_directory, verbose_output=True, match_func=acceptable_copy) try: with TempDir() as tool_code_directory: # clone the tool run_command([ "git", "clone", "--quiet", "--depth", "1", repo_url, tool_code_directory ]) publish_dir = path.join(tool_code_directory, "publish") # build the tool with ChangeDir(tool_code_directory): dotnet_cmd = os.path.join(source_directory, "dotnet.cmd") if not is_windows: dotnet_cmd = os.path.join(source_directory, "dotnet.sh") run_command([ dotnet_cmd, "publish", "-c", "Release", "--self-contained", "-r", run_configuration, "-o", publish_dir ], _exit_on_fail=True) dll_name = coreclr_args.tool_name + ".dll" if not os.path.exists(path.join(publish_dir, dll_name)): raise FileNotFoundError("{} not present at {}".format( dll_name, publish_dir)) # copy tool print('Copying {} -> {}'.format(publish_dir, dst_directory)) copy_directory(publish_dir, dst_directory, verbose_output=True, match_func=acceptable_copy) except PermissionError as pe: print("Skipping file. Got error: %s", pe) # create foo.txt in work_item directories workitem_directory = path.join(source_directory, "workitem") os.mkdir(workitem_directory) foo_txt = os.path.join(workitem_directory, "foo.txt") with open(foo_txt, "w") as foo_txt_file: foo_txt_file.write("hello world!") # Set variables print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) set_pipeline_variable("WorkItemDirectory", workitem_directory) set_pipeline_variable("RunConfiguration", run_configuration) set_pipeline_variable("Creator", creator) set_pipeline_variable("HelixSourcePrefix", helix_source_prefix)
def main(main_args): """Main entrypoint: Prepare the Helix data for SuperPMI asmdiffs checked release Azure DevOps pipeline. The Helix correlation payload directory is created and populated as follows: <source_directory>\payload -- the correlation payload directory -- contains the *.py scripts from <source_directory>\src\coreclr\scripts -- contains superpmi.exe, mcs.exe from the target-specific build <source_directory>\payload\base -- contains the Checked JITs <source_directory>\payload\diff -- contains the Release JITs Then, AzDO pipeline variables are set. Args: main_args ([type]): Arguments to the script """ # Set up logging. logger = logging.getLogger() logger.setLevel(logging.INFO) stream_handler = logging.StreamHandler(sys.stdout) stream_handler.setLevel(logging.INFO) logger.addHandler(stream_handler) coreclr_args = setup_args(main_args) arch = coreclr_args.arch source_directory = coreclr_args.source_directory checked_directory = coreclr_args.checked_directory release_directory = coreclr_args.release_directory python_path = sys.executable # CorrelationPayload directories correlation_payload_directory = os.path.join(source_directory, "payload") superpmi_scripts_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts') base_jit_directory = os.path.join(correlation_payload_directory, "base") diff_jit_directory = os.path.join(correlation_payload_directory, "diff") ######## Copy SuperPMI python scripts # Copy *.py to CorrelationPayload print('Copying {} -> {}'.format(superpmi_scripts_directory, correlation_payload_directory)) copy_directory(superpmi_scripts_directory, correlation_payload_directory, verbose_copy=True, match_func=lambda path: any( path.endswith(extension) for extension in [".py"])) ######## Copy baseline Checked JIT # Copy clrjit*_arch.dll binaries from Checked checked_directory to base_jit_directory print('Copying base Checked binaries {} -> {}'.format( checked_directory, base_jit_directory)) copy_directory(checked_directory, base_jit_directory, verbose_copy=True, match_func=match_jit_files) ######## Copy diff Release JIT # Copy clrjit*_arch.dll binaries from release_directory to diff_jit_directory print('Copying diff Release binaries {} -> {}'.format( release_directory, diff_jit_directory)) copy_directory(release_directory, diff_jit_directory, verbose_copy=True, match_func=match_jit_files) ######## Get SuperPMI tools # Put the SuperPMI tools directly in the root of the correlation payload directory. print('Copying SuperPMI tools {} -> {}'.format( checked_directory, correlation_payload_directory)) copy_directory(checked_directory, correlation_payload_directory, verbose_copy=True, match_func=match_superpmi_tool_files) # Set variables helix_source_prefix = "official" creator = "" print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) set_pipeline_variable("Architecture", arch) set_pipeline_variable("Creator", creator) set_pipeline_variable("HelixSourcePrefix", helix_source_prefix)
def main(main_args): """ Main entrypoint Args: main_args ([type]): Arguments to the script """ coreclr_args = setup_args(main_args) source_directory = coreclr_args.source_directory # CorrelationPayload directories correlation_payload_directory = os.path.join(coreclr_args.source_directory, "payload") superpmi_src_directory = os.path.join(source_directory, 'src', 'coreclr', 'scripts') superpmi_dst_directory = os.path.join(correlation_payload_directory, "superpmi") arch = coreclr_args.arch helix_source_prefix = "official" creator = "" ci = True if is_windows: helix_queue = "Windows.10.Arm64" if arch == "arm64" else "Windows.10.Amd64.X86.Rt" else: if arch == "arm": helix_queue = "(Ubuntu.1804.Arm32)[email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm32v7-bfcd90a-20200121150440" elif arch == "arm64": helix_queue = "(Ubuntu.1804.Arm64)[email protected]/dotnet-buildtools/prereqs:ubuntu-18.04-helix-arm64v8-20210531091519-97d8652" else: helix_queue = "Ubuntu.1804.Amd64" # create superpmi directory print('Copying {} -> {}'.format(superpmi_src_directory, superpmi_dst_directory)) copy_directory(superpmi_src_directory, superpmi_dst_directory, verbose_output=True, match_func=lambda path: any( path.endswith(extension) for extension in [".py"])) if is_windows: acceptable_copy = lambda path: any( path.endswith(extension) for extension in [".py", ".dll", ".exe", ".json"]) else: # Need to accept files without any extension, which is how executable file's names look. acceptable_copy = lambda path: (os.path.basename(path).find( ".") == -1) or any( path.endswith(extension) for extension in [".py", ".dll", ".so", ".json"]) print('Copying {} -> {}'.format(coreclr_args.core_root_directory, superpmi_dst_directory)) copy_directory(coreclr_args.core_root_directory, superpmi_dst_directory, verbose_output=True, match_func=acceptable_copy) # Copy all the test files to CORE_ROOT # The reason is there are lot of dependencies with *.Tests.dll and to ensure we do not get # Reflection errors, just copy everything to CORE_ROOT so for all individual partitions, the # references will be present in CORE_ROOT. if coreclr_args.collection_name == "libraries_tests": print('Copying {} -> {}'.format(coreclr_args.input_directory, superpmi_dst_directory)) def make_readable(folder_name): """Make file executable by changing the permission Args: folder_name (string): folder to mark with 744 """ if is_windows: return print("Inside make_readable") run_command(["ls", "-l", folder_name]) for file_path, dirs, files in os.walk(folder_name, topdown=True): for d in dirs: os.chmod( os.path.join(file_path, d), # read+write+execute for owner (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | # read for group (stat.S_IRGRP) | # read for other (stat.S_IROTH)) for f in files: os.chmod( os.path.join(file_path, f), # read+write+execute for owner (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) | # read for group (stat.S_IRGRP) | # read for other (stat.S_IROTH)) run_command(["ls", "-l", folder_name]) make_readable(coreclr_args.input_directory) copy_directory(coreclr_args.input_directory, superpmi_dst_directory, verbose_output=True, match_func=acceptable_copy) # Workitem directories workitem_directory = os.path.join(source_directory, "workitem") input_artifacts = "" if coreclr_args.collection_name == "benchmarks": # Setup microbenchmarks setup_microbenchmark(workitem_directory, arch) else: # Setup for pmi/crossgen runs # Clone and build jitutils try: with TempDir() as jitutils_directory: run_command([ "git", "clone", "--quiet", "--depth", "1", "https://github.com/dotnet/jitutils", jitutils_directory ]) # Make sure ".dotnet" directory exists, by running the script at least once dotnet_script_name = "dotnet.cmd" if is_windows else "dotnet.sh" dotnet_script_path = os.path.join(source_directory, dotnet_script_name) run_command([dotnet_script_path, "--info"], jitutils_directory) # Set dotnet path to run build os.environ["PATH"] = os.path.join( source_directory, ".dotnet") + os.pathsep + os.environ["PATH"] build_file = "build.cmd" if is_windows else "build.sh" run_command( [os.path.join(jitutils_directory, build_file), "-p"], jitutils_directory) copy_files( os.path.join(jitutils_directory, "bin"), superpmi_dst_directory, [os.path.join(jitutils_directory, "bin", "pmi.dll")]) except PermissionError as pe_error: # Details: https://bugs.python.org/issue26660 print('Ignoring PermissionError: {0}'.format(pe_error)) # NOTE: we can't use the build machine ".dotnet" to run on all platforms. E.g., the Windows x86 build uses a # Windows x64 .dotnet\dotnet.exe that can't load a 32-bit shim. Thus, we always use corerun from Core_Root to invoke crossgen2. # The following will copy .dotnet to the correlation payload in case we change our mind, and need or want to use it for some scenarios. # # Copy ".dotnet" to correlation_payload_directory for crossgen2 job; it is needed to invoke crossgen2.dll # if coreclr_args.collection_type == "crossgen2": # dotnet_src_directory = os.path.join(source_directory, ".dotnet") # dotnet_dst_directory = os.path.join(correlation_payload_directory, ".dotnet") # print('Copying {} -> {}'.format(dotnet_src_directory, dotnet_dst_directory)) # copy_directory(dotnet_src_directory, dotnet_dst_directory, verbose_output=False) # payload pmiassemblies_directory = os.path.join(workitem_directory, "pmiAssembliesDirectory") input_artifacts = os.path.join(pmiassemblies_directory, coreclr_args.collection_name) exclude_directory = [ 'Core_Root' ] if coreclr_args.collection_name == "coreclr_tests" else [] exclude_files = native_binaries_to_ignore if coreclr_args.collection_type == "crossgen2": print('Adding exclusions for crossgen2') # Currently, trying to crossgen2 R2RTest\Microsoft.Build.dll causes a pop-up failure, so exclude it. exclude_files += ["Microsoft.Build.dll"] if coreclr_args.collection_name == "libraries_tests": # libraries_tests artifacts contains files from core_root folder. Exclude them. core_root_dir = coreclr_args.core_root_directory exclude_files += [ item for item in os.listdir(core_root_dir) if os.path.isfile(os.path.join(core_root_dir, item)) and ( item.endswith(".dll") or item.endswith(".exe")) ] partition_files(coreclr_args.input_directory, input_artifacts, coreclr_args.max_size, exclude_directory, exclude_files) # Set variables print('Setting pipeline variables:') set_pipeline_variable("CorrelationPayloadDirectory", correlation_payload_directory) set_pipeline_variable("WorkItemDirectory", workitem_directory) set_pipeline_variable("InputArtifacts", input_artifacts) set_pipeline_variable("Python", ' '.join(get_python_name())) set_pipeline_variable("Architecture", arch) set_pipeline_variable("Creator", creator) set_pipeline_variable("Queue", helix_queue) set_pipeline_variable("HelixSourcePrefix", helix_source_prefix) set_pipeline_variable("MchFileTag", coreclr_args.mch_file_tag)