Ejemplo n.º 1
0
def build_jtharness(top_dir, tag=None):
    work_dir = join(top_dir, 'jtharness_work')
    hg_dir = join(work_dir, 'jtharness')
    build_dir = join(hg_dir, 'build')

    mkdir(work_dir)
    chdir(work_dir)

    # clone the jtharness mercurial repository
    hg_clone(jtharness_repo)
    chdir(hg_dir)

    if tag is None:
        # find the latest tag
        tag = get_latest_hg_tag('jt')

    hg_switch_tag(tag)
    print(str.format('Using jtharness tag {0}', tag))

    # download and extract dependencies
    for jtharness_dependecy in jtharness_dependencies:
        utils.download_artifact(jtharness_dependecy[0], jtharness_dependecy[1])
        utils.extract_archive(jtharness_dependecy[1], build_dir)

    move(join('build', 'jh2.0', 'javahelp', 'lib', 'jhall.jar'), build_dir)
    move(join('build', 'jh2.0', 'javahelp', 'lib', 'jh.jar'), build_dir)

    chdir(build_dir)

    # create build properties
    build_properties = 'local.properties'

    with open(build_properties, 'w') as properties:
        properties.write('jhalljar = ./build/jhall.jar\n')
        properties.write('jhjar = ./build/jh.jar\n')
        properties.write('jcommjar = ./build/comm.jar\n')
        properties.write('servletjar = ./build/servlet-api.jar\n')
        properties.write(
            'bytecodelib = ./build/asm-3.1.jar:./build/asm-commons-3.1.jar\n')
        properties.write('junitlib = ./build/junit-4.4.jar\n')
        properties.write('BUILD_DIR = ./JTHarness-build\n')

    # run the ant build
    utils.run_cmd([
        'ant', 'build', '-propertyfile', build_properties,
        '-Djvmargs="-Xdoclint:none"', '-debug'
    ])

    # copy the archive
    bundles = os.listdir(join(hg_dir, 'JTHarness-build', 'bundles'))
    bundle_pattern = re.compile('jtharness-([0-9]+\.[0-9]+)\.zip')
    for bundle in bundles:
        match = bundle_pattern.match(bundle)

        if match is not None:
            jtharness_version = match.group(1)
            copy(join(hg_dir, 'JTHarness-build', 'bundles', bundle),
                 join(top_dir, 'jtharness.zip'))

    return jtharness_version
def main(argv=None):
    parser = ArgumentParser()
    parser.add_argument('-m',
                        '--major',
                        help='The SapMachine major version to build',
                        metavar='MAJOR',
                        required=True)
    parser.add_argument('-d',
                        '--dir',
                        help='The dir to extract jtreg to',
                        metavar='DIR',
                        required=True)
    args = parser.parse_args()

    ver = int(args.major)
    if ver >= 17:
        url = 'https://github.com/SAP/SapMachine-infrastructure/releases/download/jtreg-6.1/jtreg.zip'
    else:
        url = 'https://github.com/SAP/SapMachine-infrastructure/releases/download/jtreg-5.1/jtreg.zip'

    print(
        str.format('Downloading "{0}" and extracting to "{1}"', url, args.dir))

    archive_path = join(args.dir, 'jtreg.zip')
    utils.remove_if_exists(archive_path)
    utils.download_artifact(url, archive_path)
    path = join(args.dir, 'jtreg')
    utils.remove_if_exists(path)
    os.makedirs(path)
    with ZipFile(archive_path, 'r') as zipObj:
        zipObj.extractall(path)

    utils.remove_if_exists(archive_path)
Ejemplo n.º 3
0
def get_previous_run_id(repo, run_id, pr_number):
    """
    Gets the previous run id for a given workflow run, considering that the previous workflow run needs to come from the same PR.
    """

    # Get branch and repo from run:
    this_run = utils.subprocess_check_output([
        "gh", "api", "-X", "GET", f"repos/{repo}/actions/runs/{run_id}",
        "--jq",
        "{ head_branch: .head_branch, head_repository: .head_repository.full_name }"
    ])

    this_run = json.loads(this_run)
    pr_branch = this_run["head_branch"]
    pr_repo = this_run["head_repository"]

    # Get all previous runs that match branch, repo and workflow name:
    output = utils.subprocess_check_output([
        "gh", "api", "-X", "GET", f"repos/{repo}/actions/runs", "-f",
        "event=pull_request", "-f", "status=success", "-f",
        f"branch='{pr_branch}'", "--paginate", "--jq",
        f'[.workflow_runs.[] | select(.head_repository.full_name=="{pr_repo}" and .name=="{artifacts_workflow_name}")] | sort_by(.id) | reverse | [.[].id]'
    ])

    ids = []
    for l in [json.loads(l) for l in output.splitlines()]:
        for id in l:
            ids.append(id)

    if ids[0] != int(run_id):
        raise Exception(
            f"Expected to find {run_id} in the list of matching runs.")

    for previous_run_id in ids[1:]:
        utils.download_artifact(repo, "pr", "prev_run_pr", previous_run_id)

        try:
            with open("prev_run_pr/NR") as file:
                prev_pr_number = int(file.read())
                print(f"PR number: {prev_pr_number}")
        finally:
            if os.path.isdir("prev_run_pr"):
                shutil.rmtree("prev_run_pr")

        # the previous run needs to be coming from the same PR:
        if pr_number == prev_pr_number:
            return int(previous_run_id)

    raise Exception("Couldn't find previous run.")
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-m', '--major', help='the SapMachine major version to build', metavar='MAJOR', required=True)
    parser.add_argument('-d', '--destination', help='the download destination', metavar='DIR', required=True)
    args = parser.parse_args()

    boot_jdk_major_max = int(args.major)
    boot_jdk_major_min = boot_jdk_major_max - 1
    destination = os.path.realpath(args.destination)
    releases = utils.github_api_request('releases', per_page=100)
    platform = str.format('{0}-{1}_bin', utils.get_system(), utils.get_arch())

    for release in releases:

        if release['prerelease']:
            continue

        version, version_part, major, build_number, sap_build_number, os_ext = utils.sapmachine_tag_components(release['name'])

        if major is None:
            continue

        major = int(major)

        if major <= boot_jdk_major_max and major >= boot_jdk_major_min:
            assets = release['assets']

            for asset in assets:
                asset_name = asset['name']
                asset_url = asset['browser_download_url']

                if 'jdk' in asset_name and platform in asset_name and not asset_name.endswith('.txt'):
                    archive_path = join(destination, asset_name)
                    utils.remove_if_exists(archive_path)
                    utils.download_artifact(asset_url, archive_path)
                    boot_jdk_exploded = join(destination, 'boot_jdk')
                    utils.remove_if_exists(boot_jdk_exploded)
                    os.makedirs(boot_jdk_exploded)
                    utils.extract_archive(archive_path, boot_jdk_exploded)

                    sapmachine_folder = glob.glob(join(boot_jdk_exploded, 'sapmachine*'))

                    if sapmachine_folder is not None:
                        sapmachine_folder = sapmachine_folder[0]
                        files = os.listdir(sapmachine_folder)

                        for f in files:
                            shutil.move(join(sapmachine_folder, f), boot_jdk_exploded)

                        utils.remove_if_exists(sapmachine_folder)

                        if utils.get_system() == 'osx':
                            files = os.listdir(join(boot_jdk_exploded, 'Contents', 'Home'))

                            for f in files:
                                shutil.move(join(boot_jdk_exploded, 'Contents', 'Home', f), boot_jdk_exploded)

                            utils.remove_if_exists(join(boot_jdk_exploded, 'Contents'))

                    return 0

    return 0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-m',
                        '--major',
                        help='the SapMachine major version to build',
                        metavar='MAJOR',
                        required=True)
    parser.add_argument('-d',
                        '--destination',
                        help='the download destination',
                        metavar='DIR',
                        required=True)
    args = parser.parse_args()

    boot_jdk_major_max = int(args.major)
    boot_jdk_major_min = boot_jdk_major_max - 1
    destination = os.path.realpath(args.destination)
    releases = utils.get_github_releases()
    platform = str.format('{0}-{1}_bin', utils.get_system(), utils.get_arch())
    retries = 2

    releases = extra_bootjdks + releases

    while retries > 0:
        for release in releases:

            if release['prerelease']:
                continue

            tag = SapMachineTag.from_string(release['name'])

            if tag is None:
                print(
                    str.format("SapMachine release {0} not recognized",
                               release['name']))
                continue
            major = tag.get_major()

            if major <= boot_jdk_major_max and major >= boot_jdk_major_min:
                assets = release['assets']

                for asset in assets:
                    asset_name = asset['name']
                    asset_url = asset['browser_download_url']

                    if 'jdk' in asset_name and platform in asset_name and (
                            asset_name.endswith('.tar.gz')
                            or asset_name.endswith('.zip')
                    ) and 'symbols' not in asset_name:
                        archive_path = join(destination, asset_name)
                        utils.remove_if_exists(archive_path)
                        utils.download_artifact(asset_url, archive_path)
                        boot_jdk_exploded = join(destination, 'boot_jdk')
                        utils.remove_if_exists(boot_jdk_exploded)
                        os.makedirs(boot_jdk_exploded)
                        utils.extract_archive(archive_path, boot_jdk_exploded)

                        sapmachine_folder = [
                            f for f_ in [
                                glob.glob(join(boot_jdk_exploded, e))
                                for e in ('sapmachine*', 'jdk*')
                            ] for f in f_
                        ]

                        if sapmachine_folder is not None:
                            sapmachine_folder = sapmachine_folder[0]
                            files = os.listdir(sapmachine_folder)

                            for f in files:
                                shutil.move(join(sapmachine_folder, f),
                                            boot_jdk_exploded)

                            utils.remove_if_exists(sapmachine_folder)

                            if utils.get_system() == 'osx':
                                files = os.listdir(
                                    join(boot_jdk_exploded, 'Contents',
                                         'Home'))

                                for f in files:
                                    shutil.move(
                                        join(boot_jdk_exploded, 'Contents',
                                             'Home', f), boot_jdk_exploded)

                                utils.remove_if_exists(
                                    join(boot_jdk_exploded, 'Contents'))

                        return 0
        retries -= 1
        if retries == 1:
            boot_jdk_major_min = boot_jdk_major_max - 2

    return 0
Ejemplo n.º 6
0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-t', '--tag', help='the tag to create the debian packages from', metavar='TAG', required=True)
    parser.add_argument('-d', '--templates-directory', help='specify the templates directory', metavar='DIR', required=True)
    args = parser.parse_args()

    templates_dir = realpath(args.templates_directory)
    tag = args.tag

    if tag.endswith('-alpine'):
        # the "-alpine" tags do not contain any assets
        tag = tag[:-len('-alpine')]

    cwd = os.getcwd()
    work_dir = join(cwd, 'deb_work')
    version, version_part, major, build_number, sap_build_number, os_ext = utils.sapmachine_tag_components(tag)
    version = version.replace('-', '.')
    jdk_name = str.format('sapmachine-{0}-jdk-{1}', major, version)
    jre_name = str.format('sapmachine-{0}-jre-{1}', major, version)

    jdk_url, jre_url = utils.fetch_tag(tag, 'linux-x64', utils.get_github_api_accesstoken())

    utils.remove_if_exists(work_dir)
    mkdir(work_dir)

    jdk_archive = join(work_dir, jdk_url.rsplit('/', 1)[-1])
    jre_archive = join(work_dir, jre_url.rsplit('/', 1)[-1])

    utils.download_artifact(jdk_url, jdk_archive)
    utils.download_artifact(jre_url, jre_archive)

    clone_sapmachine(join(work_dir, 'sapmachine_master'))
    src_dir = join(work_dir, 'sapmachine_master')

    jdk_dir = join(work_dir, jdk_name)
    jre_dir = join(work_dir, jre_name)

    mkdir(jdk_dir)
    mkdir(jre_dir)

    utils.extract_archive(jdk_archive, jdk_dir)
    utils.extract_archive(jre_archive, jre_dir)

    env = os.environ.copy()
    env['DEBFULLNAME'] = 'SapMachine'
    env['DEBEMAIL'] = '*****@*****.**'
    utils.run_cmd(['dh_make', '-n', '-s', '-y'], cwd=jdk_dir, env=env)
    utils.run_cmd(['dh_make', '-n', '-s', '-y'], cwd=jre_dir, env=env)

    jre_exploded_image = glob.glob(join(jre_dir, 'sapmachine-*'))[0]

    generate_configuration(
        templates_dir=join(templates_dir, 'jre'),
        major=major,
        target_dir=join(jre_dir, 'debian'),
        exploded_image=jre_exploded_image,
        src_dir=src_dir,
        download_url=jre_url)

    jdk_exploded_image = glob.glob(join(jdk_dir, 'sapmachine-*'))[0]

    generate_configuration(
        templates_dir=join(templates_dir, 'jdk'),
        major=major,
        target_dir=join(jdk_dir, 'debian'),
        exploded_image=jdk_exploded_image,
        src_dir=src_dir,
        download_url=jdk_url)

    utils.run_cmd(['debuild', '-b', '-uc', '-us'], cwd=jre_dir, env=env)
    utils.run_cmd(['debuild', '-b', '-uc', '-us'], cwd=jdk_dir, env=env)

    deb_files = glob.glob(join(work_dir, '*.deb'))

    for deb_file in deb_files:
        copy(deb_file, cwd)
        remove(deb_file)
Ejemplo n.º 7
0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-t', '--tag', help='the tag to create the debian packages from', metavar='TAG', required=True)
    args = parser.parse_args()

    tag = args.tag

    cwd = os.getcwd()
    work_dir = join(cwd, 'rpm_work')
    version, version_part, major, update, version_sap, build_number, os_ext = utils.sapmachine_tag_components(tag)
    version = version.replace('-', '.')
    jdk_name = str.format('sapmachine-jdk-{0}', version)

    jdk_url, jre_url = utils.get_asset_url(tag, 'linux-x64')

    utils.remove_if_exists(work_dir)
    mkdir(work_dir)

    jdk_archive = join(work_dir, jdk_url.rsplit('/', 1)[-1])

    utils.download_artifact(jdk_url, jdk_archive)
    utils.extract_archive(jdk_archive, work_dir)

    bin_dir = join(work_dir, jdk_name, 'bin')
    tools = [f for f in listdir(bin_dir) if isfile(join(bin_dir, f))]
    alternatives = []
    alternatives_t = Template(alternatives_template)

    for tool in tools:
        alternatives.append(alternatives_t.substitute(tool=tool, major=major))

    alternatives = '\n'.join(alternatives)

    specfile_t = Template(spec_template)
    specfile_content = specfile_t.substitute(
        version=version,
        major=major,
        alternatives=alternatives,
        workdir=work_dir
    )

    with open(join(work_dir, 'sapmachine.spec'), 'w') as specfile:
        specfile.write(specfile_content)

    rpmbuild_dir = join(work_dir, 'rpmbuild')
    mkdir(rpmbuild_dir)

    rpmbuild_cmd = str.format('rpmbuild -bb -v --buildroot={0}/BUILD {0}/sapmachine.spec', work_dir)
    rpmbuild_cmd = rpmbuild_cmd.split(' ')
    rpmbuild_cmd.append('--define')
    rpmbuild_cmd.append(str.format('_rpmdir {0}', work_dir))
    rpmbuild_cmd.append('--define')
    rpmbuild_cmd.append(str.format('_topdir {0}', rpmbuild_dir))
    utils.run_cmd(rpmbuild_cmd, cwd=work_dir)

    rpm_files = glob.glob(join(work_dir, 'x86_64', '*.rpm'))

    for rpm_file in rpm_files:
        copy(rpm_file, cwd)
        remove(rpm_file)

    return 0
Ejemplo n.º 8
0
def comment_pr(repo, run_id):
    """
    Generates coverage diff produced by the changes in the current PR. If the diff is not empty, then post it as a comment.
    If a workflow run produces the same diff as the directly preceeding one, then don't post a comment.
    """

    # Store diff for current run
    current_diff_folder = "current_diff"
    utils.download_artifact(repo, comparison_artifact_name,
                            current_diff_folder, run_id)

    utils.download_artifact(repo, "pr", "pr", run_id)

    try:
        with open("pr/NR") as file:
            pr_number = int(file.read())
    finally:
        if os.path.isdir("pr"):
            shutil.rmtree("pr")

    # Try storing diff for previous run:
    prev_run_id = 0
    prev_diff_exists = False
    try:
        prev_run_id = get_previous_run_id(repo, run_id, pr_number)
        prev_diff_folder = "prev_diff"
        utils.download_artifact(repo, comparison_artifact_name,
                                prev_diff_folder, prev_run_id)

        prev_diff_exists = True

        if filecmp.cmp(
                f"{current_diff_folder}/{comparison_artifact_file_name}",
                f"{prev_diff_folder}/{comparison_artifact_file_name}",
                shallow=False):
            print(
                f"Previous run {prev_run_id} resulted in the same diff, so not commenting again."
            )
            return
        else:
            print(f"Diff of previous run {prev_run_id} differs, commenting.")
    except Exception:
        # this is not necessarily a failure, it can also mean that there was no previous run yet.
        print("Couldn't generate diff for previous run:", sys.exc_info()[1])

    comment = get_comment_text(
        f"{current_diff_folder}/{comparison_artifact_file_name}", repo, run_id)

    if comment == None:
        if prev_run_id == 0:
            print(
                "Nothing to comment. There's no previous run, and there's no coverage change."
            )
            return

        print("Previous run found, and current run removes coverage change.")

        if not prev_diff_exists:
            print(
                "Couldn't get the comparison artifact from previous run. Not commenting."
            )
            return

        comment = comment_first_line + \
            "A recent commit removed the previously reported differences."
    post_comment(comment, repo, pr_number)
Ejemplo n.º 9
0
def build_jtreg(top_dir, jtharness_version, tag=None, build_number=None):
    work_dir = join(top_dir, 'jtreg_work')
    hg_dir = join(work_dir, 'jtreg')
    build_dir = join(hg_dir, 'build')
    dependencies_dir = join(hg_dir, 'dependencies')
    images_dir = join(hg_dir, 'build', 'images')

    mkdir(work_dir)
    chdir(work_dir)

    # clone the jtreg mercurial repository
    hg_clone(jtreg_repo)
    chdir(hg_dir)
    mkdir(dependencies_dir)

    if tag is None:
        # find the latest tag
        tag = get_latest_hg_tag('jtreg')

        if build_number is None:
            build_number = tag.split('-')[1]
    else:
        if build_number is None:
            build_number = 'b01'

    hg_switch_tag(tag)
    print(str.format('Using jtreg tag {0}', tag))

    # download and extract dependencies
    for jtreg_dependecy in jtreg_dependencies:
        utils.download_artifact(jtreg_dependecy[0], jtreg_dependecy[1])
        utils.extract_archive(jtreg_dependecy[1], dependencies_dir)

    # workaround for jtreg.gmk JAVAHELP_JAR rule
    with open('DUMMY.SF', 'w+') as dummy:
        dummy.write('dummy')
    with zipfile.ZipFile(
            join(dependencies_dir, 'jh2.0', 'javahelp', 'lib', 'jh.jar'),
            'a') as java_help:
        java_help.write('DUMMY.SF', join('META-INF', 'DUMMY.SF'))

    utils.extract_archive(join(top_dir, 'jtharness.zip'), dependencies_dir)
    copytree(join(top_dir, 'asmtools-release'),
             join(dependencies_dir, 'asmtools'))

    # build configuration
    javac = dirname(dirname(realpath(utils.which('javac'))))
    ant = dirname(dirname(realpath(utils.which('ant'))))
    make_build_env = os.environ.copy()
    make_build_env['JDK17HOME'] = javac
    make_build_env['JDK18HOME'] = javac
    make_build_env['JDKHOME'] = javac
    make_build_env['ANTHOME'] = ant
    make_build_env['ASMTOOLS_HOME'] = join(dependencies_dir, 'asmtools')
    make_build_env['JAVAHELP_HOME'] = join(dependencies_dir, 'jh2.0',
                                           'javahelp')
    make_build_env['JTHARNESS_HOME'] = join(dependencies_dir,
                                            'jtharness-' + jtharness_version)
    make_build_env['TESTNG_JAR'] = join(dependencies_dir, 'testng.jar')
    make_build_env['JUNIT_JAR'] = join(dependencies_dir, 'junit.jar')
    make_build_env['JCOV_JAR'] = join(dependencies_dir, 'JCOV_BUILD',
                                      'jcov_3.0', 'jcov.jar')
    make_build_env['JCOV_NETWORK_SAVER_JAR'] = join(dependencies_dir,
                                                    'JCOV_BUILD', 'jcov_3.0',
                                                    'jcov_network_saver.jar')
    make_build_env['JCOMMANDER_JAR'] = join(dependencies_dir,
                                            'jcommander-1.48.jar')

    # run make
    utils.run_cmd(['make', '-C', 'make', 'BUILD_NUMBER=' + build_number],
                  env=make_build_env)

    # add additional libraries to the archive
    # with zipfile.ZipFile(join(images_dir, 'jtreg.zip'), 'a') as jtreg_archive:
    #    jtreg_archive.write(join(dependencies_dir, 'jcommander-1.48.jar'), join('jtreg', 'lib', 'jcommander.jar'))
    #    jtreg_archive.write(join(dependencies_dir, 'testng.jar'), join('jtreg', 'lib', 'testng.jar'))

    # copy the build result
    copy(join(images_dir, 'jtreg.zip'), top_dir)