Esempio n. 1
0
def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
    e = ev.create('test1')
    e.add('dyninst')
    e.concretize()

    dyninst_hash = None
    hash_dict = {}

    with e as active_env:
        for s in active_env.all_specs():
            hash_dict[s.name] = s.dag_hash()
            if s.name == 'dyninst':
                dyninst_hash = s.dag_hash()

        assert (dyninst_hash)

        spec_map = ci.get_concrete_specs(active_env, dyninst_hash, 'dyninst',
                                         'NONE')
        assert 'root' in spec_map

        concrete_root = spec_map['root']
        assert (concrete_root.dag_hash() == dyninst_hash)

        s = spec.Spec('dyninst')
        print('nonconc spec name: {0}'.format(s.name))

        spec_map = ci.get_concrete_specs(active_env, s.name, s.name,
                                         'FIND_ANY')

        assert 'root' in spec_map
Esempio n. 2
0
def test_relate_cdash_builds(config, mock_packages):
    root_spec = (
        'eJztkk1uwyAQhfc5BbuuYjWObSKuUlURYP5aDBjjBPv0RU7iRI6qpKuqUtnxzZvRwHud'
        'YxSt1oCMyuVoBdI5MN8paxDYZK/ZbkLYU3kqAuA0Dtz6BgGtTB8XdG87BCgzwXbwXArY'
        'CxYQiLtqXxUTpLZxSjN/mWlwwxAQlJ7v8wpFtsvK1UXSOUyTjvRKB2Um7LBPhZD0l1md'
        'xJ7VCATfszOiXGOR9np7vwDn7lCMS8SXQNf3RCtyBTVzzNTMUMXmfWrFeR+UngEAEncS'
        'ASjKwZcid7ERNldthBxjX46mMD2PsJnlYXDs2rye3l+vroOkJJ54SXgZPklLRQmx61sm'
        'cgKNVFRO0qlpf2pojq1Ro7OG56MY+Bgc1PkIo/WkaT8OVcrDYuvZkJdtBl/+XCZ+NQBJ'
        'oKg1h6X/VdXRoyE2OWeH6lCXZdHGrauUZAWFw/YJ/0/39OefN3F4Kle3cXjYsF684ZqG'
        'Tbap/uPwbRx+YPStIQ8bvgA7G6YE')

    dep_builds = 'diffutils;libiconv'
    spec_map = ci.get_concrete_specs(root_spec, 'bzip2', dep_builds, 'NONE')
    cdash_api_url = 'http://cdash.fake.org'
    job_build_id = '42'
    cdash_project = 'spack'
    cdashids_mirror_url = 'https://my.fake.mirror'

    with pytest.raises(web_util.SpackWebError):
        ci.relate_cdash_builds(spec_map, cdash_api_url, job_build_id,
                               cdash_project, cdashids_mirror_url)

    # Just make sure passing None for build id doesn't throw exceptions
    ci.relate_cdash_builds(spec_map, cdash_api_url, None, cdash_project,
                           cdashids_mirror_url)
Esempio n. 3
0
def test_get_concrete_specs(config, mock_packages):
    root_spec = (
        'eJztkk1uwyAQhfc5BbuuYjWObSKuUlURYP5aDBjjBPv0RU7iRI6qpKuqUtnxzZvRwHud'
        'YxSt1oCMyuVoBdI5MN8paxDYZK/ZbkLYU3kqAuA0Dtz6BgGtTB8XdG87BCgzwXbwXArY'
        'CxYQiLtqXxUTpLZxSjN/mWlwwxAQlJ7v8wpFtsvK1UXSOUyTjvRKB2Um7LBPhZD0l1md'
        'xJ7VCATfszOiXGOR9np7vwDn7lCMS8SXQNf3RCtyBTVzzNTMUMXmfWrFeR+UngEAEncS'
        'ASjKwZcid7ERNldthBxjX46mMD2PsJnlYXDs2rye3l+vroOkJJ54SXgZPklLRQmx61sm'
        'cgKNVFRO0qlpf2pojq1Ro7OG56MY+Bgc1PkIo/WkaT8OVcrDYuvZkJdtBl/+XCZ+NQBJ'
        'oKg1h6X/VdXRoyE2OWeH6lCXZdHGrauUZAWFw/YJ/0/39OefN3F4Kle3cXjYsF684ZqG'
        'Tbap/uPwbRx+YPStIQ8bvgA7G6YE')

    dep_builds = 'diffutils;libiconv'
    spec_map = ci.get_concrete_specs(root_spec, 'bzip2', dep_builds, 'NONE')

    assert ('root' in spec_map and 'deps' in spec_map)

    nonconc_root_spec = 'archive-files'
    dep_builds = ''
    spec_map = ci.get_concrete_specs(nonconc_root_spec, 'archive-files',
                                     dep_builds, 'FIND_ANY')

    assert ('root' in spec_map and 'deps' in spec_map)
    assert ('archive-files' in spec_map)
Esempio n. 4
0
def test_ci_rebuild_index(tmpdir, mutable_mock_env_path, env_deactivate,
                          install_mockery, mock_packages, mock_fetch,
                          mock_stage):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    spack_yaml_contents = """
spack:
 specs:
   - callpath
 mirrors:
   test-mirror: {0}
 gitlab-ci:
   mappings:
     - match:
         - patchelf
       runner-attributes:
         tags:
           - donotcare
         image: donotcare
""".format(mirror_url)

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test'):
            spec_map = ci.get_concrete_specs('callpath', 'callpath', '',
                                             'FIND_ANY')
            concrete_spec = spec_map['callpath']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', '-f', yaml_path)
            buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
                           mirror_url, 'callpath')
            ci_cmd('rebuild-index')

            buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
            index_path = os.path.join(buildcache_path, 'index.json')
            with open(index_path) as idx_fd:
                index_object = json.load(idx_fd)
                validate(index_object, db_idx_schema)
Esempio n. 5
0
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages, mock_fetch,
                              mock_stage, mock_gnupghome):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    signing_key_dir = spack_paths.mock_gpg_keys_path
    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
    with open(signing_key_path) as fd:
        signing_key = fd.read()

    ci.import_signing_key(signing_key)

    spack_yaml_contents = """
spack:
 definitions:
   - packages: [patchelf]
 specs:
   - $packages
 mirrors:
   test-mirror: {0}
 gitlab-ci:
   enable-artifacts-buildcache: True
   mappings:
     - match:
         - patchelf
       runner-attributes:
         tags:
           - donotcare
         image: donotcare
   service-job-attributes:
     tags:
       - nonbuildtag
     image: basicimage
""".format(mirror_url)

    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test') as env:
            spec_map = ci.get_concrete_specs('patchelf', 'patchelf', '',
                                             'FIND_ANY')
            concrete_spec = spec_map['patchelf']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', yaml_path)

            # env, spec, yaml_path, mirror_url, build_id, sign_binaries
            ci.push_mirror_contents(env, concrete_spec, yaml_path, mirror_url,
                                    '42', True)

            buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')

            # Now test the --prune-dag (default) option of spack ci generate
            mirror_cmd('add', 'test-ci', mirror_url)

            outputfile_pruned = str(tmpdir.join('pruned_pipeline.yml'))
            ci_cmd('generate', '--output-file', outputfile_pruned)

            with open(outputfile_pruned) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)
                assert ('no-specs-to-rebuild' in yaml_contents)
                # Make sure there are no other spec jobs or rebuild-index
                assert (len(yaml_contents.keys()) == 1)
                the_elt = yaml_contents['no-specs-to-rebuild']
                assert ('tags' in the_elt)
                assert ('nonbuildtag' in the_elt['tags'])
                assert ('image' in the_elt)
                assert (the_elt['image'] == 'basicimage')

            outputfile_not_pruned = str(tmpdir.join('unpruned_pipeline.yml'))
            ci_cmd('generate', '--no-prune-dag', '--output-file',
                   outputfile_not_pruned)

            # Test the --no-prune-dag option of spack ci generate
            with open(outputfile_not_pruned) as f:
                contents = f.read()
                yaml_contents = syaml.load(contents)

                found_spec_job = False

                for ci_key in yaml_contents.keys():
                    if '(specs) patchelf' in ci_key:
                        the_elt = yaml_contents[ci_key]
                        assert ('variables' in the_elt)
                        job_vars = the_elt['variables']
                        assert ('SPACK_SPEC_NEEDS_REBUILD' in job_vars)
                        assert (
                            job_vars['SPACK_SPEC_NEEDS_REBUILD'] == 'False')
                        found_spec_job = True

                assert (found_spec_job)

            mirror_cmd('rm', 'test-ci')

            # Test generating buildcache index while we have bin mirror
            buildcache_cmd('update-index', '--mirror-url', mirror_url)
            index_path = os.path.join(buildcache_path, 'index.json')
            with open(index_path) as idx_fd:
                index_object = json.load(idx_fd)
                validate(index_object, db_idx_schema)

            # Now that index is regenerated, validate "buildcache list" output
            buildcache_list_output = buildcache_cmd('list', output=str)
            assert ('patchelf' in buildcache_list_output)

            # Also test buildcache_spec schema
            bc_files_list = os.listdir(buildcache_path)
            for file_name in bc_files_list:
                if file_name.endswith('.spec.yaml'):
                    spec_yaml_path = os.path.join(buildcache_path, file_name)
                    with open(spec_yaml_path) as yaml_fd:
                        yaml_object = syaml.load(yaml_fd)
                        validate(yaml_object, spec_yaml_schema)

            logs_dir = working_dir.join('logs_dir')
            if not os.path.exists(logs_dir.strpath):
                os.makedirs(logs_dir.strpath)

            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)

            logs_dir_list = os.listdir(logs_dir.strpath)

            assert ('spack-build-out.txt' in logs_dir_list)

            # Also just make sure that if something goes wrong with the
            # stage logs copy, no exception is thrown
            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)

            dl_dir = working_dir.join('download_dir')
            if not os.path.exists(dl_dir.strpath):
                os.makedirs(dl_dir.strpath)

            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
                           dl_dir.strpath, '--require-cdashid')

            dl_dir_list = os.listdir(dl_dir.strpath)

            assert (len(dl_dir_list) == 3)
Esempio n. 6
0
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages, mock_fetch,
                              mock_stage, mock_gnupghome):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    signing_key_dir = spack_paths.mock_gpg_keys_path
    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
    with open(signing_key_path) as fd:
        signing_key = fd.read()

    ci.import_signing_key(signing_key)

    spack_yaml_contents = """
spack:
 definitions:
   - packages: [patchelf]
 specs:
   - $packages
 mirrors:
   test-mirror: {0}
""".format(mirror_url)

    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test') as env:
            spec_map = ci.get_concrete_specs(
                'patchelf', 'patchelf', '', 'FIND_ANY')
            concrete_spec = spec_map['patchelf']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', yaml_path)

            # env, spec, yaml_path, mirror_url, build_id
            ci.push_mirror_contents(
                env, concrete_spec, yaml_path, mirror_url, '42')

            buildcache_list_output = buildcache_cmd('list', output=str)

            assert('patchelf' in buildcache_list_output)

            logs_dir = working_dir.join('logs_dir')
            if not os.path.exists(logs_dir.strpath):
                os.makedirs(logs_dir.strpath)

            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)

            logs_dir_list = os.listdir(logs_dir.strpath)

            assert('spack-build-env.txt' in logs_dir_list)
            assert('spack-build-out.txt' in logs_dir_list)

            # Also just make sure that if something goes wrong with the
            # stage logs copy, no exception is thrown
            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)

            dl_dir = working_dir.join('download_dir')
            if not os.path.exists(dl_dir.strpath):
                os.makedirs(dl_dir.strpath)

            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
                           dl_dir.strpath, '--require-cdashid')

            dl_dir_list = os.listdir(dl_dir.strpath)

            assert(len(dl_dir_list) == 3)
Esempio n. 7
0
File: ci.py Progetto: mmmllf/spack
def ci_rebuild(args):
    """This command represents a gitlab-ci job, corresponding to a single
       release spec.  As such it must first decide whether or not the spec it
       has been assigned to build is up to date on the remote binary mirror.
       If it is not (i.e. the full_hash of the spec as computed locally does
       not match the one stored in the metadata on the mirror), this script
       will build the package, create a binary cache for it, and then push all
       related files to the remote binary mirror.  This script also
       communicates with a remote CDash instance to share status on the package
       build process.

       The spec to be built by this job is represented by essentially two
       pieces of information: 1) a root spec (possibly already concrete, but
       maybe still needing to be concretized) and 2) a package name used to
       index that root spec (once the root is, for certain, concrete)."""
    env = ev.get_env(args, 'ci rebuild', required=True)
    yaml_root = ev.config_dict(env.yaml)

    # The following environment variables should defined in the CI
    # infrastructre (or some other external source) in the case that the
    # remote mirror is an S3 bucket.  The AWS keys are used to upload
    # buildcache entries to S3 using the boto3 api.
    #
    # AWS_ACCESS_KEY_ID
    # AWS_SECRET_ACCESS_KEY
    # S3_ENDPOINT_URL (only needed for non-AWS S3 implementations)
    #
    # If present, we will import the  SPACK_SIGNING_KEY using the
    # "spack gpg trust" command, so it can be used both for verifying
    # dependency buildcache entries and signing the buildcache entry we create
    # for our target pkg.
    #
    # SPACK_SIGNING_KEY

    ci_artifact_dir = get_env_var('CI_PROJECT_DIR')
    ci_pipeline_id = get_env_var('CI_PIPELINE_ID')
    signing_key = get_env_var('SPACK_SIGNING_KEY')
    root_spec = get_env_var('SPACK_ROOT_SPEC')
    job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
    compiler_action = get_env_var('SPACK_COMPILER_ACTION')
    cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
    related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
    pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE')
    pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')

    gitlab_ci = None
    if 'gitlab-ci' in yaml_root:
        gitlab_ci = yaml_root['gitlab-ci']

    if not gitlab_ci:
        tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')

    enable_cdash = False
    if 'cdash' in yaml_root:
        enable_cdash = True
        ci_cdash = yaml_root['cdash']
        job_spec_buildgroup = ci_cdash['build-group']
        cdash_base_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        proj_enc = urlencode({'project': cdash_project})
        eq_idx = proj_enc.find('=') + 1
        cdash_project_enc = proj_enc[eq_idx:]
        cdash_site = ci_cdash['site']
        tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
        tty.debug('cdash_project = {0}'.format(cdash_project))
        tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
        tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
        tty.debug('cdash_site = {0}'.format(cdash_site))
        tty.debug('related_builds = {0}'.format(related_builds))
        tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))

    remote_mirror_url = None
    if 'mirrors' in yaml_root:
        ci_mirrors = yaml_root['mirrors']
        mirror_urls = [url for url in ci_mirrors.values()]
        remote_mirror_url = mirror_urls[0]

    if not remote_mirror_url:
        tty.die('spack ci rebuild requires an env containing a mirror')

    tty.debug('ci_artifact_dir = {0}'.format(ci_artifact_dir))
    tty.debug('root_spec = {0}'.format(root_spec))
    tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
    tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
    tty.debug('compiler_action = {0}'.format(compiler_action))

    cdash_report_dir = os.path.join(ci_artifact_dir, 'cdash_report')
    temp_dir = os.path.join(ci_artifact_dir, 'jobs_scratch_dir')
    job_log_dir = os.path.join(temp_dir, 'logs')
    spec_dir = os.path.join(temp_dir, 'specs')

    local_mirror_dir = os.path.join(ci_artifact_dir, 'local_mirror')
    build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')

    spack_is_pr_pipeline = True if pr_env_var == 'True' else False

    pipeline_mirror_url = None
    temp_storage_url_prefix = None
    if 'temporary-storage-url-prefix' in gitlab_ci:
        temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']
        pipeline_mirror_url = url_util.join(
            temp_storage_url_prefix, ci_pipeline_id)

    enable_artifacts_mirror = False
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
        if (enable_artifacts_mirror or (spack_is_pr_pipeline and
            not enable_artifacts_mirror and not temp_storage_url_prefix)):
            # If you explicitly enabled the artifacts buildcache feature, or
            # if this is a PR pipeline but you did not enable either of the
            # per-pipeline temporary storage features, we force the use of
            # artifacts buildcache.  Otherwise jobs will not have binary
            # dependencies from previous stages available since we do not
            # allow pushing binaries to the remote mirror during PR pipelines.
            enable_artifacts_mirror = True
            pipeline_mirror_url = 'file://' + local_mirror_dir
            mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
                pipeline_mirror_url)
            tty.debug(mirror_msg)

    # Clean out scratch directory from last stage
    if os.path.exists(temp_dir):
        shutil.rmtree(temp_dir)

    if os.path.exists(cdash_report_dir):
        shutil.rmtree(cdash_report_dir)

    os.makedirs(job_log_dir)
    os.makedirs(spec_dir)

    job_spec_yaml_path = os.path.join(
        spec_dir, '{0}.yaml'.format(job_spec_pkg_name))
    job_log_file = os.path.join(job_log_dir, 'pipeline_log.txt')

    cdash_build_id = None
    cdash_build_stamp = None

    with open(job_log_file, 'w') as log_fd:
        os.dup2(log_fd.fileno(), sys.stdout.fileno())
        os.dup2(log_fd.fileno(), sys.stderr.fileno())

        current_directory = os.getcwd()
        tty.debug('Current working directory: {0}, Contents:'.format(
            current_directory))
        directory_list = os.listdir(current_directory)
        for next_entry in directory_list:
            tty.debug('  {0}'.format(next_entry))

        tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))

        if signing_key:
            spack_ci.import_signing_key(signing_key)

        can_sign = spack_ci.can_sign_binaries()
        sign_binaries = can_sign and spack_is_pr_pipeline is False

        can_verify = spack_ci.can_verify_binaries()
        verify_binaries = can_verify and spack_is_pr_pipeline is False

        spack_ci.configure_compilers(compiler_action)

        spec_map = spack_ci.get_concrete_specs(
            root_spec, job_spec_pkg_name, related_builds, compiler_action)

        job_spec = spec_map[job_spec_pkg_name]

        tty.debug('Here is the concrete spec: {0}'.format(job_spec))

        with open(job_spec_yaml_path, 'w') as fd:
            fd.write(job_spec.to_yaml(hash=ht.build_hash))

        tty.debug('Done writing concrete spec')

        # DEBUG
        with open(job_spec_yaml_path) as fd:
            tty.debug('Wrote spec file, read it back.  Contents:')
            tty.debug(fd.read())

        # DEBUG the root spec
        root_spec_yaml_path = os.path.join(spec_dir, 'root.yaml')
        with open(root_spec_yaml_path, 'w') as fd:
            fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))

        # TODO: Refactor the spack install command so it's easier to use from
        # python modules.  Currently we use "exe.which('spack')" to make it
        # easier to install packages from here, but it introduces some
        # problems, e.g. if we want the spack command to have access to the
        # mirrors we're configuring, then we have to use the "spack" command
        # to add the mirrors too, which in turn means that any code here *not*
        # using the spack command does *not* have access to the mirrors.
        spack_cmd = exe.which('spack')
        mirrors_to_check = {
            'ci_remote_mirror': remote_mirror_url,
        }

        def add_mirror(mirror_name, mirror_url):
            m_args = ['mirror', 'add', mirror_name, mirror_url]
            tty.debug('Adding mirror: spack {0}'.format(m_args))
            mirror_add_output = spack_cmd(*m_args)
            # Workaround: Adding the mirrors above, using "spack_cmd" makes
            # sure they're available later when we use "spack_cmd" to install
            # the package.  But then we also need to add them to this dict
            # below, so they're available in this process (we end up having to
            # pass them to "bindist.get_mirrors_for_spec()")
            mirrors_to_check[mirror_name] = mirror_url
            tty.debug('spack mirror add output: {0}'.format(mirror_add_output))

        # Configure mirrors
        if pr_mirror_url:
            add_mirror('ci_pr_mirror', pr_mirror_url)

        if pipeline_mirror_url:
            add_mirror(spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url)

        tty.debug('listing spack mirrors:')
        spack_cmd('mirror', 'list')
        spack_cmd('config', 'blame', 'mirrors')

        # Checks all mirrors for a built spec with a matching full hash
        matches = bindist.get_mirrors_for_spec(
            job_spec, full_hash_match=True, mirrors_to_check=mirrors_to_check,
            index_only=False)

        if matches:
            # Got at full hash match on at least one configured mirror.  All
            # matches represent the fully up-to-date spec, so should all be
            # equivalent.  If artifacts mirror is enabled, we just pick one
            # of the matches and download the buildcache files from there to
            # the artifacts, so they're available to be used by dependent
            # jobs in subsequent stages.
            tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name))
            if enable_artifacts_mirror:
                matching_mirror = matches[0]['mirror_url']
                tty.debug('Getting {0} buildcache from {1}'.format(
                    job_spec_pkg_name, matching_mirror))
                tty.debug('Downloading to {0}'.format(build_cache_dir))
                buildcache.download_buildcache_files(
                    job_spec, build_cache_dir, True, matching_mirror)
        else:
            # No full hash match anywhere means we need to rebuild spec

            # Build up common install arguments
            install_args = [
                '-d', '-v', '-k', 'install',
                '--keep-stage',
                '--require-full-hash-match',
            ]

            if not verify_binaries:
                install_args.append('--no-check-signature')

            # Add arguments to create + register a new build on CDash (if
            # enabled)
            if enable_cdash:
                tty.debug('Registering build with CDash')
                (cdash_build_id,
                    cdash_build_stamp) = spack_ci.register_cdash_build(
                    cdash_build_name, cdash_base_url, cdash_project,
                    cdash_site, job_spec_buildgroup)

                cdash_upload_url = '{0}/submit.php?project={1}'.format(
                    cdash_base_url, cdash_project_enc)

                install_args.extend([
                    '--cdash-upload-url', cdash_upload_url,
                    '--cdash-build', cdash_build_name,
                    '--cdash-site', cdash_site,
                    '--cdash-buildstamp', cdash_build_stamp,
                ])

            install_args.append(job_spec_yaml_path)

            tty.debug('Installing {0} from source'.format(job_spec.name))

            try:
                tty.debug('spack install arguments: {0}'.format(
                    install_args))
                spack_cmd(*install_args)
            finally:
                spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)

            # Create buildcache on remote mirror, either on pr-specific
            # mirror or on mirror defined in spack environment
            if spack_is_pr_pipeline:
                buildcache_mirror_url = pr_mirror_url
            else:
                buildcache_mirror_url = remote_mirror_url

            try:
                spack_ci.push_mirror_contents(
                    env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
                    cdash_build_id, sign_binaries)
            except Exception as inst:
                # If the mirror we're pushing to is on S3 and there's some
                # permissions problem, for example, we can't just target
                # that exception type here, since users of the
                # `spack ci rebuild' may not need or want any dependency
                # on boto3.  So we use the first non-boto exception type
                # in the heirarchy:
                #     boto3.exceptions.S3UploadFailedError
                #     boto3.exceptions.Boto3Error
                #     Exception
                #     BaseException
                #     object
                err_msg = 'Error msg: {0}'.format(inst)
                if 'Access Denied' in err_msg:
                    tty.msg('Permission problem writing to mirror')
                tty.msg(err_msg)

            # Create another copy of that buildcache in the per-pipeline
            # temporary storage mirror (this is only done if either artifacts
            # buildcache is enabled or a temporary storage url prefix is set)
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          pipeline_mirror_url, cdash_build_id,
                                          sign_binaries)

            # Relate this build to its dependencies on CDash (if enabled)
            if enable_cdash:
                spack_ci.relate_cdash_builds(
                    spec_map, cdash_base_url, cdash_build_id, cdash_project,
                    pipeline_mirror_url or pr_mirror_url or remote_mirror_url)
Esempio n. 8
0
def ci_rebuild(args):
    """This command represents a gitlab-ci job, corresponding to a single
       release spec.  As such it must first decide whether or not the spec it
       has been assigned to build is up to date on the remote binary mirror.
       If it is not (i.e. the full_hash of the spec as computed locally does
       not match the one stored in the metadata on the mirror), this script
       will build the package, create a binary cache for it, and then push all
       related files to the remote binary mirror.  This script also
       communicates with a remote CDash instance to share status on the package
       build process.

       The spec to be built by this job is represented by essentially two
       pieces of information: 1) a root spec (possibly already concrete, but
       maybe still needing to be concretized) and 2) a package name used to
       index that root spec (once the root is, for certain, concrete)."""
    env = ev.get_env(args, 'ci rebuild', required=True)
    yaml_root = ev.config_dict(env.yaml)

    # The following environment variables should defined in the CI
    # infrastructre (or some other external source) in the case that the
    # remote mirror is an S3 bucket.  The AWS keys are used to upload
    # buildcache entries to S3 using the boto3 api.
    #
    # AWS_ACCESS_KEY_ID
    # AWS_SECRET_ACCESS_KEY
    # S3_ENDPOINT_URL (only needed for non-AWS S3 implementations)
    #
    # If present, we will import the  SPACK_SIGNING_KEY using the
    # "spack gpg trust" command, so it can be used both for verifying
    # dependency buildcache entries and signing the buildcache entry we create
    # for our target pkg.
    #
    # SPACK_SIGNING_KEY

    ci_artifact_dir = get_env_var('CI_PROJECT_DIR')
    signing_key = get_env_var('SPACK_SIGNING_KEY')
    root_spec = get_env_var('SPACK_ROOT_SPEC')
    job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
    compiler_action = get_env_var('SPACK_COMPILER_ACTION')
    cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
    related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
    pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE')

    gitlab_ci = None
    if 'gitlab-ci' in yaml_root:
        gitlab_ci = yaml_root['gitlab-ci']

    if not gitlab_ci:
        tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')

    enable_cdash = False
    if 'cdash' in yaml_root:
        enable_cdash = True
        ci_cdash = yaml_root['cdash']
        job_spec_buildgroup = ci_cdash['build-group']
        cdash_base_url = ci_cdash['url']
        cdash_project = ci_cdash['project']
        proj_enc = urlencode({'project': cdash_project})
        eq_idx = proj_enc.find('=') + 1
        cdash_project_enc = proj_enc[eq_idx:]
        cdash_site = ci_cdash['site']
        tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
        tty.debug('cdash_project = {0}'.format(cdash_project))
        tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
        tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
        tty.debug('cdash_site = {0}'.format(cdash_site))
        tty.debug('related_builds = {0}'.format(related_builds))
        tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))

    remote_mirror_url = None
    if 'mirrors' in yaml_root:
        ci_mirrors = yaml_root['mirrors']
        mirror_urls = [url for url in ci_mirrors.values()]
        remote_mirror_url = mirror_urls[0]

    if not remote_mirror_url:
        tty.die('spack ci rebuild requires an env containing a mirror')

    tty.debug('ci_artifact_dir = {0}'.format(ci_artifact_dir))
    tty.debug('root_spec = {0}'.format(root_spec))
    tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
    tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
    tty.debug('compiler_action = {0}'.format(compiler_action))

    spack_cmd = exe.which('spack')

    cdash_report_dir = os.path.join(ci_artifact_dir, 'cdash_report')
    temp_dir = os.path.join(ci_artifact_dir, 'jobs_scratch_dir')
    job_log_dir = os.path.join(temp_dir, 'logs')
    spec_dir = os.path.join(temp_dir, 'specs')

    local_mirror_dir = os.path.join(ci_artifact_dir, 'local_mirror')
    build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')

    spack_is_pr_pipeline = True if pr_env_var == 'True' else False

    enable_artifacts_mirror = False
    artifact_mirror_url = None
    if 'enable-artifacts-buildcache' in gitlab_ci:
        enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
        if enable_artifacts_mirror or spack_is_pr_pipeline:
            # If this is a PR pipeline, we will override the setting to
            # make sure that artifacts buildcache is enabled.  Otherwise
            # jobs will not have binary deps available since we do not
            # allow pushing binaries to remote mirror during PR pipelines
            enable_artifacts_mirror = True
            artifact_mirror_url = 'file://' + local_mirror_dir
            mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
                artifact_mirror_url)
            tty.debug(mirror_msg)

    # Clean out scratch directory from last stage
    if os.path.exists(temp_dir):
        shutil.rmtree(temp_dir)

    if os.path.exists(cdash_report_dir):
        shutil.rmtree(cdash_report_dir)

    os.makedirs(job_log_dir)
    os.makedirs(spec_dir)

    job_spec_yaml_path = os.path.join(spec_dir,
                                      '{0}.yaml'.format(job_spec_pkg_name))
    job_log_file = os.path.join(job_log_dir, 'pipeline_log.txt')

    cdash_build_id = None
    cdash_build_stamp = None

    with open(job_log_file, 'w') as log_fd:
        os.dup2(log_fd.fileno(), sys.stdout.fileno())
        os.dup2(log_fd.fileno(), sys.stderr.fileno())

        current_directory = os.getcwd()
        tty.debug('Current working directory: {0}, Contents:'.format(
            current_directory))
        directory_list = os.listdir(current_directory)
        for next_entry in directory_list:
            tty.debug('  {0}'.format(next_entry))

        # Make a copy of the environment file, so we can overwrite the changed
        # version in between the two invocations of "spack install"
        env_src_path = env.manifest_path
        env_dirname = os.path.dirname(env_src_path)
        env_filename = os.path.basename(env_src_path)
        env_copyname = '{0}_BACKUP'.format(env_filename)
        env_dst_path = os.path.join(env_dirname, env_copyname)
        shutil.copyfile(env_src_path, env_dst_path)

        tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))

        if signing_key:
            spack_ci.import_signing_key(signing_key)

        spack_ci.configure_compilers(compiler_action)

        spec_map = spack_ci.get_concrete_specs(root_spec, job_spec_pkg_name,
                                               related_builds, compiler_action)

        job_spec = spec_map[job_spec_pkg_name]

        tty.debug('Here is the concrete spec: {0}'.format(job_spec))

        with open(job_spec_yaml_path, 'w') as fd:
            fd.write(job_spec.to_yaml(hash=ht.build_hash))

        tty.debug('Done writing concrete spec')

        # DEBUG
        with open(job_spec_yaml_path) as fd:
            tty.debug('Wrote spec file, read it back.  Contents:')
            tty.debug(fd.read())

        # DEBUG the root spec
        root_spec_yaml_path = os.path.join(spec_dir, 'root.yaml')
        with open(root_spec_yaml_path, 'w') as fd:
            fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))

        if bindist.needs_rebuild(job_spec, remote_mirror_url, True):
            # Binary on remote mirror is not up to date, we need to rebuild
            # it.
            #
            # FIXME: ensure mirror precedence causes this local mirror to
            # be chosen ahead of the remote one when installing deps
            if enable_artifacts_mirror:
                mirror_add_output = spack_cmd('mirror', 'add', 'local_mirror',
                                              artifact_mirror_url)
                tty.debug('spack mirror add:')
                tty.debug(mirror_add_output)

            mirror_list_output = spack_cmd('mirror', 'list')
            tty.debug('listing spack mirrors:')
            tty.debug(mirror_list_output)

            # 2) build up install arguments
            install_args = ['-d', '-v', '-k', 'install', '--keep-stage']

            # 3) create/register a new build on CDash (if enabled)
            cdash_args = []
            if enable_cdash:
                tty.debug('Registering build with CDash')
                (cdash_build_id,
                 cdash_build_stamp) = spack_ci.register_cdash_build(
                     cdash_build_name, cdash_base_url, cdash_project,
                     cdash_site, job_spec_buildgroup)

                cdash_upload_url = '{0}/submit.php?project={1}'.format(
                    cdash_base_url, cdash_project_enc)

                cdash_args = [
                    '--cdash-upload-url',
                    cdash_upload_url,
                    '--cdash-build',
                    cdash_build_name,
                    '--cdash-site',
                    cdash_site,
                    '--cdash-buildstamp',
                    cdash_build_stamp,
                ]

            spec_cli_arg = [job_spec_yaml_path]

            tty.debug('Installing package')

            try:
                # Two-pass install is intended to avoid spack trying to
                # install from buildcache even though the locally computed
                # full hash is different than the one stored in the spec.yaml
                # file on the remote mirror.
                first_pass_args = install_args + [
                    '--cache-only',
                    '--only',
                    'dependencies',
                ]
                first_pass_args.extend(spec_cli_arg)
                tty.debug('First pass install arguments: {0}'.format(
                    first_pass_args))
                spack_cmd(*first_pass_args)

                # Overwrite the changed environment file so it doesn't break
                # the next install invocation.
                tty.debug('Copying {0} to {1}'.format(env_dst_path,
                                                      env_src_path))
                shutil.copyfile(env_dst_path, env_src_path)

                second_pass_args = install_args + [
                    '--no-cache',
                    '--only',
                    'package',
                ]
                second_pass_args.extend(cdash_args)
                second_pass_args.extend(spec_cli_arg)
                tty.debug('Second pass install arguments: {0}'.format(
                    second_pass_args))
                spack_cmd(*second_pass_args)
            except Exception as inst:
                tty.error('Caught exception during install:')
                tty.error(inst)

            spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)

            # 4) create buildcache on remote mirror, but not if this is
            # running to test a spack PR
            if not spack_is_pr_pipeline:
                spack_ci.push_mirror_contents(env, job_spec,
                                              job_spec_yaml_path,
                                              remote_mirror_url,
                                              cdash_build_id)

            # 5) create another copy of that buildcache on "local artifact
            # mirror" (only done if cash reporting is enabled)
            spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
                                          artifact_mirror_url, cdash_build_id)

            # 6) relate this build to its dependencies on CDash (if enabled)
            if enable_cdash:
                spack_ci.relate_cdash_builds(
                    spec_map, cdash_base_url, cdash_build_id, cdash_project,
                    artifact_mirror_url or remote_mirror_url)
        else:
            # There is nothing to do here unless "local artifact mirror" is
            # enabled, in which case, we need to download the buildcache to
            # the local artifacts directory to be used by dependent jobs in
            # subsequent stages
            tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name))
            if enable_artifacts_mirror:
                tty.debug('Getting {0} buildcache'.format(job_spec_pkg_name))
                tty.debug('Downloading to {0}'.format(build_cache_dir))
                buildcache.download_buildcache_files(job_spec, build_cache_dir,
                                                     True, remote_mirror_url)