def test_spec_needs_rebuild(monkeypatch, tmpdir): """Make sure needs_rebuild properly compares remote full_hash against locally computed one, avoiding unnecessary rebuilds""" # Create a temp mirror directory for buildcache usage mirror_dir = tmpdir.join('mirror_dir') mirror_url = 'file://{0}'.format(mirror_dir.strpath) s = Spec('libdwarf').concretized() # Install a package install_cmd(s.name) # Put installed package in the buildcache buildcache_cmd('create', '-u', '-a', '-d', mirror_dir.strpath, s.name) rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True) assert not rebuild # Now monkey patch Spec to change the full hash on the package monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash) rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True) assert rebuild
def test_spec_needs_rebuild(install_mockery_mutable_config, mock_packages, mock_fetch, monkeypatch, tmpdir): """Make sure needs_rebuild properly compares remote full_hash against locally computed one, avoiding unnecessary rebuilds""" # Create a temp mirror directory for buildcache usage mirror_dir = tmpdir.join('mirror_dir') mirror_url = 'file://{0}'.format(mirror_dir.strpath) mirror_cmd('add', 'test-mirror', mirror_url) s = Spec('libdwarf').concretized() # Install a package install_cmd(s.name) # Put installed package in the buildcache buildcache_cmd('create', '-u', '-a', '-d', mirror_dir.strpath, s.name) rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True) assert not rebuild # Now monkey patch Spec to change the full hash on the package def fake_full_hash(spec): print('fake_full_hash') return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6' monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash) rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True) assert rebuild
def ci_rebuild(args): """This command represents a gitlab-ci job, corresponding to a single release spec. As such it must first decide whether or not the spec it has been assigned to build is up to date on the remote binary mirror. If it is not (i.e. the full_hash of the spec as computed locally does not match the one stored in the metadata on the mirror), this script will build the package, create a binary cache for it, and then push all related files to the remote binary mirror. This script also communicates with a remote CDash instance to share status on the package build process. The spec to be built by this job is represented by essentially two pieces of information: 1) a root spec (possibly already concrete, but maybe still needing to be concretized) and 2) a package name used to index that root spec (once the root is, for certain, concrete).""" env = ev.get_env(args, 'ci rebuild', required=True) yaml_root = ev.config_dict(env.yaml) # The following environment variables should defined in the CI # infrastructre (or some other external source) in the case that the # remote mirror is an S3 bucket. The AWS keys are used to upload # buildcache entries to S3 using the boto3 api. # # AWS_ACCESS_KEY_ID # AWS_SECRET_ACCESS_KEY # S3_ENDPOINT_URL (only needed for non-AWS S3 implementations) # # If present, we will import the SPACK_SIGNING_KEY using the # "spack gpg trust" command, so it can be used both for verifying # dependency buildcache entries and signing the buildcache entry we create # for our target pkg. # # SPACK_SIGNING_KEY ci_artifact_dir = get_env_var('CI_PROJECT_DIR') signing_key = get_env_var('SPACK_SIGNING_KEY') root_spec = get_env_var('SPACK_ROOT_SPEC') job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME') compiler_action = get_env_var('SPACK_COMPILER_ACTION') cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME') related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH') pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE') gitlab_ci = None if 'gitlab-ci' in yaml_root: gitlab_ci = yaml_root['gitlab-ci'] if not gitlab_ci: tty.die('spack ci rebuild requires an env containing gitlab-ci cfg') enable_cdash = False if 'cdash' in yaml_root: enable_cdash = True ci_cdash = yaml_root['cdash'] job_spec_buildgroup = ci_cdash['build-group'] cdash_base_url = ci_cdash['url'] cdash_project = ci_cdash['project'] proj_enc = urlencode({'project': cdash_project}) eq_idx = proj_enc.find('=') + 1 cdash_project_enc = proj_enc[eq_idx:] cdash_site = ci_cdash['site'] tty.debug('cdash_base_url = {0}'.format(cdash_base_url)) tty.debug('cdash_project = {0}'.format(cdash_project)) tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc)) tty.debug('cdash_build_name = {0}'.format(cdash_build_name)) tty.debug('cdash_site = {0}'.format(cdash_site)) tty.debug('related_builds = {0}'.format(related_builds)) tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup)) remote_mirror_url = None if 'mirrors' in yaml_root: ci_mirrors = yaml_root['mirrors'] mirror_urls = [url for url in ci_mirrors.values()] remote_mirror_url = mirror_urls[0] if not remote_mirror_url: tty.die('spack ci rebuild requires an env containing a mirror') tty.debug('ci_artifact_dir = {0}'.format(ci_artifact_dir)) tty.debug('root_spec = {0}'.format(root_spec)) tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url)) tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name)) tty.debug('compiler_action = {0}'.format(compiler_action)) spack_cmd = exe.which('spack') cdash_report_dir = os.path.join(ci_artifact_dir, 'cdash_report') temp_dir = os.path.join(ci_artifact_dir, 'jobs_scratch_dir') job_log_dir = os.path.join(temp_dir, 'logs') spec_dir = os.path.join(temp_dir, 'specs') local_mirror_dir = os.path.join(ci_artifact_dir, 'local_mirror') build_cache_dir = os.path.join(local_mirror_dir, 'build_cache') spack_is_pr_pipeline = True if pr_env_var == 'True' else False enable_artifacts_mirror = False artifact_mirror_url = None if 'enable-artifacts-buildcache' in gitlab_ci: enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache'] if enable_artifacts_mirror or spack_is_pr_pipeline: # If this is a PR pipeline, we will override the setting to # make sure that artifacts buildcache is enabled. Otherwise # jobs will not have binary deps available since we do not # allow pushing binaries to remote mirror during PR pipelines enable_artifacts_mirror = True artifact_mirror_url = 'file://' + local_mirror_dir mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format( artifact_mirror_url) tty.debug(mirror_msg) # Clean out scratch directory from last stage if os.path.exists(temp_dir): shutil.rmtree(temp_dir) if os.path.exists(cdash_report_dir): shutil.rmtree(cdash_report_dir) os.makedirs(job_log_dir) os.makedirs(spec_dir) job_spec_yaml_path = os.path.join(spec_dir, '{0}.yaml'.format(job_spec_pkg_name)) job_log_file = os.path.join(job_log_dir, 'pipeline_log.txt') cdash_build_id = None cdash_build_stamp = None with open(job_log_file, 'w') as log_fd: os.dup2(log_fd.fileno(), sys.stdout.fileno()) os.dup2(log_fd.fileno(), sys.stderr.fileno()) current_directory = os.getcwd() tty.debug('Current working directory: {0}, Contents:'.format( current_directory)) directory_list = os.listdir(current_directory) for next_entry in directory_list: tty.debug(' {0}'.format(next_entry)) # Make a copy of the environment file, so we can overwrite the changed # version in between the two invocations of "spack install" env_src_path = env.manifest_path env_dirname = os.path.dirname(env_src_path) env_filename = os.path.basename(env_src_path) env_copyname = '{0}_BACKUP'.format(env_filename) env_dst_path = os.path.join(env_dirname, env_copyname) shutil.copyfile(env_src_path, env_dst_path) tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path)) if signing_key: spack_ci.import_signing_key(signing_key) spack_ci.configure_compilers(compiler_action) spec_map = spack_ci.get_concrete_specs(root_spec, job_spec_pkg_name, related_builds, compiler_action) job_spec = spec_map[job_spec_pkg_name] tty.debug('Here is the concrete spec: {0}'.format(job_spec)) with open(job_spec_yaml_path, 'w') as fd: fd.write(job_spec.to_yaml(hash=ht.build_hash)) tty.debug('Done writing concrete spec') # DEBUG with open(job_spec_yaml_path) as fd: tty.debug('Wrote spec file, read it back. Contents:') tty.debug(fd.read()) # DEBUG the root spec root_spec_yaml_path = os.path.join(spec_dir, 'root.yaml') with open(root_spec_yaml_path, 'w') as fd: fd.write(spec_map['root'].to_yaml(hash=ht.build_hash)) if bindist.needs_rebuild(job_spec, remote_mirror_url, True): # Binary on remote mirror is not up to date, we need to rebuild # it. # # FIXME: ensure mirror precedence causes this local mirror to # be chosen ahead of the remote one when installing deps if enable_artifacts_mirror: mirror_add_output = spack_cmd('mirror', 'add', 'local_mirror', artifact_mirror_url) tty.debug('spack mirror add:') tty.debug(mirror_add_output) mirror_list_output = spack_cmd('mirror', 'list') tty.debug('listing spack mirrors:') tty.debug(mirror_list_output) # 2) build up install arguments install_args = ['-d', '-v', '-k', 'install', '--keep-stage'] # 3) create/register a new build on CDash (if enabled) cdash_args = [] if enable_cdash: tty.debug('Registering build with CDash') (cdash_build_id, cdash_build_stamp) = spack_ci.register_cdash_build( cdash_build_name, cdash_base_url, cdash_project, cdash_site, job_spec_buildgroup) cdash_upload_url = '{0}/submit.php?project={1}'.format( cdash_base_url, cdash_project_enc) cdash_args = [ '--cdash-upload-url', cdash_upload_url, '--cdash-build', cdash_build_name, '--cdash-site', cdash_site, '--cdash-buildstamp', cdash_build_stamp, ] spec_cli_arg = [job_spec_yaml_path] tty.debug('Installing package') try: # Two-pass install is intended to avoid spack trying to # install from buildcache even though the locally computed # full hash is different than the one stored in the spec.yaml # file on the remote mirror. first_pass_args = install_args + [ '--cache-only', '--only', 'dependencies', ] first_pass_args.extend(spec_cli_arg) tty.debug('First pass install arguments: {0}'.format( first_pass_args)) spack_cmd(*first_pass_args) # Overwrite the changed environment file so it doesn't break # the next install invocation. tty.debug('Copying {0} to {1}'.format(env_dst_path, env_src_path)) shutil.copyfile(env_dst_path, env_src_path) second_pass_args = install_args + [ '--no-cache', '--only', 'package', ] second_pass_args.extend(cdash_args) second_pass_args.extend(spec_cli_arg) tty.debug('Second pass install arguments: {0}'.format( second_pass_args)) spack_cmd(*second_pass_args) except Exception as inst: tty.error('Caught exception during install:') tty.error(inst) spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir) # 4) create buildcache on remote mirror, but not if this is # running to test a spack PR if not spack_is_pr_pipeline: spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path, remote_mirror_url, cdash_build_id) # 5) create another copy of that buildcache on "local artifact # mirror" (only done if cash reporting is enabled) spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path, artifact_mirror_url, cdash_build_id) # 6) relate this build to its dependencies on CDash (if enabled) if enable_cdash: spack_ci.relate_cdash_builds( spec_map, cdash_base_url, cdash_build_id, cdash_project, artifact_mirror_url or remote_mirror_url) else: # There is nothing to do here unless "local artifact mirror" is # enabled, in which case, we need to download the buildcache to # the local artifacts directory to be used by dependent jobs in # subsequent stages tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name)) if enable_artifacts_mirror: tty.debug('Getting {0} buildcache'.format(job_spec_pkg_name)) tty.debug('Downloading to {0}'.format(build_cache_dir)) buildcache.download_buildcache_files(job_spec, build_cache_dir, True, remote_mirror_url)