def write_cdashid_to_mirror(cdashid, spec, mirror_url): if not spec.concrete: tty.die('Can only write cdashid for concrete spec to mirror') with TemporaryDirectory() as tmpdir: local_cdash_path = os.path.join(tmpdir, 'job.cdashid') with open(local_cdash_path, 'w') as fd: fd.write(cdashid) buildcache_name = bindist.tarball_name(spec, '') cdashid_file_name = '{0}.cdashid'.format(buildcache_name) remote_url = os.path.join(mirror_url, bindist.build_cache_relative_path(), cdashid_file_name) tty.debug('pushing cdashid to url') tty.debug(' local file path: {0}'.format(local_cdash_path)) tty.debug(' remote url: {0}'.format(remote_url)) web_util.push_to_url(local_cdash_path, remote_url)
def buildcache_sync(args): """ Syncs binaries (and associated metadata) from one mirror to another. Requires an active environment in order to know which specs to sync. Args: src (str): Source mirror URL dest (str): Destination mirror URL """ # Figure out the source mirror source_location = None if args.src_directory: source_location = args.src_directory scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--src-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror source_location = 'file://' + source_location elif args.src_mirror_name: source_location = args.src_mirror_name result = spack.mirror.MirrorCollection().lookup(source_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=source_location)) elif args.src_mirror_url: source_location = args.src_mirror_url scheme = url_util.parse(source_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=source_location)) src_mirror = spack.mirror.MirrorCollection().lookup(source_location) src_mirror_url = url_util.format(src_mirror.fetch_url) # Figure out the destination mirror dest_location = None if args.dest_directory: dest_location = args.dest_directory scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme != '<missing>': raise ValueError( '"--dest-directory" expected a local path; got a URL, instead') # Ensure that the mirror lookup does not mistake this for named mirror dest_location = 'file://' + dest_location elif args.dest_mirror_name: dest_location = args.dest_mirror_name result = spack.mirror.MirrorCollection().lookup(dest_location) if result.name == "<unnamed>": raise ValueError('no configured mirror named "{name}"'.format( name=dest_location)) elif args.dest_mirror_url: dest_location = args.dest_mirror_url scheme = url_util.parse(dest_location, scheme='<missing>').scheme if scheme == '<missing>': raise ValueError( '"{url}" is not a valid URL'.format(url=dest_location)) dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location) dest_mirror_url = url_util.format(dest_mirror.fetch_url) # Get the active environment env = spack.cmd.require_active_env(cmd_name='buildcache sync') tty.msg('Syncing environment buildcache files from {0} to {1}'.format( src_mirror_url, dest_mirror_url)) build_cache_dir = bindist.build_cache_relative_path() buildcache_rel_paths = [] tty.debug('Syncing the following specs:') for s in env.all_specs(): tty.debug(' {0}{1}: {2}'.format('* ' if s in env.roots() else ' ', s.name, s.dag_hash())) buildcache_rel_paths.extend([ os.path.join(build_cache_dir, bindist.tarball_path_name(s, '.spack')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.spec.yaml')), os.path.join(build_cache_dir, bindist.tarball_name(s, '.cdashid')) ]) tmpdir = tempfile.mkdtemp() try: for rel_path in buildcache_rel_paths: src_url = url_util.join(src_mirror_url, rel_path) local_path = os.path.join(tmpdir, rel_path) dest_url = url_util.join(dest_mirror_url, rel_path) tty.debug('Copying {0} to {1} via {2}'.format( src_url, dest_url, local_path)) stage = Stage(src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True) try: stage.create() stage.fetch() web_util.push_to_url(local_path, dest_url, keep_original=True) except fs.FetchError as e: tty.debug( 'spack buildcache unable to sync {0}'.format(rel_path)) tty.debug(e) finally: stage.destroy() finally: shutil.rmtree(tmpdir)
def buildcache_copy(args): """Copy a buildcache entry and all its files from one mirror, given as '--base-dir', to some other mirror, specified as '--destination-url'. The specific buildcache entry to be copied from one location to the other is identified using the '--spec-yaml' argument.""" # TODO: This sub-command should go away once #11117 is merged if not args.spec_yaml: tty.msg('No spec yaml provided, exiting.') sys.exit(1) if not args.base_dir: tty.msg('No base directory provided, exiting.') sys.exit(1) if not args.destination_url: tty.msg('No destination mirror url provided, exiting.') sys.exit(1) dest_url = args.destination_url if dest_url[0:7] != 'file://' and dest_url[0] != '/': tty.msg('Only urls beginning with "file://" or "/" are supported ' + 'by buildcache copy.') sys.exit(1) try: with open(args.spec_yaml, 'r') as fd: spec = Spec.from_yaml(fd.read()) except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from yaml {0}'.format( args.spec_yaml)) sys.exit(1) dest_root_path = dest_url if dest_url[0:7] == 'file://': dest_root_path = dest_url[7:] build_cache_dir = bindist.build_cache_relative_path() tarball_rel_path = os.path.join(build_cache_dir, bindist.tarball_path_name(spec, '.spack')) tarball_src_path = os.path.join(args.base_dir, tarball_rel_path) tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path) specfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, '.spec.yaml')) specfile_src_path = os.path.join(args.base_dir, specfile_rel_path) specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path) cdashidfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, '.cdashid')) cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path) cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path) # Make sure directory structure exists before attempting to copy os.makedirs(os.path.dirname(tarball_dest_path)) # Now copy the specfile and tarball files to the destination mirror tty.msg('Copying {0}'.format(tarball_rel_path)) shutil.copyfile(tarball_src_path, tarball_dest_path) tty.msg('Copying {0}'.format(specfile_rel_path)) shutil.copyfile(specfile_src_path, specfile_dest_path) # Copy the cdashid file (if exists) to the destination mirror if os.path.exists(cdashid_src_path): tty.msg('Copying {0}'.format(cdashidfile_rel_path)) shutil.copyfile(cdashid_src_path, cdashid_dest_path)
def copy_fn(args): """Copy a buildcache entry and all its files from one mirror, given as '--base-dir', to some other mirror, specified as '--destination-url'. The specific buildcache entry to be copied from one location to the other is identified using the '--spec-file' argument.""" # TODO: Remove after v0.18.0 release msg = ('"spack buildcache copy" is deprecated and will be removed from ' 'Spack starting in v0.19.0') warnings.warn(msg) if not args.spec_file: tty.msg('No spec yaml provided, exiting.') sys.exit(1) if not args.base_dir: tty.msg('No base directory provided, exiting.') sys.exit(1) if not args.destination_url: tty.msg('No destination mirror url provided, exiting.') sys.exit(1) dest_url = args.destination_url if dest_url[0:7] != 'file://' and dest_url[0] != '/': tty.msg('Only urls beginning with "file://" or "/" are supported ' + 'by buildcache copy.') sys.exit(1) try: with open(args.spec_file, 'r') as fd: spec = Spec.from_yaml(fd.read()) except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from yaml {0}'.format( args.spec_file)) sys.exit(1) dest_root_path = dest_url if dest_url[0:7] == 'file://': dest_root_path = dest_url[7:] build_cache_dir = bindist.build_cache_relative_path() tarball_rel_path = os.path.join(build_cache_dir, bindist.tarball_path_name(spec, '.spack')) tarball_src_path = os.path.join(args.base_dir, tarball_rel_path) tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path) specfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, '.spec.json')) specfile_src_path = os.path.join(args.base_dir, specfile_rel_path) specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path) specfile_rel_path_yaml = os.path.join( build_cache_dir, bindist.tarball_name(spec, '.spec.yaml')) specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path) specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path) # Make sure directory structure exists before attempting to copy os.makedirs(os.path.dirname(tarball_dest_path)) # Now copy the specfile and tarball files to the destination mirror tty.msg('Copying {0}'.format(tarball_rel_path)) shutil.copyfile(tarball_src_path, tarball_dest_path) tty.msg('Copying {0}'.format(specfile_rel_path)) shutil.copyfile(specfile_src_path, specfile_dest_path) tty.msg('Copying {0}'.format(specfile_rel_path_yaml)) shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
def update_index(args): """Update the index of an s3 buildcache""" s3, bucket_name = get_s3_session(args.endpoint_url) bucket = s3.Bucket(bucket_name) exists = True try: s3.meta.client.head_bucket(Bucket=bucket_name) except botocore.exceptions.ClientError as e: # If a client error is thrown, then check that it was a 404 error. # If it was a 404 error, then the bucket does not exist. error_code = e.response['Error']['Code'] if error_code == '404': exists = False if not exists: tty.error('S3 bucket "{0}" does not exist'.format(bucket_name)) sys.exit(1) build_cache_dir = os.path.join( 'mirror', bindist.build_cache_relative_path()) spec_yaml_regex = re.compile('{0}/(.+\\.spec\\.yaml)$'.format( build_cache_dir)) spack_regex = re.compile('{0}/([^/]+)/.+\\.spack$'.format( build_cache_dir)) top_level_keys = set() for key in bucket.objects.all(): m = spec_yaml_regex.search(key.key) if m: top_level_keys.add(m.group(1)) print(m.group(1)) continue m = spack_regex.search(key.key) if m: top_level_keys.add(m.group(1)) print(m.group(1)) continue index_data = { 'top_level_keys': top_level_keys, } env = template_engine.make_environment() template_dir = 'misc' index_template = os.path.join(template_dir, 'buildcache_index.html') t = env.get_template(index_template) contents = t.render(index_data) index_key = os.path.join(build_cache_dir, 'index.html') tty.debug('Generated index:') tty.debug(contents) tty.debug('Pushing it to {0} -> {1}'.format(bucket_name, index_key)) s3_obj = s3.Object(bucket_name, index_key) s3_obj.put(Body=contents, ACL='public-read')
def upload_spec(args): """Upload a spec to s3 bucket""" if not args.spec and not args.spec_yaml: tty.error('Cannot upload spec without spec arg or path to spec yaml') sys.exit(1) if not args.base_dir: tty.error('No base directory for buildcache specified') sys.exit(1) if args.spec: try: spec = Spec(args.spec) spec.concretize() except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from string {0}'.format( args.spec)) sys.exit(1) else: try: with open(args.spec_yaml, 'r') as fd: spec = Spec.from_yaml(fd.read()) except Exception as e: tty.debug(e) tty.error('Unable to concrectize spec from yaml {0}'.format( args.spec_yaml)) sys.exit(1) s3, bucket_name = get_s3_session(args.endpoint_url) build_cache_dir = bindist.build_cache_relative_path() tarball_key = os.path.join( build_cache_dir, bindist.tarball_path_name(spec, '.spack')) tarball_path = os.path.join(args.base_dir, tarball_key) specfile_key = os.path.join( build_cache_dir, bindist.tarball_name(spec, '.spec.yaml')) specfile_path = os.path.join(args.base_dir, specfile_key) cdashidfile_key = os.path.join( build_cache_dir, bindist.tarball_name(spec, '.cdashid')) cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key) tty.msg('Uploading {0}'.format(tarball_key)) s3.meta.client.upload_file( tarball_path, bucket_name, os.path.join('mirror', tarball_key), ExtraArgs={'ACL': 'public-read'}) tty.msg('Uploading {0}'.format(specfile_key)) s3.meta.client.upload_file( specfile_path, bucket_name, os.path.join('mirror', specfile_key), ExtraArgs={'ACL': 'public-read'}) if os.path.exists(cdashidfile_path): tty.msg('Uploading {0}'.format(cdashidfile_key)) s3.meta.client.upload_file( cdashidfile_path, bucket_name, os.path.join('mirror', cdashidfile_key), ExtraArgs={'ACL': 'public-read'})