コード例 #1
0
def buildcache_update_index(args):
    """Update a buildcache index."""
    outdir = '.'
    if args.mirror_url:
        outdir = args.mirror_url

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    bindist.generate_package_index(
        url_util.join(outdir, bindist.build_cache_relative_path()))
コード例 #2
0
def update_index(mirror_url, update_keys=False):
    mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
    outdir = url_util.format(mirror.push_url)

    bindist.generate_package_index(
        url_util.join(outdir, bindist.build_cache_relative_path()))

    if update_keys:
        keys_url = url_util.join(outdir, bindist.build_cache_relative_path(),
                                 bindist.build_cache_keys_relative_path())

        bindist.generate_key_index(keys_url)
コード例 #3
0
def test_generate_indices_key_error(monkeypatch, capfd):
    def mock_list_url(url, recursive=False):
        print('mocked list_url({0}, {1})'.format(url, recursive))
        raise KeyError('Test KeyError handling')

    monkeypatch.setattr(web_util, 'list_url', mock_list_url)

    test_url = 'file:///fake/keys/dir'

    # Make sure generate_key_index handles the KeyError
    bindist.generate_key_index(test_url)

    err = capfd.readouterr()[1]
    assert 'Warning: No keys at {0}'.format(test_url) in err

    # Make sure generate_package_index handles the KeyError
    bindist.generate_package_index(test_url)

    err = capfd.readouterr()[1]
    assert 'Warning: No packages at {0}'.format(test_url) in err
コード例 #4
0
def test_generate_indices_exception(monkeypatch, capfd):
    def mock_list_url(url, recursive=False):
        print('mocked list_url({0}, {1})'.format(url, recursive))
        raise Exception('Test Exception handling')

    monkeypatch.setattr(web_util, 'list_url', mock_list_url)

    test_url = 'file:///fake/keys/dir'

    # Make sure generate_key_index handles the Exception
    bindist.generate_key_index(test_url)

    err = capfd.readouterr()[1]
    expect = 'Encountered problem listing keys at {0}'.format(test_url)
    assert expect in err

    # Make sure generate_package_index handles the Exception
    bindist.generate_package_index(test_url)

    err = capfd.readouterr()[1]
    expect = 'Encountered problem listing packages at {0}'.format(test_url)
    assert expect in err
コード例 #5
0
def createtarball(args):
    """create a binary package from an existing install"""
    if args.spec_yaml:
        packages = set()
        tty.msg('createtarball, reading spec from {0}'.format(args.spec_yaml))
        with open(args.spec_yaml, 'r') as fd:
            yaml_text = fd.read()
            tty.debug('createtarball read spec yaml:')
            tty.debug(yaml_text)
            s = Spec.from_yaml(yaml_text)
            packages.add('/{0}'.format(s.dag_hash()))
    elif args.packages:
        packages = args.packages
    else:
        tty.die("build cache file creation requires at least one" +
                " installed package argument or else path to a" +
                " yaml file containing a spec to install")
    pkgs = set(packages)
    specs = set()

    outdir = '.'
    if args.directory:
        outdir = args.directory

    mirror = spack.mirror.MirrorCollection().lookup(outdir)
    outdir = url_util.format(mirror.push_url)

    signkey = None
    if args.key:
        signkey = args.key

    # restrict matching to current environment if one is active
    env = ev.get_env(args, 'buildcache create')

    matches = find_matching_specs(pkgs, env=env)

    if matches:
        tty.debug('Found at least one matching spec')

    for match in matches:
        tty.debug('examining match {0}'.format(match.format()))
        if match.external or match.virtual:
            tty.debug('skipping external or virtual spec %s' % match.format())
        else:
            tty.debug('adding matching spec %s' % match.format())
            if "package" in args.target_type:
                specs.add(match)
            if "dependencies" not in args.target_type:
                # if the user does not want dependencies, stop here
                continue
            tty.debug('recursing dependencies')
            for d, node in match.traverse(order='post',
                                          depth=True,
                                          deptype=('link', 'run')):
                if node.external or node.virtual:
                    tty.debug('skipping external or virtual dependency %s' %
                              node.format())
                else:
                    tty.debug('adding dependency %s' % node.format())
                    specs.add(node)

    tty.debug('writing tarballs to %s/build_cache' % outdir)

    f_create = ft.partial(create_single_tarball,
                          outdir=outdir,
                          force=args.force,
                          relative=args.rel,
                          unsigned=args.unsigned,
                          allow_root=args.allow_root,
                          signkey=signkey,
                          rebuild_index=args.rebuild_index and args.jobs == 1,
                          catch_exceptions=args.jobs != 1)

    # default behavior (early termination) for one job
    if args.jobs == 1:
        for spec in specs:
            f_create(spec)

    else:
        # currently, specs cause an infinite recursion bug when pickled
        # -> as multiprocessing uses pickle internally, we need to transform
        #    specs prior to distributing the work via worker pool
        # TODO: check if specs can be pickled
        specs = [s.to_dict() for s in specs]

        pool = NoDaemonPool(args.jobs if args.jobs > 1 else mp.cpu_count())
        # chunksize=1 because we do not want to pre-allocate specs to workers
        # (since each package will need a different amount of time to be
        # compressed)
        retvals = pool.map(f_create, specs, chunksize=1)

        errors = [rv["error"] for rv in retvals if rv["error"] is not None]
        list(map(tty.error, errors))
        if len(errors) > 0:
            sys.exit(1)

        # perform rebuild of index unless user requested not to
        if args.rebuild_index:
            bindist.generate_package_index(outdir)