Esempio n. 1
0
def provide(recipe_dir, meta, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    print("Removing old work directory")
    rm_rf(WORK_DIR)
    if 'fn' in meta:
        unpack(meta)
    elif 'git_url' in meta:
        git_source(meta, recipe_dir)
    elif 'hg_url' in meta:
        hg_source(meta)
    elif 'svn_url' in meta:
        svn_source(meta)
    elif 'path' in meta:
        print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR))
        copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)
    else: # no source
        os.makedirs(WORK_DIR)

    if patch:
        src_dir = get_dir()
        for patch in meta.get('patches', []):
            apply_patch(src_dir, join(recipe_dir, patch))
Esempio n. 2
0
def get_pkginfo(package, filename, pypiurl, md5, python_version, config, setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, 'md5') != md5:
            download(pypiurl, join(config.src_cache, filename))
        else:
            print("Using cached download")
        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version, config=config, setup_options=setup_options)
        with open(join(tempdir, 'pkginfo.yaml')) as fn:
            pkginfo = yaml.load(fn)
    finally:
        rm_rf(tempdir)

    return pkginfo
Esempio n. 3
0
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs):
    import os
    from conda_build.conda_interface import url_path
    from conda_build.build import test
    from conda_build.render import render_recipe
    from conda_build.utils import get_recipe_abspath, rm_rf
    from conda_build import source

    config = get_or_merge_config(config, **kwargs)

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    is_package = False

    if hasattr(recipedir_or_package_or_metadata, 'config'):
        metadata = recipedir_or_package_or_metadata
        recipe_config = metadata.config
    else:
        recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata)
        config.need_cleanup = need_cleanup

        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided
        metadata, _, _ = render_recipe(recipe_dir, config=config)
        recipe_config = config
        # this recipe came from an extracted tarball.
        if need_cleanup:
            # ensure that the local location of the package is indexed, so that conda can find the
            #    local package
            local_location = os.path.dirname(recipedir_or_package_or_metadata)
            # strip off extra subdir folders
            for platform in ('win', 'linux', 'osx'):
                if os.path.basename(local_location).startswith(platform + "-"):
                    local_location = os.path.dirname(local_location)
            update_index(local_location, config=config)
            local_url = url_path(local_location)
            # channel_urls is an iterable, but we don't know if it's a tuple or list.  Don't know
            #    how to add elements.
            recipe_config.channel_urls = list(recipe_config.channel_urls)
            recipe_config.channel_urls.insert(0, local_url)
            is_package = True
            if metadata.meta.get('test') and metadata.meta['test'].get('source_files'):
                source.provide(metadata.path, metadata.get_section('source'), config=config)
            rm_rf(recipe_dir)

    with recipe_config:
        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided

        recipe_config.compute_build_id(metadata.name())
        test_result = test(metadata, config=recipe_config, move_broken=move_broken)

        if (test_result and is_package and hasattr(recipe_config, 'output_folder') and
                recipe_config.output_folder):
            os.rename(recipedir_or_package_or_metadata,
                      os.path.join(recipe_config.output_folder,
                                   os.path.basename(recipedir_or_package_or_metadata)))
    return test_result
Esempio n. 4
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    config.channel_urls = args.channel or ()
    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    if on_win:
        delete_trash(None)

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            recipe_dir, need_cleanup = get_recipe_abspath(recipe)

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            # this fully renders any jinja templating, throwing an error if any data is missing
            m, _, _ = render_recipe(recipe_dir, no_download_source=False, config=config)
            action(m, config)

            if need_cleanup:
                rm_rf(recipe_dir)
    else:
        api.build(args.recipe, post=args.post, build_only=args.build_only,
                   notest=args.notest, keep_old_work=args.keep_old_work,
                   already_built=None, config=config)

    if not args.output and len(build.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
Esempio n. 5
0
def run_setuppy(src_dir, temp_dir, python_version, config, setup_options):
    """
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    """
    specs = ["python %s*" % python_version, "pyyaml", "setuptools"]
    with open(os.path.join(src_dir, "setup.py")) as setup:
        text = setup.read()
        if "import numpy" in text or "from numpy" in text:
            specs.append("numpy")
    # Do everything in the build env in case the setup.py install goes
    # haywire.
    # TODO: Try with another version of Python if this one fails. Some
    # packages are Python 2 or Python 3 only.

    create_env(config.build_prefix, specs=specs, clear_cache=False, config=config)
    stdlib_dir = join(config.build_prefix, "Lib" if sys.platform == "win32" else "lib/python%s" % python_version)

    patch = join(temp_dir, "pypi-distutils.patch")
    with open(patch, "w") as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace("\\", "\\\\")))

    if exists(join(stdlib_dir, "distutils", "core.py-copy")):
        rm_rf(join(stdlib_dir, "distutils", "core.py"))
        copy2(join(stdlib_dir, "distutils", "core.py-copy"), join(stdlib_dir, "distutils", "core.py"))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyc" % sys.version_info[:2]))
            rm_rf(join(stdlib_dir, "distutils", "__pycache__", "core.cpython-%s%s.pyo" % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, "distutils", "core.pyc"))
            rm_rf(join(stdlib_dir, "distutils", "core.pyo"))
    else:
        copy2(join(stdlib_dir, "distutils", "core.py"), join(stdlib_dir, "distutils", "core.py-copy"))
    apply_patch(join(stdlib_dir, "distutils"), patch, config=config)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if "PYTHONPATH" in env:
        env[str("PYTHONPATH")] = str(src_dir + ":" + env["PYTHONPATH"])
    else:
        env[str("PYTHONPATH")] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    cmdargs = [config.build_python, "setup.py", "install"]
    cmdargs.extend(setup_options)
    try:
        subprocess.check_call(cmdargs, env=env)
    except subprocess.CalledProcessError:
        print("$PYTHONPATH = %s" % env["PYTHONPATH"])
        sys.exit("Error: command failed: %s" % " ".join(cmdargs))
    finally:
        chdir(cwd)
Esempio n. 6
0
def hoist_single_extracted_folder(nested_folder):
    """Moves all files/folders one level up.

    This is for when your archive extracts into its own folder, so that we don't need to
    know exactly what that folder is called."""
    flist = os.listdir(nested_folder)
    parent = os.path.dirname(nested_folder)
    for thing in flist:
        shutil.move(os.path.join(nested_folder, thing), os.path.join(parent, thing))
    rm_rf(nested_folder)
Esempio n. 7
0
def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, config,
                setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    hash_type = digest[0]
    hash_value = digest[1]
    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, hash_type) != hash_value:
            download(pypiurl, join(config.src_cache, filename))
            if hashsum_file(download_path, hash_type) != hash_value:
                raise RuntimeError(' Download of {} failed'
                                   ' checksum type {} expected value {}. Please'
                                   ' try again.'.format(package, hash_type, hash_value))
        else:
            print("Using cached download")
        # Calculate the preferred hash type here if necessary.
        # Needs to be done in this block because this is where we have
        # access to the source file.
        if hash_type != POSSIBLE_DIGESTS[0]:
            new_hash_value = hashsum_file(download_path, POSSIBLE_DIGESTS[0])
        else:
            new_hash_value = ''

        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config,
                    setup_options=setup_options)
        try:
            with open(join(tempdir, 'pkginfo.yaml')) as fn:
                pkg_info = yaml.safe_load(fn)
        except IOError:
            pkg_info = pkginfo.SDist(download_path).__dict__
        if new_hash_value:
            pkg_info['new_hash_value'] = (POSSIBLE_DIGESTS[0], new_hash_value)
    finally:
        rm_rf(tempdir)

    return pkg_info
Esempio n. 8
0
def hoist_single_extracted_folder(nested_folder):
    """Moves all files/folders one level up.

    This is for when your archive extracts into its own folder, so that we don't need to
    know exactly what that folder is called."""
    parent = os.path.dirname(nested_folder)
    flist = os.listdir(nested_folder)
    with TemporaryDirectory() as tmpdir:
        for entry in flist:
            shutil.move(os.path.join(nested_folder, entry), os.path.join(tmpdir, entry))
        rm_rf(nested_folder)
        for entry in flist:
            shutil.move(os.path.join(tmpdir, entry), os.path.join(parent, entry))
Esempio n. 9
0
def check_install(packages, platform=None, channel_urls=(), prepend=True,
                  minimal_hint=False):
    prefix = tempfile.mkdtemp('conda')
    try:
        specs = specs_from_args(packages)
        index = get_index(channel_urls=channel_urls, prepend=prepend,
                          platform=platform, prefix=prefix)
        actions = install_actions(prefix, index, specs, pinned=False,
                                  minimal_hint=minimal_hint)
        display_actions(actions, index)
        return actions
    finally:
        rm_rf(prefix)
    return None
Esempio n. 10
0
def build(m, get_src=True, pypi=False):
    rm_rf(prefix)
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi)

    print("BUILD START:", m.dist())

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')]
        _check_call(cmd, env=env, cwd=source.get_dir())

    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(
            m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1))
    create_info_files(m, sorted(files2 - files1))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(bldpkgs_dir)
Esempio n. 11
0
def make_hardlink_copy(path, prefix):
    """Hardlinks create invalid packages.  Copy files to break the link.
    Symlinks are OK, and unaffected here."""
    if not os.path.isabs(path):
        path = os.path.normpath(os.path.join(prefix, path))
    fn = os.path.basename(path)
    if os.lstat(path).st_nlink > 1:
        with TemporaryDirectory() as dest:
            # copy file to new name
            utils.copy_into(path, dest)
            # remove old file
            utils.rm_rf(path)
            # rename copy to original filename
            #   It is essential here to use copying (as opposed to os.rename), so that
            #        crossing volume boundaries works
            utils.copy_into(os.path.join(dest, fn), path)
Esempio n. 12
0
def render_recipe(recipe_path, config, no_download_source=False):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    if config.set_build_id:
        # updates a unique build id if not already computed
        config.compute_build_id(os.path.basename(recipe_dir))
    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    m, need_download, need_reparse_in_env = parse_or_try_download(m,
                                                no_download_source=no_download_source,
                                                config=config)
    if need_download and no_download_source:
        raise ValueError("no_download_source specified, but can't fully render recipe without"
                         " downloading source.  Please fix the recipe, or don't use "
                         "no_download_source.")
    config.noarch = bool(m.get_value('build/noarch'))

    if need_cleanup:
        rm_rf(recipe_dir)

    return m, need_download, need_reparse_in_env
Esempio n. 13
0
def make_hardlink_copy(path, prefix):
    """Hardlinks create invalid packages.  Copy files to break the link.
    Symlinks are OK, and unaffected here."""
    if not os.path.isabs(path) and not os.path.exists(path):
        path = os.path.normpath(os.path.join(prefix, path))
    nlinks = os.lstat(path).st_nlink
    dest = 'tmpfile'
    if os.path.isabs(path):
        dest = os.path.join(os.getcwd(), dest)
    if nlinks > 1:
        # copy file to new name
        utils.copy_into(path, dest)
        # remove old file
        utils.rm_rf(path)
        # rename copy to original filename
        utils.copy_into(dest, path)
        utils.rm_rf(dest)
Esempio n. 14
0
def remove_easy_install_pth(files, prefix, config, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [os.path.join(prefix, f) for f in files]
    py_ver = '.'.join(config.variant['python'].split('.')[:2])
    sp_dir = utils.get_site_packages(prefix, py_ver)
    for egg_path in glob(os.path.join(sp_dir, '*-py*.egg')):
        if os.path.isdir(egg_path):
            if preserve_egg_dir or not any(os.path.join(egg_path, i) in absfiles for i
                    in walk_prefix(egg_path, False, windows_forward_slashes=False)):
                write_pth(egg_path, config=config)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(os.path.join(egg_path, 'EGG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(os.path.join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(os.path.join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(os.path.join(sp_dir, fn)):
                        try:
                            utils.copy_into(os.path.join(egg_path, fn),
                                            os.path.join(sp_dir, fn), config.timeout,
                                            locking=config.locking)
                            utils.rm_rf(os.path.join(egg_path, fn))
                        except IOError as e:
                            fn = os.path.basename(str(e).split()[-1])
                            raise IOError("Tried to merge folder {egg_path} into {sp_dir}, but {fn}"
                                          " exists in both locations.  Please either add "
                                          "build/preserve_egg_dir: True to meta.yaml, or manually "
                                          "remove the file during your install process to avoid "
                                          "this conflict."
                                          .format(egg_path=egg_path, sp_dir=sp_dir, fn=fn))
                    else:
                        os.rename(os.path.join(egg_path, fn), os.path.join(sp_dir, fn))

        elif os.path.isfile(egg_path):
            if egg_path not in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path, config=config)

    utils.rm_rf(os.path.join(sp_dir, 'easy-install.pth'))
Esempio n. 15
0
def clean_pkg_cache(dist, timeout):
    cc.pkgs_dirs = cc.pkgs_dirs[:1]
    locks = []
    for folder in cc.pkgs_dirs:
        locks.append(filelock.SoftFileLock(join(folder, ".conda_lock")))

    for lock in locks:
        lock.acquire(timeout=timeout)

    try:
        rmplan = [
            'RM_EXTRACTED {0} local::{0}'.format(dist),
            'RM_FETCHED {0} local::{0}'.format(dist),
        ]
        plan.execute_plan(rmplan)

        # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
        #   Conda's cleanup is still necessary - it keeps track of its own in-memory
        #   list of downloaded things.
        for folder in cc.pkgs_dirs:
            try:
                assert not os.path.exists(os.path.join(folder, dist))
                assert not os.path.exists(os.path.join(folder, dist + '.tar.bz2'))
                for pkg_id in [dist, 'local::' + dist]:
                    assert pkg_id not in package_cache()
            except AssertionError:
                log.debug("Conda caching error: %s package remains in cache after removal", dist)
                log.debug("Clearing package cache to compensate")
                cache = package_cache()
                keys = [key for key in cache.keys() if dist in key]
                for pkg_id in keys:
                    if pkg_id in cache:
                        del cache[pkg_id]
                for entry in glob(os.path.join(folder, dist + '*')):
                    rm_rf(entry)
    except:
        raise
    finally:
        for lock in locks:
            lock.release()
            if os.path.isfile(lock._lock_file):
                os.remove(lock._lock_file)
Esempio n. 16
0
def create_all_test_files(m, test_dir=None):
    if test_dir:
        rm_rf(test_dir)
        os.makedirs(test_dir)
        # this happens when we're finishing the build.
        test_deps = m.meta.get('test', {}).get('requires', [])
        if test_deps:
            with open(os.path.join(test_dir, 'test_time_dependencies.json'), 'w') as f:
                json.dump(test_deps, f)
    else:
        # this happens when we're running a package's tests
        test_dir = m.config.test_dir

    files = create_files(m, test_dir)

    pl_files = create_pl_files(m, test_dir)
    py_files = create_py_files(m, test_dir)
    r_files = create_r_files(m, test_dir)
    lua_files = create_lua_files(m, test_dir)
    shell_files = create_shell_files(m, test_dir)
    return files, pl_files, py_files, r_files, lua_files, shell_files
Esempio n. 17
0
def sp_dir(request):
    '''
    create site-packges/ directory in same place where test is located. This
    is where tests look conda.pth file. It is a session scoped fixture and
    it has a finalizer function invoked in the end to remove site-packages/
    directory
    '''
    base_dir = dirname(__file__)
    sp = join(base_dir, 'site-packages')
    if exists(sp):
        rm_rf(sp)

    os.mkdir(sp)

    def cleanup():
        # session scoped cleanup is called at end of the session
        rm_rf(sp)

    request.addfinalizer(cleanup)

    return sp
Esempio n. 18
0
def test_env_creation_with_short_prefix_does_not_deadlock(caplog):
    test_base = os.path.expanduser("~/cbtmp")
    config = api.Config(croot=test_base, anaconda_upload=False, verbose=True)
    recipe_path = os.path.join(metadata_dir, "has_prefix_files")
    metadata, _, _ = api.render(recipe_path, config=config)
    metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix'
    fn = api.get_output_file_path(metadata)
    if os.path.isfile(fn):
        os.remove(fn)
    config.prefix_length = 80
    try:
        api.build(metadata)
        pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "")
        assert not api.inspect_prefix_length(fn, 255)
        config.prefix_length = 255
        build.create_env(config.build_prefix, specs=["python", pkg_name], config=config)
    except:
        raise
    finally:
        rm_rf(test_base)
    assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text()
Esempio n. 19
0
def remove_easy_install_pth(preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    sp_dir = environ.SP_DIR
    for egg_path in glob(join(sp_dir, "*-py*.egg")):
        if isdir(egg_path):
            if preserve_egg_dir:
                write_pth(egg_path)
                continue

            print("found egg dir:", egg_path)
            try:
                os.rename(join(egg_path, "EGG-INFO/PKG-INFO"), egg_path + "-info")
            except OSError:
                pass
            utils.rm_rf(join(egg_path, "EGG-INFO"))
            for fn in os.listdir(egg_path):
                if fn == "__pycache__":
                    utils.rm_rf(join(egg_path, fn))
                else:
                    os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            print("found egg:", egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, "easy-install.pth"))
Esempio n. 20
0
def remove_easy_install_pth(preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir:
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO/PKG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
Esempio n. 21
0
def clean_pkg_cache(dist, config):
    locks = []

    conda_log_level = logging.WARN
    if config.debug:
        conda_log_level = logging.DEBUG

    _pkgs_dirs = pkgs_dirs[:1]
    if config.locking:
        locks = [utils.get_lock(folder, timeout=config.timeout) for folder in _pkgs_dirs]
    with utils.LoggingContext(conda_log_level):
        with utils.try_acquire_locks(locks, timeout=config.timeout):
            rmplan = [
                'RM_EXTRACTED {0} local::{0}'.format(dist),
                'RM_FETCHED {0} local::{0}'.format(dist),
            ]
            execute_plan(rmplan)

            # Conda does not seem to do a complete cleanup sometimes.  This is supplemental.
            #   Conda's cleanup is still necessary - it keeps track of its own in-memory
            #   list of downloaded things.
            for folder in pkgs_dirs:
                try:
                    assert not os.path.exists(os.path.join(folder, dist))
                    assert not os.path.exists(os.path.join(folder, dist + '.tar.bz2'))
                    for pkg_id in [dist, 'local::' + dist]:
                        assert pkg_id not in package_cache()
                except AssertionError:
                    log = utils.get_logger(__name__)
                    log.debug("Conda caching error: %s package remains in cache after removal",
                              dist)
                    log.debug("manually removing to compensate")
                    cache = package_cache()
                    keys = [key for key in cache.keys() if dist in key]
                    for pkg_id in keys:
                        if pkg_id in cache:
                            del cache[pkg_id]
                    for entry in glob(os.path.join(folder, dist + '*')):
                        utils.rm_rf(entry)
Esempio n. 22
0
def create_files(m):
    """
    Create the test files for pkg in the directory given.  The resulting
    test files are configuration (i.e. platform, architecture, Python and
    numpy version, ...) independent.
    Return False, if the package has no tests (for any configuration), and
    True if it has.
    """
    has_files = False
    rm_rf(m.config.test_dir)
    for fn in ensure_list(m.get_value('test/files', [])):
        has_files = True
        path = join(m.path, fn)
        # disable locking to avoid locking a temporary directory (the extracted test folder)
        copy_into(path, join(m.config.test_dir, fn), m.config.timeout, locking=False,
                  clobber=True)
    # need to re-download source in order to do tests
    if m.get_value('test/source_files') and not isdir(m.config.work_dir):
        source.provide(m)
    for pattern in ensure_list(m.get_value('test/source_files', [])):
        if on_win and '\\' in pattern:
            raise RuntimeError("test/source_files paths must use / "
                                "as the path delimiter on Windows")
        has_files = True
        files = glob.glob(join(m.config.work_dir, pattern))
        if not files:
            raise RuntimeError("Did not find any source_files for test with pattern %s", pattern)
        for f in files:
            try:
                # disable locking to avoid locking a temporary directory (the extracted test folder)
                copy_into(f, f.replace(m.config.work_dir, m.config.test_dir), m.config.timeout,
                          locking=False)
            except OSError as e:
                log = logging.getLogger(__name__)
                log.warn("Failed to copy {0} into test files.  Error was: {1}".format(f, str(e)))
        for ext in '.pyc', '.pyo':
            for f in get_ext_files(m.config.test_dir, ext):
                os.remove(f)
    return has_files
Esempio n. 23
0
def provide(recipe_dir, meta, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """
    rm_rf(WORK_DIR)
    if "fn" in meta:
        unpack(meta)
    elif "git_url" in meta:
        git_source(meta)
    elif "hg_url" in meta:
        hg_source(meta)
    elif "svn_url" in meta:
        svn_source(meta)
    else:  # no source
        os.makedirs(WORK_DIR)

    if patch:
        src_dir = get_dir()
        for patch in meta.get("patches", []):
            apply_patch(src_dir, join(recipe_dir, patch))
Esempio n. 24
0
def get_pkginfo(package, filename, pypiurl, md5, python_version, config, setup_options):
    # Unfortunately, two important pieces of metadata are only stored in
    # the package itself: the dependencies, and the entry points (if the
    # package uses distribute).  Our strategy is to download the package
    # and "fake" distribute/setuptools's setup() function to get this
    # information from setup.py. If this sounds evil, keep in mind that
    # distribute itself already works by monkeypatching distutils.
    tempdir = mkdtemp('conda_skeleton_' + filename)

    if not isdir(config.src_cache):
        makedirs(config.src_cache)

    try:
        # Download it to the build source cache. That way, you have
        # it.
        download_path = join(config.src_cache, filename)
        if not isfile(download_path) or \
                hashsum_file(download_path, 'md5') != md5:
            download(pypiurl, join(config.src_cache, filename))
        else:
            print("Using cached download")
        print("Unpacking %s..." % package)
        unpack(join(config.src_cache, filename), tempdir)
        print("done")
        print("working in %s" % tempdir)
        src_dir = get_dir(tempdir)
        # TODO: find args parameters needed by run_setuppy
        run_setuppy(src_dir, tempdir, python_version, config=config, setup_options=setup_options)
        try:
            with open(join(tempdir, 'pkginfo.yaml')) as fn:
                pkg_info = yaml.load(fn)
        except IOError:
            pkg_info = pkginfo.SDist(download_path).__dict__
    finally:
        rm_rf(tempdir)

    return pkg_info
Esempio n. 25
0
def provide(recipe_dir, meta, verbose=False, patch=True, dirty=False):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """

    if not dirty:
        if sys.platform == 'win32':
            if isdir(WORK_DIR):
                move_to_trash(WORK_DIR, '')
        else:
            rm_rf(WORK_DIR)

    if not os.path.exists(WORK_DIR):
        if any(k in meta for k in ('fn', 'url')):
            unpack(meta, verbose=verbose)
        elif 'git_url' in meta:
            git_source(meta, recipe_dir, verbose=verbose)
        # build to make sure we have a work directory with source in it.  We want to make sure that
        #    whatever version that is does not interfere with the test we run next.
        elif 'hg_url' in meta:
            hg_source(meta, verbose=verbose)
        elif 'svn_url' in meta:
            svn_source(meta, verbose=verbose)
        elif 'path' in meta:
            if verbose:
                print("Copying %s to %s" % (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR))
            copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)
        else:  # no source
            os.makedirs(WORK_DIR)

        if patch:
            src_dir = get_dir()
            for patch in meta.get('patches', []):
                apply_patch(src_dir, join(recipe_dir, patch))
Esempio n. 26
0
def provide(recipe_dir, meta, verbose=False, patch=True):
    """
    given a recipe_dir:
      - download (if necessary)
      - unpack
      - apply patches (if any)
    """

    if sys.platform == 'win32':
        if isdir(WORK_DIR):
            move_to_trash(WORK_DIR, '')
    else:
        rm_rf(WORK_DIR)

    if any(k in meta for k in ('fn', 'url')):
        unpack(meta, verbose=verbose)
    elif 'git_url' in meta:
        git_source(meta, recipe_dir, verbose=verbose)
    # build to make sure we have a work directory with source in it.  We want to make sure that
    #    whatever version that is does not interfere with the test we run next.
    elif 'hg_url' in meta:
        hg_source(meta, verbose=verbose)
    elif 'svn_url' in meta:
        svn_source(meta, verbose=verbose)
    elif 'path' in meta:
        if verbose:
            print("Copying %s to %s" %
                  (abspath(join(recipe_dir, meta.get('path'))), WORK_DIR))
        copytree(abspath(join(recipe_dir, meta.get('path'))), WORK_DIR)
    else:  # no source
        if not isdir(WORK_DIR):
            os.makedirs(WORK_DIR)

    if patch:
        src_dir = get_dir()
        for patch in meta.get('patches', []):
            apply_patch(src_dir, join(recipe_dir, patch))
Esempio n. 27
0
def test_env_creation_with_short_prefix_does_not_deadlock(caplog):
    test_base = os.path.expanduser("~/cbtmp")
    config = api.Config(croot=test_base, anaconda_upload=False, verbose=True)
    recipe_path = os.path.join(metadata_dir, "has_prefix_files")
    metadata, _, _ = api.render(recipe_path, config=config)
    metadata.meta['package']['name'] = 'test_env_creation_with_short_prefix'
    fn = api.get_output_file_path(metadata)
    if os.path.isfile(fn):
        os.remove(fn)
    config.prefix_length = 80
    try:
        api.build(metadata)
        pkg_name = os.path.basename(fn).replace("-1.0-0.tar.bz2", "")
        assert not api.inspect_prefix_length(fn, 255)
        config.prefix_length = 255
        build.create_env(config.build_prefix,
                         specs=["python", pkg_name],
                         config=config)
    except:
        raise
    finally:
        rm_rf(test_base)
    assert 'One or more of your package dependencies needs to be rebuilt' in caplog.text(
    )
Esempio n. 28
0
def hoist_single_extracted_folder(nested_folder):
    """Moves all files/folders one level up.

    This is for when your archive extracts into its own folder, so that we don't need to
    know exactly what that folder is called."""
    flist = os.listdir(nested_folder)
    parent = os.path.dirname(nested_folder)
    # only hoist if the parent folder contains ONLY our nested folder
    nested_folders_to_remove = [nested_folder]
    for thing in flist:
        if not os.path.isdir(os.path.join(parent, thing)):
            shutil.move(os.path.join(nested_folder, thing),
                        os.path.join(parent, thing))
        else:
            copy_into(os.path.join(nested_folder, thing),
                      os.path.join(parent, thing))
            nested_folders_to_remove.append(os.path.join(nested_folder, thing))
    # handle nested similar folder names
    fn = os.path.basename(nested_folder)
    if (os.path.join(nested_folder, fn) in nested_folders_to_remove
            and nested_folder in nested_folders_to_remove):
        nested_folders_to_remove.remove(nested_folder)
    for folder in nested_folders_to_remove:
        rm_rf(folder)
Esempio n. 29
0
def remove_easy_install_pth(files, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [join(config.build_prefix, f) for f in files]
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir or not any(
                    join(egg_path, i) in absfiles
                    for i in walk_prefix(egg_path, False)):
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO/PKG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(join(sp_dir, fn)):
                        utils.copy_into(join(egg_path, fn), join(sp_dir, fn))
                        utils.rm_rf(join(egg_path, fn))
                    else:
                        os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            if not egg_path in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
Esempio n. 30
0
File: post.py Progetto: Vasyka/hat
def remove_easy_install_pth(files, preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    absfiles = [join(config.build_prefix, f) for f in files]
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir or not any(join(egg_path, i) in absfiles for i
                in walk_prefix(egg_path, False, windows_forward_slashes=False)):
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(join(sp_dir, fn)):
                        utils.copy_into(join(egg_path, fn), join(sp_dir, fn))
                        utils.rm_rf(join(egg_path, fn))
                    else:
                        os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            if not egg_path in absfiles:
                continue
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
Esempio n. 31
0
def remove_easy_install_pth(preserve_egg_dir=False):
    """
    remove the need for easy-install.pth and finally remove easy-install.pth
    itself
    """
    sp_dir = environ.get_sp_dir()
    for egg_path in glob(join(sp_dir, '*-py*.egg')):
        if isdir(egg_path):
            if preserve_egg_dir:
                write_pth(egg_path)
                continue

            print('found egg dir:', egg_path)
            try:
                os.rename(join(egg_path, 'EGG-INFO/PKG-INFO'),
                          egg_path + '-info')
            except OSError:
                pass
            utils.rm_rf(join(egg_path, 'EGG-INFO'))
            for fn in os.listdir(egg_path):
                if fn == '__pycache__':
                    utils.rm_rf(join(egg_path, fn))
                else:
                    # this might be a name-space package
                    # so the package directory already exists
                    # from another installed dependency
                    if os.path.exists(join(sp_dir, fn)):
                        utils.copy_into(join(egg_path, fn), join(sp_dir, fn))
                        utils.rm_rf(join(egg_path, fn))
                    else:
                        os.rename(join(egg_path, fn), join(sp_dir, fn))

        elif isfile(egg_path):
            print('found egg:', egg_path)
            write_pth(egg_path)

    utils.rm_rf(join(sp_dir, 'easy-install.pth'))
Esempio n. 32
0
def test(m, pypi=False):
    # remove from package cache
    rm_pkgs_cache(m.dist())

    tmp_dir = join(config.croot, 'test-tmp_dir')
    rm_rf(tmp_dir)
    os.makedirs(tmp_dir)
    if not create_files(tmp_dir, m):
        print("Nothing to test for:", m.dist())
        return

    print("TEST START:", m.dist())
    rm_rf(prefix)
    rm_rf(config.test_prefix)
    specs = ['%s %s %s' % (m.name(), m.version(), m.build_id()),
             # as the tests are run by python, we need to specify it
             'python %s*' % environ.py_ver]
    # add packages listed in test/requires
    for spec in m.get_value('test/requires'):
        specs.append(spec)

    create_env(config.test_prefix, specs, pypi)

    env = dict(os.environ)
    # prepend bin (or Scripts) directory
    env['PATH'] = (join(config.test_prefix, bin_dirname) + os.pathsep +
                   env['PATH'])

    for varname in 'CONDA_PY', 'CONDA_NPY':
        env[varname] = str(getattr(config, varname))
    env['PREFIX'] = config.test_prefix

    try:
        subprocess.check_call([config.test_python, join(tmp_dir, 'run_test.py')],
            env=env, cwd=tmp_dir)
    except subprocess.CalledProcessError:
        if not isdir(broken_dir):
            os.makedirs(broken_dir)
        shutil.move(bldpkg_path(m), join(broken_dir, "%s.tar.bz2" % m.dist()))
        sys.exit("TESTS FAILED: " + m.dist())

    print("TEST END:", m.dist())
Esempio n. 33
0
def get_install_actions(prefix,
                        specs,
                        env,
                        retries=0,
                        subdir=None,
                        verbose=True,
                        debug=False,
                        locking=True,
                        bldpkgs_dirs=None,
                        timeout=900,
                        disable_pip=False,
                        max_env_retry=3,
                        output_folder=None,
                        channel_urls=None):
    global cached_actions
    global last_index_ts
    actions = {}
    log = utils.get_logger(__name__)
    conda_log_level = logging.WARN
    specs = list(specs)
    if specs:
        specs.extend(create_default_packages)
    if verbose or debug:
        capture = contextlib.contextmanager(lambda: (yield))
        if debug:
            conda_log_level = logging.DEBUG
    else:
        capture = utils.capture
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    bldpkgs_dirs = ensure_list(bldpkgs_dirs)

    index, index_ts, _ = get_build_index(subdir,
                                         list(bldpkgs_dirs)[0],
                                         output_folder=output_folder,
                                         channel_urls=channel_urls,
                                         debug=debug,
                                         verbose=verbose,
                                         locking=locking,
                                         timeout=timeout)
    specs = tuple(
        utils.ensure_valid_spec(spec) for spec in specs
        if not str(spec).endswith('@'))

    if ((specs, env, subdir, channel_urls, disable_pip) in cached_actions
            and last_index_ts >= index_ts):
        actions = cached_actions[(specs, env, subdir, channel_urls,
                                  disable_pip)].copy()
        if "PREFIX" in actions:
            actions['PREFIX'] = prefix
    elif specs:
        # this is hiding output like:
        #    Fetching package metadata ...........
        #    Solving package specifications: ..........
        with utils.LoggingContext(conda_log_level):
            with capture():
                try:
                    actions = install_actions(prefix, index, specs, force=True)
                except (NoPackagesFoundError, UnsatisfiableError) as exc:
                    raise DependencyNeedsBuildingError(exc, subdir=subdir)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError,
                        AssertionError, BuildLockError) as exc:
                    if 'lock' in str(exc):
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)
                          or isinstance(exc, AssertionError)):
                        locks = utils.get_conda_operation_locks(
                            locking, bldpkgs_dirs, timeout)
                        with utils.try_acquire_locks(locks, timeout=timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s. "
                                "Removing the folder and retrying", pkg_dir)
                            if pkg_dir in pkgs_dirs and os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retries < max_env_retry:
                        log.warn(
                            "failed to get install actions, retrying.  exception was: %s",
                            str(exc))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            retries=retries + 1,
                            subdir=subdir,
                            verbose=verbose,
                            debug=debug,
                            locking=locking,
                            bldpkgs_dirs=tuple(bldpkgs_dirs),
                            timeout=timeout,
                            disable_pip=disable_pip,
                            max_env_retry=max_env_retry,
                            output_folder=output_folder,
                            channel_urls=tuple(channel_urls))
                    else:
                        log.error(
                            "Failed to get install actions, max retries exceeded."
                        )
                        raise
        if disable_pip:
            for pkg in ('pip', 'setuptools', 'wheel'):
                # specs are the raw specifications, not the conda-derived actual specs
                #   We're testing that pip etc. are manually specified
                if not any(
                        re.match(r'^%s(?:$|[\s=].*)' % pkg, str(dep))
                        for dep in specs):
                    actions['LINK'] = [
                        spec for spec in actions['LINK'] if spec.name != pkg
                    ]
        utils.trim_empty_keys(actions)
        cached_actions[(specs, env, subdir, channel_urls,
                        disable_pip)] = actions.copy()
        last_index_ts = index_ts
    return actions
Esempio n. 34
0
def create_env(prefix,
               specs,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               index=None,
               locks=None):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        utils.get_logger("conda_build").setLevel(logging.DEBUG)
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        utils.get_logger("conda_build").setLevel(logging.INFO)
        external_logger_context = utils.LoggingContext(logging.ERROR)

    with external_logger_context:
        log = utils.get_logger(__name__)

        if os.path.isdir(prefix):
            utils.rm_rf(prefix)

        specs = list(set(specs))
        for feature, value in feature_list:
            if value:
                specs.append('%s@' % feature)

        if specs:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs))

            with utils.path_prepended(prefix):
                if not locks:
                    locks = utils.get_conda_operation_locks(config)
                try:
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        if not index:
                            index = get_build_index(config=config,
                                                    subdir=subdir)
                        actions = get_install_actions(prefix, index, specs,
                                                      config)
                        plan.display_actions(actions, index)
                        if utils.on_win:
                            for k, v in os.environ.items():
                                os.environ[k] = str(v)
                        plan.execute_actions(actions,
                                             index,
                                             verbose=config.debug)
                except (SystemExit, PaddingError, LinkError,
                        DependencyNeedsBuildingError, CondaError) as exc:
                    if (("too short in" in str(exc) or re.search(
                            'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                            str(exc)) or isinstance(exc, PaddingError))
                            and config.prefix_length > 80):
                        if config.prefix_length_fallback:
                            log.warn(
                                "Build prefix failed with prefix length %d",
                                config.prefix_length)
                            log.warn("Error was: ")
                            log.warn(str(exc))
                            log.warn(
                                "One or more of your package dependencies needs to be rebuilt "
                                "with a longer prefix length.")
                            log.warn(
                                "Falling back to legacy prefix length of 80 characters."
                            )
                            log.warn(
                                "Your package will not install into prefixes > 80 characters."
                            )
                            config.prefix_length = 80

                            # Set this here and use to create environ
                            #   Setting this here is important because we use it below (symlink)
                            prefix = config.build_prefix

                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache)
                        else:
                            raise
                    elif 'lock' in str(exc):
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache,
                                       retry=retry + 1)
                    elif ('requires a minimum conda version' in str(exc)
                          or 'link a source that does not' in str(exc)):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = str(exc)
                            folder = 0
                            while os.path.dirname(
                                    pkg_dir) not in pkgs_dirs and folder < 20:
                                pkg_dir = os.path.dirname(pkg_dir)
                                folder += 1
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                        if retry < config.max_env_retry:
                            log.warn(
                                "failed to create env, retrying.  exception was: %s",
                                str(exc))
                            create_env(prefix,
                                       specs,
                                       config=config,
                                       subdir=subdir,
                                       clear_cache=clear_cache,
                                       retry=retry + 1)
                        else:
                            log.error(
                                "Failed to create env, max retries exceeded.")
                            raise
                    else:
                        raise
                # HACK: some of the time, conda screws up somehow and incomplete packages result.
                #    Just retry.
                except (AssertionError, IOError, ValueError, RuntimeError,
                        LockError) as exc:
                    if isinstance(exc, AssertionError):
                        with utils.try_acquire_locks(locks,
                                                     timeout=config.timeout):
                            pkg_dir = os.path.dirname(os.path.dirname(
                                str(exc)))
                            log.warn(
                                "I think conda ended up with a partial extraction for %s.  "
                                "Removing the folder and retrying", pkg_dir)
                            if os.path.isdir(pkg_dir):
                                utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs,
                                   config=config,
                                   subdir=subdir,
                                   clear_cache=clear_cache,
                                   retry=retry + 1)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
    if utils.on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 35
0
def download_to_cache(cache_folder, recipe_path, source_dict, verbose=False):
    ''' Download a source to the local cache. '''
    if verbose:
        log.info('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder) and not os.path.islink(cache_folder):
        os.makedirs(cache_folder)

    source_urls = source_dict['url']
    if not isinstance(source_urls, list):
        source_urls = [source_urls]
    unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(
        source_urls[0])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            if source_dict[hash_type] in (None, ""):
                raise ValueError('Empty {} hash provided for {}'.format(
                    hash_type, fn))
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log.warn(
            "No hash (md5, sha1, sha256) provided for {}.  Source download forced.  "
            "Add hash to recipe to use source cache.".format(unhashed_fn))
    path = join(cache_folder, fn)
    if isfile(path):
        if verbose:
            log.info('Found source in cache: %s' % fn)
    else:
        if verbose:
            log.info('Downloading source to cache: %s' % fn)

        for url in source_urls:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                if verbose:
                    log.info("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                log.warn("Error: %s" % str(e).strip())
                rm_rf(path)
            except RuntimeError as e:
                log.warn("Error: %s" % str(e).strip())
                rm_rf(path)
            else:
                if verbose:
                    log.info("Success")
                break
        else:  # no break
            rm_rf(path)
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if tp in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                rm_rf(path)
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                                   (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        if not os.path.isfile(dest_path):
            shutil.move(path, dest_path)
        path = dest_path

    return path, unhashed_fn
Esempio n. 36
0
def build(m,
          get_src=True,
          verbose=True,
          post=None,
          channel_urls=(),
          override_channels=False,
          include_recipe=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build environment")
        if on_win:
            if isdir(config.short_build_prefix):
                move_to_trash(config.short_build_prefix, '')
            if isdir(config.long_build_prefix):
                move_to_trash(config.long_build_prefix, '')
        else:
            rm_rf(config.short_build_prefix)
            rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        if on_win:
            if isdir(source.WORK_DIR):
                move_to_trash(source.WORK_DIR, '')
        else:
            rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
                   [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose,
                   channel_urls=channel_urls,
                   override_channels=override_channels)

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(clear_cache=False,
                                    channel_urls=channel_urls,
                                    override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for pat in m.always_include_files():
            has_matches = False
            for f in set(files1):
                if fnmatch.fnmatch(f, pat):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit(
                    "Error: Glob %s from always_include_files does not match any files"
                    % pat)
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if isfile(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1),
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.  This error
usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" %
                    (tuple(f
                           for f in files2 - files1 if config.meta_dir in join(
                               config.build_prefix, f)), )))
        post_build(m, sorted(files2 - files1))
        create_info_files(m,
                          sorted(files2 - files1),
                          include_recipe=bool(m.path) and include_recipe)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 37
0
def skeletonize(packages,
                output_dir=".",
                version=None,
                git_tag=None,
                cran_url="https://cran.r-project.org/",
                recursive=False,
                archive=True,
                version_compare=False,
                update_outdated=False,
                config=None):

    if not config:
        config = Config()

    if len(packages) > 1 and version_compare:
        raise ValueError(
            "--version-compare only works with one package at a time")
    if not update_outdated and not packages:
        raise ValueError("At least one package must be supplied")

    package_dicts = {}

    cran_metadata = get_cran_metadata(cran_url, output_dir)

    if update_outdated:
        packages = get_outdated(output_dir, cran_metadata, packages)
        for pkg in packages:
            rm_rf(join(output_dir[0], 'r-' + pkg))

    while packages:
        package = packages.pop()

        is_github_url = 'github.com' in package
        url = package

        if is_github_url:
            rm_rf(config.work_dir)
            m = metadata.MetaData.fromdict({'source': {
                'git_url': package
            }},
                                           config=config)
            source.git_source(m.get_section('source'), m.config.git_cache,
                              m.config.work_dir)
            git_tag = git_tag[0] if git_tag else get_latest_git_tag(config)
            p = subprocess.Popen(['git', 'checkout', git_tag],
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE,
                                 cwd=config.work_dir)
            stdout, stderr = p.communicate()
            stdout = stdout.decode('utf-8')
            stderr = stderr.decode('utf-8')
            if p.returncode:
                sys.exit(
                    "Error: 'git checkout %s' failed (%s).\nInvalid tag?" %
                    (git_tag, stderr.strip()))
            if stdout:
                print(stdout, file=sys.stdout)
            if stderr:
                print(stderr, file=sys.stderr)

            DESCRIPTION = join(config.work_dir, "DESCRIPTION")
            if not isfile(DESCRIPTION):
                sub_description_pkg = join(config.work_dir, 'pkg',
                                           "DESCRIPTION")
                sub_description_name = join(config.work_dir,
                                            package.split('/')[-1],
                                            "DESCRIPTION")
                if isfile(sub_description_pkg):
                    DESCRIPTION = sub_description_pkg
                elif isfile(sub_description_name):
                    DESCRIPTION = sub_description_name
                else:
                    sys.exit(
                        "%s does not appear to be a valid R package "
                        "(no DESCRIPTION file in %s, %s)" %
                        (package, sub_description_pkg, sub_description_name))

            with open(DESCRIPTION) as f:
                description_text = clear_trailing_whitespace(f.read())

            d = dict_from_cran_lines(
                remove_package_line_continuations(
                    description_text.splitlines()))
            d['orig_description'] = description_text
            package = d['Package'].lower()
            cran_metadata[package] = d

        if package.startswith('r-'):
            package = package[2:]
        if package.endswith('/'):
            package = package[:-1]
        if package.lower() not in cran_metadata:
            sys.exit("Package %s not found" % package)

        # Make sure package is always uses the CRAN capitalization
        package = cran_metadata[package.lower()]['Package']

        if not is_github_url:
            session = get_session(output_dir)
            cran_metadata[package.lower()].update(
                get_package_metadata(cran_url, package, session))

        dir_path = join(output_dir, 'r-' + package.lower())
        if exists(dir_path) and not version_compare:
            raise RuntimeError("directory already exists: %s" % dir_path)

        cran_package = cran_metadata[package.lower()]

        d = package_dicts.setdefault(
            package,
            {
                'cran_packagename': package,
                'packagename': 'r-' + package.lower(),
                'build_depends': '',
                'run_depends': '',
                # CRAN doesn't seem to have this metadata :(
                'home_comment': '#',
                'homeurl': '',
                'summary_comment': '#',
                'summary': '',
            })

        if is_github_url:
            d['url_key'] = ''
            d['fn_key'] = ''
            d['git_url_key'] = 'git_url:'
            d['git_tag_key'] = 'git_tag:'
            d['hash_entry'] = '# You can add a hash for the file here, like md5, sha1 or sha256'
            d['filename'] = ''
            d['cranurl'] = ''
            d['git_url'] = url
            d['git_tag'] = git_tag
        else:
            d['url_key'] = 'url:'
            d['fn_key'] = 'fn:'
            d['git_url_key'] = ''
            d['git_tag_key'] = ''
            d['git_url'] = ''
            d['git_tag'] = ''
            d['hash_entry'] = ''

        if version:
            d['version'] = version
            raise NotImplementedError(
                "Package versions from CRAN are not yet implemented")

        d['cran_version'] = cran_package['Version']
        # Conda versions cannot have -. Conda (verlib) will treat _ as a .
        d['conda_version'] = d['cran_version'].replace('-', '_')
        if version_compare:
            sys.exit(not version_compare(dir_path, d['conda_version']))

        if not is_github_url:
            filename = '{}_{}.tar.gz'
            contrib_url = cran_url + 'src/contrib/'
            package_url = contrib_url + filename.format(
                package, d['cran_version'])

            # calculate sha256 by downloading source
            sha256 = hashlib.sha256()
            print("Downloading source from {}".format(package_url))
            sha256.update(urlopen(package_url).read())
            d['hash_entry'] = 'sha256: {}'.format(sha256.hexdigest())

            d['filename'] = filename.format(package, '{{ version }}')
            if archive:
                d['cranurl'] = (INDENT + contrib_url + d['filename'] + INDENT +
                                contrib_url + 'Archive/{}/'.format(package) +
                                d['filename'])
            else:
                d['cranurl'] = ' ' + cran_url + 'src/contrib/' + d['filename']

        d['cran_metadata'] = '\n'.join(
            ['# %s' % l for l in cran_package['orig_lines'] if l])

        # XXX: We should maybe normalize these
        d['license'] = cran_package.get("License", "None")
        d['license_family'] = guess_license_family(d['license'],
                                                   allowed_license_families)

        if 'License_is_FOSS' in cran_package:
            d['license'] += ' (FOSS)'
        if cran_package.get('License_restricts_use') == 'yes':
            d['license'] += ' (Restricts use)'

        if "URL" in cran_package:
            d['home_comment'] = ''
            d['homeurl'] = ' ' + yaml_quote_string(cran_package['URL'])
        else:
            # use CRAN page as homepage if nothing has been specified
            d['home_comment'] = ''
            d['homeurl'] = ' https://CRAN.R-project.org/package={}'.format(
                package)

        if 'Description' in cran_package:
            d['summary_comment'] = ''
            d['summary'] = ' ' + yaml_quote_string(cran_package['Description'])

        if "Suggests" in cran_package:
            d['suggests'] = "# Suggests: %s" % cran_package['Suggests']
        else:
            d['suggests'] = ''

        # Every package depends on at least R.
        # I'm not sure what the difference between depends and imports is.
        depends = [
            s.strip() for s in cran_package.get('Depends', '').split(',')
            if s.strip()
        ]
        imports = [
            s.strip() for s in cran_package.get('Imports', '').split(',')
            if s.strip()
        ]
        links = [
            s.strip() for s in cran_package.get("LinkingTo", '').split(',')
            if s.strip()
        ]

        dep_dict = {}

        for s in set(chain(depends, imports, links)):
            match = VERSION_DEPENDENCY_REGEX.match(s)
            if not match:
                sys.exit("Could not parse version from dependency of %s: %s" %
                         (package, s))
            name = match.group('name')
            archs = match.group('archs')
            relop = match.group('relop') or ''
            version = match.group('version') or ''
            version = version.replace('-', '_')
            # If there is a relop there should be a version
            assert not relop or version

            if archs:
                sys.exit("Don't know how to handle archs from dependency of "
                         "package %s: %s" % (package, s))

            dep_dict[name] = '{relop}{version}'.format(relop=relop,
                                                       version=version)

        if 'R' not in dep_dict:
            dep_dict['R'] = ''

        for dep_type in ['build', 'run']:
            deps = []
            for name in sorted(dep_dict):
                if name in R_BASE_PACKAGE_NAMES:
                    continue
                if name == 'R':
                    # Put R first
                    # Regarless of build or run, and whether this is a recommended package or not,
                    # it can only depend on 'r-base' since anything else can and will cause cycles
                    # in the dependency graph. The cran metadata lists all dependencies anyway, even
                    # those packages that are in the recommended group.
                    r_name = 'r-base'
                    # We don't include any R version restrictions because we
                    # always build R packages against an exact R version
                    deps.insert(
                        0, '{indent}{r_name}'.format(indent=INDENT,
                                                     r_name=r_name))
                else:
                    conda_name = 'r-' + name.lower()

                    if dep_dict[name]:
                        deps.append('{indent}{name} {version}'.format(
                            name=conda_name,
                            version=dep_dict[name],
                            indent=INDENT))
                    else:
                        deps.append('{indent}{name}'.format(name=conda_name,
                                                            indent=INDENT))
                    if recursive:
                        if not exists(join(output_dir, conda_name)):
                            packages.append(name)

            if cran_package.get("NeedsCompilation", 'no') == 'yes':
                if dep_type == 'build':
                    deps.append('{indent}posix                # [win]'.format(
                        indent=INDENT))
                    deps.append(
                        '{indent}{{{{native}}}}toolchain  # [win]'.format(
                            indent=INDENT))
                    deps.append(
                        '{indent}gcc                  # [not win]'.format(
                            indent=INDENT))
                elif dep_type == 'run':
                    deps.append(
                        '{indent}{{{{native}}}}gcc-libs   # [win]'.format(
                            indent=INDENT))
                    deps.append(
                        '{indent}libgcc               # [not win]'.format(
                            indent=INDENT))
            d['%s_depends' % dep_type] = ''.join(deps)

    for package in package_dicts:
        d = package_dicts[package]
        name = d['packagename']

        # Normalize the metadata values
        d = {
            k: unicodedata.normalize("NFKD",
                                     text_type(v)).encode('ascii',
                                                          'ignore').decode()
            for k, v in iteritems(d)
        }

        makedirs(join(output_dir, name))
        print("Writing recipe for %s" % package.lower())
        with open(join(output_dir, name, 'meta.yaml'), 'w') as f:
            f.write(clear_trailing_whitespace(CRAN_META.format(**d)))
        with open(join(output_dir, name, 'build.sh'), 'w') as f:
            f.write(CRAN_BUILD_SH.format(**d))
        with open(join(output_dir, name, 'bld.bat'), 'w') as f:
            f.write(CRAN_BLD_BAT.format(**d))

    print("Done")
Esempio n. 38
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description="Boa, the fast, mamba powered-build tool for conda packages."
    )
    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("recipe_dir", type=str)

    render_parser = subparsers.add_parser(
        "render", parents=[parent_parser], help="render a recipe"
    )
    convert_parser = subparsers.add_parser(
        "convert", parents=[parent_parser], help="convert recipe.yaml to old-style meta.yaml"
    )
    build_parser = subparsers.add_parser(
        "build", parents=[parent_parser], help="build a recipe"
    )
    args = parser.parse_args()

    command = args.command

    if command == 'convert':
        from boa.cli import convert
        convert.main(args.recipe_dir)
        exit()

    folder = args.recipe_dir
    cbc, config = get_config(folder)

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)
    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))
    normalize_recipe(ydoc)

    # pprint(ydoc)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    print('\n')
    if command == "render":
        for o in sorted_outputs:
            print(o)
        exit()


    # TODO this should be done cleaner
    top_name = ydoc['package']['name']
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    solver = MambaSolver(["conda-forge"], context.subdir)
    print("\n")

    download_source(MetaData(recipe_path, o0))
    cached_source = o0.sections['source']

    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        # print(o)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if isdir(o.config.build_prefix):
                utils.rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            o.transactions['build'].execute(PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir)

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions['host'].execute(PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir)

        meta = MetaData(recipe_path, o)
        o.final_build_id = meta.build_id()

        if cached_source != o.sections['source']:
            download_source(meta)

        build(meta, None)

    for o in sorted_outputs:
        print("\n")
        print(o)
Esempio n. 39
0
def execute(args):
    parser, args = parse_args(args)
    config = Config(**args.__dict__)
    build.check_external()

    # change globals in build module, see comment there as well
    config.channel_urls = args.channel or ()
    config.override_channels = args.override_channels
    config.verbose = not args.quiet or args.debug

    if 'purge' in args.recipe:
        build.clean_build(config)
        return

    if 'purge-all' in args.recipe:
        build.clean_build(config)
        config.clean_pkgs()
        return

    set_language_env_vars(args, parser, config=config, execute=execute)

    action = None
    if args.output:
        action = output_action
        logging.basicConfig(level=logging.ERROR)
        config.verbose = False
        config.quiet = True
    elif args.test:
        action = test_action
    elif args.source:
        action = source_action
    elif args.check:
        action = check_action

    if action:
        for recipe in args.recipe:
            recipe_dir, need_cleanup = get_recipe_abspath(recipe)

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            # this fully renders any jinja templating, throwing an error if any data is missing
            m, _, _ = render_recipe(recipe_dir,
                                    no_download_source=False,
                                    config=config)
            action(m, config)

            if need_cleanup:
                rm_rf(recipe_dir)
    else:
        api.build(args.recipe,
                  post=args.post,
                  build_only=args.build_only,
                  notest=args.notest,
                  keep_old_work=args.keep_old_work,
                  already_built=None,
                  config=config,
                  noverify=args.no_verify)

    if not args.output and len(build.get_build_folders(config.croot)) > 0:
        build.print_build_intermediate_warning(config)
Esempio n. 40
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package, and figure out
    # which python version we prefer. `python_age` can use used to tweak which
    # python gets used here.
    if metadata.noarch or metadata.noarch_python:
        from .conda_interface import VersionOrder
        age = int(
            metadata.get_value('build/noarch_python_build_age',
                               metadata.config.noarch_python_build_age))
        versions = []
        for variant in variants:
            if 'python' in variant:
                vo = variant['python']
                if vo not in versions:
                    versions.append(vo)
        version_indices = sorted(
            range(len(versions)),
            key=lambda k: VersionOrder(versions[k].split(' ')[0]))
        if age < 0:
            age = 0
        elif age > len(versions) - 1:
            age = len(versions) - 1
        build_ver = versions[version_indices[len(versions) - 1 - age]]
        variants = filter_by_key_value(variants, 'python', build_ver,
                                       'noarch_python_reduction')

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()

    metadata.config.variant = variants[0]
    used_variables = metadata.get_used_loop_vars(force_global=False)
    top_loop = metadata.get_reduced_variant_set(used_variables)

    for variant in top_loop:
        from conda_build.build import get_all_replacements
        get_all_replacements(variant)
        mv = metadata.copy()
        mv.config.variant = variant

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        conform_dict = {}
        for key in used_variables:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            conform_dict[key] = variant[key]

        for key, values in conform_dict.items():
            mv.config.variants = (filter_by_key_value(
                mv.config.variants, key, values,
                'distribute_variants_reduction') or mv.config.variants)
        get_all_replacements(mv.config.variants)
        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        mv.config.squished_variants = list_of_dicts_to_dict_of_lists(
            mv.config.variants)

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()

        try:
            mv.parse_until_resolved(
                allow_no_other_outputs=allow_no_other_outputs,
                bypass_env_check=bypass_env_check)
        except SystemExit:
            pass
        need_source_download = (not mv.needs_source_for_render
                                or not mv.source_provided)

        rendered_metadata[(mv.dist(),
                           mv.config.variant.get('target_platform', mv.config.subdir),
                           tuple((var, mv.config.variant.get(var))
                                 for var in mv.get_used_vars()))] = \
                                     (mv, need_source_download, None)
    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 41
0
def download_to_cache(cache_folder, recipe_path, source_dict):
    ''' Download a source to the local cache. '''
    print('Source cache directory is: %s' % cache_folder)
    if not isdir(cache_folder):
        os.makedirs(cache_folder)

    source_urls = source_dict['url']
    if not isinstance(source_urls, list):
        source_urls = [source_urls]
    unhashed_fn = fn = source_dict['fn'] if 'fn' in source_dict else basename(source_urls[0])
    hash_added = False
    for hash_type in ('md5', 'sha1', 'sha256'):
        if hash_type in source_dict:
            fn = append_hash_to_fn(fn, source_dict[hash_type])
            hash_added = True
            break
    else:
        log = get_logger(__name__)
        log.warn("No hash (md5, sha1, sha256) provided for {}.  Source download forced.  "
                 "Add hash to recipe to use source cache.".format(unhashed_fn))
    path = join(cache_folder, fn)
    if isfile(path):
        print('Found source in cache: %s' % fn)
    else:
        print('Downloading source to cache: %s' % fn)

        for url in source_urls:
            if "://" not in url:
                if url.startswith('~'):
                    url = expanduser(url)
                if not os.path.isabs(url):
                    url = os.path.normpath(os.path.join(recipe_path, url))
                url = url_path(url)
            else:
                if url.startswith('file:///~'):
                    url = 'file:///' + expanduser(url[8:]).replace('\\', '/')
            try:
                print("Downloading %s" % url)
                with LoggingContext():
                    download(url, path)
            except CondaHTTPError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            except RuntimeError as e:
                print("Error: %s" % str(e).strip(), file=sys.stderr)
                rm_rf(path)
            else:
                print("Success")
                break
        else:  # no break
            rm_rf(path)
            raise RuntimeError("Could not download %s" % url)

    hashed = None
    for tp in ('md5', 'sha1', 'sha256'):
        if tp in source_dict:
            expected_hash = source_dict[tp]
            hashed = hashsum_file(path, tp)
            if expected_hash != hashed:
                rm_rf(path)
                raise RuntimeError("%s mismatch: '%s' != '%s'" %
                           (tp.upper(), hashed, expected_hash))
            break

    # this is really a fallback.  If people don't provide the hash, we still need to prevent
    #    collisions in our source cache, but the end user will get no benefirt from the cache.
    if not hash_added:
        if not hashed:
            hashed = hashsum_file(path, 'sha256')
        dest_path = append_hash_to_fn(path, hashed)
        if not os.path.isfile(dest_path):
            shutil.move(path, dest_path)
        path = dest_path

    return path, unhashed_fn
Esempio n. 42
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True):
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    if config.set_build_id:
        m.config.compute_build_id(m.name(), reset=reset_build_id)

    if m.needs_source_for_render and (not os.path.isdir(m.config.work_dir) or
                                      len(os.listdir(m.config.work_dir)) == 0):
        try_download(m, no_download_source=no_download_source)

    rendered_metadata = {}

    if m.final:
        rendered_metadata = [
            (m, False, False),
        ]
        index = None
    else:
        variants = dict_of_lists_to_list_of_dicts(
            variants) if variants else get_package_variants(m)
        index = get_build_index(m.config, m.config.build_subdir)
        rendered_metadata = distribute_variants(
            m,
            variants,
            index,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants)
        if not rendered_metadata:
            raise ValueError(
                "No variants were satisfiable - no valid recipes could be rendered."
            )

    if need_cleanup:
        utils.rm_rf(recipe_dir)

    return rendered_metadata, index
Esempio n. 43
0
def create_env(prefix, specs, config, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        silence_loggers(show_warnings_and_errors=True)

    if os.path.isdir(prefix):
        rm_rf(prefix)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    if specs:  # Don't waste time if there is nothing to do
        with path_prepended(prefix):
            locks = []
            try:
                cc.pkgs_dirs = cc.pkgs_dirs[:1]
                locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs)
                for folder in locked_folders:
                    if not os.path.isdir(folder):
                        os.makedirs(folder)
                    lock = filelock.SoftFileLock(join(folder, '.conda_lock'))
                    update_index(folder, config=config, lock=lock)
                    locks.append(lock)
                for lock in locks:
                    lock.acquire(timeout=config.timeout)

                index = get_build_index(config=config, clear_cache=True)

                actions = plan.install_actions(prefix, index, specs)
                if config.disable_pip:
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('pip-')
                    ]  # noqa
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('setuptools-')
                    ]  # noqa
                plan.display_actions(actions, index)
                if on_win:
                    for k, v in os.environ.items():
                        os.environ[k] = str(v)
                plan.execute_actions(actions, index, verbose=config.debug)
            except (SystemExit, PaddingError, LinkError) as exc:
                if (("too short in" in str(exc)
                     or 'post-link failed for: openssl' in str(exc)
                     or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    log.warn("Build prefix failed with prefix length %d",
                             config.prefix_length)
                    log.warn("Error was: ")
                    log.warn(str(exc))
                    log.warn(
                        "One or more of your package dependencies needs to be rebuilt "
                        "with a longer prefix length.")
                    log.warn(
                        "Falling back to legacy prefix length of 80 characters."
                    )
                    log.warn(
                        "Your package will not install into prefixes > 80 characters."
                    )
                    config.prefix_length = 80

                    # Set this here and use to create environ
                    #   Setting this here is important because we use it below (symlink)
                    prefix = config.build_prefix

                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    create_env(prefix,
                               specs,
                               config=config,
                               clear_cache=clear_cache)
                else:
                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    raise
            finally:
                for lock in locks:
                    lock.release()
                    if os.path.isfile(lock._lock_file):
                        os.remove(lock._lock_file)
        warn_on_old_conda_build(index=index)

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 44
0
def distribute_variants(metadata,
                        variants,
                        permit_unsatisfiable_variants=False,
                        allow_no_other_outputs=False,
                        bypass_env_check=False):
    rendered_metadata = {}
    need_source_download = True

    # don't bother distributing python if it's a noarch package
    if metadata.noarch or metadata.noarch_python:
        conform_dict = {'python': variants[0]['python']}
        variants = conform_variants_to_value(variants, conform_dict)

    # store these for reference later
    metadata.config.variants = variants
    # These are always the full set.  just 'variants' is the one that gets
    #     used mostly, and can be reduced
    metadata.config.input_variants = variants
    squished_variants = list_of_dicts_to_dict_of_lists(variants)

    recipe_requirements = metadata.extract_requirements_text()
    recipe_package_and_build_text = metadata.extract_package_and_build_text()
    recipe_text = recipe_package_and_build_text + recipe_requirements
    if PY3 and hasattr(recipe_text, 'decode'):
        recipe_text = recipe_text.decode()
    elif not PY3 and hasattr(recipe_text, 'encode'):
        recipe_text = recipe_text.encode()
    for variant in variants:
        mv = metadata.copy()

        # this determines which variants were used, and thus which ones should be locked for
        #     future rendering
        mv.final = False
        mv.config.variant = {}
        mv.parse_again(permit_undefined_jinja=True,
                       allow_no_other_outputs=True,
                       bypass_env_check=True)
        vars_in_recipe = set(mv.undefined_jinja_vars)

        mv.config.variant = variant
        conform_dict = {}
        for key in vars_in_recipe:
            # We use this variant in the top-level recipe.
            # constrain the stored variants to only this version in the output
            #     variant mapping
            if re.search(r"\s*\{\{\s*%s\s*(?:.*?)?\}\}" % key, recipe_text):
                if key in variant:
                    variant_index = squished_variants[key].index(variant[key])
                    zipped_keys = [key]
                    if 'zip_keys' in variant:
                        zip_key_groups = variant['zip_keys']
                        if zip_key_groups and not isinstance(
                                zip_key_groups[0], list):
                            zip_key_groups = [zip_key_groups]
                        for group in zip_key_groups:
                            if key in group:
                                zipped_keys = group
                                break
                    for zipped_key in zipped_keys:
                        conform_dict[zipped_key] = squished_variants[
                            zipped_key][variant_index]

        conform_dict.update({
            key: val
            for key, val in variant.items()
            if key in mv.meta.get('requirements').get('build', []) +
            mv.meta.get('requirements').get('host', [])
        })

        compiler_matches = re.findall(r"compiler\([\'\"](.*)[\'\"].*\)",
                                      recipe_requirements)
        if compiler_matches:
            from conda_build.jinja_context import native_compiler
            for match in compiler_matches:
                compiler_key = '{}_compiler'.format(match)
                conform_dict[compiler_key] = variant.get(
                    compiler_key, native_compiler(match, mv.config))
                conform_dict['target_platform'] = variant['target_platform']

        build_reqs = mv.meta.get('requirements', {}).get('build', [])
        host_reqs = mv.meta.get('requirements', {}).get('host', [])
        if 'python' in build_reqs or 'python' in host_reqs:
            conform_dict['python'] = variant['python']

        pin_run_as_build = variant.get('pin_run_as_build', {})
        if mv.numpy_xx and 'numpy' not in pin_run_as_build:
            pin_run_as_build['numpy'] = {'min_pin': 'x.x', 'max_pin': 'x.x'}

        mv.config.variants = conform_variants_to_value(mv.config.variants,
                                                       conform_dict)
        numpy_pinned_variants = []
        for _variant in mv.config.variants:
            _variant['pin_run_as_build'] = pin_run_as_build
            numpy_pinned_variants.append(_variant)
        mv.config.variants = numpy_pinned_variants

        if mv.needs_source_for_render and mv.variant_in_source:
            mv.parse_again()
            utils.rm_rf(mv.config.work_dir)
            source.provide(mv)
            mv.parse_again()
        mv.parse_until_resolved(allow_no_other_outputs=allow_no_other_outputs,
                                bypass_env_check=bypass_env_check)
        need_source_download = (bool(mv.meta.get('source'))
                                and not mv.needs_source_for_render
                                and not os.listdir(mv.config.work_dir))
        # if python is in the build specs, but doesn't have a specific associated
        #    version, make sure to add one to newly parsed 'requirements/build'.
        for env in ('build', 'host', 'run'):
            utils.insert_variant_versions(mv, env)
        fm = mv.copy()
        # HACK: trick conda-build into thinking this is final, and computing a hash based
        #     on the current meta.yaml.  The accuracy doesn't matter, all that matters is
        #     our ability to differentiate configurations
        fm.final = True
        rendered_metadata[fm.dist()] = (mv, need_source_download, None)

    # list of tuples.
    # each tuple item is a tuple of 3 items:
    #    metadata, need_download, need_reparse_in_env
    return list(rendered_metadata.values())
Esempio n. 45
0
def build(m,
          config,
          post=None,
          need_source_download=True,
          need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(),
                  "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print(
                "    (actual version deferred until further download or env creation)"
            )

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(
                external.find_executable(vcs_executable, config.build_prefix)
                or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or not any(
                        spec.startswith('python') and "3." in spec
                        for spec in specs)):
                    specs.append(vcs_source)

                    log.warn(
                        "Your recipe depends on %s at build time (for templates), "
                        "but you have not listed it as a build dependency.  Doing "
                        "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError(
                        "Your recipe uses mercurial in build, but mercurial"
                        " does not yet support Python 3.  Please handle all of "
                        "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = source.get_dir(config)
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files",
                        pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(source.get_dir(config),
                                         'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(
                                        conda_root=root_script_dir +
                                        os.path.sep,
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file,
                                          config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)
        create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            meta_files = (tuple(
                f for f in files2 - files1
                if config.meta_dir in join(config.build_prefix, f)), )
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m,
                   sorted(files2 - files1),
                   prefix=config.build_prefix,
                   build_python=config.build_python,
                   croot=config.croot)
        create_info_files(m,
                          sorted(files2 - files1),
                          config=config,
                          prefix=config.build_prefix)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1),
                                    config.build_prefix)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
Esempio n. 46
0
def build(m,
          get_src=True,
          verbose=True,
          post=None,
          channel_urls=(),
          override_channels=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
                   [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose,
                   channel_urls=channel_urls,
                   override_channels=override_channels)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for f in m.always_include_files():
            if f not in files1:
                sys.exit("Error: File %s from always_include_files not found" %
                         f)
        files1 = files1.difference(set(m.always_include_files()))
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1),
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        post_build(m, sorted(files2 - files1))
        create_info_files(m,
                          sorted(files2 - files1),
                          include_recipe=bool(m.path))
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 47
0
def create_env(prefix,
               specs_or_actions,
               env,
               config,
               subdir,
               clear_cache=True,
               retry=0,
               locks=None,
               is_cross=False,
               is_conda=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        external_logger_context = utils.LoggingContext(logging.DEBUG)
    else:
        external_logger_context = utils.LoggingContext(logging.WARN)

    if os.path.exists(prefix):
        for entry in glob(os.path.join(prefix, "*")):
            utils.rm_rf(entry)

    with external_logger_context:
        log = utils.get_logger(__name__)

        # if os.path.isdir(prefix):
        #     utils.rm_rf(prefix)

        if specs_or_actions:  # Don't waste time if there is nothing to do
            log.debug("Creating environment in %s", prefix)
            log.debug(str(specs_or_actions))

            if not locks:
                locks = utils.get_conda_operation_locks(config)
            try:
                with utils.try_acquire_locks(locks, timeout=config.timeout):
                    # input is a list - it's specs in MatchSpec format
                    if not hasattr(specs_or_actions, 'keys'):
                        specs = list(set(specs_or_actions))
                        actions = get_install_actions(
                            prefix,
                            tuple(specs),
                            env,
                            subdir=subdir,
                            verbose=config.verbose,
                            debug=config.debug,
                            locking=config.locking,
                            bldpkgs_dirs=tuple(config.bldpkgs_dirs),
                            timeout=config.timeout,
                            disable_pip=config.disable_pip,
                            max_env_retry=config.max_env_retry,
                            output_folder=config.output_folder,
                            channel_urls=tuple(config.channel_urls))
                    else:
                        actions = specs_or_actions
                    index, _, _ = get_build_index(
                        subdir=subdir,
                        bldpkgs_dir=config.bldpkgs_dir,
                        output_folder=config.output_folder,
                        channel_urls=config.channel_urls,
                        debug=config.debug,
                        verbose=config.verbose,
                        locking=config.locking,
                        timeout=config.timeout)
                    utils.trim_empty_keys(actions)
                    display_actions(actions, index)
                    if utils.on_win:
                        for k, v in os.environ.items():
                            os.environ[k] = str(v)
                    with env_var('CONDA_QUIET', not config.verbose,
                                 reset_context):
                        with env_var('CONDA_JSON', not config.verbose,
                                     reset_context):
                            execute_actions(actions, index)
            except (SystemExit, PaddingError, LinkError,
                    DependencyNeedsBuildingError, CondaError,
                    BuildLockError) as exc:
                if (("too short in" in str(exc) or re.search(
                        'post-link failed for: (?:[a-zA-Z]*::)?openssl',
                        str(exc)) or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    if config.prefix_length_fallback:
                        log.warn("Build prefix failed with prefix length %d",
                                 config.prefix_length)
                        log.warn("Error was: ")
                        log.warn(str(exc))
                        log.warn(
                            "One or more of your package dependencies needs to be rebuilt "
                            "with a longer prefix length.")
                        log.warn(
                            "Falling back to legacy prefix length of 80 characters."
                        )
                        log.warn(
                            "Your package will not install into prefixes > 80 characters."
                        )
                        config.prefix_length = 80

                        host = '_h_env' in prefix
                        # Set this here and use to create environ
                        #   Setting this here is important because we use it below (symlink)
                        prefix = config.host_prefix if host else config.build_prefix
                        actions['PREFIX'] = prefix

                        create_env(prefix,
                                   actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   is_cross=is_cross)
                    else:
                        raise
                elif 'lock' in str(exc):
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                elif ('requires a minimum conda version' in str(exc)
                      or 'link a source that does not' in str(exc)):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = str(exc)
                        folder = 0
                        while os.path.dirname(
                                pkg_dir) not in pkgs_dirs and folder < 20:
                            pkg_dir = os.path.dirname(pkg_dir)
                            folder += 1
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                    if retry < config.max_env_retry:
                        log.warn(
                            "failed to create env, retrying.  exception was: %s",
                            str(exc))
                        create_env(prefix,
                                   specs_or_actions,
                                   config=config,
                                   subdir=subdir,
                                   env=env,
                                   clear_cache=clear_cache,
                                   retry=retry + 1,
                                   is_cross=is_cross)
                    else:
                        log.error(
                            "Failed to create env, max retries exceeded.")
                        raise
                else:
                    raise
            # HACK: some of the time, conda screws up somehow and incomplete packages result.
            #    Just retry.
            except (AssertionError, IOError, ValueError, RuntimeError,
                    LockError) as exc:
                if isinstance(exc, AssertionError):
                    with utils.try_acquire_locks(locks,
                                                 timeout=config.timeout):
                        pkg_dir = os.path.dirname(os.path.dirname(str(exc)))
                        log.warn(
                            "I think conda ended up with a partial extraction for %s.  "
                            "Removing the folder and retrying", pkg_dir)
                        if os.path.isdir(pkg_dir):
                            utils.rm_rf(pkg_dir)
                if retry < config.max_env_retry:
                    log.warn(
                        "failed to create env, retrying.  exception was: %s",
                        str(exc))
                    create_env(prefix,
                               specs_or_actions,
                               config=config,
                               subdir=subdir,
                               env=env,
                               clear_cache=clear_cache,
                               retry=retry + 1,
                               is_cross=is_cross)
                else:
                    log.error("Failed to create env, max retries exceeded.")
                    raise
Esempio n. 48
0
def test(m, verbose=True, channel_urls=(), override_channels=False):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''
    # remove from package cache
    rm_pkgs_cache(m.dist())

    tmp_dir = join(config.croot, 'test-tmp_dir')
    rm_rf(tmp_dir)
    os.makedirs(tmp_dir)
    create_files(tmp_dir, m)
    # Make Perl or Python-specific test files
    if m.name().startswith('perl-'):
        pl_files = create_pl_files(tmp_dir, m)
        py_files = False
    else:
        py_files = create_py_files(tmp_dir, m)
        pl_files = False
    shell_files = create_shell_files(tmp_dir, m)
    if not (py_files or shell_files or pl_files):
        print("Nothing to test for:", m.dist())
        return

    print("TEST START:", m.dist())
    rm_rf(config.build_prefix)
    rm_rf(config.test_prefix)
    specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

    if py_files:
        # as the tests are run by python, we need to specify it
        specs += ['python %s*' % environ.get_py_ver()]
    if pl_files:
        # as the tests are run by perl, we need to specify it
        specs += ['perl %s*' % environ.get_perl_ver()]
    # add packages listed in test/requires
    for spec in m.get_value('test/requires', []):
        specs.append(spec)

    create_env(config.test_prefix, specs, verbose=verbose,
        channel_urls=channel_urls, override_channels=override_channels)

    env = dict(os.environ)
    # TODO: Include all the same environment variables that are used in
    # building.
    env.update(environ.get_dict(m, prefix=config.test_prefix))

    # prepend bin (or Scripts) directory
    env['PATH'] = (join(config.test_prefix, bin_dirname) + os.pathsep +
                   os.getenv('PATH'))

    for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL':
        env[varname] = str(getattr(config, varname))
    env['PREFIX'] = config.test_prefix

    # Python 2 Windows requires that envs variables be string, not unicode
    env = {str(key): str(value) for key, value in env.items()}
    if py_files:
        try:
            subprocess.check_call([config.test_python, '-s',
                                   join(tmp_dir, 'run_test.py')],
                                  env=env, cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m)

    if pl_files:
        try:
            subprocess.check_call([config.test_perl,
                                   join(tmp_dir, 'run_test.pl')],
                                  env=env, cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m)

    if shell_files:
        if sys.platform == 'win32':
            test_file = join(tmp_dir, 'run_test.bat')
            cmd = [os.environ['COMSPEC'], '/c', 'call', test_file]
            try:
                subprocess.check_call(cmd, env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m)
        else:
            test_file = join(tmp_dir, 'run_test.sh')
            # TODO: Run the test/commands here instead of in run_test.py
            cmd = ['/bin/bash', '-x', '-e', test_file]
            try:
                subprocess.check_call(cmd, env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m)

    print("TEST END:", m.dist())
Esempio n. 49
0
def build_recipe(args, recipe_path, cbc, config):

    if args.features:
        assert args.features.startswith("[") and args.features.endswith("]")
        features = [f.strip() for f in args.features[1:-1].split(",")]
    else:
        features = []

    selected_features = {}
    for f in features:
        if f.startswith("~"):
            selected_features[f[1:]] = False
        else:
            selected_features[f] = True

    ydoc = render(recipe_path, config=config)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta

            o["selected_features"] = selected_features

            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config, features
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config, features
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config, selected_features)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    if args.command == "render":
        for o in sorted_outputs:
            console.print(o)
        exit()

    # TODO this should be done cleaner
    top_name = ydoc["package"]["name"]
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    console.print("\n[yellow]Initializing mamba solver[/yellow]\n")
    solver = MambaSolver([], context.subdir)

    console.print("\n[yellow]Downloading source[/yellow]\n")
    download_source(MetaData(recipe_path, o0), args.interactive)
    cached_source = o0.sections["source"]

    for o in sorted_outputs:
        console.print(
            f"\n[yellow]Preparing environment for [bold]{o.name}[/bold][/yellow]\n"
        )
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if os.path.isdir(o.config.build_prefix):
                rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            try:
                o.transactions["build"].execute(
                    PrefixData(o.config.build_prefix),
                    PackageCacheData.first_writable().pkgs_dir,
                )
            except Exception:
                # This currently enables windows-multi-build...
                print("Could not instantiate build environment")

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions["host"].execute(
                PrefixData(o.config.host_prefix),
                PackageCacheData.first_writable().pkgs_dir,
            )

        meta = MetaData(recipe_path, o)
        o.set_final_build_id(meta)

        if cached_source != o.sections["source"]:
            download_source(meta, args.interactive)

        console.print(f"\n[yellow]Starting build for [bold]{o.name}[/bold][/yellow]\n")

        build(meta, None, allow_interactive=args.interactive)

    for o in sorted_outputs:
        print("\n\n")
        console.print(o)
Esempio n. 50
0
def test(m, verbose=True):
    '''
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    '''
    # remove from package cache
    rm_pkgs_cache(m.dist())

    tmp_dir = join(config.croot, 'test-tmp_dir')
    rm_rf(tmp_dir)
    os.makedirs(tmp_dir)
    create_files(tmp_dir, m)
    # Make Perl or Python-specific test files
    if m.name().startswith('perl-'):
        pl_files = create_pl_files(tmp_dir, m)
        py_files = False
    else:
        py_files = create_py_files(tmp_dir, m)
        pl_files = False
    shell_files = create_shell_files(tmp_dir, m)
    if not (py_files or shell_files or pl_files):
        print("Nothing to test for:", m.dist())
        return

    print("TEST START:", m.dist())
    rm_rf(prefix)
    rm_rf(config.test_prefix)
    specs = ['%s %s %s' % (m.name(), m.version(), m.build_id())]

    if py_files:
        # as the tests are run by python, we need to specify it
        specs += ['python %s*' % environ.PY_VER]
    if pl_files:
        # as the tests are run by perl, we need to specify it
        specs += ['perl %s*' % environ.PERL_VER]
    # add packages listed in test/requires
    for spec in m.get_value('test/requires'):
        specs.append(spec)

    create_env(config.test_prefix, specs, verbose=verbose)

    env = dict(os.environ)
    # TODO: Include all the same environment variables that are used in
    # building.
    env.update(environ.get_dict(m, prefix=config.test_prefix))

    # prepend bin (or Scripts) directory
    env['PATH'] = (join(config.test_prefix, bin_dirname) + os.pathsep +
                   env['PATH'])

    for varname in 'CONDA_PY', 'CONDA_NPY', 'CONDA_PERL':
        env[varname] = str(getattr(config, varname))
    env['PREFIX'] = config.test_prefix

    # Python 2 Windows requires that envs variables be string, not unicode
    env = {str(key): str(value) for key, value in env.items()}
    if py_files:
        try:
            subprocess.check_call(
                [config.test_python,
                 join(tmp_dir, 'run_test.py')],
                env=env,
                cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m)

    if pl_files:
        try:
            subprocess.check_call(
                [config.test_perl,
                 join(tmp_dir, 'run_test.pl')],
                env=env,
                cwd=tmp_dir)
        except subprocess.CalledProcessError:
            tests_failed(m)

    if shell_files:
        if sys.platform == 'win32':
            test_file = join(tmp_dir, 'run_test.bat')
            cmd = [os.environ['COMSPEC'], '/c', test_file]
            try:
                subprocess.check_call(cmd, env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m)
        else:
            test_file = join(tmp_dir, 'run_test.sh')
            # TODO: Run the test/commands here instead of in run_test.py
            cmd = ['/bin/bash', '-x', '-e', test_file]
            try:
                subprocess.check_call(cmd, env=env, cwd=tmp_dir)
            except subprocess.CalledProcessError:
                tests_failed(m)

    print("TEST END:", m.dist())
Esempio n. 51
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
            # We must use a long prefix here as the package will only be
            # installable into prefixes shorter than this one.
            config.use_long_build_prefix = True
        else:
            # In case there are multiple builds in the same process
            config.use_long_build_prefix = False

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
            [ms.spec for ms in m.ms_depends('build')],
            verbose=verbose, channel_urls=channel_urls,
            override_channels=override_channels)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files().difference(set(m.always_include_files()))
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1),
                          include_recipe=bool(m.path))
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 52
0
def get_run_dists(m, config):
    prefix = join(envs_dirs[0], '_run')
    rm_rf(prefix)
    create_env(prefix, [ms.spec for ms in m.ms_depends('run')], config=config)
    return sorted(linked(prefix))
Esempio n. 53
0
def run_test(
    recipedir_or_package_or_metadata,
    config,
    stats,
    move_broken=True,
    provision_only=False,
    solver=None,
):
    """
    Execute any test scripts for the given package.

    :param m: Package's metadata.
    :type m: Metadata
    """

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    hash_input = {}

    # store this name to keep it consistent.  By changing files, we change the hash later.
    #    It matches the build hash now, so let's keep it around.
    test_package_name = (
        recipedir_or_package_or_metadata.dist()
        if hasattr(recipedir_or_package_or_metadata, "dist")
        else recipedir_or_package_or_metadata
    )

    if not provision_only:
        print("TEST START:", test_package_name)

    if hasattr(recipedir_or_package_or_metadata, "config"):
        metadata = recipedir_or_package_or_metadata
        utils.rm_rf(metadata.config.test_dir)
    else:
        metadata, hash_input = construct_metadata_for_test(
            recipedir_or_package_or_metadata, config
        )

    trace = "-x " if metadata.config.debug else ""

    # Must download *after* computing build id, or else computing build id will change
    #     folder destination
    _extract_test_files_from_package(metadata)

    # When testing a .tar.bz2 in the pkgs dir, clean_pkg_cache() will remove it.
    # Prevent this. When https://github.com/conda/conda/issues/5708 gets fixed
    # I think we can remove this call to clean_pkg_cache().
    in_pkg_cache = (
        not hasattr(recipedir_or_package_or_metadata, "config")
        and os.path.isfile(recipedir_or_package_or_metadata)
        and recipedir_or_package_or_metadata.endswith(CONDA_PACKAGE_EXTENSIONS)
        and os.path.dirname(recipedir_or_package_or_metadata) in pkgs_dirs[0]
    )
    if not in_pkg_cache:
        environ.clean_pkg_cache(metadata.dist(), metadata.config)

    copy_test_source_files(metadata, metadata.config.test_dir)
    # this is also copying tests/source_files from work_dir to testing workdir

    _, pl_files, py_files, r_files, lua_files, shell_files = create_all_test_files(
        metadata
    )

    if (
        not any([py_files, shell_files, pl_files, lua_files, r_files])
        and not metadata.config.test_run_post
    ):
        print("Nothing to test for:", test_package_name)
        return True

    if metadata.config.remove_work_dir:
        for name, prefix in (
            ("host", metadata.config.host_prefix),
            ("build", metadata.config.build_prefix),
        ):
            if os.path.isdir(prefix):
                # move host folder to force hardcoded paths to host env to break during tests
                #    (so that they can be properly addressed by recipe author)
                dest = os.path.join(
                    os.path.dirname(prefix),
                    "_".join(
                        (
                            "%s_prefix_moved" % name,
                            metadata.dist(),
                            getattr(metadata.config, "%s_subdir" % name),
                        )
                    ),
                )
                # Needs to come after create_files in case there's test/source_files
                shutil_move_more_retrying(prefix, dest, "{} prefix".format(prefix))

        # nested if so that there's no warning when we just leave the empty workdir in place
        if metadata.source_provided:
            dest = os.path.join(
                os.path.dirname(metadata.config.work_dir),
                "_".join(("work_moved", metadata.dist(), metadata.config.host_subdir)),
            )
            # Needs to come after create_files in case there's test/source_files
            shutil_move_more_retrying(config.work_dir, dest, "work")
    else:
        log.warn(
            "Not moving work directory after build.  Your package may depend on files "
            "in the work directory that are not included with your package"
        )

    # looks like a dead function to me
    # get_build_metadata(metadata)

    specs = metadata.get_test_deps(py_files, pl_files, lua_files, r_files)

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        env["CONDA_BUILD"] = "1"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if not metadata.config.activate or metadata.name() == "conda":
        # prepend bin (or Scripts) directory
        env = utils.prepend_bin_path(
            env, metadata.config.test_prefix, prepend_prefix=True
        )

    if utils.on_win:
        env["PATH"] = metadata.config.test_prefix + os.pathsep + env["PATH"]

    env["PREFIX"] = metadata.config.test_prefix
    if "BUILD_PREFIX" in env:
        del env["BUILD_PREFIX"]

    # In the future, we will need to support testing cross compiled
    #     packages on physical hardware. until then it is expected that
    #     something like QEMU or Wine will be used on the build machine,
    #     therefore, for now, we use host_subdir.

    # ensure that the test prefix isn't kept between variants
    utils.rm_rf(metadata.config.test_prefix)

    if solver is None:
        solver, pkg_cache_path = get_solver(metadata.config.host_subdir)
    else:
        pkg_cache_path = PackageCacheData.first_writable().pkgs_dir

    solver.replace_channels()
    transaction = solver.solve(specs, [pkg_cache_path])

    downloaded = transaction.fetch_extract_packages(
        pkg_cache_path, solver.repos + list(solver.local_repos.values()),
    )
    if not downloaded:
        raise RuntimeError("Did not succeed in downloading packages.")

    mkdir_p(metadata.config.test_prefix)
    transaction.execute(
        PrefixData(metadata.config.test_prefix), pkg_cache_path,
    )

    with utils.path_prepended(metadata.config.test_prefix):
        env = dict(os.environ.copy())
        env.update(environ.get_dict(m=metadata, prefix=metadata.config.test_prefix))
        env["CONDA_BUILD_STATE"] = "TEST"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    if config.test_run_post:
        from conda_build.utils import get_installed_packages

        installed = get_installed_packages(metadata.config.test_prefix)
        files = installed[metadata.meta["package"]["name"]]["files"]
        replacements = get_all_replacements(metadata.config)
        try_download(metadata, False, True)
        create_info_files(metadata, replacements, files, metadata.config.test_prefix)
        post_build(metadata, files, None, metadata.config.test_prefix, True)

    # when workdir is removed, the source files are unavailable.  There's the test/source_files
    #    entry that lets people keep these files around.  The files are copied into test_dir for
    #    intuitive relative path behavior, though, not work_dir, so we need to adjust where
    #    SRC_DIR points.  The initial CWD during tests is test_dir.
    if metadata.config.remove_work_dir:
        env["SRC_DIR"] = metadata.config.test_dir

    test_script, _ = write_test_scripts(
        metadata, env, py_files, pl_files, lua_files, r_files, shell_files, trace
    )

    if utils.on_win:
        cmd = [os.environ.get("COMSPEC", "cmd.exe"), "/d", "/c", test_script]
    else:
        cmd = (
            [shell_path]
            + (["-x"] if metadata.config.debug else [])
            + ["-o", "errexit", test_script]
        )
    try:
        test_stats = {}
        if not provision_only:
            # rewrite long paths in stdout back to their env variables
            if metadata.config.debug or metadata.config.no_rewrite_stdout_env:
                rewrite_env = None
            else:
                rewrite_env = {k: env[k] for k in ["PREFIX", "SRC_DIR"] if k in env}
                if metadata.config.verbose:
                    for k, v in rewrite_env.items():
                        print(
                            "{0} {1}={2}".format(
                                "set" if test_script.endswith(".bat") else "export",
                                k,
                                v,
                            )
                        )
            utils.check_call_env(
                cmd,
                env=env,
                cwd=metadata.config.test_dir,
                stats=test_stats,
                rewrite_stdout_env=rewrite_env,
            )
            log_stats(test_stats, "testing {}".format(metadata.name()))
            # TODO need to implement metadata.get_used_loop_vars
            # if stats is not None and metadata.config.variants:
            #     stats[
            #         stats_key(metadata, "test_{}".format(metadata.name()))
            #     ] = test_stats
            if os.path.exists(join(metadata.config.test_dir, "TEST_FAILED")):
                raise subprocess.CalledProcessError(-1, "")
            print("TEST END:", test_package_name)

    except subprocess.CalledProcessError as _:  # noqa
        tests_failed(
            metadata,
            move_broken=move_broken,
            broken_dir=metadata.config.broken_dir,
            config=metadata.config,
        )
        raise

    if config.need_cleanup and config.recipe_dir is not None and not provision_only:
        utils.rm_rf(config.recipe_dir)

    return True
Esempio n. 54
0
def _construct_metadata_for_test_from_package(package, config):
    recipe_dir, need_cleanup = utils.get_recipe_abspath(package)
    config.need_cleanup = need_cleanup
    config.recipe_dir = recipe_dir
    hash_input = {}

    info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
    with open(os.path.join(info_dir, "index.json")) as f:
        package_data = json.load(f)

    if package_data["subdir"] != "noarch":
        config.host_subdir = package_data["subdir"]
    # We may be testing an (old) package built without filename hashing.
    hash_input = os.path.join(info_dir, "hash_input.json")
    if os.path.isfile(hash_input):
        with open(os.path.join(info_dir, "hash_input.json")) as f:
            hash_input = json.load(f)
    else:
        config.filename_hashing = False
        hash_input = {}
    # not actually used as a variant, since metadata will have been finalized.
    #    This is still necessary for computing the hash correctly though
    config.variant = hash_input
    log = utils.get_logger(__name__)

    # get absolute file location
    local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package)))

    # get last part of the path
    last_element = os.path.basename(local_pkg_location)
    is_channel = False
    for platform in ("win-", "linux-", "osx-", "noarch"):
        if last_element.startswith(platform):
            is_channel = True

    if not is_channel:
        log.warn(
            "Copying package to conda-build croot.  No packages otherwise alongside yours will"
            " be available unless you specify -c local.  To avoid this warning, your package "
            "must reside in a channel structure with platform-subfolders.  See more info on "
            "what a valid channel is at "
            "https://conda.io/docs/user-guide/tasks/create-custom-channels.html"
        )

        local_dir = os.path.join(config.croot, config.host_subdir)
        mkdir_p(local_dir)
        local_pkg_location = os.path.join(local_dir, os.path.basename(package))
        utils.copy_into(package, local_pkg_location)
        local_pkg_location = local_dir

    local_channel = os.path.dirname(local_pkg_location)

    # update indices in the channel
    update_index(local_channel, verbose=config.debug, threads=1)

    try:
        # raise IOError()
        # metadata = render_recipe(
        #     os.path.join(info_dir, "recipe"), config=config, reset_build_id=False
        # )[0][0]

        metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config)
        # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi:
        # metadata = yaml.load(fi)
    # no recipe in package.  Fudge metadata
    except SystemExit:
        # force the build string to line up - recomputing it would
        #    yield a different result
        metadata = MetaData.fromdict(
            {
                "package": {
                    "name": package_data["name"],
                    "version": package_data["version"],
                },
                "build": {
                    "number": int(package_data["build_number"]),
                    "string": package_data["build"],
                },
                "requirements": {"run": package_data["depends"]},
            },
            config=config,
        )
    # HACK: because the recipe is fully baked, detecting "used" variables no longer works.  The set
    #     of variables in the hash_input suffices, though.

    if metadata.noarch:
        metadata.config.variant["target_platform"] = "noarch"

    metadata.config.used_vars = list(hash_input.keys())
    urls = list(utils.ensure_list(metadata.config.channel_urls))
    local_path = url_path(local_channel)
    # replace local with the appropriate real channel.  Order is maintained.
    urls = [url if url != "local" else local_path for url in urls]
    if local_path not in urls:
        urls.insert(0, local_path)
    metadata.config.channel_urls = urls
    utils.rm_rf(metadata.config.test_dir)
    return metadata, hash_input
Esempio n. 55
0
def render_recipe(recipe_path,
                  config,
                  no_download_source=False,
                  variants=None,
                  permit_unsatisfiable_variants=True,
                  reset_build_id=True,
                  bypass_env_check=False):
    """Returns a list of tuples, each consisting of

    (metadata-object, needs_download, needs_render_in_env)

    You get one tuple per variant.  Outputs are not factored in here (subpackages won't affect these
    results returned here.)
    """
    arg = recipe_path
    # Don't use byte literals for paths in Python 2
    if not PY3:
        arg = arg.decode(getpreferredencoding() or 'utf-8')
    if isfile(arg):
        if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
            recipe_dir = tempfile.mkdtemp()
            t = tarfile.open(arg, 'r:*')
            t.extractall(path=recipe_dir)
            t.close()
            need_cleanup = True
        elif arg.endswith('.yaml'):
            recipe_dir = os.path.dirname(arg)
            need_cleanup = False
        else:
            print("Ignoring non-recipe: %s" % arg)
            return None, None
    else:
        recipe_dir = abspath(arg)
        need_cleanup = False

    if not isdir(recipe_dir):
        sys.exit("Error: no such directory: %s" % recipe_dir)

    try:
        m = MetaData(recipe_dir, config=config)
    except exceptions.YamlParsingError as e:
        sys.stderr.write(e.error_msg())
        sys.exit(1)

    rendered_metadata = {}

    # important: set build id *before* downloading source.  Otherwise source goes into a different
    #    build folder.
    if config.set_build_id:
        m.config.compute_build_id(m.name(), m.version(), reset=reset_build_id)

    # this source may go into a folder that doesn't match the eventual build folder.
    #   There's no way around it AFAICT.  We must download the source to be able to render
    #   the recipe (from anything like GIT_FULL_HASH), but we can't know the final build
    #   folder until rendering is complete, because package names can have variant jinja2 in them.
    if m.needs_source_for_render and not m.source_provided:
        try_download(m, no_download_source=no_download_source)
    if m.final:
        if not hasattr(m.config, 'variants') or not m.config.variant:
            m.config.ignore_system_variants = True
            if os.path.isfile(os.path.join(m.path, 'conda_build_config.yaml')):
                m.config.variant_config_files = [
                    os.path.join(m.path, 'conda_build_config.yaml')
                ]
            m.config.variants = get_package_variants(m, variants=variants)
            m.config.variant = m.config.variants[0]
        rendered_metadata = [
            (m, False, False),
        ]
    else:
        # merge any passed-in variants with any files found
        variants = get_package_variants(m, variants=variants)

        # when building, we don't want to fully expand all outputs into metadata, only expand
        #    whatever variants we have (i.e. expand top-level variants, not output-only variants)
        rendered_metadata = distribute_variants(
            m,
            variants,
            permit_unsatisfiable_variants=permit_unsatisfiable_variants,
            allow_no_other_outputs=True,
            bypass_env_check=bypass_env_check)
    if need_cleanup:
        utils.rm_rf(recipe_dir)
    return rendered_metadata
Esempio n. 56
0
def run_setuppy(src_dir, temp_dir, python_version, extra_specs, config,
                setup_options):
    '''
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    '''
    # TODO: we could make everyone's lives easier if we include packaging here, because setuptools
    #    needs it in recent versions.  At time of writing, it is not a package in defaults, so this
    #    actually breaks conda-build right now.  Omit it until packaging is on defaults.
    # specs = ['python %s*' % python_version, 'pyyaml', 'setuptools', 'six', 'packaging', 'appdirs']
    specs = ['python %s*' % python_version, 'pyyaml', 'setuptools']
    with open(os.path.join(src_dir, "setup.py")) as setup:
        text = setup.read()
        if 'import numpy' in text or 'from numpy' in text:
            specs.append('numpy')

    specs.extend(extra_specs)

    rm_rf(config.host_prefix)
    create_env(config.host_prefix,
               specs_or_actions=specs,
               env='host',
               subdir=config.host_subdir,
               clear_cache=False,
               config=config)
    stdlib_dir = join(
        config.host_prefix,
        'Lib' if sys.platform == 'win32' else 'lib/python%s' % python_version)

    patch = join(temp_dir, 'pypi-distutils.patch')
    with open(patch, 'w') as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')))

    if exists(join(stdlib_dir, 'distutils', 'core.py-copy')):
        rm_rf(join(stdlib_dir, 'distutils', 'core.py'))
        copy2(join(stdlib_dir, 'distutils', 'core.py-copy'),
              join(stdlib_dir, 'distutils', 'core.py'))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(
                join(stdlib_dir, 'distutils', '__pycache__',
                     'core.cpython-%s%s.pyc' % sys.version_info[:2]))
            rm_rf(
                join(stdlib_dir, 'distutils', '__pycache__',
                     'core.cpython-%s%s.pyo' % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyc'))
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyo'))
    else:
        copy2(join(stdlib_dir, 'distutils', 'core.py'),
              join(stdlib_dir, 'distutils', 'core.py-copy'))
    apply_patch(join(stdlib_dir, 'distutils'), patch, config=config)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if 'PYTHONPATH' in env:
        env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH'])
    else:
        env[str('PYTHONPATH')] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    cmdargs = [config.host_python, 'setup.py', 'install']
    cmdargs.extend(setup_options)
    try:
        check_call_env(cmdargs, env=env)
    except subprocess.CalledProcessError:
        print('$PYTHONPATH = %s' % env['PYTHONPATH'])
        sys.exit('Error: command failed: %s' % ' '.join(cmdargs))
    finally:
        chdir(cwd)
Esempio n. 57
0
def build(m, get_src=True, verbose=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    '''
    rm_rf(prefix)

    print("BUILD START:", m.dist())
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')],
               verbose=verbose)

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        build_file = join(m.path, 'build.sh')
        if exists(build_file):
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                with open(build_file, 'w', encoding='utf-8') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)
            cmd = ['/bin/bash', '-x', '-e', build_file]

            _check_call(cmd, env=env, cwd=source.get_dir())

    get_build_metadata(m)
    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1),
               binary_relocation=bool(
                   m.get_value('build/binary_relocation', True)))
    create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(config.bldpkgs_dir)
Esempio n. 58
0
def clean_build(config, folders=None):
    if not folders:
        folders = get_build_folders(config.croot)
    for folder in folders:
        rm_rf(folder)
Esempio n. 59
0
 def cleanup():
     # session scoped cleanup is called at end of the session
     rm_rf(sp)
Esempio n. 60
0
def run_setuppy(src_dir, temp_dir, python_version, config, setup_options):
    '''
    Patch distutils and then run setup.py in a subprocess.

    :param src_dir: Directory containing the source code
    :type src_dir: str
    :param temp_dir: Temporary directory for doing for storing pkginfo.yaml
    :type temp_dir: str
    '''
    specs = ['python %s*' % python_version, 'pyyaml', 'setuptools']
    with open(os.path.join(src_dir, "setup.py")) as setup:
        text = setup.read()
        if 'import numpy' in text or 'from numpy' in text:
            specs.append('numpy')
    # Do everything in the build env in case the setup.py install goes
    # haywire.
    # TODO: Try with another version of Python if this one fails. Some
    # packages are Python 2 or Python 3 only.

    create_env(config.build_prefix, specs=specs,
               clear_cache=False,
               config=config)
    stdlib_dir = join(config.build_prefix,
                      'Lib' if sys.platform == 'win32'
                      else 'lib/python%s' % python_version)

    patch = join(temp_dir, 'pypi-distutils.patch')
    with open(patch, 'w') as f:
        f.write(DISTUTILS_PATCH.format(temp_dir.replace('\\', '\\\\')))

    if exists(join(stdlib_dir, 'distutils', 'core.py-copy')):
        rm_rf(join(stdlib_dir, 'distutils', 'core.py'))
        copy2(join(stdlib_dir, 'distutils', 'core.py-copy'),
              join(stdlib_dir, 'distutils', 'core.py'))
        # Avoid race conditions. Invalidate the cache.
        if PY3:
            rm_rf(join(stdlib_dir, 'distutils', '__pycache__',
                'core.cpython-%s%s.pyc' % sys.version_info[:2]))
            rm_rf(join(stdlib_dir, 'distutils', '__pycache__',
                'core.cpython-%s%s.pyo' % sys.version_info[:2]))
        else:
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyc'))
            rm_rf(join(stdlib_dir, 'distutils', 'core.pyo'))
    else:
        copy2(join(stdlib_dir, 'distutils', 'core.py'), join(stdlib_dir,
            'distutils', 'core.py-copy'))
    apply_patch(join(stdlib_dir, 'distutils'), patch, config=config)

    # Save PYTHONPATH for later
    env = os.environ.copy()
    if 'PYTHONPATH' in env:
        env[str('PYTHONPATH')] = str(src_dir + ':' + env['PYTHONPATH'])
    else:
        env[str('PYTHONPATH')] = str(src_dir)
    cwd = getcwd()
    chdir(src_dir)
    cmdargs = [config.build_python, 'setup.py', 'install']
    cmdargs.extend(setup_options)
    try:
        subprocess.check_call(cmdargs, env=env)
    except subprocess.CalledProcessError:
        print('$PYTHONPATH = %s' % env['PYTHONPATH'])
        sys.exit('Error: command failed: %s' % ' '.join(cmdargs))
    finally:
        chdir(cwd)