Esempio n. 1
0
def tests_failed(package_or_metadata, move_broken, broken_dir, config):
    """
    Causes conda to exit if any of the given package's tests failed.

    :param m: Package's metadata
    :type m: Metadata
    """
    if not isdir(broken_dir):
        os.makedirs(broken_dir)

    if hasattr(package_or_metadata, "config"):
        pkg = bldpkg_path(package_or_metadata)
    else:
        pkg = package_or_metadata
    dest = join(broken_dir, os.path.basename(pkg))

    if move_broken:
        log = utils.get_logger(__name__)
        try:
            shutil.move(pkg, dest)
            log.warn(
                "Tests failed for %s - moving package to %s"
                % (os.path.basename(pkg), broken_dir)
            )
        except OSError:
            pass
        update_index(
            os.path.dirname(os.path.dirname(pkg)), verbose=config.debug, threads=1
        )
    sys.exit("TESTS FAILED: " + os.path.basename(pkg))
Esempio n. 2
0
def create_env(prefix, specs, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 3
0
def create_env(prefix, specs, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 4
0
def main():
    _, args = parse_args(sys.argv[1:])
    args = args.__dict__

    config = Config(**args)
    channel_urls = get_rc_urls() + get_channel_urls({})

    init_api_context()

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe = args["recipe"][0]

    global solver
    solver = MambaSolver(channel_urls, context.subdir)
    solver.replace_channels()
    cbc, _ = conda_build.variants.get_package_combined_spec(recipe,
                                                            config=config)

    if args["test"]:
        api.test(recipe, config=config)
    else:
        api.build(
            recipe,
            post=args["post"],
            build_only=args["build_only"],
            notest=args["notest"],
            config=config,
            variants=args["variants"],
        )
Esempio n. 5
0
def main():
    p = argparse.ArgumentParser(
        description="Update package index metadata files in given directories")

    p.add_argument('dir',
                   help='Directory that contains an index to be updated.',
                   nargs='*',
                   default=[os.getcwd()])

    p.add_argument('-f',
                   "--force",
                   action="store_true",
                   help="force reading all files")

    p.add_argument('-q', "--quiet", action="store_true")

    args = p.parse_args()

    dir_paths = [abspath(path) for path in args.dir]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path, verbose=(not args.quiet), force=args.force)
Esempio n. 6
0
def update_index(dir_paths,
                 config=None,
                 force=False,
                 check_md5=False,
                 remove=False,
                 channel_name=None,
                 subdir=None,
                 threads=None,
                 patch_generator=None,
                 verbose=False,
                 progress=False,
                 hotfix_source_repo=None,
                 **kwargs):
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3
    from conda_build.index import update_index
    from conda_build.utils import ensure_list
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path,
                     check_md5=check_md5,
                     channel_name=channel_name,
                     patch_generator=patch_generator,
                     threads=threads,
                     verbose=verbose,
                     progress=progress,
                     hotfix_source_repo=hotfix_source_repo,
                     subdirs=ensure_list(subdir))
Esempio n. 7
0
def run_build(args):
    if getattr(args, "json", False):
        global console
        console.quiet = True

    selected_features = extract_features(args.features)

    folder = args.recipe_dir or os.path.dirname(args.target)
    variant = {"target_platform": args.target_platform or context.subdir}
    cbc, config = get_config(folder, variant, args.variant_config_files)
    cbc["target_platform"] = [variant["target_platform"]]

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    console.print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    all_recipes = find_all_recipes(args.target, config)  # [noqa]

    console.print("\n[yellow]Assembling all recipes and variants[/yellow]\n")

    for recipe in all_recipes:
        build_recipe(
            args.command,
            recipe["recipe_file"],
            cbc,
            config,
            selected_features=selected_features,
            notest=getattr(args, "notest", False),
            skip_existing=getattr(args, "skip_existing", False) != "default",
            interactive=getattr(args, "interactive", False),
            skip_fast=getattr(args, "skip_existing", "default") == "fast",
        )
Esempio n. 8
0
def main():
    p = argparse.ArgumentParser(
        description="Update package index metadata files in given directories")

    p.add_argument('dir',
                   help='Directory that contains an index to be updated.',
                   nargs='*',
                   default=[os.getcwd()])

    p.add_argument('-f', "--force",
                   action="store_true",
                   help="force reading all files")

    p.add_argument('-q', "--quiet",
                   action="store_true")

    args = p.parse_args()

    dir_paths = [abspath(path) for path in args.dir]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path, verbose=(not args.quiet), force=args.force)
Esempio n. 9
0
def main():
    _, args = parse_args(sys.argv[1:])
    args = args.__dict__

    config = Config(**args)

    init_api_context()

    config.output_folder = os.path.abspath(config.output_folder)
    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe = args["recipe"][0]

    if args["test"]:
        api.test(recipe, config=config)
    else:
        api.build(
            recipe,
            post=args["post"],
            build_only=args["build_only"],
            notest=args["notest"],
            config=config,
            variants=args["variants"],
        )
Esempio n. 10
0
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(),
    override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs: # Don't waste time if there is nothing to do
        if clear_cache:
            # remove the cache such that a refetch is made,
            # this is necessary because we add the local build repo URL
            fetch_index.cache = {}
        index = get_index(channel_urls=[url_path(config.croot)] + list(channel_urls),
            prepend=not override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 11
0
def update_index(dir_paths,
                 config=None,
                 force=False,
                 check_md5=False,
                 remove=False):
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3
    from conda_build.index import update_index
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if not config:
        config = Config()

    for path in dir_paths:
        update_index(path,
                     force=force,
                     check_md5=check_md5,
                     remove=remove,
                     verbose=config.verbose,
                     locking=config.locking,
                     timeout=config.timeout)
Esempio n. 12
0
def create_env(prefix,
               specs,
               clear_cache=True,
               verbose=True,
               channel_urls=(),
               override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True,
                                channel_urls=channel_urls,
                                override_channels=override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 13
0
def create_env(prefix,
               specs,
               clear_cache=True,
               verbose=True,
               channel_urls=(),
               override_channels=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        if clear_cache:
            # remove the cache such that a refetch is made,
            # this is necessary because we add the local build repo URL
            fetch_index.cache = {}
        index = get_index(channel_urls=[url_path(config.croot)] +
                          list(channel_urls),
                          prepend=not override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 14
0
def run_build(args):
    folder = args.recipe_dir or os.path.dirname(args.target)
    variant = {}
    if args.target_platform:
        variant["target_platform"] = args.target_platform

    cbc, config = get_config(folder, variant, args.variant_config_files)
    cbc["target_platform"] = [args.target_platform]

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    console.print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    all_recipes = find_all_recipes(args.target, config)  # [noqa]

    console.print("\n[yellow]Assembling all recipes and variants[/yellow]\n")

    for recipe in all_recipes:
        build_recipe(
            args,
            recipe["recipe_file"],
            cbc,
            config,
            notest=getattr(args, "notest", False),
        )
Esempio n. 15
0
def test(recipedir_or_package_or_metadata, move_broken=True, config=None, **kwargs):
    import os
    from conda_build.conda_interface import url_path
    from conda_build.build import test
    from conda_build.render import render_recipe
    from conda_build.utils import get_recipe_abspath, rm_rf
    from conda_build import source

    config = get_or_merge_config(config, **kwargs)

    # we want to know if we're dealing with package input.  If so, we can move the input on success.
    is_package = False

    if hasattr(recipedir_or_package_or_metadata, 'config'):
        metadata = recipedir_or_package_or_metadata
        recipe_config = metadata.config
    else:
        recipe_dir, need_cleanup = get_recipe_abspath(recipedir_or_package_or_metadata)
        config.need_cleanup = need_cleanup

        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided
        metadata, _, _ = render_recipe(recipe_dir, config=config)
        recipe_config = config
        # this recipe came from an extracted tarball.
        if need_cleanup:
            # ensure that the local location of the package is indexed, so that conda can find the
            #    local package
            local_location = os.path.dirname(recipedir_or_package_or_metadata)
            # strip off extra subdir folders
            for platform in ('win', 'linux', 'osx'):
                if os.path.basename(local_location).startswith(platform + "-"):
                    local_location = os.path.dirname(local_location)
            update_index(local_location, config=config)
            local_url = url_path(local_location)
            # channel_urls is an iterable, but we don't know if it's a tuple or list.  Don't know
            #    how to add elements.
            recipe_config.channel_urls = list(recipe_config.channel_urls)
            recipe_config.channel_urls.insert(0, local_url)
            is_package = True
            if metadata.meta.get('test') and metadata.meta['test'].get('source_files'):
                source.provide(metadata.path, metadata.get_section('source'), config=config)
            rm_rf(recipe_dir)

    with recipe_config:
        # This will create a new local build folder if and only if config doesn't already have one.
        #   What this means is that if we're running a test immediately after build, we use the one
        #   that the build already provided

        recipe_config.compute_build_id(metadata.name())
        test_result = test(metadata, config=recipe_config, move_broken=move_broken)

        if (test_result and is_package and hasattr(recipe_config, 'output_folder') and
                recipe_config.output_folder):
            os.rename(recipedir_or_package_or_metadata,
                      os.path.join(recipe_config.output_folder,
                                   os.path.basename(recipedir_or_package_or_metadata)))
    return test_result
Esempio n. 16
0
def main():
    p = ArgumentParser(
        description="Update package index metadata files in given directories."
    )

    p.add_argument(
        'dir',
        help='Directory that contains an index to be updated.',
        nargs='*',
        default=[os.getcwd()],
    )

    p.add_argument(
        '-c',
        "--check-md5",
        action="store_true",
        help=
        """Use MD5 values instead of file modification times for determining if a
        package's metadata needs to be updated.""",
    )

    p.add_argument(
        '-f',
        "--force",
        action="store_true",
        help="Force reading all files.",
    )

    p.add_argument(
        '-q',
        "--quiet",
        action="store_true",
        help="Don't show any output.",
    )
    p.add_argument(
        '--no-remove',
        action="store_false",
        dest="remove",
        default=True,
        help="Don't remove entries for files that don't exist.",
    )

    args = p.parse_args()

    dir_paths = [abspath(path) for path in args.dir]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path,
                     verbose=(not args.quiet),
                     force=args.force,
                     check_md5=args.check_md5,
                     remove=args.remove)
Esempio n. 17
0
def test_patch_from_tarball(testing_workdir):
    """This is how we expect external communities to provide patches to us.
    We can't let them just give us Python files for us to run, because of the
    security risk of arbitrary code execution."""
    _build_test_index(testing_workdir)

    # our hotfix metadata can be generated any way you want.  Hard-code this here, but in general,
    #    people will use some python file to generate this.

    replacement_dict = {}
    replacement_dict["track_features_test-1.0-0.tar.bz2"] = {
        "track_features": None
    }
    replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = {
        "depends": ["zlib", "dummy"],
        "features": None
    }

    patch = {
        "patch_instructions_version": 1,
        "packages": replacement_dict,
        "revoke": ["revoke_test-1.0-0.tar.bz2"],
        "remove": ["remove_test-1.0-0.tar.bz2"],
    }
    with open("patch_instructions.json", "w") as f:
        json.dump(patch, f)

    with tarfile.open("patch_archive.tar.bz2", "w:bz2") as archive:
        archive.add("patch_instructions.json",
                    "%s/patch_instructions.json" % subdir)

    update_index(testing_workdir, patch_generator="patch_archive.tar.bz2")

    with open(os.path.join(testing_workdir, subdir, 'repodata.json')) as f:
        patched_metadata = json.load(f)

    pkg_list = patched_metadata['packages']
    assert "track_features_test-1.0-0.tar.bz2" in pkg_list
    assert "track_features" not in pkg_list[
        "track_features_test-1.0-0.tar.bz2"]

    assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list
    assert "features" not in pkg_list[
        "hotfix_depends_test-1.0-dummy_0.tar.bz2"]
    assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"][
        "depends"]
    assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"][
        "depends"]

    assert "revoke_test-1.0-0.tar.bz2" in pkg_list
    assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]
    assert "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"][
        "depends"]

    assert "remove_test-1.0-0.tar.bz2" not in pkg_list
Esempio n. 18
0
def is_package_built(metadata, config):
    for d in config.bldpkgs_dirs:
        if not os.path.isdir(d):
            os.makedirs(d)
        update_index(d, config, could_be_mirror=False)
    index = get_build_index(config=config, clear_cache=True)

    urls = [url_path(config.croot)] + get_rc_urls() + get_local_urls() + ['local', ]
    if config.channel_urls:
        urls.extend(config.channel_urls)

    # will be empty if none found, and evalute to False
    package_exists = [url for url in urls if url + '::' + metadata.pkg_fn() in index]
    return package_exists or metadata.pkg_fn() in index
Esempio n. 19
0
def is_package_built(metadata, config):
    for d in config.bldpkgs_dirs:
        if not os.path.isdir(d):
            os.makedirs(d)
        update_index(d, config, could_be_mirror=False)
    index = get_build_index(config=config, clear_cache=True)

    urls = [url_path(config.croot)] + get_rc_urls() + get_local_urls() + ['local', ]
    if config.channel_urls:
        urls.extend(config.channel_urls)

    # will be empty if none found, and evalute to False
    package_exists = [url for url in urls if url + '::' + metadata.pkg_fn() in index]
    return package_exists or metadata.pkg_fn() in index
Esempio n. 20
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not debug:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(
            logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix,
                                              True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 21
0
def build(m, get_src=True, pypi=False):
    rm_rf(prefix)
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')], pypi)

    print("BUILD START:", m.dist())

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        cmd = ['/bin/bash', '-x', '-e', join(m.path, 'build.sh')]
        _check_call(cmd, env=env, cwd=source.get_dir())

    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(
            m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1))
    create_info_files(m, sorted(files2 - files1))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(bldpkgs_dir)
Esempio n. 22
0
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False):
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3
    from conda_build.index import update_index
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if not config:
        config = Config()

    for path in dir_paths:
        update_index(path, config, force=force, check_md5=check_md5, remove=remove)
Esempio n. 23
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if not debug:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests.packages.urllib3.connectionpool").setLevel(logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 24
0
def test_channel_patch_instructions_json(testing_workdir):
    _build_test_index(testing_workdir)

    replacement_dict = {}
    replacement_dict["track_features_test-1.0-0.tar.bz2"] = {
        "track_features": None
    }
    replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = {
        "depends": ["zlib", "dummy"],
        "features": None
    }

    patch = {
        "patch_instructions_version": 1,
        "packages": replacement_dict,
        "revoke": ["revoke_test-1.0-0.tar.bz2"],
        "remove": ["remove_test-1.0-0.tar.bz2"],
    }

    with open(os.path.join(testing_workdir, subdir, 'patch_instructions.json'),
              'w') as f:
        json.dump(patch, f)

    update_index(testing_workdir)

    with open(os.path.join(testing_workdir, subdir, 'repodata.json')) as f:
        patched_metadata = json.load(f)

    pkg_list = patched_metadata['packages']
    assert "track_features_test-1.0-0.tar.bz2" in pkg_list
    assert "track_features" not in pkg_list[
        "track_features_test-1.0-0.tar.bz2"]

    assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list
    assert "features" not in pkg_list[
        "hotfix_depends_test-1.0-dummy_0.tar.bz2"]
    assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"][
        "depends"]
    assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"][
        "depends"]

    assert "revoke_test-1.0-0.tar.bz2" in pkg_list
    assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]
    assert "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"][
        "depends"]

    assert "remove_test-1.0-0.tar.bz2" not in pkg_list
Esempio n. 25
0
def main():
    p = ArgumentParser(
        description="Update package index metadata files in given directories.")

    p.add_argument(
        'dir',
        help='Directory that contains an index to be updated.',
        nargs='*',
        default=[os.getcwd()],
    )

    p.add_argument(
        '-c', "--check-md5",
        action="store_true",
        help="""Use MD5 values instead of file modification times for determining if a
        package's metadata needs to be updated.""",
    )

    p.add_argument(
        '-f', "--force",
        action="store_true",
        help="Force reading all files.",
    )

    p.add_argument(
        '-q', "--quiet",
        action="store_true",
        help="Don't show any output.",
    )
    p.add_argument(
        '--no-remove',
        action="store_false",
        dest="remove",
        default=True,
        help="Don't remove entries for files that don't exist.",
    )

    args = p.parse_args()

    dir_paths = [abspath(path) for path in args.dir]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path, verbose=(not args.quiet), force=args.force,
            check_md5=args.check_md5, remove=args.remove)
Esempio n. 26
0
def test_patch_from_tarball(testing_workdir):
    """This is how we expect external communities to provide patches to us.
    We can't let them just give us Python files for us to run, because of the
    security risk of arbitrary code execution."""
    _build_test_index(testing_workdir)

    # our hotfix metadata can be generated any way you want.  Hard-code this here, but in general,
    #    people will use some python file to generate this.

    replacement_dict = {}
    replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None}
    replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = {
                             "depends": ["zlib", "dummy"],
                             "features": None}

    patch = {
        "patch_instructions_version": 1,
        "packages": replacement_dict,
        "revoke": ["revoke_test-1.0-0.tar.bz2"],
        "remove": ["remove_test-1.0-0.tar.bz2"],
    }
    with open("patch_instructions.json", "w") as f:
        json.dump(patch, f)

    with tarfile.open("patch_archive.tar.bz2", "w:bz2") as archive:
        archive.add("patch_instructions.json", "%s/patch_instructions.json" % subdir)

    update_index(testing_workdir, patch_generator="patch_archive.tar.bz2")

    with open(os.path.join(testing_workdir, subdir, 'repodata.json')) as f:
        patched_metadata = json.load(f)

    pkg_list = patched_metadata['packages']
    assert "track_features_test-1.0-0.tar.bz2" in pkg_list
    assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"]

    assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list
    assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]
    assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"]
    assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"]

    assert "revoke_test-1.0-0.tar.bz2" in pkg_list
    assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]
    assert "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]

    assert "remove_test-1.0-0.tar.bz2" not in pkg_list
Esempio n. 27
0
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None,
                 subdir=None, threads=None, patch_generator=None, verbose=False, progress=False,
                 hotfix_source_repo=None, **kwargs):
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3
    from conda_build.index import update_index
    from conda_build.utils import ensure_list
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    for path in dir_paths:
        update_index(path, check_md5=check_md5, channel_name=channel_name,
                     patch_generator=patch_generator, threads=threads, verbose=verbose,
                     progress=progress, hotfix_source_repo=hotfix_source_repo,
                     subdirs=ensure_list(subdir))
Esempio n. 28
0
def run_build(args):

    folder = args.recipe_dir
    cbc, config = get_config(folder)

    # target = os.path.dirname(args.target)

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    console.print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    all_recipes = find_all_recipes(args.target, config)  # [noqa]

    console.print("\n[yellow]Assembling all recipes and variants[/yellow]\n")

    for recipe in all_recipes:
        build_recipe(args, recipe["recipe_file"], cbc, config)
Esempio n. 29
0
def create_env(prefix, specs, clear_cache=True, verbose=True, channel_urls=(), override_channels=False):
    """
    Create a conda envrionment for the given prefix and specs.
    """
    if not isdir(config.bldpkgs_dir):
        os.makedirs(config.bldpkgs_dir)
    update_index(config.bldpkgs_dir)
    if specs:  # Don't waste time if there is nothing to do
        index = get_build_index(clear_cache=True, channel_urls=channel_urls, override_channels=override_channels)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)
        plan.execute_actions(actions, index, verbose=verbose)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
Esempio n. 30
0
def update_index(dir_paths,
                 config=None,
                 force=False,
                 check_md5=False,
                 remove=False,
                 channel_name=None,
                 subdir=None,
                 threads=None,
                 patch_generator=None,
                 verbose=False,
                 progress=False,
                 hotfix_source_repo=None,
                 shared_format_cache=True,
                 current_index_versions=None,
                 **kwargs):
    import yaml
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3, string_types
    from conda_build.index import update_index
    from conda_build.utils import ensure_list
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if isinstance(current_index_versions, string_types):
        with open(current_index_versions) as f:
            current_index_versions = yaml.safe_load(f)

    for path in dir_paths:
        update_index(path,
                     check_md5=check_md5,
                     channel_name=channel_name,
                     patch_generator=patch_generator,
                     threads=threads,
                     verbose=verbose,
                     progress=progress,
                     hotfix_source_repo=hotfix_source_repo,
                     subdirs=ensure_list(subdir),
                     shared_format_cache=shared_format_cache,
                     current_index_versions=current_index_versions)
Esempio n. 31
0
def prepare(**kwargs):
    """
    Prepare and configure the stage for mambabuild to run.

    The given **kwargs are passed to conda-build's Config which
    is the value returned by this function.
    """
    config = Config(**kwargs)
    config.channel_urls = get_channel_urls(kwargs)

    init_api_context()

    config.output_folder = os.path.abspath(config.output_folder)
    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)

    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    return config
Esempio n. 32
0
def main():
    p = OptionParser(
        usage="usage: %prog [options] DIR [DIR ...]",
        description="display useful information about tar files")

    p.add_option('-f', "--force",
                 action = "store_true",
                 help = "force reading all files")

    p.add_option('-q', "--quiet",
                 action = "store_true")

    opts, args = p.parse_args()

    if len(args) == 0:
        dir_paths = [os.getcwd()]
    else:
        dir_paths = [abspath(path) for path in args]

    for path in dir_paths:
        update_index(path, verbose=not opts.quiet, force=opts.force)
Esempio n. 33
0
def create_env(pref, specs, pypi=False):
    if not isdir(bldpkgs_dir):
        os.makedirs(bldpkgs_dir)
    update_index(bldpkgs_dir)
    # remove the cache such that a refetch is made,
    # this is necessary because we add the local build repo URL
    fetch_index.cache = {}
    index = get_index([url_path(config.croot)])

    cc.pkgs_dirs = cc.pkgs_dirs[:1]

    if pypi:
        from conda.from_pypi import install_from_pypi
        specs = install_from_pypi(pref, index, specs)

    actions = plan.install_actions(pref, index, specs)
    plan.display_actions(actions, index)
    plan.execute_actions(actions, index, verbose=True)
    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(pref):
        os.makedirs(pref)
Esempio n. 34
0
def test_channel_patch_instructions_json(testing_workdir):
    _build_test_index(testing_workdir)

    replacement_dict = {}
    replacement_dict["track_features_test-1.0-0.tar.bz2"] = {"track_features": None}
    replacement_dict["hotfix_depends_test-1.0-dummy_0.tar.bz2"] = {
                             "depends": ["zlib", "dummy"],
                             "features": None}

    patch = {
        "patch_instructions_version": 1,
        "packages": replacement_dict,
        "revoke": ["revoke_test-1.0-0.tar.bz2"],
        "remove": ["remove_test-1.0-0.tar.bz2"],
    }

    with open(os.path.join(testing_workdir, subdir, 'patch_instructions.json'), 'w') as f:
        json.dump(patch, f)

    update_index(testing_workdir)

    with open(os.path.join(testing_workdir, subdir, 'repodata.json')) as f:
        patched_metadata = json.load(f)

    pkg_list = patched_metadata['packages']
    assert "track_features_test-1.0-0.tar.bz2" in pkg_list
    assert "track_features" not in pkg_list["track_features_test-1.0-0.tar.bz2"]

    assert "hotfix_depends_test-1.0-dummy_0.tar.bz2" in pkg_list
    assert "features" not in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]
    assert "zlib" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"]
    assert "dummy" in pkg_list["hotfix_depends_test-1.0-dummy_0.tar.bz2"]["depends"]

    assert "revoke_test-1.0-0.tar.bz2" in pkg_list
    assert "zlib" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]
    assert "package_has_been_revoked" in pkg_list["revoke_test-1.0-0.tar.bz2"]["depends"]

    assert "remove_test-1.0-0.tar.bz2" not in pkg_list
Esempio n. 35
0
def update_index(dir_paths, config=None, force=False, check_md5=False, remove=False, channel_name=None,
                 subdir=None, threads=None, patch_generator=None, verbose=False, progress=False,
                 hotfix_source_repo=None, current_index_versions=None, **kwargs):
    import yaml
    from locale import getpreferredencoding
    import os
    from .conda_interface import PY3, string_types
    from conda_build.index import update_index
    from conda_build.utils import ensure_list
    dir_paths = [os.path.abspath(path) for path in _ensure_list(dir_paths)]
    # Don't use byte strings in Python 2
    if not PY3:
        dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]

    if isinstance(current_index_versions, string_types):
        with open(current_index_versions) as f:
            current_index_versions = yaml.safe_load(f)

    for path in dir_paths:
        update_index(path, check_md5=check_md5, channel_name=channel_name,
                     patch_generator=patch_generator, threads=threads, verbose=verbose,
                     progress=progress, hotfix_source_repo=hotfix_source_repo,
                     subdirs=ensure_list(subdir), current_index_versions=current_index_versions)
Esempio n. 36
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda").setLevel(logging.WARN)
        logging.getLogger("binstar").setLevel(logging.WARN)
        logging.getLogger("install").setLevel(logging.ERROR)
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests").setLevel(logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix, True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)

        try:
            plan.execute_actions(actions, index, verbose=debug)
        except SystemExit as exc:
            if "too short in" in str(exc) and config.prefix_length > 80:
                log.warn("Build prefix failed with prefix length {0}."
                         .format(config.prefix_length))
                log.warn("Error was: ")
                log.warn(str(exc))
                log.warn("One or more of your package dependencies needs to be rebuilt with a "
                         "longer prefix length.")
                log.warn("Falling back to legacy prefix length of 80 characters.")
                log.warn("Your package will not install into prefixes longer than 80 characters.")
                config.prefix_length = 80
                create_env(config.build_prefix, specs, clear_cache=clear_cache, debug=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 37
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
            # We must use a long prefix here as the package will only be
            # installable into prefixes shorter than this one.
            config.use_long_build_prefix = True
        else:
            # In case there are multiple builds in the same process
            config.use_long_build_prefix = False

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
            [ms.spec for ms in m.ms_depends('build')],
            verbose=verbose, channel_urls=channel_urls,
            override_channels=override_channels)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files().difference(set(m.always_include_files()))
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1),
                          include_recipe=bool(m.path))
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 38
0
def build(m, config, post=None, need_source_download=True, need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(), "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print("    (actual version deferred until further download or env creation)")

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(external.find_executable(vcs_executable,
                                                                config.build_prefix) or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or
                        not any(spec.startswith('python') and "3." in spec
                                for spec in specs)):
                    specs.append(vcs_source)

                    log.warn("Your recipe depends on %s at build time (for templates), "
                            "but you have not listed it as a build dependency.  Doing "
                                "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError("Your recipe uses mercurial in build, but mercurial"
                                    " does not yet support Python 3.  Please handle all of "
                                    "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(m,
                                                                no_download_source=False,
                                                                force_download=True,
                                                                config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." %
                m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = config.work_dir
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn("Glob %s from always_include_files does not match any files", pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(config.work_dir, 'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write("source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(conda_root=root_script_dir + os.path.sep,
                                                         build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file, config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)

        if not is_noarch_python(m):
            create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')),
                     noarch=m.get_value('build/noarch'))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1):
            meta_files = (tuple(f for f in files2 - files1 if config.meta_dir in
                    join(config.build_prefix, f)),)
            sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m, sorted(files2 - files1),
                    prefix=config.build_prefix,
                    build_python=config.build_python,
                    croot=config.croot)

        entry_point_script_names = get_entry_point_script_names(get_entry_points(config, m))
        if is_noarch_python(m):
            pkg_files = [f for f in sorted(files2 - files1) if f not in entry_point_script_names]
        else:
            pkg_files = sorted(files2 - files1)

        create_info_files(m, pkg_files, config=config, prefix=config.build_prefix)

        if m.get_value('build/noarch_python'):
            noarch_python.transform(m, sorted(files2 - files1), config.build_prefix)
        elif is_noarch_python(m):
            noarch_python.populate_files(
                m, pkg_files, config.build_prefix, entry_point_script_names)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config, could_be_mirror=False)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
Esempio n. 39
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert 'markupsafe' not in sys.modules
        delete_trash(None)

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2 and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    already_built = []
    to_build = args.recipe[:]
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels, include_recipe=args.include_recipe)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build.append(dep_pkg)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Esempio n. 40
0
def build(m,
          get_src=True,
          verbose=True,
          post=None,
          channel_urls=(),
          override_channels=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
                   [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose,
                   channel_urls=channel_urls,
                   override_channels=override_channels)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for f in m.always_include_files():
            if f not in files1:
                sys.exit("Error: File %s from always_include_files not found" %
                         f)
        files1 = files1.difference(set(m.always_include_files()))
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1),
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        post_build(m, sorted(files2 - files1))
        create_info_files(m,
                          sorted(files2 - files1),
                          include_recipe=bool(m.path))
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 41
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    set_language_env_vars(args, parser, execute=execute)

    if args.skip_existing:
        for d in config.bldpkgs_dirs:
            if not isdir(d):
                makedirs(d)
            update_index(d)
        index = build.get_build_index(clear_cache=True)

    already_built = set()
    to_build_recursive = []
    recipes = deque(args.recipe)
    while recipes:
        arg = recipes.popleft()
        try_again = False
        # Don't use byte literals for paths in Python 2
        if not PY3:
            arg = arg.decode(getpreferredencoding() or 'utf-8')
        if isfile(arg):
            if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                recipe_dir = tempfile.mkdtemp()
                t = tarfile.open(arg, 'r:*')
                t.extractall(path=recipe_dir)
                t.close()
                need_cleanup = True
            else:
                print("Ignoring non-recipe: %s" % arg)
                continue
        else:
            recipe_dir = abspath(arg)
            need_cleanup = False

        # recurse looking for meta.yaml that is potentially not in immediate folder
        recipe_dir = find_recipe(recipe_dir)
        if not isdir(recipe_dir):
            sys.exit("Error: no such directory: %s" % recipe_dir)

        # this fully renders any jinja templating, throwing an error if any data is missing
        m, need_source_download = render_recipe(recipe_dir, no_download_source=False,
                                                verbose=False)
        if m.get_value('build/noarch_python'):
            config.noarch = True

        if args.check and len(args.recipe) > 1:
            print(m.path)
        m.check_fields()
        if args.check:
            continue
        if m.skip():
            print("Skipped: The %s recipe defines build/skip for this "
                    "configuration." % m.dist())
            continue
        if args.skip_existing:
            # 'or m.pkg_fn() in index' is for conda <4.1 and could be removed in the future.
            if ('local::' + m.pkg_fn() in index or
                    m.pkg_fn() in index or
                    m.pkg_fn() in already_built):
                print(m.dist(), "is already built, skipping.")
                continue
        if args.output:
            print(bldpkg_path(m))
            continue
        elif args.test:
            build.test(m, move_broken=False)
        elif args.source:
            source.provide(m.path, m.get_section('source'), verbose=build.verbose)
            print('Source tree in:', source.get_dir())
        else:
            # This loop recursively builds dependencies if recipes exist
            if args.build_only:
                post = False
                args.notest = True
                args.binstar_upload = False
            elif args.post:
                post = True
                args.notest = True
                args.binstar_upload = False
            else:
                post = None
            try:
                build.build(m, post=post,
                            include_recipe=args.include_recipe,
                            keep_old_work=args.keep_old_work,
                            need_source_download=need_source_download,
                            dirty=args.dirty)
            except (NoPackagesFound, Unsatisfiable) as e:
                error_str = str(e)
                # Typically if a conflict is with one of these
                # packages, the other package needs to be rebuilt
                # (e.g., a conflict with 'python 3.5*' and 'x' means
                # 'x' isn't build for Python 3.5 and needs to be
                # rebuilt).
                skip_names = ['python', 'r']
                add_recipes = []
                for line in error_str.splitlines():
                    if not line.startswith('  - '):
                        continue
                    pkg = line.lstrip('  - ').split(' -> ')[-1]
                    pkg = pkg.strip().split(' ')[0]
                    if pkg in skip_names:
                        continue
                    recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                    if os.path.exists(pkg):
                        recipe_glob.append(pkg)
                    if recipe_glob:
                        try_again = True
                        for recipe_dir in recipe_glob:
                            if pkg in to_build_recursive:
                                sys.exit(str(e))
                            print(error_str)
                            print(("Missing dependency {0}, but found" +
                                    " recipe directory, so building " +
                                    "{0} first").format(pkg))
                            add_recipes.append(recipe_dir)
                            to_build_recursive.append(pkg)
                    else:
                        raise
                recipes.appendleft(arg)
                recipes.extendleft(reversed(add_recipes))

            if try_again:
                continue

            if not args.notest:
                build.test(m)

        if need_cleanup:
            shutil.rmtree(recipe_dir)

        # outputs message, or does upload, depending on value of args.binstar_upload
        handle_binstar_upload(build.bldpkg_path(m), args)

        already_built.add(m.pkg_fn())
Esempio n. 42
0
def bundle_conda(metadata, initial_files, env, files_selector=None):

    files = post_process_files(metadata, initial_files)

    # first filter is so that info_files does not pick up ignored files
    files = utils.filter_files(files, prefix=metadata.config.host_prefix)
    if files_selector:
        files = select_files(files, files_selector.get("include"),
                             files_selector.get("exclude"))

    console.print(f"\n[yellow]Adding files for {metadata.name()}[/yellow]\n")
    if files:
        for f in sorted(files):
            console.print(f"- {f}")
    else:
        console.print(
            f"[red]ATTENTION: No files added in target [bold]{metadata.name()}[/bold][/red]"
        )
    console.print("\n")

    # this is also copying things like run_test.sh into info/recipe
    utils.rm_rf(os.path.join(metadata.config.info_dir, "test"))

    output = {}

    with tmp_chdir(metadata.config.host_prefix):
        output["checksums"] = create_info_files(
            metadata, files, prefix=metadata.config.host_prefix)

    # here we add the info files into the prefix, so we want to re-collect the files list
    prefix_files = set(utils.prefix_files(metadata.config.host_prefix))
    files = utils.filter_files(prefix_files - initial_files,
                               prefix=metadata.config.host_prefix)
    if files_selector:
        include_files = files_selector.get("include")
        if include_files:
            include_files += ["info/*"]
        files = select_files(files, include_files,
                             files_selector.get("exclude"))

    basename = metadata.dist()
    tmp_archives = []
    final_outputs = []
    ext = ".tar.bz2"
    if output.get(
            "type") == "conda_v2" or metadata.config.conda_pkg_format == "2":
        ext = ".conda"

    with TemporaryDirectory() as tmp:
        conda_package_handling.api.create(metadata.config.host_prefix,
                                          files,
                                          basename + ext,
                                          out_folder=tmp)
        tmp_archives = [os.path.join(tmp, basename + ext)]

        # we're done building, perform some checks
        for tmp_path in tmp_archives:
            #     if tmp_path.endswith('.tar.bz2'):
            #         tarcheck.check_all(tmp_path, metadata.config)
            output_filename = os.path.basename(tmp_path)

            #     # we do the import here because we want to respect logger level context
            #     try:
            #         from conda_verify.verify import Verify
            #     except ImportError:
            #         Verify = None
            #         log.warn("Importing conda-verify failed.  Please be sure to test your packages.  "
            #             "conda install conda-verify to make this message go away.")
            #     if getattr(metadata.config, "verify", False) and Verify:
            #         verifier = Verify()
            #         checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) +
            #                             metadata.ignore_verify_codes())
            #         try:
            #             verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore,
            #                                     exit_on_error=metadata.config.exit_on_verify_error)
            #         except KeyError as e:
            #             log.warn("Package doesn't have necessary files.  It might be too old to inspect."
            #                      "Legacy noarch packages are known to fail.  Full message was {}".format(e))
            try:
                crossed_subdir = metadata.config.target_subdir
            except AttributeError:
                crossed_subdir = metadata.config.host_subdir
            subdir = ("noarch" if (metadata.noarch or metadata.noarch_python)
                      else crossed_subdir)
            if metadata.config.output_folder:
                output_folder = os.path.join(metadata.config.output_folder,
                                             subdir)
            else:
                output_folder = os.path.join(
                    os.path.dirname(metadata.config.bldpkgs_dir), subdir)
            final_output = os.path.join(output_folder, output_filename)
            if os.path.isfile(final_output):
                utils.rm_rf(final_output)

            # disable locking here. It's just a temp folder getting locked.
            # Having it proved a major bottleneck.
            utils.copy_into(tmp_path,
                            final_output,
                            metadata.config.timeout,
                            locking=False)
            final_outputs.append(final_output)

    update_index(os.path.dirname(output_folder),
                 verbose=metadata.config.debug,
                 threads=1)

    # clean out host prefix so that this output's files don't interfere with other outputs
    # We have a backup of how things were before any output scripts ran.  That's
    # restored elsewhere.
    if metadata.config.keep_old_work:
        prefix = metadata.config.host_prefix
        dest = os.path.join(
            os.path.dirname(prefix),
            "_".join(("_h_env_moved", metadata.dist(),
                      metadata.config.host_subdir)),
        )
        console.print("Renaming host env directory, ", prefix, " to ", dest)
        if os.path.exists(dest):
            utils.rm_rf(dest)
        shutil.move(prefix, dest)
    else:
        utils.rm_rf(metadata.config.host_prefix)

    return final_outputs
Esempio n. 43
0
def build(m, get_src=True, verbose=True, post=None):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''
    if post in [False, None]:
        rm_rf(prefix)

        print("BUILD START:", m.dist())
        create_env(prefix, [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose)

        if get_src:
            source.provide(m.path, m.get_section('source'))
        assert isdir(source.WORK_DIR)
        if os.listdir(source.get_dir()):
            print("source tree in:", source.get_dir())
        else:
            print("no source")

        rm_rf(info_dir)
        files1 = prefix_files()
        if post == False:
            # Save this for later
            with open(join(source.WORK_DIR, 'prefix_files'), 'w') as f:
                json.dump(list(files1), f)

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')
            if exists(build_file):
                script = m.get_value('build/script', None)
                if script:
                    if isinstance(script, list):
                        script = '\n'.join(script)
                    with open(build_file, 'w', encoding='utf-8') as bf:
                        bf.write(script)
                    os.chmod(build_file, 0o766)
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=source.get_dir())

    if post in [True, None]:
        if post == True:
            with open(join(source.WORK_DIR, 'prefix_files')) as f:
                files1 = set(json.load(f))

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        assert not exists(info_dir)
        files2 = prefix_files()

        post_build(sorted(files2 - files1),
              binary_relocation=bool(m.get_value('build/binary_relocation', True)))
        create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 44
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(),
    override_channels=False, include_recipe=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
        or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        sys.exit(0)

    if post in [False, None]:
        print("Removing old build environment")
        if on_win:
            if isdir(config.short_build_prefix):
                move_to_trash(config.short_build_prefix, '')
            if isdir(config.long_build_prefix):
                move_to_trash(config.long_build_prefix, '')
        else:
            rm_rf(config.short_build_prefix)
            rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        if on_win:
            if isdir(source.WORK_DIR):
                move_to_trash(source.WORK_DIR, '')
        else:
            rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
            [ms.spec for ms in m.ms_depends('build')],
            verbose=verbose, channel_urls=channel_urls,
            override_channels=override_channels)

        if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." %
                m.name())
            index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for pat in m.always_include_files():
            has_matches = False
            for f in set(files1):
                if fnmatch.fnmatch(f, pat):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit("Error: Glob %s from always_include_files does not match any files" % pat)
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if isfile(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        if any(config.meta_dir in join(config.build_prefix, f) for f in
            files2 - files1):
            sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory.  This error
usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" %
                (tuple(f for f in files2 - files1 if config.meta_dir in
                    join(config.build_prefix, f)),)))
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1),
                          include_recipe=bool(m.path) and include_recipe)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 45
0
def _construct_metadata_for_test_from_package(package, config):
    recipe_dir, need_cleanup = utils.get_recipe_abspath(package)
    config.need_cleanup = need_cleanup
    config.recipe_dir = recipe_dir
    hash_input = {}

    info_dir = os.path.normpath(os.path.join(recipe_dir, "info"))
    with open(os.path.join(info_dir, "index.json")) as f:
        package_data = json.load(f)

    if package_data["subdir"] != "noarch":
        config.host_subdir = package_data["subdir"]
    # We may be testing an (old) package built without filename hashing.
    hash_input = os.path.join(info_dir, "hash_input.json")
    if os.path.isfile(hash_input):
        with open(os.path.join(info_dir, "hash_input.json")) as f:
            hash_input = json.load(f)
    else:
        config.filename_hashing = False
        hash_input = {}
    # not actually used as a variant, since metadata will have been finalized.
    #    This is still necessary for computing the hash correctly though
    config.variant = hash_input
    log = utils.get_logger(__name__)

    # get absolute file location
    local_pkg_location = os.path.normpath(os.path.abspath(os.path.dirname(package)))

    # get last part of the path
    last_element = os.path.basename(local_pkg_location)
    is_channel = False
    for platform in ("win-", "linux-", "osx-", "noarch"):
        if last_element.startswith(platform):
            is_channel = True

    if not is_channel:
        log.warn(
            "Copying package to conda-build croot.  No packages otherwise alongside yours will"
            " be available unless you specify -c local.  To avoid this warning, your package "
            "must reside in a channel structure with platform-subfolders.  See more info on "
            "what a valid channel is at "
            "https://conda.io/docs/user-guide/tasks/create-custom-channels.html"
        )

        local_dir = os.path.join(config.croot, config.host_subdir)
        mkdir_p(local_dir)
        local_pkg_location = os.path.join(local_dir, os.path.basename(package))
        utils.copy_into(package, local_pkg_location)
        local_pkg_location = local_dir

    local_channel = os.path.dirname(local_pkg_location)

    # update indices in the channel
    update_index(local_channel, verbose=config.debug, threads=1)

    try:
        # raise IOError()
        # metadata = render_recipe(
        #     os.path.join(info_dir, "recipe"), config=config, reset_build_id=False
        # )[0][0]

        metadata = get_metadata(os.path.join(info_dir, "recipe", "recipe.yaml"), config)
        # with open(os.path.join(info_dir, "recipe", "recipe.yaml")) as fi:
        # metadata = yaml.load(fi)
    # no recipe in package.  Fudge metadata
    except SystemExit:
        # force the build string to line up - recomputing it would
        #    yield a different result
        metadata = MetaData.fromdict(
            {
                "package": {
                    "name": package_data["name"],
                    "version": package_data["version"],
                },
                "build": {
                    "number": int(package_data["build_number"]),
                    "string": package_data["build"],
                },
                "requirements": {"run": package_data["depends"]},
            },
            config=config,
        )
    # HACK: because the recipe is fully baked, detecting "used" variables no longer works.  The set
    #     of variables in the hash_input suffices, though.

    if metadata.noarch:
        metadata.config.variant["target_platform"] = "noarch"

    metadata.config.used_vars = list(hash_input.keys())
    urls = list(utils.ensure_list(metadata.config.channel_urls))
    local_path = url_path(local_channel)
    # replace local with the appropriate real channel.  Order is maintained.
    urls = [url if url != "local" else local_path for url in urls]
    if local_path not in urls:
        urls.insert(0, local_path)
    metadata.config.channel_urls = urls
    utils.rm_rf(metadata.config.test_dir)
    return metadata, hash_input
Esempio n. 46
0
def build(m,
          post=None,
          include_recipe=True,
          keep_old_work=False,
          need_source_download=True,
          verbose=True,
          dirty=False,
          activate=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [
                name for name in os.listdir(source.WORK_DIR)
                if os.path.isdir(os.path.join(source.WORK_DIR, name))
            ]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s" %
                      (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub),
                                old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix,
                       [ms.spec for ms in m.ms_depends('build')])

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                m, need_source_download = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    verbose=verbose,
                    dirty=dirty)
                assert not need_source_download, "Source download failed.  Please investigate."

            if m.name() in [
                    i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
            ]:
                print("%s is installed as a build dependency. Removing." %
                      m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()],
                                              index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            assert isdir(source.WORK_DIR)
            src_dir = source.get_dir()
            contents = os.listdir(src_dir)
            if contents:
                print("source tree in:", src_dir)
            else:
                print("no source")

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    sys.exit(
                        "Error: Glob %s from always_include_files does not match any files"
                        % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if on_win:
                build_file = join(m.path, 'bld.bat')
                if script:
                    build_file = join(source.get_dir(), 'bld.bat')
                    with open(join(source.get_dir(), 'bld.bat'), 'w') as bf:
                        bf.write(script)
                import conda_build.windows as windows
                windows.build(m, build_file, dirty=dirty, activate=activate)
            else:
                build_file = join(m.path, 'build.sh')

                # There is no sense in trying to run an empty build script.
                if isfile(build_file) or script:
                    env = environ.get_dict(m, dirty=dirty)
                    work_file = join(source.get_dir(), 'conda_build.sh')
                    if script:
                        with open(work_file, 'w') as bf:
                            bf.write(script)
                    if activate:
                        if isfile(build_file):
                            data = open(build_file).read()
                        else:
                            data = open(work_file).read()
                        with open(work_file, 'w') as bf:
                            bf.write("source activate {build_prefix}\n".format(
                                build_prefix=config.build_prefix))
                            bf.write(data)
                    else:
                        if not isfile(work_file):
                            shutil.copy(build_file, work_file)
                    os.chmod(work_file, 0o766)

                    if isfile(work_file):
                        cmd = [shell_path, '-x', '-e', work_file]

                        _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(
                             m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f)
                   for f in files2 - files1):
                sys.exit(
                    indent(
                        """Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                        (tuple(f for f in files2 - files1 if config.meta_dir in
                               join(config.build_prefix, f)), )))
            post_build(m, sorted(files2 - files1))
            create_info_files(m,
                              sorted(files2 - files1),
                              include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s" %
                  (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print(
                        "Not restoring old source directory %s over new build's version"
                        % (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub),
                                source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)
Esempio n. 47
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    if on_win:
        # needs to happen before any c extensions are imported that might be
        # hard-linked by files in the trash. one of those is markupsafe, used
        # by jinja2. see https://github.com/conda/conda-build/pull/520
        assert "markupsafe" not in sys.modules
        delete_trash(None)

    conda_version = {"python": "CONDA_PY", "numpy": "CONDA_NPY", "perl": "CONDA_PERL", "R": "CONDA_R"}

    for lang in ["python", "numpy", "perl", "R"]:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ["all"]:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ("python", "numpy"):
                version = int(version.replace(".", ""))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ["python", "numpy"]:
            if all_versions[lang]:
                raise RuntimeError(
                    "%s must be major.minor, like %s, not %s"
                    % (conda_version[lang], all_versions[lang][-1] / 10, version)
                )
            else:
                raise RuntimeError("%s must be major.minor, not %s" % (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(
            clear_cache=True, channel_urls=channel_urls, override_channels=args.override_channels
        )

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or "utf-8")
            if isfile(arg):
                if arg.endswith((".tar", ".tar.gz", ".tgz", ".tar.bz2")):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, "r:*")
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value("build/noarch_python"):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(
                    m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                )
            elif args.source:
                source.provide(m.path, m.get_section("source"))
                print("Source tree in:", source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    if m.skip():
                        print("Skipped: The %s recipe defines build/skip for this " "configuration." % m.dist())
                        continue
                    build.build(
                        m,
                        verbose=not args.quiet,
                        post=post,
                        channel_urls=channel_urls,
                        override_channels=args.override_channels,
                        include_recipe=args.include_recipe,
                    )
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith("No packages found") or error_str.startswith("Could not find some"):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(": ")[1]
                        # Handle package names that contain version deps.
                        if " " in dep_pkg:
                            dep_pkg = dep_pkg.split(" ")[0]
                        recipe_glob = glob(dep_pkg + "-[v0-9][0-9.]*")
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(
                                    (
                                        "Missing dependency {0}, but found"
                                        + " recipe directory, so building "
                                        + "{0} first"
                                    ).format(dep_pkg)
                                )
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith("- ")]
                        pkgs = [line.lstrip("- ") for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ["python", "r"]
                        pkgs = [pkg for pkg in pkgs if pkg.split(" ")[0] not in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if " " in pkg:
                                pkg = pkg.split(" ")[0]
                            recipe_glob = glob(pkg + "-[v0-9][0-9.]*")
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(
                                        (
                                            "Missing dependency {0}, but found"
                                            + " recipe directory, so building "
                                            + "{0} first"
                                        ).format(pkg)
                                    )
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(
                        m, verbose=not args.quiet, channel_urls=channel_urls, override_channels=args.override_channels
                    )
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())
Esempio n. 48
0
def main(config=None):
    print(banner)

    parser = argparse.ArgumentParser(
        description="Boa, the fast, mamba powered-build tool for conda packages."
    )
    subparsers = parser.add_subparsers(help="sub-command help", dest="command")
    parent_parser = argparse.ArgumentParser(add_help=False)
    parent_parser.add_argument("recipe_dir", type=str)

    render_parser = subparsers.add_parser(
        "render", parents=[parent_parser], help="render a recipe"
    )
    convert_parser = subparsers.add_parser(
        "convert", parents=[parent_parser], help="convert recipe.yaml to old-style meta.yaml"
    )
    build_parser = subparsers.add_parser(
        "build", parents=[parent_parser], help="build a recipe"
    )
    args = parser.parse_args()

    command = args.command

    if command == 'convert':
        from boa.cli import convert
        convert.main(args.recipe_dir)
        exit()

    folder = args.recipe_dir
    cbc, config = get_config(folder)

    if not os.path.exists(config.output_folder):
        mkdir_p(config.output_folder)
    print(f"Updating build index: {(config.output_folder)}\n")
    update_index(config.output_folder, verbose=config.debug, threads=1)

    recipe_path = os.path.join(folder, "recipe.yaml")

    # step 1: parse YAML
    with open(recipe_path) as fi:
        loader = YAML(typ="safe")
        ydoc = loader.load(fi)

    # step 2: fill out context dict
    context_dict = ydoc.get("context") or {}
    jenv = jinja2.Environment()
    for key, value in context_dict.items():
        if isinstance(value, str):
            tmpl = jenv.from_string(value)
            context_dict[key] = tmpl.render(context_dict)

    if ydoc.get("context"):
        del ydoc["context"]

    # step 3: recursively loop over the entire recipe and render jinja with context
    jenv.globals.update(jinja_functions(config, context_dict))
    for key in ydoc:
        render_recursive(ydoc[key], context_dict, jenv)

    flatten_selectors(ydoc, ns_cfg(config))
    normalize_recipe(ydoc)

    # pprint(ydoc)
    # We need to assemble the variants for each output
    variants = {}
    # if we have a outputs section, use that order the outputs
    if ydoc.get("outputs"):
        for o in ydoc["outputs"]:
            # inherit from global package
            pkg_meta = {}
            pkg_meta.update(ydoc["package"])
            pkg_meta.update(o["package"])
            o["package"] = pkg_meta

            build_meta = {}
            build_meta.update(ydoc.get("build"))
            build_meta.update(o.get("build") or {})
            o["build"] = build_meta
            variants[o["package"]["name"]] = get_dependency_variants(
                o.get("requirements", {}), cbc, config
            )
    else:
        # we only have one output
        variants[ydoc["package"]["name"]] = get_dependency_variants(
            ydoc.get("requirements", {}), cbc, config
        )

    # this takes in all variants and outputs, builds a dependency tree and returns
    # the final metadata
    sorted_outputs = to_build_tree(ydoc, variants, config)

    # then we need to solve and build from the bottom up
    # we can't first solve all packages without finalizing everything
    #
    # FLOW:
    # =====
    # - solve the package
    #   - solv build, add weak run exports to
    # - add run exports from deps!

    print('\n')
    if command == "render":
        for o in sorted_outputs:
            print(o)
        exit()


    # TODO this should be done cleaner
    top_name = ydoc['package']['name']
    o0 = sorted_outputs[0]
    o0.is_first = True
    o0.config.compute_build_id(top_name)

    solver = MambaSolver(["conda-forge"], context.subdir)
    print("\n")

    download_source(MetaData(recipe_path, o0))
    cached_source = o0.sections['source']

    for o in sorted_outputs:
        solver.replace_channels()
        o.finalize_solve(sorted_outputs, solver)
        # print(o)

        o.config._build_id = o0.config.build_id

        if "build" in o.transactions:
            if isdir(o.config.build_prefix):
                utils.rm_rf(o.config.build_prefix)
            mkdir_p(o.config.build_prefix)
            o.transactions['build'].execute(PrefixData(o.config.build_prefix), PackageCacheData.first_writable().pkgs_dir)

        if "host" in o.transactions:
            mkdir_p(o.config.host_prefix)
            o.transactions['host'].execute(PrefixData(o.config.host_prefix), PackageCacheData.first_writable().pkgs_dir)

        meta = MetaData(recipe_path, o)
        o.final_build_id = meta.build_id()

        if cached_source != o.sections['source']:
            download_source(meta)

        build(meta, None)

    for o in sorted_outputs:
        print("\n")
        print(o)
Esempio n. 49
0
def create_env(prefix, specs, clear_cache=True, debug=False):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        # This squelches a ton of conda output that is not hugely relevant
        logging.getLogger("conda").setLevel(logging.WARN)
        logging.getLogger("binstar").setLevel(logging.WARN)
        logging.getLogger("install").setLevel(logging.ERROR)
        logging.getLogger("conda.install").setLevel(logging.ERROR)
        logging.getLogger("fetch").setLevel(logging.WARN)
        logging.getLogger("print").setLevel(logging.WARN)
        logging.getLogger("progress").setLevel(logging.WARN)
        logging.getLogger("dotupdate").setLevel(logging.WARN)
        logging.getLogger("stdoutlog").setLevel(logging.WARN)
        logging.getLogger("requests").setLevel(logging.WARN)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    for d in config.bldpkgs_dirs:
        if not isdir(d):
            os.makedirs(d)
        update_index(d)
    if specs:  # Don't waste time if there is nothing to do

        # FIXME: stupid hack to put test prefix on PATH so that runtime libs can be found
        old_path = os.environ['PATH']
        os.environ['PATH'] = prepend_bin_path(os.environ.copy(), prefix,
                                              True)['PATH']

        index = get_build_index(clear_cache=True)

        warn_on_old_conda_build(index)

        cc.pkgs_dirs = cc.pkgs_dirs[:1]
        actions = plan.install_actions(prefix, index, specs)
        plan.display_actions(actions, index)

        try:
            plan.execute_actions(actions, index, verbose=debug)
        except SystemExit as exc:
            if "too short in" in exc.message and config.prefix_length > 80:
                log.warn("Build prefix failed with prefix length {0}.".format(
                    config.prefix_length))
                log.warn("Error was: ")
                log.warn(exc.message)
                log.warn(
                    "One or more of your package dependencies needs to be rebuilt with a "
                    "longer prefix length.")
                log.warn(
                    "Falling back to legacy prefix length of 80 characters.")
                log.warn(
                    "Your package will not install into prefixes longer than 80 characters."
                )
                config.prefix_length = 80
                create_env(prefix, specs, clear_cache=clear_cache, debug=debug)

        os.environ['PATH'] = old_path

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 50
0
def test_index_noarch_osx64_1():
    with tempdir() as base_location:
        test_package_path = join(base_location, 'osx-64', 'conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2')
        test_package_url = 'https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2'
        download(test_package_url, test_package_path)

        test_package_path = join(base_location, 'noarch', 'conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2')
        test_package_url = 'https://conda.anaconda.org/conda-test/noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2'
        download(test_package_url, test_package_path)

        update_index(base_location, channel_name='test-channel')

        # #######################################
        # tests for osx-64 subdir
        # #######################################
        assert isfile(join(base_location, 'osx-64', 'index.html'))
        assert isfile(join(base_location, 'osx-64', 'repodata.json'))  # repodata is tested in test_index_on_single_subdir_1
        assert isfile(join(base_location, 'osx-64', 'repodata.json.bz2'))

        # #######################################
        # tests for noarch subdir
        # #######################################
        assert isfile(join(base_location, 'osx-64', 'index.html'))
        assert isfile(join(base_location, 'osx-64', 'repodata.json.bz2'))

        with open(join(base_location, 'noarch', 'repodata.json')) as fh:
            actual_repodata_json = json.loads(fh.read())
        expected_repodata_json = {
            "info": {
                'subdir': 'noarch',
            },
            "packages": {
                "conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2": {
                    "build": "pyhed9eced_1",
                    "build_number": 1,
                    "depends": [
                        "python"
                    ],
                    "license": "BSD",
                    "md5": "56b5f6b7fb5583bccfc4489e7c657484",
                    "name": "conda-index-pkg-a",
                    "noarch": "python",
                    "sha256": "7430743bffd4ac63aa063ae8518e668eac269c783374b589d8078bee5ed4cbc6",
                    "size": 7882,
                    "subdir": "noarch",
                    "timestamp": 1508520204768,
                    "version": "1.0",
                },
            },
        }
        assert actual_repodata_json == expected_repodata_json

        # #######################################
        # tests for full channel
        # #######################################

        with open(join(base_location, 'channeldata.json')) as fh:
            actual_channeldata_json = json.loads(fh.read())
        expected_channeldata_json = {
            "channeldata_version": 1,
            "packages": {
                "conda-index-pkg-a": {
                    "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. "
                                   "Python is a widely used high-level, general-purpose, interpreted, dynamic "
                                   "programming language. Its design philosophy emphasizes code "
                                   "readability, and its syntax allows programmers to express concepts in "
                                   "fewer lines of code than would be possible in languages such as C++ or "
                                   "Java. The language provides constructs intended to enable clear programs "
                                   "on both a small and large scale.",
                    "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml",
                    "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md",
                    "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a",
                    "home": "https://anaconda.org/conda-test/conda-index-pkg-a",
                    "license": "BSD",
                    "reference_package": "noarch/conda-index-pkg-a-1.0-pyhed9eced_1.tar.bz2",
                    "source_git_rev": "master",
                    "source_git_url": "https://github.com/kalefranz/conda-test-packages.git",
                    "subdirs": [
                        "noarch",
                        "osx-64",
                    ],
                    "summary": "Summary field for conda-index-pkg-a. This is the python noarch version.",  # <- tests that the higher noarch build number is the data collected
                    "version": "1.0"
                }
            },
            "subdirs": [
                "noarch",
                "osx-64",
            ]
        }
        assert actual_channeldata_json == expected_channeldata_json
Esempio n. 51
0
def build(m,
          get_src=True,
          verbose=True,
          post=None,
          channel_urls=(),
          override_channels=False,
          include_recipe=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    '''

    if (m.get_value('build/detect_binary_files_with_prefix')
            or m.binary_has_prefix_files()):
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build environment")
        if on_win:
            if isdir(config.short_build_prefix):
                move_to_trash(config.short_build_prefix, '')
            if isdir(config.long_build_prefix):
                move_to_trash(config.long_build_prefix, '')
        else:
            rm_rf(config.short_build_prefix)
            rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        if on_win:
            if isdir(source.WORK_DIR):
                move_to_trash(source.WORK_DIR, '')
        else:
            rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(config.build_prefix,
                   [ms.spec for ms in m.ms_depends('build')],
                   verbose=verbose,
                   channel_urls=channel_urls,
                   override_channels=override_channels)

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(clear_cache=False,
                                    channel_urls=channel_urls,
                                    override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section('source'))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for pat in m.always_include_files():
            has_matches = False
            for f in set(files1):
                if fnmatch.fnmatch(f, pat):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit(
                    "Error: Glob %s from always_include_files does not match any files"
                    % pat)
        # Save this for later
        with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
            f.write(u'\n'.join(sorted(list(files1))))
            f.write(u'\n')

        if sys.platform == 'win32':
            import conda_build.windows as windows
            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, 'build.sh')

            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                build_file = join(source.get_dir(), 'conda_build.sh')
                with open(build_file, 'w') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if isfile(build_file):
                cmd = ['/bin/bash', '-x', '-e', build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value('build/entry_points'))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1),
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.  This error
usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" %
                    (tuple(f
                           for f in files2 - files1 if config.meta_dir in join(
                               config.build_prefix, f)), )))
        post_build(m, sorted(files2 - files1))
        create_info_files(m,
                          sorted(files2 - files1),
                          include_recipe=bool(m.path) and include_recipe)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, 'w:bz2')
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 52
0
def test_index_on_single_subdir_1():
    with tempdir() as base_location:
        test_package_path = join(base_location, 'osx-64', 'conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2')
        test_package_url = 'https://conda.anaconda.org/conda-test/osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2'
        download(test_package_url, test_package_path)

        update_index(base_location, channel_name='test-channel')

        # #######################################
        # tests for osx-64 subdir
        # #######################################
        assert isfile(join(base_location, 'osx-64', 'index.html'))
        assert isfile(join(base_location, 'osx-64', 'repodata.json.bz2'))

        with open(join(base_location, 'osx-64', 'repodata.json')) as fh:
            actual_repodata_json = json.loads(fh.read())
        expected_repodata_json = {
            "info": {
                'subdir': 'osx-64',
            },
            "packages": {
                "conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2": {
                    "build": "py27h5e241af_0",
                    "build_number": 0,
                    "depends": [
                        "python >=2.7,<2.8.0a0"
                    ],
                    "license": "BSD",
                    "md5": "37861df8111170f5eed4bff27868df59",
                    "name": "conda-index-pkg-a",
                    "sha256": "459f3e9b2178fa33bdc4e6267326405329d1c1ab982273d9a1c0a5084a1ddc30",
                    "size": 8733,
                    "subdir": "osx-64",
                    "timestamp": 1508520039632,
                    "version": "1.0",
                },
            },
        }
        assert actual_repodata_json == expected_repodata_json

        # #######################################
        # tests for full channel
        # #######################################

        with open(join(base_location, 'channeldata.json')) as fh:
            actual_channeldata_json = json.loads(fh.read())
        expected_channeldata_json = {
            "channeldata_version": 1,
            "packages": {
                "conda-index-pkg-a": {
                    "description": "Description field for conda-index-pkg-a. Actually, this is just the python description. "
                                   "Python is a widely used high-level, general-purpose, interpreted, dynamic "
                                   "programming language. Its design philosophy emphasizes code "
                                   "readability, and its syntax allows programmers to express concepts in "
                                   "fewer lines of code than would be possible in languages such as C++ or "
                                   "Java. The language provides constructs intended to enable clear programs "
                                   "on both a small and large scale.",
                    "dev_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/meta.yaml",
                    "doc_source_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a/README.md",
                    "doc_url": "https://github.com/kalefranz/conda-test-packages/blob/master/conda-index-pkg-a",
                    "home": "https://anaconda.org/conda-test/conda-index-pkg-a",
                    "license": "BSD",
                    "reference_package": "osx-64/conda-index-pkg-a-1.0-py27h5e241af_0.tar.bz2",
                    "source_git_rev": "master",
                    "source_git_url": "https://github.com/kalefranz/conda-test-packages.git",
                    "subdirs": [
                        "osx-64",
                    ],
                    "summary": "Summary field for conda-index-pkg-a",
                    "version": "1.0"
                }
            },
            "subdirs": [
                "osx-64"
            ]
        }
        assert actual_channeldata_json == expected_channeldata_json
Esempio n. 53
0
def build(m, get_src=True, verbose=True, post=None, channel_urls=(), override_channels=False):
    """
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    """

    if m.get_value("build/detect_binary_files_with_prefix") or m.binary_has_prefix_files():
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if post in [False, None]:
        print("Removing old build directory")
        rm_rf(config.short_build_prefix)
        rm_rf(config.long_build_prefix)
        print("Removing old work directory")
        rm_rf(source.WORK_DIR)

        # Display the name only
        # Version number could be missing due to dependency on source info.
        print("BUILD START:", m.dist())
        create_env(
            config.build_prefix,
            [ms.spec for ms in m.ms_depends("build")],
            verbose=verbose,
            channel_urls=channel_urls,
            override_channels=override_channels,
        )

        if m.name() in [i.rsplit("-", 2)[0] for i in linked(config.build_prefix)]:
            print("%s is installed as a build dependency. Removing." % m.name())
            index = get_build_index(clear_cache=False, channel_urls=channel_urls, override_channels=override_channels)
            actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        if get_src:
            source.provide(m.path, m.get_section("source"))
            # Parse our metadata again because we did not initialize the source
            # information before.
            m.parse_again()

        print("Package:", m.dist())

        assert isdir(source.WORK_DIR)
        src_dir = source.get_dir()
        contents = os.listdir(src_dir)
        if contents:
            print("source tree in:", src_dir)
        else:
            print("no source")

        rm_rf(config.info_dir)
        files1 = prefix_files()
        for rx in m.always_include_files():
            pat = re.compile(rx)
            has_matches = False
            for f in set(files1):
                if pat.match(f):
                    print("Including in package existing file", f)
                    files1.discard(f)
                    has_matches = True
            if not has_matches:
                sys.exit("Error: Regex %s from always_include_files does not match any files" % rx)
        # Save this for later
        with open(join(config.croot, "prefix_files.txt"), "w") as f:
            f.write(u"\n".join(sorted(list(files1))))
            f.write(u"\n")

        if sys.platform == "win32":
            import conda_build.windows as windows

            windows.build(m)
        else:
            env = environ.get_dict(m)
            build_file = join(m.path, "build.sh")

            script = m.get_value("build/script", None)
            if script:
                if isinstance(script, list):
                    script = "\n".join(script)
                build_file = join(source.get_dir(), "conda_build.sh")
                with open(build_file, "w") as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)

            if exists(build_file):
                cmd = ["/bin/bash", "-x", "-e", build_file]

                _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post == True:
            with open(join(config.croot, "prefix_files.txt"), "r") as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m)
        create_post_scripts(m)
        create_entry_points(m.get_value("build/entry_points"))
        assert not exists(config.info_dir)
        files2 = prefix_files()

        post_process(sorted(files2 - files1), preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files()
        assert not any(config.meta_dir in join(config.build_prefix, f) for f in files2 - files1)
        post_build(m, sorted(files2 - files1))
        create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
        if m.get_value("build/noarch_python"):
            import conda_build.noarch_python as noarch_python

            noarch_python.transform(m, sorted(files2 - files1))

        files3 = prefix_files()
        fix_permissions(files3 - files1)

        path = bldpkg_path(m)
        t = tarfile.open(path, "w:bz2")
        for f in sorted(files3 - files1):
            t.add(join(config.build_prefix, f), f)
        t.close()

        print("BUILD END:", m.dist())

        # we're done building, perform some checks
        tarcheck.check_all(path)
        update_index(config.bldpkgs_dir)
    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())
Esempio n. 54
0
def build(m, post=None, include_recipe=True, keep_old_work=False,
          need_source_download=True, need_reparse_in_env=False,
          verbose=True, dirty=False, activate=True, debug=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if (m.get_value('build/detect_binary_files_with_prefix') or
            m.binary_has_prefix_files()) and not on_win:
        # We must use a long prefix here as the package will only be
        # installable into prefixes shorter than this one.
        config.use_long_build_prefix = True
    else:
        # In case there are multiple builds in the same process
        config.use_long_build_prefix = False

    if m.skip():
        print("Skipped: The %s recipe defines build/skip for this "
              "configuration." % m.dist())
        return

    with Locked(cc.root_dir):

        # If --keep-old-work, then move the contents of source.WORK_DIR to a
        # temporary directory for the duration of the build.
        # The source unpacking procedure is too varied and complex
        # to allow this to be written cleanly (see source.get_dir() for example)
        if keep_old_work:
            old_WORK_DIR = tempfile.mkdtemp()
            old_sub_dirs = [name for name in os.listdir(source.WORK_DIR)
                            if os.path.isdir(os.path.join(source.WORK_DIR, name))]
            if len(old_sub_dirs):
                print("Keeping old work directory backup: %s => %s"
                    % (old_sub_dirs, old_WORK_DIR))
                for old_sub in old_sub_dirs:
                    shutil.move(os.path.join(source.WORK_DIR, old_sub), old_WORK_DIR)

        if post in [False, None]:
            print("Removing old build environment")
            print("BUILD START:", m.dist())
            if need_source_download or need_reparse_in_env:
                print("    (actual version deferred until further download or env creation)")
            if on_win:
                if isdir(config.short_build_prefix):
                    move_to_trash(config.short_build_prefix, '')
                if isdir(config.long_build_prefix):
                    move_to_trash(config.long_build_prefix, '')
            else:
                rm_rf(config.short_build_prefix)
                rm_rf(config.long_build_prefix)

            specs = [ms.spec for ms in m.ms_depends('build')]
            if activate:
                # If we activate the build envrionment, we need to be sure that we
                #    have the appropriate VCS available in the environment.  People
                #    are not used to explicitly listing it in recipes, though.
                #    We add it for them here, but warn them about it.
                vcs_source = m.uses_vcs_in_build()
                if vcs_source and vcs_source not in specs:
                    vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
                    has_vcs_available = os.path.isfile(external.find_executable(vcs_executable) or
                                                       "")
                    if not has_vcs_available:
                        if (vcs_source != "mercurial" or
                                not any(spec.startswith('python') and "3." in spec
                                        for spec in specs)):
                            specs.append(vcs_source)

                            log.warn("Your recipe depends on {} at build time (for templates), "
                                    "but you have not listed it as a build dependency.  Doing "
                                    "so for this build.")
                        else:
                            raise ValueError("Your recipe uses mercurial in build, but mercurial"
                                            " does not yet support Python 3.  Please handle all of "
                                            "your mercurial actions outside of your build script.")
            # Display the name only
            # Version number could be missing due to dependency on source info.
            create_env(config.build_prefix, specs, debug=debug)

            if need_source_download:
                # Execute any commands fetching the source (e.g., git) in the _build environment.
                # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
                # dependencies.
                if not activate:
                    _old_path = os.environ['PATH']
                    os.environ['PATH'] = prepend_bin_path({'PATH': _old_path},
                                                          config.build_prefix)['PATH']
                try:
                    m, need_source_download, need_reparse_in_env = parse_or_try_download(m,
                                                                    no_download_source=False,
                                                                    force_download=True,
                                                                    verbose=verbose,
                                                                    dirty=dirty)
                    assert not need_source_download, "Source download failed.  Please investigate."
                finally:
                    if not activate:
                        os.environ['PATH'] = _old_path
                print("BUILD START:", m.dist())

            if need_reparse_in_env:
                reparse(m)
                print("BUILD START:", m.dist())

            if m.name() in [i.rsplit('-', 2)[0] for i in linked(config.build_prefix)]:
                print("%s is installed as a build dependency. Removing." %
                    m.name())
                index = get_build_index(clear_cache=False)
                actions = plan.remove_actions(config.build_prefix, [m.name()], index=index)
                assert not plan.nothing_to_do(actions), actions
                plan.display_actions(actions, index)
                plan.execute_actions(actions, index)

            print("Package:", m.dist())

            src_dir = source.get_dir()
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files()
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn("Glob %s from always_include_files does not match any files" % pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(source.WORK_DIR):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(source.get_dir(), 'bld.bat')
                        with open(join(source.get_dir(), 'bld.bat'), 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, dirty=dirty, activate=activate)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        env = environ.get_dict(m, dirty=dirty)
                        work_file = join(source.get_dir(), 'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write("source activate {build_prefix}\n".format(
                                    build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                shutil.copy(build_file, work_file)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]

                            _check_call(cmd, env=env, cwd=src_dir)

        if post in [True, None]:
            if post:
                with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                    files1 = set(f.read().splitlines())

            get_build_metadata(m)
            create_post_scripts(m)
            create_entry_points(m.get_value('build/entry_points'))
            assert not exists(config.info_dir)
            files2 = prefix_files()

            post_process(sorted(files2 - files1),
                         preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

            # The post processing may have deleted some files (like easy-install.pth)
            files2 = prefix_files()
            if any(config.meta_dir in join(config.build_prefix, f) for f in
                    files2 - files1):
                sys.exit(indent("""Error: Untracked file(s) %s found in conda-meta directory.
    This error usually comes from using conda in the build script.  Avoid doing this, as it
    can lead to packages that include their dependencies.""" %
                    (tuple(f for f in files2 - files1 if config.meta_dir in
                        join(config.build_prefix, f)),)))
            post_build(m, sorted(files2 - files1))
            create_info_files(m, sorted(files2 - files1),
                            include_recipe=bool(m.path) and include_recipe)
            if m.get_value('build/noarch_python'):
                import conda_build.noarch_python as noarch_python
                noarch_python.transform(m, sorted(files2 - files1))

            files3 = prefix_files()
            fix_permissions(files3 - files1)

            path = bldpkg_path(m)
            t = tarfile.open(path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            print("BUILD END:", m.dist())

            # we're done building, perform some checks
            tarcheck.check_all(path)
            update_index(config.bldpkgs_dir)
        else:
            print("STOPPING BUILD BEFORE POST:", m.dist())

        if keep_old_work and len(old_sub_dirs):
            print("Restoring old work directory backup: %s :: %s => %s"
                % (old_WORK_DIR, old_sub_dirs, source.WORK_DIR))
            for old_sub in old_sub_dirs:
                if os.path.exists(os.path.join(source.WORK_DIR, old_sub)):
                    print("Not restoring old source directory %s over new build's version" %
                          (old_sub))
                else:
                    shutil.move(os.path.join(old_WORK_DIR, old_sub), source.WORK_DIR)
            shutil.rmtree(old_WORK_DIR, ignore_errors=True)
Esempio n. 55
0
def create_env(prefix, specs, config, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        silence_loggers(show_warnings_and_errors=True)

    if os.path.isdir(prefix):
        rm_rf(prefix)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    if specs:  # Don't waste time if there is nothing to do
        with path_prepended(prefix):
            locks = []
            try:
                cc.pkgs_dirs = cc.pkgs_dirs[:1]
                locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs)
                for folder in locked_folders:
                    if not os.path.isdir(folder):
                        os.makedirs(folder)
                    lock = filelock.SoftFileLock(join(folder, '.conda_lock'))
                    if not folder.endswith('pkgs'):
                        update_index(folder, config=config, lock=lock, could_be_mirror=False)
                    lock.acquire(timeout=config.timeout)
                    locks.append(lock)

                index = get_build_index(config=config, clear_cache=True)

                actions = plan.install_actions(prefix, index, specs)
                if config.disable_pip:
                    actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('pip-')]  # noqa
                    actions['LINK'] = [spec for spec in actions['LINK'] if not spec.startswith('setuptools-')]  # noqa
                plan.display_actions(actions, index)
                if on_win:
                    for k, v in os.environ.items():
                        os.environ[k] = str(v)
                plan.execute_actions(actions, index, verbose=config.debug)
            except (SystemExit, PaddingError, LinkError) as exc:
                if (("too short in" in str(exc) or
                        'post-link failed for: openssl' in str(exc) or
                        isinstance(exc, PaddingError)) and
                        config.prefix_length > 80):
                    log.warn("Build prefix failed with prefix length %d", config.prefix_length)
                    log.warn("Error was: ")
                    log.warn(str(exc))
                    log.warn("One or more of your package dependencies needs to be rebuilt "
                            "with a longer prefix length.")
                    log.warn("Falling back to legacy prefix length of 80 characters.")
                    log.warn("Your package will not install into prefixes > 80 characters.")
                    config.prefix_length = 80

                    # Set this here and use to create environ
                    #   Setting this here is important because we use it below (symlink)
                    prefix = config.build_prefix

                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    create_env(prefix, specs, config=config,
                                clear_cache=clear_cache)
                else:
                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    raise
            finally:
                for lock in locks:
                    lock.release()
                    if os.path.isfile(lock._lock_file):
                        os.remove(lock._lock_file)
        warn_on_old_conda_build(index=index)

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 56
0
def create_env(prefix, specs, config, clear_cache=True):
    '''
    Create a conda envrionment for the given prefix and specs.
    '''
    if config.debug:
        logging.getLogger("conda").setLevel(logging.DEBUG)
        logging.getLogger("binstar").setLevel(logging.DEBUG)
        logging.getLogger("install").setLevel(logging.DEBUG)
        logging.getLogger("conda.install").setLevel(logging.DEBUG)
        logging.getLogger("fetch").setLevel(logging.DEBUG)
        logging.getLogger("print").setLevel(logging.DEBUG)
        logging.getLogger("progress").setLevel(logging.DEBUG)
        logging.getLogger("dotupdate").setLevel(logging.DEBUG)
        logging.getLogger("stdoutlog").setLevel(logging.DEBUG)
        logging.getLogger("requests").setLevel(logging.DEBUG)
    else:
        silence_loggers(show_warnings_and_errors=True)

    if os.path.isdir(prefix):
        rm_rf(prefix)

    specs = list(specs)
    for feature, value in feature_list:
        if value:
            specs.append('%s@' % feature)

    if specs:  # Don't waste time if there is nothing to do
        with path_prepended(prefix):
            locks = []
            try:
                cc.pkgs_dirs = cc.pkgs_dirs[:1]
                locked_folders = cc.pkgs_dirs + list(config.bldpkgs_dirs)
                for folder in locked_folders:
                    if not os.path.isdir(folder):
                        os.makedirs(folder)
                    lock = filelock.SoftFileLock(join(folder, '.conda_lock'))
                    update_index(folder, config=config, lock=lock)
                    locks.append(lock)
                for lock in locks:
                    lock.acquire(timeout=config.timeout)

                index = get_build_index(config=config, clear_cache=True)

                actions = plan.install_actions(prefix, index, specs)
                if config.disable_pip:
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('pip-')
                    ]  # noqa
                    actions['LINK'] = [
                        spec for spec in actions['LINK']
                        if not spec.startswith('setuptools-')
                    ]  # noqa
                plan.display_actions(actions, index)
                if on_win:
                    for k, v in os.environ.items():
                        os.environ[k] = str(v)
                plan.execute_actions(actions, index, verbose=config.debug)
            except (SystemExit, PaddingError, LinkError) as exc:
                if (("too short in" in str(exc)
                     or 'post-link failed for: openssl' in str(exc)
                     or isinstance(exc, PaddingError))
                        and config.prefix_length > 80):
                    log.warn("Build prefix failed with prefix length %d",
                             config.prefix_length)
                    log.warn("Error was: ")
                    log.warn(str(exc))
                    log.warn(
                        "One or more of your package dependencies needs to be rebuilt "
                        "with a longer prefix length.")
                    log.warn(
                        "Falling back to legacy prefix length of 80 characters."
                    )
                    log.warn(
                        "Your package will not install into prefixes > 80 characters."
                    )
                    config.prefix_length = 80

                    # Set this here and use to create environ
                    #   Setting this here is important because we use it below (symlink)
                    prefix = config.build_prefix

                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    create_env(prefix,
                               specs,
                               config=config,
                               clear_cache=clear_cache)
                else:
                    for lock in locks:
                        lock.release()
                        if os.path.isfile(lock._lock_file):
                            os.remove(lock._lock_file)
                    raise
            finally:
                for lock in locks:
                    lock.release()
                    if os.path.isfile(lock._lock_file):
                        os.remove(lock._lock_file)
        warn_on_old_conda_build(index=index)

    # ensure prefix exists, even if empty, i.e. when specs are empty
    if not isdir(prefix):
        os.makedirs(prefix)
    if on_win:
        shell = "cmd.exe"
    else:
        shell = "bash"
    symlink_conda(prefix, sys.prefix, shell)
Esempio n. 57
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()
    channel_urls = args.channel or ()

    all_versions = {
        'python': [26, 27, 33, 34],
        'numpy': [16, 17, 18, 19],
        'perl': None,
        'R': None,
        }
    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = int(versions[0].replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) == 2:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    if args.skip_existing:
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True,
            channel_urls=channel_urls,
            override_channels=args.override_channels)

    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if args.output:
                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, verbose=not args.quiet,
                    channel_urls=channel_urls, override_channels=args.override_channels)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, verbose=not args.quiet, post=post,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                except RuntimeError as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                        else:
                            raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m, verbose=not args.quiet,
                        channel_urls=channel_urls, override_channels=args.override_channels)
                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)
Esempio n. 58
0
def build(m,
          config,
          post=None,
          need_source_download=True,
          need_reparse_in_env=False):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :type post: bool or None. None means run the whole build. True means run
    post only. False means stop just before the post.
    :type keep_old_work: bool: Keep any previous work directory.
    :type need_source_download: bool: if rendering failed to download source
    (due to missing tools), retry here after build env is populated
    '''

    if m.skip():
        print_skip_message(m)
        return False

    if config.skip_existing:
        package_exists = is_package_built(m, config)
        if package_exists:
            print(m.dist(),
                  "is already built in {0}, skipping.".format(package_exists))
            return False

    if post in [False, None]:
        print("BUILD START:", m.dist())
        if m.uses_jinja and (need_source_download or need_reparse_in_env):
            print(
                "    (actual version deferred until further download or env creation)"
            )

        specs = [ms.spec for ms in m.ms_depends('build')]
        create_env(config.build_prefix, specs, config=config)
        vcs_source = m.uses_vcs_in_build
        if vcs_source and vcs_source not in specs:
            vcs_executable = "hg" if vcs_source == "mercurial" else vcs_source
            has_vcs_available = os.path.isfile(
                external.find_executable(vcs_executable, config.build_prefix)
                or "")
            if not has_vcs_available:
                if (vcs_source != "mercurial" or not any(
                        spec.startswith('python') and "3." in spec
                        for spec in specs)):
                    specs.append(vcs_source)

                    log.warn(
                        "Your recipe depends on %s at build time (for templates), "
                        "but you have not listed it as a build dependency.  Doing "
                        "so for this build.", vcs_source)

                    # Display the name only
                    # Version number could be missing due to dependency on source info.
                    create_env(config.build_prefix, specs, config=config)
                else:
                    raise ValueError(
                        "Your recipe uses mercurial in build, but mercurial"
                        " does not yet support Python 3.  Please handle all of "
                        "your mercurial actions outside of your build script.")

        if need_source_download:
            # Execute any commands fetching the source (e.g., git) in the _build environment.
            # This makes it possible to provide source fetchers (eg. git, hg, svn) as build
            # dependencies.
            with path_prepended(config.build_prefix):
                m, need_source_download, need_reparse_in_env = parse_or_try_download(
                    m,
                    no_download_source=False,
                    force_download=True,
                    config=config)
            assert not need_source_download, "Source download failed.  Please investigate."
            if m.uses_jinja:
                print("BUILD START (revised):", m.dist())

        if need_reparse_in_env:
            reparse(m, config=config)
            print("BUILD START (revised):", m.dist())

        if m.name() in [
                i.rsplit('-', 2)[0] for i in linked(config.build_prefix)
        ]:
            print("%s is installed as a build dependency. Removing." %
                  m.name())
            index = get_build_index(config=config, clear_cache=False)
            actions = plan.remove_actions(config.build_prefix, [m.name()],
                                          index=index)
            assert not plan.nothing_to_do(actions), actions
            plan.display_actions(actions, index)
            plan.execute_actions(actions, index)

        print("Package:", m.dist())

        with filelock.SoftFileLock(join(config.build_folder, ".conda_lock"),
                                   timeout=config.timeout):
            # get_dir here might be just work, or it might be one level deeper,
            #    dependening on the source.
            src_dir = source.get_dir(config)
            if isdir(src_dir):
                print("source tree in:", src_dir)
            else:
                print("no source - creating empty work folder")
                os.makedirs(src_dir)

            rm_rf(config.info_dir)
            files1 = prefix_files(prefix=config.build_prefix)
            for pat in m.always_include_files():
                has_matches = False
                for f in set(files1):
                    if fnmatch.fnmatch(f, pat):
                        print("Including in package existing file", f)
                        files1.discard(f)
                        has_matches = True
                if not has_matches:
                    log.warn(
                        "Glob %s from always_include_files does not match any files",
                        pat)
            # Save this for later
            with open(join(config.croot, 'prefix_files.txt'), 'w') as f:
                f.write(u'\n'.join(sorted(list(files1))))
                f.write(u'\n')

            # Use script from recipe?
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)

            if isdir(src_dir):
                if on_win:
                    build_file = join(m.path, 'bld.bat')
                    if script:
                        build_file = join(src_dir, 'bld.bat')
                        with open(build_file, 'w') as bf:
                            bf.write(script)
                    import conda_build.windows as windows
                    windows.build(m, build_file, config=config)
                else:
                    build_file = join(m.path, 'build.sh')

                    # There is no sense in trying to run an empty build script.
                    if isfile(build_file) or script:
                        with path_prepended(config.build_prefix):
                            env = environ.get_dict(config=config, m=m)
                        env["CONDA_BUILD_STATE"] = "BUILD"
                        work_file = join(source.get_dir(config),
                                         'conda_build.sh')
                        if script:
                            with open(work_file, 'w') as bf:
                                bf.write(script)
                        if config.activate:
                            if isfile(build_file):
                                data = open(build_file).read()
                            else:
                                data = open(work_file).read()
                            with open(work_file, 'w') as bf:
                                bf.write(
                                    "source {conda_root}activate {build_prefix} &> "
                                    "/dev/null\n".format(
                                        conda_root=root_script_dir +
                                        os.path.sep,
                                        build_prefix=config.build_prefix))
                                bf.write(data)
                        else:
                            if not isfile(work_file):
                                copy_into(build_file, work_file,
                                          config.timeout)
                        os.chmod(work_file, 0o766)

                        if isfile(work_file):
                            cmd = [shell_path, '-x', '-e', work_file]
                            # this should raise if any problems occur while building
                            _check_call(cmd, env=env, cwd=src_dir)

    if post in [True, None]:
        if post:
            with open(join(config.croot, 'prefix_files.txt'), 'r') as f:
                files1 = set(f.read().splitlines())

        get_build_metadata(m, config=config)
        create_post_scripts(m, config=config)
        create_entry_points(m.get_value('build/entry_points'), config=config)
        files2 = prefix_files(prefix=config.build_prefix)

        post_process(sorted(files2 - files1),
                     prefix=config.build_prefix,
                     config=config,
                     preserve_egg_dir=bool(
                         m.get_value('build/preserve_egg_dir')))

        # The post processing may have deleted some files (like easy-install.pth)
        files2 = prefix_files(prefix=config.build_prefix)
        if any(config.meta_dir in join(config.build_prefix, f)
               for f in files2 - files1):
            meta_files = (tuple(
                f for f in files2 - files1
                if config.meta_dir in join(config.build_prefix, f)), )
            sys.exit(
                indent(
                    """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
        post_build(m,
                   sorted(files2 - files1),
                   prefix=config.build_prefix,
                   build_python=config.build_python,
                   croot=config.croot)
        create_info_files(m,
                          sorted(files2 - files1),
                          config=config,
                          prefix=config.build_prefix)
        if m.get_value('build/noarch_python'):
            import conda_build.noarch_python as noarch_python
            noarch_python.transform(m, sorted(files2 - files1),
                                    config.build_prefix)

        files3 = prefix_files(prefix=config.build_prefix)
        fix_permissions(files3 - files1, config.build_prefix)

        path = bldpkg_path(m, config)

        # lock the output directory while we build this file
        # create the tarball in a temporary directory to minimize lock time
        with TemporaryDirectory() as tmp:
            tmp_path = os.path.join(tmp, os.path.basename(path))
            t = tarfile.open(tmp_path, 'w:bz2')

            def order(f):
                # we don't care about empty files so send them back via 100000
                fsize = os.stat(join(config.build_prefix, f)).st_size or 100000
                # info/* records will be False == 0, others will be 1.
                info_order = int(os.path.dirname(f) != 'info')
                return info_order, fsize

            # add files in order of a) in info directory, b) increasing size so
            # we can access small manifest or json files without decompressing
            # possible large binary or data files
            for f in sorted(files3 - files1, key=order):
                t.add(join(config.build_prefix, f), f)
            t.close()

            # we're done building, perform some checks
            tarcheck.check_all(tmp_path)

            copy_into(tmp_path, path, config.timeout)
        update_index(config.bldpkgs_dir, config)

    else:
        print("STOPPING BUILD BEFORE POST:", m.dist())

    # returning true here says package is OK to test
    return True
Esempio n. 59
0
def build(m, get_src=True, verbose=True):
    '''
    Build the package with the specified metadata.

    :param m: Package metadata
    :type m: Metadata
    :param get_src: Should we download the source?
    :type get_src: bool
    '''
    rm_rf(prefix)

    print("BUILD START:", m.dist())
    create_env(prefix, [ms.spec for ms in m.ms_depends('build')],
               verbose=verbose)

    if get_src:
        source.provide(m.path, m.get_section('source'))
    assert isdir(source.WORK_DIR)
    if os.listdir(source.get_dir()):
        print("source tree in:", source.get_dir())
    else:
        print("no source")

    rm_rf(info_dir)
    files1 = prefix_files()

    if sys.platform == 'win32':
        import conda_build.windows as windows
        windows.build(m)
    else:
        env = environ.get_dict(m)
        build_file = join(m.path, 'build.sh')
        if exists(build_file):
            script = m.get_value('build/script', None)
            if script:
                if isinstance(script, list):
                    script = '\n'.join(script)
                with open(build_file, 'w', encoding='utf-8') as bf:
                    bf.write(script)
                os.chmod(build_file, 0o766)
            cmd = ['/bin/bash', '-x', '-e', build_file]

            _check_call(cmd, env=env, cwd=source.get_dir())

    get_build_metadata(m)
    create_post_scripts(m)
    create_entry_points(m.get_value('build/entry_points'))
    post_process(preserve_egg_dir=bool(m.get_value('build/preserve_egg_dir')))

    assert not exists(info_dir)
    files2 = prefix_files()

    post_build(sorted(files2 - files1),
               binary_relocation=bool(
                   m.get_value('build/binary_relocation', True)))
    create_info_files(m, sorted(files2 - files1), include_recipe=bool(m.path))
    files3 = prefix_files()
    fix_permissions(files3 - files1)

    path = bldpkg_path(m)
    t = tarfile.open(path, 'w:bz2')
    for f in sorted(files3 - files1):
        t.add(join(prefix, f), f)
    t.close()

    print("BUILD END:", m.dist())

    # we're done building, perform some checks
    tarcheck.check_all(path)
    update_index(config.bldpkgs_dir)
Esempio n. 60
0
def execute(args, parser):
    import sys
    import shutil
    import tarfile
    import tempfile
    from os import makedirs
    from os.path import abspath, isdir, isfile

    from conda.lock import Locked
    import conda_build.build as build
    import conda_build.source as source
    from conda_build.config import config
    from conda_build.metadata import MetaData

    check_external()

    # change globals in build module, see comment there as well
    build.channel_urls = args.channel or ()
    build.override_channels = args.override_channels
    build.verbose = not args.quiet

    if on_win:
        try:
            # needs to happen before any c extensions are imported that might be
            # hard-linked by files in the trash. one of those is markupsafe,
            # used by jinja2. see https://github.com/conda/conda-build/pull/520
            delete_trash(None)
        except:
            # when we can't delete the trash, don't crash on AssertionError,
            # instead inform the user and try again next time.
            # see https://github.com/conda/conda-build/pull/744
            warnings.warn("Cannot delete trash; some c extension has been "
                          "imported that is hard-linked by files in the trash. "
                          "Will try again on next run.")

    conda_version = {
        'python': 'CONDA_PY',
        'numpy': 'CONDA_NPY',
        'perl': 'CONDA_PERL',
        'R': 'CONDA_R',
        }

    for lang in ['python', 'numpy', 'perl', 'R']:
        versions = getattr(args, lang)
        if not versions:
            continue
        if versions == ['all']:
            if all_versions[lang]:
                versions = all_versions[lang]
            else:
                parser.error("'all' is not supported for --%s" % lang)
        if len(versions) > 1:
            for ver in versions[:]:
                setattr(args, lang, [str(ver)])
                execute(args, parser)
                # This is necessary to make all combinations build.
                setattr(args, lang, versions)
            return
        else:
            version = versions[0]
            if lang in ('python', 'numpy'):
                version = int(version.replace('.', ''))
            setattr(config, conda_version[lang], version)
        if not len(str(version)) in (2, 3) and lang in ['python', 'numpy']:
            if all_versions[lang]:
                raise RuntimeError("%s must be major.minor, like %s, not %s" %
                    (conda_version[lang], all_versions[lang][-1]/10, version))
            else:
                raise RuntimeError("%s must be major.minor, not %s" %
                    (conda_version[lang], version))

    # Using --python, --numpy etc. is equivalent to using CONDA_PY, CONDA_NPY, etc.
    # Auto-set those env variables
    for var in conda_version.values():
        if getattr(config, var):
            # Set the env variable.
            os_environ[var] = str(getattr(config, var))

    if args.skip_existing:
        if not isdir(config.bldpkgs_dir):
            makedirs(config.bldpkgs_dir)
        update_index(config.bldpkgs_dir)
        index = build.get_build_index(clear_cache=True)

    already_built = []
    to_build_recursive = []
    with Locked(config.croot):
        recipes = deque(args.recipe)
        while recipes:
            arg = recipes.popleft()
            try_again = False
            # Don't use byte literals for paths in Python 2
            if not PY3:
                arg = arg.decode(getpreferredencoding() or 'utf-8')
            if isfile(arg):
                if arg.endswith(('.tar', '.tar.gz', '.tgz', '.tar.bz2')):
                    recipe_dir = tempfile.mkdtemp()
                    t = tarfile.open(arg, 'r:*')
                    t.extractall(path=recipe_dir)
                    t.close()
                    need_cleanup = True
                else:
                    print("Ignoring non-recipe: %s" % arg)
                    continue
            else:
                recipe_dir = abspath(arg)
                need_cleanup = False

            if not isdir(recipe_dir):
                sys.exit("Error: no such directory: %s" % recipe_dir)

            try:
                m = MetaData(recipe_dir)
                if m.get_value('build/noarch_python'):
                    config.noarch = True
            except exceptions.YamlParsingError as e:
                sys.stderr.write(e.error_msg())
                sys.exit(1)
            binstar_upload = False
            if args.check and len(args.recipe) > 1:
                print(m.path)
            m.check_fields()
            if args.check:
                continue
            if args.skip_existing:
                if m.pkg_fn() in index or m.pkg_fn() in already_built:
                    print("%s is already built, skipping." % m.dist())
                    continue
            if m.skip():
                print("Skipped: The %s recipe defines build/skip for this "
                      "configuration." % m.dist())
                continue
            if args.output:
                try:
                    m.parse_again(permit_undefined_jinja=False)
                except SystemExit:
                    # Something went wrong; possibly due to undefined GIT_ jinja variables.
                    # Maybe we need to actually download the source in order to resolve the build_id.
                    source.provide(m.path, m.get_section('source'))
                    
                    # Parse our metadata again because we did not initialize the source
                    # information before.
                    m.parse_again(permit_undefined_jinja=False)

                print(build.bldpkg_path(m))
                continue
            elif args.test:
                build.test(m, move_broken=False)
            elif args.source:
                source.provide(m.path, m.get_section('source'))
                print('Source tree in:', source.get_dir())
            else:
                # This loop recursively builds dependencies if recipes exist
                if args.build_only:
                    post = False
                    args.notest = True
                    args.binstar_upload = False
                elif args.post:
                    post = True
                    args.notest = True
                    args.binstar_upload = False
                else:
                    post = None
                try:
                    build.build(m, post=post,
                                include_recipe=args.include_recipe)
                except (RuntimeError, SystemExit) as e:
                    error_str = str(e)
                    if error_str.startswith('No packages found') or error_str.startswith('Could not find some'):
                        # Build dependency if recipe exists
                        dep_pkg = error_str.split(': ')[1]
                        # Handle package names that contain version deps.
                        if ' ' in dep_pkg:
                            dep_pkg = dep_pkg.split(' ')[0]
                        recipe_glob = glob(dep_pkg + '-[v0-9][0-9.]*')
                        if exists(dep_pkg):
                            recipe_glob.append(dep_pkg)
                        if recipe_glob:
                            recipes.appendleft(arg)
                            try_again = True
                            for recipe_dir in recipe_glob:
                                if dep_pkg in to_build_recursive:
                                    sys.exit(str(e))
                                print(("Missing dependency {0}, but found" +
                                       " recipe directory, so building " +
                                       "{0} first").format(dep_pkg))
                                recipes.appendleft(recipe_dir)
                                to_build_recursive.append(dep_pkg)
                        else:
                            raise
                    elif error_str.strip().startswith("Hint:"):
                        lines = [line for line in error_str.splitlines() if line.strip().startswith('- ')]
                        pkgs = [line.lstrip('- ') for line in lines]
                        # Typically if a conflict is with one of these
                        # packages, the other package needs to be rebuilt
                        # (e.g., a conflict with 'python 3.5*' and 'x' means
                        # 'x' isn't build for Python 3.5 and needs to be
                        # rebuilt).
                        skip_names = ['python', 'r']
                        pkgs = [pkg for pkg in pkgs if pkg.split(' ')[0] not
                            in skip_names]
                        for pkg in pkgs:
                            # Handle package names that contain version deps.
                            if ' ' in pkg:
                                pkg = pkg.split(' ')[0]
                            recipe_glob = glob(pkg + '-[v0-9][0-9.]*')
                            if exists(pkg):
                                recipe_glob.append(pkg)
                            if recipe_glob:
                                recipes.appendleft(arg)
                                try_again = True
                                for recipe_dir in recipe_glob:
                                    if pkg in to_build_recursive:
                                        sys.exit(str(e))
                                    print(error_str)
                                    print(("Missing dependency {0}, but found" +
                                           " recipe directory, so building " +
                                           "{0} first").format(pkg))
                                    recipes.appendleft(recipe_dir)
                                    to_build_recursive.append(pkg)
                            else:
                                raise
                    else:
                        raise
                if try_again:
                    continue

                if not args.notest:
                    build.test(m)

                binstar_upload = True

            if need_cleanup:
                shutil.rmtree(recipe_dir)

            if binstar_upload:
                handle_binstar_upload(build.bldpkg_path(m), args)

            already_built.append(m.pkg_fn())