コード例 #1
0
def post_build(m, files, build_python):
    print('number of files:', len(files))

    for f in files:
        make_hardlink_copy(f, m.config.host_prefix)

    if sys.platform == 'win32':
        return

    binary_relocation = m.binary_relocation()
    if not binary_relocation:
        print("Skipping binary relocation logic")
    osx_is_app = bool(m.get_value('build/osx_is_app', False)) and sys.platform == 'darwin'

    check_symlinks(files, m.config.host_prefix, m.config.croot)
    prefix_files = utils.prefix_files(m.config.host_prefix)

    for f in files:
        if f.startswith('bin/'):
            fix_shebang(f, prefix=m.config.host_prefix, build_python=build_python,
                        osx_is_app=osx_is_app)
        if binary_relocation is True or (isinstance(binary_relocation, list) and
                                         f in binary_relocation):
            post_process_shared_lib(m, f, prefix_files)
    check_overlinking(m, files)
コード例 #2
0
def post_build(m, files, build_python):
    print('number of files:', len(files))

    for f in files:
        make_hardlink_copy(f, m.config.host_prefix)

    if not m.config.target_subdir.startswith('win'):
        binary_relocation = m.binary_relocation()
        if not binary_relocation:
            print("Skipping binary relocation logic")
        osx_is_app = (m.config.target_subdir == 'osx-64' and
                      bool(m.get_value('build/osx_is_app', False)))
        check_symlinks(files, m.config.host_prefix, m.config.croot)
        prefix_files = utils.prefix_files(m.config.host_prefix)

        for f in files:
            if f.startswith('bin/'):
                fix_shebang(f, prefix=m.config.host_prefix, build_python=build_python,
                            osx_is_app=osx_is_app)
            if binary_relocation is True or (isinstance(binary_relocation, list) and
                                             f in binary_relocation):
                post_process_shared_lib(m, f, prefix_files)
    # disable overlinking check on win right now, until Ray has time for it.
    if not utils.on_win:
        check_overlinking(m, files)
コード例 #3
0
ファイル: build.py プロジェクト: hbcarlos/boa
def build(m, stats={}):

    if m.skip():
        print(utils.get_skip_message(m))
        return {}

    log = utils.get_logger(__name__)

    with utils.path_prepended(m.config.build_prefix):
        env = environ.get_dict(m=m)

    env["CONDA_BUILD_STATE"] = "BUILD"
    if env_path_backup_var_exists:
        env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

    m.output.sections["package"]["name"] = m.output.name
    env["PKG_NAME"] = m.get_value('package/name')

    src_dir = m.config.work_dir
    if isdir(src_dir):
        if m.config.verbose:
            print("source tree in:", src_dir)
    else:
        if m.config.verbose:
            print("no source - creating empty work folder")
        os.makedirs(src_dir)

    utils.rm_rf(m.config.info_dir)
    files_before_script = utils.prefix_files(prefix=m.config.host_prefix)

    with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f:
        f.write("\n".join(sorted(list(files_before_script))))
        f.write("\n")

    execute_build_script(m, src_dir, env)

    files_after_script = utils.prefix_files(prefix=m.config.host_prefix)

    files_difference = files_after_script - files_before_script

    if m.output.sections['build'].get('intermediate') == True:
        utils.rm_rf(m.config.host_prefix)
        return

    bundle_conda(m, files_before_script, env, m.output.sections['files'])
コード例 #4
0
ファイル: build.py プロジェクト: ocefpaf/boa
def build(m, stats=None, from_interactive=False, allow_interactive=False):
    try:
        if not stats:
            stats = {}

        if m.skip():
            console.print(utils.get_skip_message(m))
            return {}

        with utils.path_prepended(m.config.build_prefix):
            env = environ.get_dict(m=m)

        env["CONDA_BUILD_STATE"] = "BUILD"
        if env_path_backup_var_exists:
            env["CONDA_PATH_BACKUP"] = os.environ["CONDA_PATH_BACKUP"]

        m.output.sections["package"]["name"] = m.output.name
        env["PKG_NAME"] = m.get_value("package/name")

        src_dir = m.config.work_dir
        if isdir(src_dir):
            if m.config.verbose:
                console.print("source tree in:", src_dir)
        else:
            if m.config.verbose:
                console.print("no source - creating empty work folder")
            os.makedirs(src_dir)

        utils.rm_rf(m.config.info_dir)
        files_before_script = utils.prefix_files(prefix=m.config.host_prefix)

        with open(join(m.config.build_folder, "prefix_files.txt"), "w") as f:
            f.write("\n".join(sorted(list(files_before_script))))
            f.write("\n")

        execute_build_script(m, src_dir, env)

        if m.output.sections["build"].get("intermediate"):
            utils.rm_rf(m.config.host_prefix)
            return

        bundle_conda(m, files_before_script, env, m.output.sections["files"])
    except subprocess.CalledProcessError:
        ext = "bat" if utils.on_win else "sh"
        work_dir = pathlib.Path(m.config.build_prefix).parent / "work"
        build_cmd = work_dir / f"conda_build.{ext}"

        console.print("\n")
        console.print(f"Work directory: {work_dir}")
        console.print(f"Try building again with {build_cmd}")

        if not from_interactive and allow_interactive:
            console.print("[red]Build went wrong, entering interactive mode![/red]")
            from boa.tui import tui
            import asyncio

            asyncio.run(tui.enter_tui(m))
コード例 #5
0
def find_lib(link, prefix, path=None):
    files = utils.prefix_files(prefix)
    if link.startswith(prefix):
        link = os.path.normpath(link[len(prefix) + 1:])
        if link not in files:
            sys.exit("Error: Could not find %s" % link)
        return link
    if link.startswith('/'):  # but doesn't start with the build prefix
        return
    if link.startswith('@rpath/'):
        # Assume the rpath already points to lib, so there is no need to
        # change it.
        return
    if '/' not in link or link.startswith('@executable_path/'):
        link = os.path.basename(link)
        file_names = defaultdict(list)
        for f in files:
            file_names[os.path.basename(f)].append(f)
        if link not in file_names:
            sys.exit("Error: Could not find %s" % link)
        if len(file_names[link]) > 1:
            if path and os.path.basename(path) == link:
                # The link is for the file itself, just use it
                return path
            # Allow for the possibility of the same library appearing in
            # multiple places.
            md5s = set()
            for f in file_names[link]:
                md5s.add(md5_file(os.path.join(prefix, f)))
            if len(md5s) > 1:
                sys.exit("Error: Found multiple instances of %s: %s" %
                         (link, file_names[link]))
            else:
                file_names[link].sort()
                print("Found multiple instances of %s (%s).  "
                      "Choosing the first one." % (link, file_names[link]))
        return file_names[link][0]
    print("Don't know how to find %s, skipping" % link)
コード例 #6
0
ファイル: post.py プロジェクト: jjhelmus/conda-build
def find_lib(link, prefix, path=None):
    files = utils.prefix_files(prefix)
    if link.startswith(prefix):
        link = os.path.normpath(link[len(prefix) + 1:])
        if link not in files:
            sys.exit("Error: Could not find %s" % link)
        return link
    if link.startswith('/'):  # but doesn't start with the build prefix
        return
    if link.startswith('@rpath/'):
        # Assume the rpath already points to lib, so there is no need to
        # change it.
        return
    if '/' not in link or link.startswith('@executable_path/'):
        link = os.path.basename(link)
        file_names = defaultdict(list)
        for f in files:
            file_names[os.path.basename(f)].append(f)
        if link not in file_names:
            sys.exit("Error: Could not find %s" % link)
        if len(file_names[link]) > 1:
            if path and os.path.basename(path) == link:
                # The link is for the file itself, just use it
                return path
            # Allow for the possibility of the same library appearing in
            # multiple places.
            md5s = set()
            for f in file_names[link]:
                md5s.add(md5_file(os.path.join(prefix, f)))
            if len(md5s) > 1:
                sys.exit("Error: Found multiple instances of %s: %s" % (link, file_names[link]))
            else:
                file_names[link].sort()
                print("Found multiple instances of %s (%s).  "
                    "Choosing the first one." % (link, file_names[link]))
        return file_names[link][0]
    print("Don't know how to find %s, skipping" % link)
コード例 #7
0
ファイル: build.py プロジェクト: stjordanis/boa
def bundle_conda(metadata, initial_files, env, files_selector=None):

    files = post_process_files(metadata, initial_files)

    # first filter is so that info_files does not pick up ignored files
    files = utils.filter_files(files, prefix=metadata.config.host_prefix)
    if files_selector:
        files = select_files(files, files_selector.get("include"),
                             files_selector.get("exclude"))

    console.print(f"\n[yellow]Adding files for {metadata.name()}[/yellow]\n")
    if files:
        for f in sorted(files):
            console.print(f"- {f}")
    else:
        console.print(
            f"[red]ATTENTION: No files added in target [bold]{metadata.name()}[/bold][/red]"
        )
    console.print("\n")

    # this is also copying things like run_test.sh into info/recipe
    utils.rm_rf(os.path.join(metadata.config.info_dir, "test"))

    output = {}

    with tmp_chdir(metadata.config.host_prefix):
        output["checksums"] = create_info_files(
            metadata, files, prefix=metadata.config.host_prefix)

    # here we add the info files into the prefix, so we want to re-collect the files list
    prefix_files = set(utils.prefix_files(metadata.config.host_prefix))
    files = utils.filter_files(prefix_files - initial_files,
                               prefix=metadata.config.host_prefix)
    if files_selector:
        include_files = files_selector.get("include")
        if include_files:
            include_files += ["info/*"]
        files = select_files(files, include_files,
                             files_selector.get("exclude"))

    basename = metadata.dist()
    tmp_archives = []
    final_outputs = []
    ext = ".tar.bz2"
    if output.get(
            "type") == "conda_v2" or metadata.config.conda_pkg_format == "2":
        ext = ".conda"

    with TemporaryDirectory() as tmp:
        conda_package_handling.api.create(metadata.config.host_prefix,
                                          files,
                                          basename + ext,
                                          out_folder=tmp)
        tmp_archives = [os.path.join(tmp, basename + ext)]

        # we're done building, perform some checks
        for tmp_path in tmp_archives:
            #     if tmp_path.endswith('.tar.bz2'):
            #         tarcheck.check_all(tmp_path, metadata.config)
            output_filename = os.path.basename(tmp_path)

            #     # we do the import here because we want to respect logger level context
            #     try:
            #         from conda_verify.verify import Verify
            #     except ImportError:
            #         Verify = None
            #         log.warn("Importing conda-verify failed.  Please be sure to test your packages.  "
            #             "conda install conda-verify to make this message go away.")
            #     if getattr(metadata.config, "verify", False) and Verify:
            #         verifier = Verify()
            #         checks_to_ignore = (utils.ensure_list(metadata.config.ignore_verify_codes) +
            #                             metadata.ignore_verify_codes())
            #         try:
            #             verifier.verify_package(path_to_package=tmp_path, checks_to_ignore=checks_to_ignore,
            #                                     exit_on_error=metadata.config.exit_on_verify_error)
            #         except KeyError as e:
            #             log.warn("Package doesn't have necessary files.  It might be too old to inspect."
            #                      "Legacy noarch packages are known to fail.  Full message was {}".format(e))
            try:
                crossed_subdir = metadata.config.target_subdir
            except AttributeError:
                crossed_subdir = metadata.config.host_subdir
            subdir = ("noarch" if (metadata.noarch or metadata.noarch_python)
                      else crossed_subdir)
            if metadata.config.output_folder:
                output_folder = os.path.join(metadata.config.output_folder,
                                             subdir)
            else:
                output_folder = os.path.join(
                    os.path.dirname(metadata.config.bldpkgs_dir), subdir)
            final_output = os.path.join(output_folder, output_filename)
            if os.path.isfile(final_output):
                utils.rm_rf(final_output)

            # disable locking here. It's just a temp folder getting locked.
            # Having it proved a major bottleneck.
            utils.copy_into(tmp_path,
                            final_output,
                            metadata.config.timeout,
                            locking=False)
            final_outputs.append(final_output)

    update_index(os.path.dirname(output_folder),
                 verbose=metadata.config.debug,
                 threads=1)

    # clean out host prefix so that this output's files don't interfere with other outputs
    # We have a backup of how things were before any output scripts ran.  That's
    # restored elsewhere.
    if metadata.config.keep_old_work:
        prefix = metadata.config.host_prefix
        dest = os.path.join(
            os.path.dirname(prefix),
            "_".join(("_h_env_moved", metadata.dist(),
                      metadata.config.host_subdir)),
        )
        console.print("Renaming host env directory, ", prefix, " to ", dest)
        if os.path.exists(dest):
            utils.rm_rf(dest)
        shutil.move(prefix, dest)
    else:
        utils.rm_rf(metadata.config.host_prefix)

    return final_outputs
コード例 #8
0
ファイル: build.py プロジェクト: stjordanis/boa
def post_process_files(m, initial_prefix_files):
    get_build_metadata(m)
    create_post_scripts(m)

    # this is new-style noarch, with a value of 'python'
    if m.noarch != "python":
        utils.create_entry_points(m.get_value("build/entry_points"),
                                  config=m.config)
    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)

    python = (m.config.build_python if os.path.isfile(m.config.build_python)
              else m.config.host_python)
    post_process(
        m.get_value("package/name"),
        m.get_value("package/version"),
        sorted(current_prefix_files - initial_prefix_files),
        prefix=m.config.host_prefix,
        config=m.config,
        preserve_egg_dir=bool(m.get_value("build/preserve_egg_dir")),
        noarch=m.get_value("build/noarch"),
        skip_compile_pyc=m.get_value("build/skip_compile_pyc"),
    )

    # The post processing may have deleted some files (like easy-install.pth)
    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)
    new_files = sorted(current_prefix_files - initial_prefix_files)
    new_files = utils.filter_files(new_files, prefix=m.config.host_prefix)

    host_prefix = m.config.host_prefix
    meta_dir = m.config.meta_dir
    if any(meta_dir in join(host_prefix, f) for f in new_files):
        meta_files = (tuple(
            f for f in new_files
            if m.config.meta_dir in join(m.config.host_prefix, f)), )
        sys.exit(
            indent(
                """Error: Untracked file(s) %s found in conda-meta directory.
This error usually comes from using conda in the build script.  Avoid doing this, as it
can lead to packages that include their dependencies.""" % meta_files))
    post_build(m, new_files, build_python=python)

    entry_point_script_names = get_entry_point_script_names(
        m.get_value("build/entry_points"))
    if m.noarch == "python":
        pkg_files = [
            fi for fi in new_files if fi not in entry_point_script_names
        ]
    else:
        pkg_files = new_files

    # the legacy noarch
    if m.get_value("build/noarch_python"):
        noarch_python.transform(m, new_files, m.config.host_prefix)
    # new way: build/noarch: python
    elif m.noarch == "python":
        noarch_python.populate_files(m, pkg_files, m.config.host_prefix,
                                     entry_point_script_names)

    current_prefix_files = utils.prefix_files(prefix=m.config.host_prefix)
    new_files = current_prefix_files - initial_prefix_files
    fix_permissions(new_files, m.config.host_prefix)

    return new_files