Ejemplo n.º 1
0
def config_source(export_folder, export_source_folder, scm_sources_folder,
                  src_folder, conanfile, output, conanfile_path, reference,
                  hook_manager, cache):
    """ Implements the sources configuration when a package is going to be built in the
    local cache.
    """
    def remove_source(raise_error=True):
        output.warn("This can take a while for big packages")
        try:
            rmdir(src_folder)
        except BaseException as e_rm:
            set_dirty(src_folder)
            msg = str(e_rm)
            if six.PY2:
                msg = str(e_rm).decode(
                    "latin1")  # Windows prints some chars in latin1
            output.error("Unable to remove source folder %s\n%s" %
                         (src_folder, msg))
            output.warn("**** Please delete it manually ****")
            if raise_error or isinstance(e_rm, KeyboardInterrupt):
                raise ConanException("Unable to remove source folder")

    if is_dirty(src_folder):
        output.warn("Trying to remove corrupted source folder")
        remove_source()
    elif conanfile.build_policy_always:
        output.warn(
            "Detected build_policy 'always', trying to remove source folder")
        remove_source()

    if not os.path.exists(src_folder):  # No source folder, need to get it
        with set_dirty_context_manager(src_folder):
            mkdir(src_folder)

            def get_sources_from_exports():
                # First of all get the exported scm sources (if auto) or clone (if fixed)
                _run_cache_scm(conanfile, scm_sources_folder, src_folder,
                               output)
                # so self exported files have precedence over python_requires ones
                merge_directories(export_folder, src_folder)
                # Now move the export-sources to the right location
                merge_directories(export_source_folder, src_folder)

            _run_source(conanfile,
                        conanfile_path,
                        src_folder,
                        hook_manager,
                        reference,
                        cache,
                        get_sources_from_exports=get_sources_from_exports)
Ejemplo n.º 2
0
def config_source(export_folder, export_source_folder, scm_sources_folder, conanfile, output,
                  conanfile_path, reference, hook_manager, cache):
    """ Implements the sources configuration when a package is going to be built in the
    local cache:
    - remove old sources if dirty or build_policy=always
    - execute SCM logic
    - do a copy of the export and exports_sources folders to the source folder in the cache
    - run the source() recipe method
    """

    def remove_source():
        output.warn("This can take a while for big packages")
        try:
            rmdir(conanfile.folders.base_source)
        except BaseException as e_rm:
            msg = str(e_rm)
            if six.PY2:
                msg = str(e_rm).decode("latin1")  # Windows prints some chars in latin1
            output.error("Unable to remove source folder %s\n%s" %
                         (conanfile.folders.base_source, msg))
            output.warn("**** Please delete it manually ****")
            raise ConanException("Unable to remove source folder")

    if is_dirty(conanfile.folders.base_source):
        output.warn("Trying to remove corrupted source folder")
        remove_source()
        clean_dirty(conanfile.folders.base_source)
    elif conanfile.build_policy_always:
        output.warn("Detected build_policy 'always', trying to remove source folder")
        remove_source()

    if not os.path.exists(conanfile.folders.base_source):  # No source folder, need to get it
        with set_dirty_context_manager(conanfile.folders.base_source):
            mkdir(conanfile.source_folder)

            def get_sources_from_exports():
                # First of all get the exported scm sources (if auto) or clone (if fixed)
                _run_cache_scm(conanfile, scm_sources_folder, output)
                if not hasattr(conanfile, "layout"):
                    # so self exported files have precedence over python_requires ones
                    merge_directories(export_folder, conanfile.folders.base_source)
                # Now move the export-sources to the right location
                merge_directories(export_source_folder, conanfile.folders.base_source)

            _run_source(conanfile, conanfile_path, hook_manager, reference, cache,
                        get_sources_from_exports=get_sources_from_exports)
Ejemplo n.º 3
0
    def _handle_node_cache(self, node, keep_build,
                           processed_package_references, remotes):
        pref = node.pref
        assert pref.id, "Package-ID without value"
        assert pref.id != PACKAGE_ID_UNKNOWN, "Package-ID error: %s" % str(
            pref)
        conanfile = node.conanfile
        output = conanfile.output

        layout = self._cache.package_layout(pref.ref, conanfile.short_paths)
        package_folder = layout.package(pref)

        with layout.package_lock(pref):
            if pref not in processed_package_references:
                processed_package_references.add(pref)
                if node.binary == BINARY_BUILD:
                    assert node.prev is None, "PREV for %s to be built should be None" % str(
                        pref)
                    with set_dirty_context_manager(package_folder):
                        pref = self._build_package(node, output, keep_build,
                                                   remotes)
                    assert node.prev, "Node PREV shouldn't be empty"
                    assert node.pref.revision, "Node PREF revision shouldn't be empty"
                    assert pref.revision is not None, "PREV for %s to be built is None" % str(
                        pref)
                elif node.binary in (BINARY_UPDATE, BINARY_DOWNLOAD):
                    # this can happen after a re-evaluation of packageID with Package_ID_unknown
                    self._download_pkg(layout, node.pref, node)
                elif node.binary == BINARY_CACHE:
                    assert node.prev, "PREV for %s is None" % str(pref)
                    output.success('Already installed!')
                    log_package_got_from_local_cache(pref)
                    self._recorder.package_fetched_from_cache(pref)

            # Call the info method
            self._call_package_info(conanfile, package_folder, ref=pref.ref)
            self._recorder.package_cpp_info(pref, conanfile.cpp_info)
Ejemplo n.º 4
0
def compress_files(files, symlinks, name, dest_dir, output=None):
    t1 = time.time()
    # FIXME, better write to disk sequentially and not keep tgz contents in memory
    tgz_path = os.path.join(dest_dir, name)
    with set_dirty_context_manager(tgz_path), open(tgz_path,
                                                   "wb") as tgz_handle:
        tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle)

        for filename, dest in sorted(symlinks.items()):
            info = tarfile.TarInfo(name=filename)
            info.type = tarfile.SYMTYPE
            info.linkname = dest
            info.size = 0  # A symlink shouldn't have size
            tgz.addfile(tarinfo=info)

        mask = ~(stat.S_IWOTH | stat.S_IWGRP)
        with progress_bar.iterate_list_with_progress(sorted(files.items()),
                                                     output, "Compressing %s" %
                                                     name) as pg_file_list:
            for filename, abs_path in pg_file_list:
                info = tarfile.TarInfo(name=filename)
                info.size = os.stat(abs_path).st_size
                info.mode = os.stat(abs_path).st_mode & mask
                if os.path.islink(abs_path):
                    info.type = tarfile.SYMTYPE
                    info.size = 0  # A symlink shouldn't have size
                    info.linkname = os.readlink(abs_path)  # @UndefinedVariable
                    tgz.addfile(tarinfo=info)
                else:
                    with open(abs_path, 'rb') as file_handler:
                        tgz.addfile(tarinfo=info, fileobj=file_handler)
        tgz.close()

    duration = time.time() - t1
    log_compressed_files(files, duration, tgz_path)

    return tgz_path
Ejemplo n.º 5
0
def compress_files(files, symlinks, name, dest_dir, output=None):
    t1 = time.time()
    # FIXME, better write to disk sequentially and not keep tgz contents in memory
    tgz_path = os.path.join(dest_dir, name)
    with set_dirty_context_manager(tgz_path), open(tgz_path,
                                                   "wb") as tgz_handle:
        tgz = gzopen_without_timestamps(name, mode="w", fileobj=tgz_handle)

        for filename, dest in sorted(symlinks.items()):
            info = tarfile.TarInfo(name=filename)
            info.type = tarfile.SYMTYPE
            info.linkname = dest
            info.size = 0  # A symlink shouldn't have size
            tgz.addfile(tarinfo=info)

        mask = ~(stat.S_IWOTH | stat.S_IWGRP)
        i_file = 0
        n_files = len(files)
        last_progress = None
        if output and n_files > 1 and not output.is_terminal:
            output.write("[")
        elif output and n_files > 1 and output.is_terminal:
            progress_bar = tqdm(total=len(files),
                                desc="Compressing %s" % name,
                                unit="files",
                                leave=True,
                                dynamic_ncols=False,
                                ascii=True,
                                file=output)

        for filename, abs_path in sorted(files.items()):
            info = tarfile.TarInfo(name=filename)
            info.size = os.stat(abs_path).st_size
            info.mode = os.stat(abs_path).st_mode & mask
            if os.path.islink(abs_path):
                info.type = tarfile.SYMTYPE
                info.size = 0  # A symlink shouldn't have size
                info.linkname = os.readlink(abs_path)  # @UndefinedVariable
                tgz.addfile(tarinfo=info)
            else:
                with open(abs_path, 'rb') as file_handler:
                    tgz.addfile(tarinfo=info, fileobj=file_handler)
            if output and n_files > 1:
                i_file = i_file + 1
                units = min(50, int(50 * i_file / n_files))
                if last_progress != units:  # Avoid screen refresh if nothing has change
                    if not output.is_terminal:
                        output.write('=' * (units - (last_progress or 0)))
                    last_progress = units
                if output.is_terminal:
                    progress_bar.update()

        if output and n_files > 1:
            if output.is_terminal:
                progress_bar.close()
            else:
                output.writeln("]")
        tgz.close()

    duration = time.time() - t1
    log_compressed_files(files, duration, tgz_path)

    return tgz_path
Ejemplo n.º 6
0
def export_pkg(app, recorder, full_ref, source_folder, build_folder,
               package_folder, install_folder, graph_info, force, remotes):
    ref = full_ref.copy_clear_rev()
    cache, output, hook_manager = app.cache, app.out, app.hook_manager
    graph_manager = app.graph_manager
    conan_file_path = cache.package_layout(ref).conanfile()
    if not os.path.exists(conan_file_path):
        raise ConanException("Package recipe '%s' does not exist" % str(ref))

    # The graph has to be loaded with build_mode=[ref.name], so that node is not tried
    # to be downloaded from remotes
    # passing here the create_reference=ref argument is useful so the recipe is in "develop",
    # because the "package()" method is in develop=True already
    deps_graph = graph_manager.load_graph(ref,
                                          ref,
                                          graph_info=graph_info,
                                          build_mode=[ref.name],
                                          check_updates=False,
                                          update=False,
                                          remotes=remotes,
                                          recorder=recorder,
                                          apply_build_requires=False)
    # this is a bit tricky, but works. The root (virtual), has only 1 neighbor,
    # which is the exported pkg
    nodes = deps_graph.root.neighbors()
    pkg_node = nodes[0]
    conanfile = pkg_node.conanfile

    def _init_conanfile_infos():
        node_order = [
            n for n in pkg_node.public_closure if n.binary != BINARY_SKIP
        ]
        subtree_libnames = [node.ref.name for node in node_order]
        add_env_conaninfo(conanfile, subtree_libnames)

    _init_conanfile_infos()
    from conans.client.conan_api import existing_info_files
    if install_folder and existing_info_files(install_folder):
        load_deps_info(install_folder, conanfile, required=True)
    package_id = pkg_node.package_id
    output.info("Packaging to %s" % package_id)
    pref = PackageReference(ref, package_id)
    layout = cache.package_layout(ref, short_paths=conanfile.short_paths)
    dest_package_folder = layout.package(pref)

    if os.path.exists(dest_package_folder):
        if force:
            rmdir(dest_package_folder)
        else:
            raise ConanException(
                "Package already exists. Please use --force, -f to "
                "overwrite it")

    recipe_hash = layout.recipe_manifest().summary_hash
    conanfile.info.recipe_hash = recipe_hash
    conanfile.develop = True
    with set_dirty_context_manager(dest_package_folder):
        if package_folder:
            prev = packager.export_pkg(conanfile, package_id, package_folder,
                                       dest_package_folder, hook_manager,
                                       conan_file_path, ref)
        else:
            prev = run_package_method(conanfile,
                                      package_id,
                                      source_folder,
                                      build_folder,
                                      dest_package_folder,
                                      install_folder,
                                      hook_manager,
                                      conan_file_path,
                                      ref,
                                      local=True)

    packager.update_package_metadata(prev, layout, package_id,
                                     full_ref.revision)
    pref = PackageReference(pref.ref, pref.id, prev)
    if graph_info.graph_lock:
        # after the package has been created we need to update the node PREV
        pkg_node.prev = pref.revision
        graph_info.graph_lock.update_check_graph(deps_graph, output)
    recorder.package_exported(pref)