예제 #1
0
파일: elf.py 프로젝트: raven-os/nbuild
def _find_elfs(package, search_patterns, recursive_patterns) -> [str]:
    files = []

    with stdlib.pushd(package.wrap_cache):
        for search_pattern in search_patterns:
            for rglob in braceexpand.braceexpand(
                    search_pattern):  # Expand braces
                for rpath in glob.glob(
                        rglob,
                        recursive=recursive_patterns):  # Expand globbing

                    # We want to retain ELFs only
                    #
                    # Unfortunately, I couldn't find a better way but to try to open the file
                    # and catch any exception.
                    #
                    # "Better ask for forgiveness than permission", they say... :°

                    try:
                        with open(rpath, 'rb') as file:
                            ELFFile(
                                file
                            )  # This throws if the file isn't a valid ELF
                            files += [rpath]
                    except:
                        pass  # Ignore invalid ELFs (dangling/text files in bin/ or lib/)
    return files
예제 #2
0
def build(build):

    # TODO:
    #   - Include the configuration in /boot/
    #   - Symlink or rename vmlinuz to vmlinuz-X.Y.Z.

    packages = basic.build(
        configure=lambda: make('olddefconfig'),
        compile=make,
        install=install_linux,
        deplinker=None,
    )

    packages['kernel/linux'].drain(
        'usr/lib/*',
        'boot/*',
    )

    # Remove useless files
    with stdlib.pushd(packages['kernel/linux'].wrap_cache):
        stdlib.cmd(
            f'find \\( -name .install -o -name ..install.cmd \\) -delete')

    # Drain documentation
    packages['kernel/linux-doc'].drain_build_cache('Documentation/*',
                                                   'usr/doc/linux/')

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['kernel/linux'].requires('raven-os/corefs')

    return packages
예제 #3
0
def build(build):
    packages = autotools.build(
        configure=lambda: configure(
            '--with-system-expat',
            '--with-system-ffi',
            '--without-ensurepip',
            '--enable-optimizations',
        ),
        split=drain_all_with_doc,
        install=lambda: make('altinstall', f'DESTDIR={build.install_cache}'),
    )

    # Make it executable
    with stdlib.pushd(packages['dev-lang/python'].wrap_cache):
        os.chmod(f'usr/lib64/libpython{build.major}.{build.minor}m.so', 0o0755)
        os.chmod(f'usr/lib64/libpython{build.major}.so', 0o0755)

    # Make the python -> python3 symlink
    packages['dev-lang/python'].make_symlink(f'python{build.major}',
                                             'usr/bin/python')
    packages['dev-lang/python'].make_symlink(
        f'python{build.major}.{build.minor}', f'usr/bin/python{build.major}')

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['dev-lang/python'].requires('raven-os/corefs')

    return packages
예제 #4
0
def build(build):
    packages = autotools.build(
        patch=patch_vim,
        check=check_vim,
        split=drain_all_with_doc,
    )

    # Make a symlink from vi to vim, both for the binaries and man pages
    packages['editor/vim'].make_symlink('vim', 'usr/bin/vi')
    packages['editor/vim'].make_symlink('vim.1', 'usr/share/man/man1/vi.1')

    with stdlib.pushd(packages['editor/vim'].wrap_cache):
        for path in glob.glob('usr/share/man/*/man1/vim.1'):
            packages['editor/vim'].make_symlink(
                'vim.1', f'{os.path.dirname(path)}/vi.1')

    # Symlink the documentation to a more standard place.
    packages['editor/vim-doc'].make_symlink(
        f'../vim/vim{build.major}{build.minor}/doc/', 'usr/share/doc/vim')
    packages['editor/vim-doc'].requires('editor/vim')

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['editor/vim'].requires('raven-os/corefs')

    return packages
예제 #5
0
def patch_m4():
    build = stdlib.build.current_build()

    # Fix a bug introduced by glibc 2.28
    # http://www.linuxfromscratch.org/lfs/view/stable/chapter06/m4.html
    with stdlib.pushd(build.build_cache):
        stdlib.cmd("sed -i 's/IO_ftrylockfile/IO_EOF_SEEN/' lib/*.c")
        stdlib.cmd('echo "#define _IO_IN_BACKUP 0x100" >> lib/stdio-impl.h')
예제 #6
0
def install_rust():
    build = stdlib.build.current_build()

    stdlib.cmd(f'''./install.sh             \
        --destdir={build.install_cache}/    \
        --prefix=/usr/                      \
    ''')

    with stdlib.pushd(build.install_cache):
        shutil.move('usr/etc/', 'etc/')
예제 #7
0
def build(build):
    packages = autotools.build()

    # TODO FIXME Remove this when Raven is self-hosted
    with stdlib.pushd(packages['dev-libs/check'].wrap_cache):
        stdlib.cmd("sed -i '1 s/tools/usr/' usr/bin/checkmk")

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['dev-libs/check'].requires('raven-os/corefs')

    return packages
예제 #8
0
def build(build):
    packages = autotools.build(split=drain_all_with_doc, )

    # Make it executable
    with stdlib.pushd(packages['dev-apps/gettext'].wrap_cache):
        os.chmod('usr/lib64/preloadable_libintl.so', 0o0755)

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['dev-apps/gettext'].requires('raven-os/corefs')

    return packages
예제 #9
0
def install_gcc():
    build = stdlib.build.current_build()
    target = os.environ['TARGET']

    # Install the main components of gcc and g++
    make('install', f'DESTDIR={build.install_cache}')

    # Compatibility symlink to enable building programs with Link Time Optimization
    #stdlib.cmd(f'install -v -dm755 {build.install_cache}/usr/lib64/bfd-plugins')
    with stdlib.pushd(build.install_cache):
        os.makedirs('usr/lib64/bfd-plugins/', exist_ok=True)
        os.symlink(f'../gcc/{target}/8.2.0/liblto_plugin.so',
                   'usr/lib64/bfd-plugins/liblto_plugin.so')
예제 #10
0
파일: fetch.py 프로젝트: raven-os/nbuild
def fetch_git(
    git: str,
    tag: str = None,
    commit: str = None,
    branch: str = None,
    folder: str = '.',
    recursive: bool = True,
):
    """Download a file from an URL and ensure its integrity

    The downloaded file is put in the build cache of the current build, but a copy
    is also stored in the download cache. If :py:func:`.fetch_url` is called again
    with the same ``url`` argument, the already-downloaded file will be copied
    instead, avoiding any extra download.

    :note: Only HTTP, HTTPS and FTP protocols are supported.

    :param url: The URL pointing to the file to download.
    :param sha256: The SHA256 used to ensure the integrity of the file.
    """
    build = stdlib.build.current_build()

    if (tag is not None) + (branch is not None) + (commit is not None) > 1:
        raise ValueError(
            f"More than one parameter between tag, commit and branch were provided. Please only pick one."
        )

    if os.path.isabs(folder):
        raise ValueError(
            "The folder to operate is given as an absolute path. A relative one is expected."
        )

    if tag is None and commit is None:
        stdlib.log.elog(
            "No specific commit or tag specified -- The manifest will not produce a deterministic and reliable result."
        )

    # TODO FIXME: Use libgit instead of using shell commands.

    with stdlib.pushd(build.build_cache):
        stdlib.log.ilog(f"Cloning {git}...")
        stdlib.cmd(
            f"git clone {'--recursive' if recursive else ''} {git} {folder}")

        checkout = tag or branch or commit
        if checkout is not None:
            stdlib.log.ilog(f"Checking {checkout}...")
            stdlib.cmd(f'git checkout {checkout}')
예제 #11
0
파일: elf.py 프로젝트: raven-os/nbuild
def _fetch_elf_dependencies(package, elf_path) -> [str]:
    deps = []

    with stdlib.pushd(package.wrap_cache):
        try:
            with open(elf_path, 'rb') as file:
                elf = ELFFile(file)
                dyn = elf.get_section_by_name(".dynamic")
                if dyn is not None:
                    for tag in dyn.iter_tags():
                        if tag.entry.d_tag == 'DT_NEEDED':
                            deps += [tag.needed]
        except:
            pass  # Ignore invalid ELFs (dangling/text files in bin/ or lib/)

    return deps
예제 #12
0
def build(build):
    packages = autotools.build(
        configure=None,
        install=lambda: make('RAISE_SETFCAP=no', 'prefix=/usr',
                             f'DESTDIR={build.install_cache}/', 'install'),
    )

    # Fix some permissions
    with stdlib.pushd(packages['sys-libs/libcap'].wrap_cache):
        for lib in glob.glob('usr/lib/libcap.so.*'):
            os.chmod(lib, 0o755)

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['sys-libs/libcap'].requires('raven-os/corefs')

    return packages
예제 #13
0
def build(build):
    packages = cargo.build(
        build=None,  # Let cargo install do the build
        check=None,  # We do not support Nest's unit test for now. (TODO FIXME)
        install=lambda: cargo.cargo_install(path='./nest-cli', ),
        split=drain_all_with_doc,
    )

    # Move all binaries to usr/bin/
    packages['raven-os/nest'].move('bin/*', 'usr/bin/')

    with stdlib.pushd(packages['raven-os/nest'].wrap_cache):
        os.makedirs('etc/nest/')
        with open('etc/nest/config.toml', 'w') as config:
            config.write(
                dedent('''\
            #
            # Raven-OS - /etc/nest/config.toml
            # Default configuration file for Nest.
            #

            repositories_order = ["beta"]

            # Stable repository (uncomment to enable)
            # [repositories.stable]
            # mirrors = ["https://stable.raven-os.org"]

            # Beta repository
            [repositories.beta]
            mirrors = ["https://beta.raven-os.org"]

            # Unstable repository (uncomment to enable)
            # [repositories.unstable]
            # mirrors = ["https://unstable.raven-os.org"]
            '''))

    # Packages member of `raven-os/essentials` should explicitly state all
    # of their dependencies, including indirect ones.
    packages['raven-os/nest'].requires('raven-os/corefs')

    return packages
예제 #14
0
def split_perl():
    build = stdlib.build.current_build()
    packages = stdlib.split.system.system()

    # Drain the perl5 folder which contains most of the language's necessary files.
    packages['dev-lang/perl'].drain('usr/lib/perl5')

    # Make a couple of symlinks to put libperl.so into a more standard location
    with stdlib.pushd(f"{packages['dev-lang/perl'].wrap_cache}/usr/"):
        libperl = glob.glob(
            f'lib/perl{build.major}/{build.semver}/*/CORE/libperl.so')[0]

        packages['dev-lang/perl'].make_symlink(f'../{libperl}',
                                               'usr/lib64/libperl.so')
        packages['dev-lang/perl'].make_symlink(
            f'../{libperl}',
            f'usr/lib64/libperl.so.{build.major}.{build.minor}')
        packages['dev-lang/perl'].make_symlink(
            f'../{libperl}',
            f'usr/lib64/libperl.so.{build.major}.{build.minor}.{build.patch}')

    return packages
예제 #15
0
def build(build):
    packages = autotools.build(
        configure=lambda: configure(
            '--with-system-expat',
            '--with-system-ffi',
            '--without-ensurepip',
            '--enable-unicode=ucs4',
            '--enable-optimizations'),
        split=drain_all_with_doc,
    )

    with stdlib.pushd(packages['dev-lang/python2'].wrap_cache):
        # Fixes permissions for libraries to be consistent with other libraries
        for lib in glob.glob('usr/lib64/libpython{build.major}.{build.minor}.so.*.*'):
            os.chmod(lib, 0o755)
        # Fixes to avoid conflicts with what was installed by Python3
        os.rename('usr/bin/2to3', 'usr/bin/2to3-2')
        os.rename('usr/bin/idle', 'usr/bin/idle2')
        os.rename('usr/bin/pydoc', 'usr/bin/pydoc2')
        os.unlink('usr/bin/python')

    return packages
예제 #16
0
def build(build):
    packages = distutils.build()

    with stdlib.pushd(packages['dev-python/pip'].wrap_cache):
        # Remove the default binaries
        os.remove('usr/bin/pip')
        os.remove(f'usr/bin/pip{PYTHON_MAJOR}')
        os.remove(f'usr/bin/pip{PYTHON_MAJOR}.{PYTHON_MINOR}')

        # Make the pip binary
        with open(f'usr/bin/pip{PYTHON_MAJOR}.{PYTHON_MINOR}', 'w+') as resolv:
            resolv.write(
                textwrap.dedent(f'''\
                #!/usr/bin/env python{PYTHON_MAJOR}.{PYTHON_MINOR}
                # -*- coding: utf-8 -*-

                import re
                import sys

                from pip._internal import main

                if __name__ == '__main__':
                    sys.argv[0] = re.sub(r'(-script\\.pyw?|\\.exe)?$', '', sys.argv[0])
                    sys.exit(main())
            '''))

        os.chmod(f'usr/bin/pip{PYTHON_MAJOR}.{PYTHON_MINOR}', 0o755)

        # Make the python -> python3 symlink
        packages['dev-python/pip'].make_symlink(f'pip{PYTHON_MAJOR}',
                                                'usr/bin/pip')
        packages['dev-python/pip'].make_symlink(
            f'pip{PYTHON_MAJOR}.{PYTHON_MINOR}', f'usr/bin/pip{PYTHON_MAJOR}')

    packages['dev-python/pip'].requires('dev-lang/python#~3.7')
    packages['dev-python/pip'].requires('dev-python/setuptools')

    return packages
예제 #17
0
파일: basic.py 프로젝트: raven-os/nbuild
def build(
    build_folder='.',
    fetch=stdlib.fetch.fetch,
    extract=stdlib.extract.flat_extract_all,
    patch=stdlib.patch.patch_all,
    configure=None,
    compile=None,
    check=None,
    install=None,
    split=stdlib.split.system.system,
    deplinker=stdlib.deplinker.elf.elf_deplinker,
):
    """Download, build and wrap a software based on any build system.
    Use this template when there is no more appropriate exhaustive template for your use-case.

    This exhaustive template is made of 9 steps:
        * ``fetch``
        * ``extract``
        * ``patch``
        * ``configure``
        * ``compile``
        * ``check``
        * ``install``
        * ``split``
        * ``dependency linking``

    For each one of these steps, a function is called. This template simply calls each of them in the above order.
    All of these functions can be given as arguments, but each one of them has a default value that is explained below.
    If any of those functions is ``None``, the step is skipped.

    **Fetch**

        This step is used to download the source code. The default value is :py:func:`.fetch` with no argument.

    **Extract**

        This step is used to extract the downloaded source code. The default value is :py:func:`.flat_extract_all` with no argument.

    **Patch**

        This step is used to patch the downloaded source code. The default value is :py:func:`.patch_all` with no argument.

    From now on, the current working directory is changed in favor of ``build_folder`` (which defaults to ``.``).
    If the directory pointed by ``build_folder`` doesn't exist, it is created.

    This is useful for the few ``configure`` scripts that don't work if they aren't executed in a standalone directory.

    **Configure**

        This step is used to configure the source code. The default value is ``None``.

    **Compile**

        This step is used to build the source code. The default value is ``None``.

    **Check**

        This step is used to test the built binaries. The default value is ``None``.

    **Install**

        This step is used to install the software in the install cache. The default value is ``None``.

    **Split**

        This step automatically splits the output of the build into multiple packages. The default value is :py:func:`~stdlib.split.system.system`.
        Alternative splitters can be found in the :py:mod:`~stdlib.split` module.

    **Dependency Linking**

        This step automatically finds requirements for the generated packages. The default value is :py:func:`~stdlib.deplinker.elf.elf_deplinker`.
        Alternative dependency linkers can be found in the :py:mod:`~stdlib.deplinker` module.

    """
    build = stdlib.build.current_build()

    stdlib.log.ilog("Step 1/9: Fetch")
    if fetch is not None:
        with stdlib.log.pushlog():
            fetch()

    stdlib.log.ilog("Step 2/9: Extract")
    if extract is not None:
        with stdlib.log.pushlog():
            extract()

    stdlib.log.ilog("Step 3/9: Patch")
    if patch is not None:
        with stdlib.log.pushlog():
            patch()

    os.makedirs(build_folder, exist_ok=True)
    with stdlib.pushd(build_folder):
        stdlib.log.ilog("Step 4/9: Configure")
        if configure is not None:
            with stdlib.log.pushlog():
                configure()

        stdlib.log.ilog("Step 5/9: Compile")
        if compile is not None:
            with stdlib.log.pushlog():
                compile()

        stdlib.log.ilog("Step 6/9: Check")
        if check is not None:
            with stdlib.log.pushlog():
                check()

        stdlib.log.ilog("Step 7/9: Install")
        if install is not None:
            with stdlib.log.pushlog(), stdlib.pushenv():
                os.environ['DESTDIR'] = build.install_cache
                install()

        stdlib.log.ilog("Step 8/9: Split")
        if split is not None:
            with stdlib.log.pushlog():
                packages = split()

                if len(packages) > 0:
                    stdlib.log.ilog("The following packages were generated:")

                    with stdlib.log.pushlog():
                        for package in packages.values():
                            stdlib.log.ilog(str(package))

        stdlib.log.ilog("Step 9/9: Dependency Linking")
        if deplinker is not None:
            with stdlib.log.pushlog():
                deplinker(packages)

    return packages
예제 #18
0
def build(build):
    corefs = stdlib.package.Package(stdlib.package.PackageID('corefs'))

    # / hierarchy
    corefs.make_keepers(
        'boot',
        'dev',
        'etc',
        'home',
        'media',
        'mnt',
        'opt',
        'proc',
        'root',
        'run',
        'srv',
        'sys',
        'tmp',
        'usr',
        'var',
    )
    corefs.make_symlink('usr/lib64', 'lib')
    corefs.make_symlink('usr/lib64', 'lib64')
    corefs.make_symlink('usr/lib32', 'lib32')
    corefs.make_symlink('usr/bin', 'bin')
    corefs.make_symlink('usr/bin', 'sbin')

    # /media hierarchy
    corefs.make_keepers(
        'media/floppy',
        'media/cdrom',
        'media/cdrecorder',
        'media/zip',
    )

    # /usr hierarchy
    corefs.make_keepers(
        'usr/bin',
        'usr/games',
        'usr/include',
        'usr/lib64',
        'usr/lib32',
        'usr/libexec',
        'usr/local',
        'usr/share',
        'usr/src',
    )
    corefs.make_symlink('lib64', 'usr/lib')
    corefs.make_symlink('bin', 'usr/sbin')
    corefs.make_symlink('../var/tmp', 'usr/tmp')

    # /usr/lib hierarchy
    corefs.make_keepers('usr/lib64/modules')

    # /usr/local hierarchy
    corefs.make_keepers(
        'usr/local/bin',
        'usr/local/etc',
        'usr/local/games',
        'usr/local/include',
        'usr/local/lib64',
        'usr/local/lib32',
        'usr/local/man',
        'usr/local/share',
        'usr/local/src',
    )
    corefs.make_symlink('lib64', 'usr/local/lib')
    corefs.make_symlink('bin', 'usr/local/sbin')

    # /usr/share hierarchy
    corefs.make_keepers(
        'usr/share',
        'usr/src',
    )

    # /var hierarchy
    corefs.make_keepers('var/tmp')
    corefs.make_symlink('../run', 'var/run')

    with stdlib.pushd(corefs.wrap_cache):

        # Change permission of /root, /tmp and /var/tmp
        os.chmod('root', 0o700)
        os.chmod('tmp', 0o1777)
        os.chmod('var/tmp', 0o1777)

        # Write a default etc/passwd
        with open('etc/passwd', 'w+') as passwd:
            passwd.write(
                dedent('''\
            root:x:0:0:root:/root:/bin/bash
            nobody:x:65534:65534:nobody:/var/empty:/bin/false
            '''))

        # Write a default etc/group
        with open('etc/group', 'w+') as group:
            group.write(
                dedent('''\
            root:x:0:
            bin:x:1:
            sys:x:2:
            kmem:x:3:
            tty:x:4:
            tape:x:5:
            daemon:x:6:
            floppy:x:7:
            disk:x:8:
            lp:x:9:
            dialout:x:10:
            audio:x:11:
            video:x:12:
            utmp:x:13:
            usb:x:14:
            wheel:x:15:
            users:x:999:
            '''))

        # Write a default etc/shells
        with open('etc/shells', 'w+') as shells:
            shells.write(
                dedent('''\
            #
            # Raven-OS - /etc/shells
            #   - Valid login shells -
            #

            /bin/bash
            /bin/csh
            /bin/dash
            /bin/esh
            /bin/fish
            /bin/ksh
            /bin/mksh
            /bin/sash
            /bin/sh
            /bin/tcsh
            /bin/zsh
            '''))

        # Write a default etc/hosts
        with open('etc/hosts', 'w+') as hosts:
            hosts.write(
                dedent('''\
            #
            # Raven-OS - /etc/hosts
            #

            # IPv4 and IPv6 localhost aliases
            127.0.0.1	localhost.localdomain localhost
            ::1		localhost.localdomain localhost

            '''))

        # Write a default etc/hostname
        with open('etc/hostname', 'w+') as hostname:
            hostname.write(dedent('''\
            raven-os
            '''))

        # Write a default etc/resolv.conf
        with open('etc/resolv.conf', 'w+') as resolv:
            resolv.write(
                dedent('''\
            #
            # Raven-OS - /etc/resolv.conf
            #

            nameserver 8.8.8.8
            nameserver 8.8.4.4
            '''))

        # Write a default etc/os-release
        with open('etc/os-release', 'w+') as resolv:
            resolv.write(
                dedent('''\
            #
            # Raven-OS - /etc/os-release
            #

            ID="raven-os"
            NAME="Raven-OS"
            HOME_URL="https://raven-os.org/"
            SUPPORT_URL="https://github.com/raven-os/iso/issues/"
            BUG_REPORT_URL="https://github.com/raven-os/iso/issues/"
            '''))

        # Write a default etc/raven-os-release
        with open('etc/raven-os-release', 'w+') as resolv:
            resolv.write(
                dedent('''\
            #
            # Raven-OS - /etc/raven-os-release
            #
            Raven-OS release 0.1.0 (Beta)
            '''))

        # Write a default etc/issue
        with open('etc/issue', 'w+') as resolv:
            resolv.write(
                dedent('''\
            #
            # Raven-OS - /etc/issue
            #
            Welcome to Raven-OS!
            '''))

    return {corefs.id.short_name(): corefs}
예제 #19
0
파일: autotools.py 프로젝트: doom/nbuild
def build(
    build_folder='.',
    fetch=stdlib.fetch.fetch,
    extract=stdlib.extract.flat_extract_all,
    patch=stdlib.patch.patch_all,
    configure=configure,
    compile=make,
    check=lambda: make('check', fail_ok=True),
    install=lambda: make('install', f'DESTDIR={stdlib.build.current_build().install_cache}'),
    split=stdlib.split.system.system,
    deplinker=stdlib.deplinker.elf.elf_deplinker,
):
    """Download, build and wrap a library based on ``autoconf`` and ``make``.

    This exhaustive template is made of 9 steps:
        * ``fetch``
        * ``extract``
        * ``patch``
        * ``configure``
        * ``compile``
        * ``check``
        * ``install``
        * ``split``
        * ``dependency linking``

    For each one of these steps, a function is called. This template simply calls each of them in the above order.
    All of these functions can be given as arguments, but each one of them has a default value that is explained below.
    If any of those functions is ``None``, the step is skipped.

    **Fetch**

        This step is used to download the source code. The default value is :py:func:`.fetch` with no argument.

    **Extract**

        This step is used to extract the downloaded source code. The default value is :py:func:`.flat_extract_all` with no argument.

    **Patch**

        This step is used to patch the downloaded source code. The default value is :py:func:`.patch_all` with no argument.

    From now on, the current working directory is changed in favor of ``build_folder`` (which defaults to ``.``).
    If the directory pointed by ``build_folder`` doesn't exist, it is created.

    This is useful for the few ``configure`` scripts that don't work if they aren't executed in a standalone directory.

    **Configure**

        This step uses the ``configure`` script to configure the source code. The default value is :py:func:`.configure` with no argument.

        When ``build_folder`` isn't ``.``, it is usually necessary to override this step with an other call to :py:func:`.configure` with
        the argument ``binary='../configure'``.

    **Compile**

        This step compiles the source code. The default value is :py:func:`.make` with no argument.

    **Check**

        This step runs the unit and integration tests. The default value is :py:func:`.make` with the arguments ``'check'`` and ``fail_ok=True``.

    **Install**

        This step installs the software in the install cache. The default value is :py:func:`.make` with the arguments ``'install'`` and
        ``f'DESTDIR={stdlib.build.current_build().install_cache}'``

        If this step is overriden, the ``DESTDIR`` variable is crucial and should be reused. Otherwise, and unless the ``configure`` script has been
        configured to work without it, the installation can damage and overwrite parts of the host system.

    **Split**

        This step automatically splits the output of the build into multiple packages. The default value is :py:func:`~stdlib.split.system.system`.
        Alternative splitters can be found in the :py:mod:`~stdlib.split` module.

    **Dependency Linking**

        This step automatically finds requirements for the generated packages. The default value is :py:func:`~stdlib.deplinker.elf.elf_deplinker`.
        Alternative dependency linkers can be found in the :py:mod:`~stdlib.deplinker` module.

    """
    build = stdlib.build.current_build()

    stdlib.log.ilog("Step 1/9: Fetch")
    if fetch is not None:
        with stdlib.log.pushlog():
            fetch()

    stdlib.log.ilog("Step 2/9: Extract")
    if extract is not None:
        with stdlib.log.pushlog():
            extract()

    stdlib.log.ilog("Step 3/9: Patch")
    if patch is not None:
        with stdlib.log.pushlog():
            patch()

    packages = dict()

    os.makedirs(build_folder, exist_ok=True)
    with stdlib.pushd(build_folder):

        os.environ['DESTDIR'] = build.install_cache

        stdlib.log.ilog("Step 4/9: Configure")
        if configure is not None:
            with stdlib.log.pushlog():
                configure()

        stdlib.log.ilog("Step 5/9: Compile")
        if compile is not None:
            with stdlib.log.pushlog():
                compile()

        stdlib.log.ilog("Step 6/9: Check")
        if check is not None:
            with stdlib.log.pushlog():
                check()

        stdlib.log.ilog("Step 7/9: Install")
        if install is not None:
            with stdlib.log.pushlog():
                install()

        stdlib.log.ilog("Step 8/9: Split")
        if split is not None:
            with stdlib.log.pushlog():
                packages = split()

                if len(packages) > 0:
                    stdlib.log.ilog("The following packages were generated:")

                    with stdlib.log.pushlog():
                        for package in packages.values():
                            stdlib.log.ilog(str(package))

        stdlib.log.ilog("Step 9/9: Dependency Linking")
        if deplinker is not None:
            with stdlib.log.pushlog():
                deplinker(packages)

    return packages
예제 #20
0
def install_glibc():
    build = stdlib.build.current_build()

    # TODO FIXME Temporary fix to avoid a harmless failure while installing glibc
    stdlib.cmd(
        "sed '/test-installation/s@$(PERL)@echo not running@' -i ../Makefile")

    # Install glibc
    make('install')

    # Install locales
    make('localedata/install-locales')

    # Compile all timezone data
    with stdlib.pushd(build.install_cache):
        tzdata = f'{build.build_cache}/tzdata/'
        zic = f'{build.install_cache}/usr/sbin/zic'

        os.makedirs('usr/share/zoneinfo/posix', exist_ok=True)
        os.makedirs('usr/share/zoneinfo/right', exist_ok=True)

        for tz in [
                'etcetera', 'southamerica', 'northamerica', 'europe', 'africa',
                'antarctica', 'asia', 'australasia', 'backward', 'pacificnew',
                'systemv'
        ]:
            stdlib.cmd(
                f'{zic} -L /dev/null -d usr/share/zoneinfo {tzdata}/{tz}')
            stdlib.cmd(
                f'{zic} -L /dev/null -d usr/share/zoneinfo/posix {tzdata}/{tz}'
            )
            stdlib.cmd(
                f'{zic} -L {tzdata}/leapseconds -d usr/share/zoneinfo/right {tzdata}/{tz}'
            )

        stdlib.cmd(f'{zic} -d usr/share/zoneinfo -p America/New_York')

        shutil.copy(f'{tzdata}/zone.tab', 'usr/share/zoneinfo/')
        shutil.copy(f'{tzdata}/zone1970.tab', 'usr/share/zoneinfo/')
        shutil.copy(f'{tzdata}/iso3166.tab', 'usr/share/zoneinfo/')

    # Setup default configuration files
    with stdlib.pushd(build.install_cache):
        # /etc/nsswitch.conf
        with open('etc/nsswitch.conf', 'w+') as conf:
            conf.write(
                dedent('''\
            #
            # Raven-OS - /etc/nsswitch.conf
            #

            passwd:         compat files
            group:          compat files
            shadow:         compat files

            hosts:          files dns
            networks:       files dns

            services:       db files
            protocols:      db files
            rpc:            db files
            ethers:         db files
            netmasks:       files
            netgroup:       files
            bootparams:     files

            automount:      files
            aliases:        files
            '''))

        # /etc/ld.so.conf
        with open('etc/ld.so.conf', 'w+') as ld_conf:
            ld_conf.write(
                dedent('''
            #
            # Raven-OS - /etc/ld.so.conf
            #

            include /etc/ld.so.conf.d/*.conf

            /usr/local/lib
            /opt/lib
            '''))
예제 #21
0
def extract_glibc():
    stdlib.extract.flat_extract(glob.glob('glibc-*')[0])

    os.makedirs('tzdata')
    with stdlib.pushd('tzdata'):
        stdlib.extract.flat_extract(glob.glob('../tzdata*.tar*')[0])
예제 #22
0
def build_all(
    build_folder='.',
    fetch=stdlib.fetch.fetch,
    extract=stdlib.extract.flat_extract_all,
    patch=stdlib.patch.patch_all,
    compilations=[],
    split=stdlib.split.system.system,
    deplinker=stdlib.deplinker.elf.elf_deplinker,
):
    """Download, build and wrap multiple configurations of the same software based on ``autoconf`` and ``make``.

    This exhaustive template is made of 9 steps, where some of them are repeatable:
        * ``fetch``
        * ``extract``
        * ``patch``
        * ``compilation``, which is an array containing multiple iterations of the following steps:
            * ``clean_before``
            * ``configure``
            * ``compile``
            * ``check``
            * ``install``
            * ``clean_after``
        * ``split``
        * ``dependency linking``

    For each one of these steps, a function is called. This template simply calls each of them in the above order.
    All of these functions can be given as arguments, but each one of them has a default value that is explained below.
    If any of those functions is ``None``, the step is skipped.

    **Fetch**

        This step is used to download the source code. The default value is :py:func:`.fetch` with no argument.

    **Extract**

        This step is used to extract the downloaded source code. The default value is :py:func:`.flat_extract_all` with no argument.

    **Patch**

        This step is used to patch the downloaded source code. The default value is :py:func:`.patch_all` with no argument.

    From now on, the following four steps are repeated for each item in ``compilation``. Each item must be an object containing
    a ``clean_before``, ``configure``, ``compile``, ``check``, ``install`` and ``clean_after`` key where the value must be a function.
    If the key is not present, the default value is taken instead.

    There is persistence in a all caches of the build. Therefore, the build folder is dirty of any previous iteration, as is the install cache
    and any other cache. This means that it is usually wise to run ``make clean`` before compiling the package again, hence the
    ``clean_before`` and ``clean_after`` steps.

    For each iteration, the current working directory is changed in favor of ``build_folder`` (which defaults to ``.``).
    If the directory pointed by ``build_folder`` doesn't exist, it is created.

    This is useful for the few ``configure`` scripts that don't work if they aren't executed in a standalone directory.

    **Clean Before**

        This step is used to clean any cache from impurities left by the previous iteration, before configuring and compiling the source code.

    **Configure**

        This step uses the ``configure`` script to configure the source code. The default value is :py:func:`.configure` with no argument.

        When ``build_folder`` isn't ``.``, it is usually necessary to override this step with an other call to :py:func:`.configure` with
        the argument ``binary='../configure'``.

    **Compile**

        This step compiles the source code. The default value is :py:func:`.make` with no argument.

    **Check**

        This step runs the unit and integration tests. The default value is :py:func:`.make` with the arguments ``'check'`` and ``fail_ok=True``.

    **Install**

        This step installs the software in the install cache. The default value is :py:func:`.make` with the arguments ``'install'`` and
        ``f'DESTDIR={stdlib.build.current_build().install_cache}'``

        If this step is overriden, the ``DESTDIR`` variable is crucial and should be reused. Otherwise, and unless the ``configure`` script has been
        configured to work without it, the installation can damage and overwrite parts of the host system.

    **Clean After**

        This step is used to clean any cache from impurities left by the current iteration, after configuring and compiling the source code.

    **Split**

        This step automatically splits the output of the build into multiple packages. The default value is :py:func:`~stdlib.split.system.system`.
        Alternative splitters can be found in the :py:mod:`~stdlib.split` module.

    **Dependency Linking**

        This step automatically finds requirements for the generated packages. The default value is :py:func:`~stdlib.deplinker.elf.elf_deplinker`.
        Alternative dependency linkers can be found in the :py:mod:`~stdlib.deplinker` module.

    """
    build = stdlib.build.current_build()

    nb_steps = 5 + len(compilations) * 6

    stdlib.log.ilog(f"Step 1/{nb_steps}: Fetch")
    if fetch is not None:
        with stdlib.log.pushlog():
            fetch()

    stdlib.log.ilog(f"Step 2/{nb_steps}: Extract")
    if extract is not None:
        with stdlib.log.pushlog():
            extract()

    stdlib.log.ilog(f"Step 3/{nb_steps}: Patch")
    if patch is not None:
        with stdlib.log.pushlog():
            patch()

    packages = dict()

    os.makedirs(build_folder, exist_ok=True)
    with stdlib.pushd(build_folder):
        for idx, compilation in enumerate(compilations):

            relative_step = 3 + idx * 6

            stdlib.log.ilog(
                f"Step {relative_step + 1}/{nb_steps}: Clean before")
            if compilation.get('clean_before') is not None:
                with stdlib.log.pushlog():
                    compilation['clean_before']()

            stdlib.log.ilog(f"Step {relative_step + 2}/{nb_steps}: Configure")
            if compilation.get('configure') is None:
                compilation['configure'] = configure
            with stdlib.log.pushlog():
                compilation['configure']()

            stdlib.log.ilog(
                f"Step {relative_step + 3}/{nb_steps}: Compilation")
            if compilation.get('compile') is None:
                compilation['compile'] = make
            with stdlib.log.pushlog():
                compilation['compile']()

            stdlib.log.ilog(f"Step {relative_step + 4}/{nb_steps}: Check")
            if compilation.get('check') is None:
                compilation['check'] = lambda: make('check', fail_ok=True)
            with stdlib.log.pushlog():
                compilation['check']()

            stdlib.log.ilog(f"Step {relative_step + 5}/{nb_steps}: Install")
            if compilation.get('install') is None:
                compilation['install'] = lambda: make(
                    'install',
                    f'DESTDIR={stdlib.build.current_build().install_cache}')
            with stdlib.log.pushlog(), stdlib.pushenv():
                os.environ['DESTDIR'] = build.install_cache
                compilation['install']()

            stdlib.log.ilog(
                f"Step {relative_step + 6}/{nb_steps}: Clean after")
            if compilation.get('clean_after') is not None:
                with stdlib.log.pushlog():
                    compilation['clean_after']()

        stdlib.log.ilog(f"Step {nb_steps-1}/{nb_steps}: Split")
        if split is not None:
            with stdlib.log.pushlog():
                packages = split()

                if len(packages) > 0:
                    stdlib.log.ilog("The following packages were generated:")

                    with stdlib.log.pushlog():
                        for package in packages.values():
                            stdlib.log.ilog(str(package))

        stdlib.log.ilog(f"Step {nb_steps}/{nb_steps}: Dependency Linking")
        if deplinker is not None:
            with stdlib.log.pushlog():
                deplinker(packages)

    return packages