Пример #1
0
def set_up_lxc_net(worker, subnet):
    with TemporaryDirectory(prefix='vectis-lxc-') as tmp:
        with AtomicWriter(os.path.join(tmp, 'lxc-net')) as writer:
            writer.write(
                textwrap.dedent('''\
            USE_LXC_BRIDGE="true"
            LXC_BRIDGE="lxcbr0"
            LXC_ADDR="{subnet}.1"
            LXC_NETMASK="255.255.255.0"
            LXC_NETWORK="{subnet}.0/24"
            LXC_DHCP_RANGE="{subnet}.2,{subnet}.254"
            LXC_DHCP_MAX="253"
            LXC_DHCP_CONFILE=""
            LXC_DOMAIN=""
            ''').format(subnet=subnet))
        worker.copy_to_guest(os.path.join(tmp, 'lxc-net'),
                             '/etc/default/lxc-net')

        with AtomicWriter(os.path.join(tmp, 'default.conf')) as writer:
            writer.write(
                textwrap.dedent('''\
            lxc.network.type = veth
            lxc.network.link = lxcbr0
            lxc.network.flags = up
            lxc.network.hwaddr = 00:16:3e:xx:xx:xx
            '''))
        worker.copy_to_guest(os.path.join(tmp, 'default.conf'),
                             '/etc/lxc/default.conf')

    worker.check_call(['systemctl', 'enable', 'lxc-net'])
    worker.check_call(['systemctl', 'stop', 'lxc-net'])
    worker.check_call(['systemctl', 'start', 'lxc-net'])
Пример #2
0
    def set_up_apt(self):
        tarball_in_guest = self.worker.make_file_available(self.tarball,
                                                           cache=True)

        tmp = TemporaryDirectory(prefix='vectis-worker-')
        tmp = self.stack.enter_context(tmp)
        sources_list = os.path.join(tmp, 'sources.list')

        with AtomicWriter(sources_list) as writer:
            self.write_sources_list(writer)
        self.worker.check_call(['mkdir', '-p', '/etc/schroot/sources.list.d'])
        self.worker.copy_to_guest(
            sources_list, '/etc/schroot/sources.list.d/{}'.format(self.chroot))

        with AtomicWriter(os.path.join(tmp, 'sbuild.conf')) as writer:
            writer.write(
                textwrap.dedent('''
            [{chroot}]
            type=file
            description=An autobuilder
            file={tarball_in_guest}
            groups=root,sbuild
            root-groups=root,sbuild
            profile=sbuild
            ''').format(chroot=self.chroot, tarball_in_guest=tarball_in_guest))
        self.worker.copy_to_guest(
            os.path.join(tmp, 'sbuild.conf'),
            '/etc/schroot/chroot.d/{}'.format(self.chroot))

        with AtomicWriter(os.path.join(tmp, '60vectis-sources')) as writer:
            writer.write(
                textwrap.dedent('''\
            #!/bin/sh
            set -e
            set -u
            if [ $1 = setup-start ] || [ $1 = setup-recover ]; then
                echo "$0: Setting up ${CHROOT_ALIAS}" >&2

                if [ -f /etc/schroot/sources.list.d/${CHROOT_ALIAS} ]; then
                    echo "$0: Copying /etc/schroot/sources.list.d/${CHROOT_ALIAS} into ${CHROOT_PATH}" >&2
                    cp /etc/schroot/sources.list.d/${CHROOT_ALIAS} ${CHROOT_PATH}/etc/apt/sources.list
                fi
                if [ -d /etc/schroot/apt-keys.d/${CHROOT_ALIAS} ]; then
                    echo "$0: Copying /etc/schroot/apt-keys.d/${CHROOT_ALIAS}/ into ${CHROOT_PATH}" >&2
                    cp /etc/schroot/apt-keys.d/${CHROOT_ALIAS}/* ${CHROOT_PATH}/etc/apt/trusted.gpg.d/
                fi
            fi
            '''))
        self.worker.copy_to_guest(os.path.join(tmp, '60vectis-sources'),
                                  '/etc/schroot/setup.d/60vectis-sources')
        self.worker.check_call(
            ['chmod', '0755', '/etc/schroot/setup.d/60vectis-sources'])
        self.install_apt_keys()
Пример #3
0
    def set_up_apt(self, suite, components=()):
        with TemporaryDirectory() as tmp:
            with AtomicWriter(os.path.join(tmp, 'sources.list')) as writer:
                for ancestor in suite.hierarchy:
                    if components:
                        filtered_components = (set(components)
                                               & set(ancestor.all_components))
                    else:
                        filtered_components = ancestor.components

                    writer.write(
                        textwrap.dedent('''
                    deb {mirror} {suite} {components}
                    deb-src {mirror} {suite} {components}
                    ''').format(
                            components=' '.join(filtered_components),
                            mirror=ancestor.mirror,
                            suite=ancestor.apt_suite,
                        ))

                    if ancestor.apt_key is not None:
                        self.copy_to_guest(
                            ancestor.apt_key, '/etc/apt/trusted.gpg.d/' +
                            os.path.basename(ancestor.apt_key))

            self.copy_to_guest(os.path.join(tmp, 'sources.list'),
                               '/etc/apt/sources.list')
            self.check_call([
                'env',
                'DEBIAN_FRONTEND=noninteractive',
                'apt-get',
                '-y',
                'update',
            ])
Пример #4
0
    def set_up_apt(self):
        tmp = TemporaryDirectory(prefix='vectis-worker-')
        tmp = self.stack.enter_context(tmp)
        self.sources_list = os.path.join(tmp, 'sources.list')

        with AtomicWriter(self.sources_list) as writer:
            self.write_sources_list(writer)

        sources_list = self.worker.make_file_available(self.sources_list)
        self.argv.append('--copy={}:{}'.format(sources_list,
                                               '/etc/apt/sources.list'))
        self.install_apt_keys()
Пример #5
0
    def set_up_apt(self):
        logger.info('Configuring apt in %r for %s', self, self.suite)

        with TemporaryDirectory(prefix='vectis-worker-') as tmp:
            sources_list = os.path.join(tmp, 'sources.list')

            with AtomicWriter(sources_list) as writer:
                self.write_sources_list(writer)

            self.copy_to_guest(sources_list, '/etc/apt/sources.list')

        self.install_apt_keys()

        if self.apt_update:
            self.check_call([
                'env', 'DEBIAN_FRONTEND=noninteractive',
                'apt-get', '-y', 'update',
            ])
Пример #6
0
def _run(args, buildables, worker):
    logger.info('Installing sbuild')
    worker.set_up_apt(args.worker_suite)
    worker.check_call([
        'apt-get',
        '-y',
        '--no-install-recommends',
        'install',
        'python3',
        'sbuild',
        'schroot',
    ])

    for buildable in buildables:
        logger.info('Processing: %s', buildable)

        buildable.copy_source_to(worker)

        if buildable.suite == 'UNRELEASED':
            suite = args.vendor.get_suite(args.vendor.default_suite)
        else:
            suite = args.vendor.get_suite(buildable.suite)

        dpkg_buildpackage_options = get_dpkg_buildpackage_options(args, suite)
        dpkg_source_options = get_dpkg_source_options(args)

        def new_build(arch, output_builds=args.output_builds):
            return Build(buildable,
                         arch,
                         worker,
                         components=args.components,
                         extra_repositories=args._extra_repository,
                         dpkg_buildpackage_options=dpkg_buildpackage_options,
                         dpkg_source_options=dpkg_source_options,
                         output_builds=output_builds,
                         storage=args.storage,
                         suite=suite)

        if args._rebuild_source:
            new_build('source').sbuild()
        elif buildable.source_from_archive:
            # We need to get some information from the .dsc, which we do by
            # building one and throwing it away.
            new_build('source', output_builds=None).sbuild()
        elif buildable.dsc is None:
            # We're building from a directory; get a .dsc
            new_build('source').sbuild()

        if not args._source_only:
            buildable.select_archs(worker.dpkg_architecture, args._archs,
                                   args._indep, args.sbuild_together)

            for arch in buildable.archs:
                new_build(arch).sbuild()

        if buildable.sourceful_changes_name:
            c = os.path.join(
                args.output_builds,
                '{}_source.changes'.format(buildable.product_prefix))
            if 'source' not in buildable.changes_produced:
                with AtomicWriter(c) as writer:
                    subprocess.check_call([
                        'mergechanges',
                        '--source',
                        buildable.sourceful_changes_name,
                        buildable.sourceful_changes_name,
                    ],
                                          stdout=writer)

            buildable.merged_changes['source'] = c

        if ('all' in buildable.changes_produced
                and 'source' in buildable.merged_changes):
            c = os.path.join(
                args.output_builds,
                '{}_source+all.changes'.format(buildable.product_prefix))
            buildable.merged_changes['source+all'] = c
            with AtomicWriter(c) as writer:
                subprocess.check_call([
                    'mergechanges',
                    buildable.changes_produced['all'],
                    buildable.merged_changes['source'],
                ],
                                      stdout=writer)

        c = os.path.join(args.output_builds,
                         '{}_binary.changes'.format(buildable.product_prefix))

        binary_changes = []
        for k, v in buildable.changes_produced.items():
            if k != 'source':
                binary_changes.append(v)

        if len(binary_changes) > 1:
            with AtomicWriter(c) as writer:
                subprocess.check_call(['mergechanges'] + binary_changes,
                                      stdout=writer)
            buildable.merged_changes['binary'] = c
        elif len(binary_changes) == 1:
            shutil.copy(binary_changes[0], c)
            buildable.merged_changes['binary'] = c
        # else it was source-only: no binary changes

        if ('source' in buildable.merged_changes
                and 'binary' in buildable.merged_changes):
            c = os.path.join(
                args.output_builds,
                '{}_source+binary.changes'.format(buildable.product_prefix))
            buildable.merged_changes['source+binary'] = c

            with AtomicWriter(c) as writer:
                subprocess.check_call([
                    'mergechanges',
                    buildable.merged_changes['source'],
                    buildable.merged_changes['binary'],
                ],
                                      stdout=writer)

    for buildable in buildables:
        if 'source' in buildable.merged_changes:
            source = buildable.merged_changes['source']
        elif buildable.source_from_archive:
            source = buildable.source_package
        else:
            logger.warning('Unable to run autopkgtest on %s',
                           buildable.buildable)
            continue

        # TODO: backends other than qemu
        image = args.autopkgtest_qemu_image

        if args.autopkgtest and image and os.path.exists(image):
            # Run this in the host system, to avoid nested virtualization.
            status = subprocess.call([
                'autopkgtest',
                '--apt-upgrade',
                '--no-built-binaries',
                # TODO: --output-dir
                # TODO: --setup-commands
                buildable.merged_changes['binary'],
                source,
                '--',
                'qemu',
                args.autopkgtest_qemu_image
            ])

            if (status & ~2) != 0:
                logger.error('autopkgtest failed: status %d', status)

    for buildable in buildables:
        logger.info(
            'Built changes files from %s:\n\t%s',
            buildable,
            '\n\t'.join(sorted(buildable.changes_produced.values())),
        )

        logger.info(
            'Build logs from %s:\n\t%s',
            buildable,
            '\n\t'.join(sorted(buildable.logs.values())),
        )

        # Run lintian near the end for better visibility
        for x in 'source+binary', 'binary', 'source':
            if x in buildable.merged_changes:
                subprocess.call(
                    ['lintian', '-I', '-i', buildable.merged_changes[x]])

                reprepro_suite = args._reprepro_suite

                if reprepro_suite is None:
                    reprepro_suite = buildable.nominal_suite

                if args._reprepro_dir:
                    subprocess.call([
                        'reprepro', '-b', args._reprepro_dir, 'removesrc',
                        str(reprepro_suite), buildable.source_package
                    ])
                    subprocess.call([
                        'reprepro', '--ignore=wrongdistribution',
                        '--ignore=missingfile', '-b', args._reprepro_dir,
                        'include',
                        str(reprepro_suite),
                        os.path.join(args.output_builds,
                                     buildable.merged_changes[x])
                    ])

                break
Пример #7
0
    def sbuild(self):
        self.worker.check_call([
            'install', '-d', '-m755', '-osbuild', '-gsbuild',
            '{}/out'.format(self.worker.scratch)
        ])

        sbuild_version = Version(
            self.worker.check_output(
                ['dpkg-query', '-W', '-f${Version}', 'sbuild'],
                universal_newlines=True).rstrip('\n'))

        logger.info('Building architecture: %s', self.arch)

        if self.arch in ('all', 'source'):
            logger.info('(on %s)', self.worker.dpkg_architecture)
            use_arch = self.worker.dpkg_architecture
        else:
            use_arch = self.arch

        hierarchy = self.suite.hierarchy

        sbuild_tarball = ('sbuild-{vendor}-{base}-{arch}.tar.gz'.format(
            arch=use_arch,
            vendor=self.buildable.vendor,
            base=hierarchy[-1],
        ))

        self.worker.copy_to_guest(os.path.join(self.storage, sbuild_tarball),
                                  '{}/in/{}'.format(self.worker.scratch,
                                                    sbuild_tarball),
                                  cache=True)

        chroot = '{base}-{arch}-sbuild'.format(base=hierarchy[-1],
                                               arch=use_arch)

        with TemporaryDirectory() as tmp:
            with AtomicWriter(os.path.join(tmp, 'sbuild.conf')) as writer:
                writer.write(
                    textwrap.dedent('''
                [{chroot}]
                type=file
                description=An autobuilder
                file={scratch}/in/{sbuild_tarball}
                groups=root,sbuild
                root-groups=root,sbuild
                profile=sbuild
                ''').format(chroot=chroot,
                            sbuild_tarball=sbuild_tarball,
                            scratch=self.worker.scratch))
            self.worker.copy_to_guest(
                os.path.join(tmp, 'sbuild.conf'),
                '/etc/schroot/chroot.d/{}'.format(chroot))

        # Backwards compatibility goo for Debian jessie buildd backport:
        # it can't do "sbuild hello", only "sbuild hello_2.10-1"
        if (self.buildable.source_from_archive
                and self.buildable.version is None
                and sbuild_version < Version('0.69.0')):
            lines = self.worker.check_output(
                [
                    'schroot',
                    '-c',
                    chroot,
                    '--',
                    'sh',
                    '-c',
                    'apt-get update >&2 && '
                    'apt-cache showsrc --only-source "$1" | '
                    'sed -ne "s/^Version: *//p"',
                    'sh',  # argv[0]
                    self.buildable.source_package
                ],
                universal_newlines=True).strip().splitlines()
            self.buildable.version = sorted(map(Version, lines))[-1]
            self.buildable.buildable = '{}_{}'.format(
                self.buildable.source_package,
                self.buildable.version,
            )

        argv = [
            self.worker.command_wrapper,
            '--chdir',
            '{}/out'.format(self.worker.scratch),
            '--',
            'runuser',
            '-u',
            'sbuild',
            '--',
            'sbuild',
            '-c',
            chroot,
            '-d',
            str(self.buildable.nominal_suite),
            '--no-run-lintian',
        ]

        for x in self.dpkg_buildpackage_options:
            argv.append('--debbuildopt=' + x)

        for x in self.dpkg_source_options:
            argv.append('--dpkg-source-opt=' + x)

        for child in hierarchy[:-1]:
            argv.append('--extra-repository')
            argv.append('deb {} {} {}'.format(
                child.mirror, child.apt_suite, ' '.join(
                    set(self.components or child.components)
                    & child.all_components)))

            if child.sbuild_resolver:
                argv.extend(child.sbuild_resolver)

        for x in self.extra_repositories:
            argv.append('--extra-repository')
            argv.append(x)

        if self.arch == 'all':
            logger.info('Architecture: all')
            argv.append('-A')

            # Backwards compatibility goo for Debian jessie buildd backport
            if sbuild_version < Version('0.69.0'):
                argv.append('--arch-all-only')
            else:
                argv.append('--no-arch-any')
        elif self.arch == self.buildable.together_with:
            logger.info('Architecture: %s + all', self.arch)
            argv.append('-A')
            argv.append('--arch')
            argv.append(self.arch)
        elif self.arch == 'source':
            logger.info('Source-only')

            # Backwards compatibility goo for Debian jessie buildd backport
            if sbuild_version < Version('0.69.0'):
                # If we only build 'all', and we don't build 'all',
                # then logically we build nothing (except source).
                argv.append('--arch-all-only')
                argv.append('--no-arch-all')
                # Urgh. This sbuild expects to find foo_1_amd64.changes
                # even for a source-only build (because it doesn't really
                # support source-only builds), so we have to cheat.
                # sbuild splits the command on spaces so we need to have
                # a one-liner that doesn't contain embedded whitespace.
                # Luckily, Perl can be written as line-noise.
                argv.append('--finished-build-commands=perl -e ' +
                            '$arch=qx(dpkg\\x20--print-architecture);' +
                            'chomp($arch);' + 'chdir(shift);' +
                            'foreach(glob("../*_source.changes")){' +
                            '$orig=$_;' +
                            's/_source\\.changes$/_${arch}.changes/;' +
                            'print("Renaming\\x20$orig\\x20to\\x20$_\\n");' +
                            'rename($orig,$_)||die("$!");' + '}' + ' %p')
            else:
                argv.append('--no-arch-any')

            argv.append('--source')
        else:
            logger.info('Architecture: %s only', self.arch)
            argv.append('--arch')
            argv.append(self.arch)

        if self.buildable.dsc_name is not None:
            if 'source' in self.buildable.changes_produced:
                argv.append('{}/out/{}'.format(
                    self.worker.scratch,
                    os.path.basename(self.buildable.dsc_name)))
            else:
                argv.append('{}/in/{}'.format(
                    self.worker.scratch,
                    os.path.basename(self.buildable.dsc_name)))
        elif self.buildable.source_from_archive:
            argv.append(self.buildable.buildable)
        else:
            # Build a clean source package as a side-effect of the first
            # build (in practice this will be the 'source' build).
            if '--source' not in argv:
                argv.append('--source')

            # jessie sbuild doesn't support --no-clean-source so build
            # the temporary source package ourselves.
            self.worker.check_call([
                self.worker.command_wrapper, '--chdir',
                '{}/in/{}_source'.format(self.worker.scratch,
                                         self.buildable.product_prefix), '--',
                'dpkg-source', '-b', '.'
            ])

            argv.append('{}/in/{}.dsc'.format(self.worker.scratch,
                                              self.buildable.product_prefix))

        logger.info('Running %r', argv)
        try:
            self.worker.check_call(argv)
        finally:
            # Note that we mix use_arch and arch here: an Architecture: all
            # build produces foo_1.2_amd64.build, which we rename.
            # We also check for foo_amd64.build because
            # that's what comes out if we do "vectis sbuild --suite=sid hello".
            for prefix in (self.buildable.source_package,
                           self.buildable.product_prefix):
                product = '{}/out/{}_{}.build'.format(self.worker.scratch,
                                                      prefix, use_arch)
                product = self.worker.check_output(
                    ['readlink', '-f', product],
                    universal_newlines=True).rstrip('\n')

                if (self.worker.call(['test', '-e', product]) == 0
                        and self.output_builds is not None):
                    logger.info('Copying %s back to host as %s_%s.build...',
                                product, self.buildable.product_prefix,
                                self.arch)
                    copied_back = os.path.join(
                        self.output_builds, '{}_{}_{}.build'.format(
                            self.buildable.product_prefix, self.arch,
                            time.strftime('%Y%m%dt%H%M%S', time.gmtime())))
                    self.worker.copy_to_host(product, copied_back)
                    self.buildable.logs[self.arch] = copied_back

                    symlink = os.path.join(
                        self.output_builds,
                        '{}_{}.build'.format(self.buildable.product_prefix,
                                             self.arch))
                    try:
                        os.remove(symlink)
                    except FileNotFoundError:
                        pass

                    os.symlink(os.path.abspath(copied_back), symlink)
                    break
            else:
                logger.warning('Did not find build log at %s', product)
                logger.warning(
                    'Possible build logs:\n%s',
                    self.worker.check_call([
                        'sh',
                        '-c',
                        'cd "$1"; ls -l *.build || :',
                        'sh',  # argv[0]
                        self.worker.scratch
                    ]))

        if self.arch == 'source' and self.buildable.source_from_archive:
            dscs = self.worker.check_output(
                [
                    'sh',
                    '-c',
                    'exec ls "$1"/out/*.dsc',
                    'sh',  # argv[0]
                    self.worker.scratch
                ],
                universal_newlines=True)

            dscs = dscs.splitlines()
            if len(dscs) != 1:
                raise CannotHappen('sbuild --source produced more than one '
                                   '.dsc file from {!r}'.format(
                                       self.buildable))

            product = dscs[0]

            with TemporaryDirectory() as tmp:
                copied_back = os.path.join(
                    tmp, '{}.dsc'.format(self.buildable.buildable))
                self.worker.copy_to_host(product, copied_back)

                self.buildable.dsc = Dsc(open(copied_back))
                self.buildable.source_package = self.buildable.dsc['source']
                self.buildable.version = Version(self.buildable.dsc['version'])
                self.buildable.arch_wildcards = set(
                    self.buildable.dsc['architecture'].split())
                self.buildable.binary_packages = [
                    p.strip() for p in self.buildable.dsc['binary'].split(',')
                ]

        if self.arch == 'source' and self.output_builds is not None:
            # Make sure the orig.tar.* are in the out directory, because
            # we will be building from the rebuilt source in future
            self.worker.check_call([
                'sh',
                '-c',
                'ln -nsf "$1"/in/*.orig.tar.* "$1"/out/',
                'sh',  # argv[0]
                self.worker.scratch
            ])

        if self.output_builds is None:
            return

        for product_arch in (self.arch, self.worker.dpkg_architecture):
            product = '{}/out/{}_{}.changes'.format(
                self.worker.scratch, self.buildable.product_prefix,
                product_arch)
            if self.worker.call(['test', '-e', product]) == 0:
                break
        else:
            raise CannotHappen('sbuild produced no .changes file from '
                               '{!r}'.format(self.buildable))

        logger.info('Copying %s back to host...', product)
        copied_back = os.path.join(
            self.output_builds,
            '{}_{}.changes'.format(self.buildable.product_prefix, self.arch))
        self.worker.copy_to_host(product, copied_back)
        self.buildable.changes_produced[self.arch] = copied_back

        changes_out = Changes(open(copied_back))

        if self.arch == 'source':
            self.buildable.dsc_name = None
            self.buildable.sourceful_changes_name = copied_back

            for f in changes_out['files']:
                if f['name'].endswith('.dsc'):
                    # expect to find exactly one .dsc file
                    assert self.buildable.dsc_name is None
                    self.buildable.dsc_name = os.path.join(
                        self.output_builds, f['name'])

            assert self.buildable.dsc_name is not None
            # Save some space
            self.worker.check_call([
                'rm', '-fr',
                '{}/in/{}_source/'.format(self.worker.scratch,
                                          self.buildable.product_prefix)
            ])

        for f in changes_out['files']:
            assert '/' not in f['name']
            assert not f['name'].startswith('.')

            logger.info('Additionally copying %s back to host...', f['name'])
            product = '{}/out/{}'.format(self.worker.scratch, f['name'])
            copied_back = os.path.join(self.output_builds, f['name'])
            self.worker.copy_to_host(product, copied_back)
Пример #8
0
def run(args):
    if args.suite is None:
        if args.default_suite is not None:
            args.suite = args.default_suite
        else:
            raise ArgumentError('--suite must be specified')

    # From argv or configuration
    architecture = args.architecture
    components = args.components
    keep = args._keep
    mirrors = args.get_mirrors()
    storage = args.storage
    suite = args.suite
    test_package = args._test_package
    uri = args._uri
    vendor = args.vendor
    worker_argv = args.worker
    worker_suite = args.worker_suite

    # From configuration
    apt_key = args.apt_key
    apt_key_package = args.apt_key_package

    os.makedirs(storage, exist_ok=True)

    for suite in (worker_suite, suite):
        for ancestor in suite.hierarchy:
            mirror = mirrors.lookup_suite(ancestor)
            if mirror is None:
                raise ArgumentError(
                    'No mirror configured for {}'.format(ancestor))

    if uri is None:
        uri = mirrors.lookup_suite(suite)

    tarball = '{arch}/{vendor}/{suite}/pbuilder.tar.gz'.format(
        arch=architecture,
        vendor=vendor,
        suite=suite,
    )
    logger.info('Creating tarball %s...', tarball)

    with VirtWorker(
            worker_argv,
            mirrors=mirrors,
            storage=storage,
            suite=worker_suite,
    ) as worker:
        logger.info('Installing debootstrap and pbuilder')
        worker.check_call([
            'env',
            'DEBIAN_FRONTEND=noninteractive',
            'apt-get',
            '-y',
            '--no-install-recommends',
            '-t',
            worker_suite.apt_suite,
            'install',
            'debootstrap',
            'python3',
            'pbuilder',
        ])

        keyring = apt_key_package

        if keyring is not None:
            worker.call([
                'env',
                'DEBIAN_FRONTEND=noninteractive',
                'apt-get',
                '-y',
                '-t',
                worker_suite.apt_suite,
                '--no-install-recommends',
                'install',
                keyring,
            ])

        pbuilder_args = [
            'create',
            '--aptcache',
            '',
            '--architecture',
            architecture,
            '--components',
            ' '.join(components),
            '--basetgz',
            '{}/output.tar.gz'.format(worker.scratch),
            '--mirror',
            uri,
            '--distribution',
            str(suite),
        ]
        debootstrap_args = []

        if worker.call(['test', '-f', apt_key]) == 0:
            logger.info('Found apt key worker:{}'.format(apt_key))
            pbuilder_args.append('--keyring')
            pbuilder_args.append(apt_key)
            debootstrap_args.append('--keyring={}'.format(apt_key))
        elif os.path.exists(apt_key):
            logger.info('Found apt key host:{}, copying to worker:{}'.format(
                apt_key, '{}/apt-key.gpg'.format(worker.scratch)))
            worker.copy_to_guest(apt_key,
                                 '{}/apt-key.gpg'.format(worker.scratch))
            pbuilder_args.append('--keyring')
            pbuilder_args.append('{}/apt-key.gpg'.format(worker.scratch))
            debootstrap_args.append('--keyring={}/apt-key.gpg'.format(
                worker.scratch))
        else:
            logger.warning(
                'Apt key host:{} not found; leaving it out and hoping '
                'for the best'.format(apt_key))

        for arg in debootstrap_args:
            pbuilder_args.append('--debootstrapopts')
            pbuilder_args.append(arg)

        worker.check_call([
            'touch',
            '/root/.pbuilderrc',
        ])
        logger.info('pbuilder %r', pbuilder_args)
        worker.check_call([
            'env',
            'DEBIAN_FRONTEND=noninteractive',
            worker.command_wrapper,
            '--',
            'pbuilder',
        ] + pbuilder_args)

        out = os.path.join(storage, tarball)
        os.makedirs(os.path.dirname(out) or os.curdir, exist_ok=True)

        # Smoke-test the new tarball before being prepared to use it.
        if test_package:
            with TemporaryDirectory(prefix='vectis-pbuilder-') as tmp:
                with AtomicWriter(os.path.join(tmp, 'script')) as writer:
                    writer.write(
                        textwrap.dedent('''\
                    #!/bin/sh
                    set -e
                    set -u
                    cd "$2"
                    perl -ne 'print $_; if (s/^deb\s/deb-src /) { print $_ }' \
                            < /etc/apt/sources.list \
                            > /etc/apt/sources.list.new
                    mv /etc/apt/sources.list.new /etc/apt/sources.list
                    apt-get update >&2
                    apt-get --download-only source "$1" >&2
                    mv *.dsc "$1.dsc"
                    '''))
                worker.copy_to_guest(os.path.join(tmp, 'script'),
                                     '{}/script'.format(worker.scratch))

            worker.check_call([
                'chmod',
                '0755',
                '{}/script'.format(worker.scratch),
            ])

            try:
                lines = worker.check_output(
                    [
                        'pbuilder',
                        'execute',
                        '--aptcache',
                        '',
                        '--basetgz',
                        '{}/output.tar.gz'.format(worker.scratch),
                        '--bindmounts',
                        '{}'.format(worker.scratch),
                        '--',
                        '{}/script'.format(worker.scratch),
                        test_package,
                        worker.scratch,
                    ],
                    universal_newlines=True).strip().splitlines()
                logger.info('%r', lines)

                worker.check_call([
                    worker.command_wrapper,
                    '--chdir',
                    worker.scratch,
                    '--',
                    'pbuilder',
                    'build',
                    '--aptcache',
                    '',
                    '--basetgz',
                    '{}/output.tar.gz'.format(worker.scratch),
                    '{}.dsc'.format(test_package),
                ])
            except Exception:
                if keep:
                    worker.copy_to_host(
                        '{}/output.tar.gz'.format(worker.scratch),
                        out + '.new')

                raise

        worker.copy_to_host('{}/output.tar.gz'.format(worker.scratch),
                            out + '.new')
        os.rename(out + '.new', out)

    logger.info('Created tarball %s', tarball)
Пример #9
0
def run_autopkgtest(*,
                    components,
                    mirrors,
                    modes,
                    storage,
                    suite,
                    vendor,
                    worker,
                    architecture=None,
                    binaries=(),
                    built_binaries=None,
                    extra_repositories=(),
                    lxc_24bit_subnet=None,
                    lxc_worker=None,
                    lxd_worker=None,
                    output_logs=None,
                    qemu_ram_size=None,
                    schroot_worker=None,
                    source_dir=None,
                    source_dsc=None,
                    source_package=None):
    failures = []

    if lxc_worker is None:
        lxc_worker = worker

    if lxd_worker is None:
        lxd_worker = worker

    if schroot_worker is None:
        schroot_worker = worker

    logger.info('Testing in modes: %r', modes)

    for test in modes:
        logger.info('Testing in mode: %s', test)
        with ExitStack() as stack:
            run_as = None
            worker = None

            if output_logs is None:
                output_dir = None
            else:
                output_dir = os.path.join(
                    output_logs,
                    'autopkgtest_{}_{}'.format(test, architecture))

            if test == 'qemu':
                test = 'qemu:autopkgtest.qcow2'

            if test.startswith('qemu:'):
                image = os.path.join(storage, architecture, str(vendor),
                                     str(suite.hierarchy[-1]),
                                     test[len('qemu:'):])

                if not image or not os.path.exists(image):
                    logger.info('Required image %s does not exist', image)
                    continue

                output_on_worker = output_dir
                virt = ['qemu']

                if qemu_ram_size is not None:
                    virt.append('--ram-size={}'.format(qemu_ram_size // _1M))

                virt.append(image)

            elif test == 'schroot':
                tarball = os.path.join(storage, architecture, str(vendor),
                                       str(suite.hierarchy[-1]),
                                       'minbase.tar.gz')

                if not os.path.exists(tarball):
                    logger.info('Required tarball %s does not exist', tarball)
                    continue

                worker = stack.enter_context(schroot_worker)
                worker.check_call([
                    'env',
                    'DEBIAN_FRONTEND=noninteractive',
                    'apt-get',
                    '-y',
                    '-t',
                    worker.suite.apt_suite,
                    'install',
                    'autopkgtest',
                    'python3',
                    'schroot',
                ])

                with TemporaryDirectory(prefix='vectis-sbuild-') as tmp:
                    with AtomicWriter(os.path.join(tmp,
                                                   'sbuild.conf')) as writer:
                        writer.write(
                            textwrap.dedent('''
                        [autopkgtest]
                        type=file
                        description=Test
                        file={tarball}
                        groups=root,{user}
                        root-groups=root,{user}
                        profile=default
                        ''').format(
                                tarball=worker.make_file_available(tarball,
                                                                   cache=True),
                                user=worker.user,
                            ))
                    worker.copy_to_guest(os.path.join(tmp, 'sbuild.conf'),
                                         '/etc/schroot/chroot.d/autopkgtest')

                output_on_worker = worker.new_directory()
                worker.check_call(['chown', worker.user, output_on_worker])
                run_as = worker.user
                virt = ['schroot', 'autopkgtest']

            elif test == 'lxc':
                container = '{}-{}-{}'.format(
                    vendor,
                    suite.hierarchy[-1],
                    architecture,
                )
                rootfs = os.path.join(storage, architecture, str(vendor),
                                      str(suite.hierarchy[-1]),
                                      'lxc-rootfs.tar.gz')
                meta = os.path.join(storage, architecture, str(vendor),
                                    str(suite.hierarchy[-1]),
                                    'lxc-meta.tar.gz')

                if not os.path.exists(rootfs) or not os.path.exists(meta):
                    logger.info('Required tarball %s or %s does not exist',
                                rootfs, meta)
                    continue

                worker = stack.enter_context(lxc_worker)
                worker.check_call([
                    'env',
                    'DEBIAN_FRONTEND=noninteractive',
                    'apt-get',
                    '-y',
                    '-t',
                    lxc_worker.suite.apt_suite,
                    'install',
                    'autopkgtest',
                    'lxc',
                    'python3',
                ])
                set_up_lxc_net(worker, lxc_24bit_subnet)
                worker.check_call(
                    ['mkdir', '-p', '/var/lib/lxc/vectis-new/rootfs'])
                with open(rootfs, 'rb') as reader:
                    worker.check_call([
                        'tar', '-x', '-z', '-C',
                        '/var/lib/lxc/vectis-new/rootfs', '-f', '-'
                    ],
                                      stdin=reader)
                with open(meta, 'rb') as reader:
                    worker.check_call([
                        'tar', '-x', '-z', '-C', '/var/lib/lxc/vectis-new',
                        '-f', '-'
                    ],
                                      stdin=reader)
                worker.check_call([
                    'mv', '/var/lib/lxc/vectis-new',
                    '/var/lib/lxc/{}'.format(container)
                ])

                # Make sure the container has an ordinary user to run tests;
                # autopkgtest auto-detects 'nobody' which doesn't have a
                # real home directory
                worker.check_call([
                    'chroot', '/var/lib/lxc/{}/rootfs'.format(container), 'sh',
                    '-c', 'if ! getent passwd user >/dev/null; then '
                    '    apt-get -y install adduser && '
                    '    adduser --disabled-password --shell=/bin/sh user '
                    '    </dev/null; '
                    'fi'
                ])

                output_on_worker = worker.new_directory()
                virt = ['lxc', container]

            elif test == 'lxd':
                container = 'autopkgtest/{}/{}/{}'.format(
                    vendor,
                    suite.hierarchy[-1],
                    architecture,
                )
                tarball = os.path.join(storage, architecture, str(vendor),
                                       str(suite.hierarchy[-1]),
                                       'lxd-autopkgtest.tar.gz')

                if not os.path.exists(tarball):
                    logger.info('Required tarball %s does not exist', tarball)
                    continue

                worker = stack.enter_context(lxd_worker)
                worker.check_call([
                    'env',
                    'DEBIAN_FRONTEND=noninteractive',
                    'apt-get',
                    '-y',
                    '-t',
                    lxd_worker.suite.apt_suite,
                    'install',
                    'autopkgtest',
                    'lxd',
                    'lxd-client',
                    'python3',
                ])
                worker.check_call([
                    'lxd',
                    'init',
                    '--auto',
                    '--debug',
                    '--verbose',
                ])
                set_up_lxd_net(worker, lxc_24bit_subnet)
                worker.check_call([
                    'lxc',
                    'image',
                    'import',
                    '--alias={}'.format(container),
                    worker.make_file_available(tarball, cache=True),
                ])

                output_on_worker = worker.new_directory()
                virt = ['lxd', container]

            else:
                logger.warning('Unknown autopkgtest setup: {}'.format(test))
                continue

            if worker is None:
                worker = stack.enter_context(HostWorker())

            autopkgtest = stack.enter_context(
                AutopkgtestWorker(
                    components=components,
                    extra_repositories=extra_repositories,
                    mirrors=mirrors,
                    suite=suite,
                    virt=virt,
                    worker=worker,
                ))

            if not autopkgtest.call_autopkgtest(
                    binaries=binaries,
                    built_binaries=built_binaries,
                    output_dir=output_on_worker,
                    run_as=run_as,
                    source_dir=source_dir,
                    source_dsc=source_dsc,
                    source_package=source_package,
            ):
                if output_dir is None:
                    failures.append(test)
                else:
                    failures.append(output_dir)

            if output_dir is not None and output_dir != output_on_worker:
                worker.copy_to_host(os.path.join(output_on_worker, ''),
                                    os.path.join(output_dir, ''))

    return failures
Пример #10
0
def _sbuild(buildables,
            *,
            archs,
            components,
            indep,
            mirrors,
            profiles,
            build_source,
            sbuild_options=(),
            source_only,
            storage,
            vendor,
            worker,
            deb_build_options=(),
            dpkg_buildpackage_options=(),
            dpkg_source_options=(),
            extra_repositories=(),
            indep_together=False,
            source_together=False):

    logger.info('Installing sbuild')
    worker.check_call([
        'env',
        'DEBIAN_FRONTEND=noninteractive',
        'apt-get',
        '-y',
        '-t',
        worker.suite.apt_suite,
        '--no-install-recommends',
        'install',
        'python3',
        'sbuild',
        'schroot',
    ])
    # Be like the real Debian build infrastructure: give sbuild a
    # nonexistent home directory.
    worker.check_call([
        'usermod',
        '-d',
        '/nonexistent',
        'sbuild',
    ])

    for buildable in buildables:
        logger.info('Processing: %s', buildable)

        buildable.copy_source_to(worker)

        def new_build(arch, output_dir=buildable.output_dir):
            return Build(
                buildable,
                arch,
                worker,
                components=components,
                deb_build_options=deb_build_options,
                dpkg_buildpackage_options=dpkg_buildpackage_options,
                dpkg_source_options=dpkg_source_options,
                extra_repositories=extra_repositories,
                mirrors=mirrors,
                output_dir=output_dir,
                profiles=profiles,
                storage=storage,
            )

        if buildable.source_from_archive:
            # We need to get some information from the .dsc, which we do by
            # building one and (usually) throwing it away.
            # TODO: With jessie's sbuild, this doesn't work for
            # sources that only build Architecture: all binaries.
            # TODO: This won't work if the sbuild_options are a binNMU.
            if build_source:
                logger.info('Rebuilding source as requested')
                new_build('source').sbuild(sbuild_options=sbuild_options)
            else:
                logger.info(
                    'Rebuilding and discarding source to discover supported '
                    'architectures')
                new_build(
                    'source',
                    output_dir=None,
                ).sbuild(sbuild_options=sbuild_options)

        buildable.select_archs(
            worker_arch=worker.dpkg_architecture,
            archs=archs,
            indep=indep,
            indep_together=indep_together,
            build_source=build_source,
            source_only=source_only,
            source_together=source_together,
        )

        logger.info('Builds required: %r', list(buildable.archs))

        for arch in buildable.archs:
            new_build(arch).sbuild(sbuild_options=sbuild_options)

        if buildable.sourceful_changes_name:
            base = '{}_source.changes'.format(buildable.product_prefix)
            c = os.path.join(buildable.output_dir, base)
            c = os.path.abspath(c)
            if 'source' not in buildable.changes_produced:
                with AtomicWriter(c) as writer:
                    subprocess.check_call([
                        'mergechanges',
                        '--source',
                        buildable.sourceful_changes_name,
                        buildable.sourceful_changes_name,
                    ],
                                          stdout=writer)

            buildable.merged_changes['source'] = c

        if ('all' in buildable.changes_produced
                and 'source' in buildable.merged_changes):
            base = '{}_source+all.changes'.format(buildable.product_prefix)
            c = os.path.join(buildable.output_dir, base)
            c = os.path.abspath(c)
            buildable.merged_changes['source+all'] = c
            with AtomicWriter(c) as writer:
                subprocess.check_call([
                    'mergechanges',
                    buildable.changes_produced['all'],
                    buildable.merged_changes['source'],
                ],
                                      stdout=writer)

        binary_group = 'binary'

        binary_changes = []
        for k, v in buildable.changes_produced.items():
            if k != 'source':
                binary_changes.append(v)

                if v == buildable.sourceful_changes_name:
                    binary_group = 'source+binary'

        base = '{}_{}.changes'.format(buildable.product_prefix, binary_group)
        c = os.path.join(buildable.output_dir, base)
        c = os.path.abspath(c)

        if len(binary_changes) > 1:
            with AtomicWriter(c) as writer:
                subprocess.check_call(['mergechanges'] + binary_changes,
                                      stdout=writer)
            buildable.merged_changes[binary_group] = c
        elif len(binary_changes) == 1:
            shutil.copy(binary_changes[0], c)
            buildable.merged_changes[binary_group] = c
        # else it was source-only: no binary changes

        if ('source' in buildable.merged_changes
                and 'binary' in buildable.merged_changes):
            base = '{}_source+binary.changes'.format(buildable.product_prefix)
            c = os.path.join(buildable.output_dir, base)
            c = os.path.abspath(c)
            buildable.merged_changes['source+binary'] = c

            with AtomicWriter(c) as writer:
                subprocess.check_call([
                    'mergechanges',
                    buildable.merged_changes['source'],
                    buildable.merged_changes['binary'],
                ],
                                      stdout=writer)

        for ident, linkable in (list(buildable.merged_changes.items()) +
                                list(buildable.changes_produced.items())):
            base = os.path.basename(linkable)

            for l in buildable.link_builds:
                symlink = os.path.join(l, base)

                with suppress(FileNotFoundError):
                    os.unlink(symlink)

                os.symlink(linkable, symlink)