Exemple #1
0
    def __init__(self, logfile, name):

        self.outf = ASCIIDocLog(logfile)

        if name:
            self.outf.h1("ELBE Report for Project " + name)
        else:
            self.outf.h1("ELBE Report")
Exemple #2
0
    def build_cdroms(self,
                     build_bin=True,
                     build_sources=False,
                     cdrom_size=None):
        self.repo_images = []

        elog = ASCIIDocLog(self.validationpath, True)

        env = None
        sysrootstr = ""
        if os.path.exists(self.sysrootpath):
            sysrootstr = "(including sysroot packages)"
            env = BuildEnv(self.xml,
                           self.log,
                           self.sysrootpath,
                           build_sources=build_sources,
                           clean=False)
        else:
            env = BuildEnv(self.xml,
                           self.log,
                           self.chrootpath,
                           build_sources=build_sources,
                           clean=False)

        # ensure the /etc/apt/sources.list is created according to
        # buil_sources, # build_bin flag, ensure to reopen it with
        # the new 'sources.list'
        with env:
            env.seed_etc()

        self.drop_rpcaptcache(env=env)

        with env:
            init_codename = self.xml.get_initvm_codename()

            if build_bin:
                elog.h1("Binary CD %s" % sysrootstr)

                self.repo_images += mk_binary_cdrom(env.rfs,
                                                    self.arch,
                                                    self.codename,
                                                    init_codename,
                                                    self.xml,
                                                    self.builddir,
                                                    self.log,
                                                    cdrom_size=cdrom_size)
            if build_sources:
                elog.h1("Source CD %s" % sysrootstr)
                try:
                    self.repo_images += mk_source_cdrom(env.rfs,
                                                        self.arch,
                                                        self.codename,
                                                        init_codename,
                                                        self.builddir,
                                                        self.log,
                                                        cdrom_size=cdrom_size,
                                                        xml=self.xml)
                except SystemError as e:
                    # e.g. no deb-src urls specified
                    elog.printo(str(e))
Exemple #3
0
    def __init__(self, logfile, name):

        self.outf = ASCIIDocLog(logfile)

        if name:
            self.outf.h1("ELBE Report for Project " + name)
        else:
            self.outf.h1("ELBE Report")
Exemple #4
0
class adjpkg(object):
    def __init__(self, logfile, name):

        self.outf = ASCIIDocLog(logfile)

        if name:
            self.outf.h1("ELBE Report for Project " + name)
        else:
            self.outf.h1("ELBE Report")

    def set_pkgs(self, pkglist):

        cache = apt.Cache()
        cache.update()
        cache.open(None)

        errors = 0

        with cache.actiongroup():

            for p in cache:
                if not p.is_installed:
                    continue
                if p.essential or p.is_auto_installed or (
                        p.name in pkglist
                ) or p.installed.priority == "important" or p.installed.priority == "required":
                    continue
                print "MARK REMOVE %s" % p.name
                p.mark_delete(auto_fix=False, purge=True)

            for name in pkglist:

                if not name in cache:
                    self.outf.printo("- package %s does not exist" % name)
                    errors += 1
                    continue

                cp = cache[name]

                cp.mark_install()
                print "MARK INSTALL %s" % cp.name

            cache.commit(apt.progress.base.AcquireProgress(),
                         apt.progress.base.InstallProgress())

            cache.update()
            cache.open(None)

            for p in cache:
                if not p.is_installed:
                    continue
                if p.is_auto_removable:
                    p.mark_delete(purge=True)
                    print "MARKED AS AUTOREMOVE %s" % p.name

        cache.commit(apt.progress.base.AcquireProgress(),
                     apt.progress.base.InstallProgress())

        return errors
Exemple #5
0
class adjpkg(object):
    def __init__(self, logfile, name):

        self.outf = ASCIIDocLog (logfile)

        if name:
            self.outf.h1( "ELBE Report for Project "+name )
        else:
            self.outf.h1( "ELBE Report" )

    def set_pkgs(self, pkglist):

        cache = apt.Cache()
        cache.update()
        cache.open(None)

        errors = 0

        with cache.actiongroup():

            for p in cache:
                if not p.is_installed:
                    continue
                if p.essential or p.is_auto_installed or (p.name in pkglist) or p.installed.priority == "important" or p.installed.priority == "required":
                    continue
                print "MARK REMOVE %s" % p.name
                p.mark_delete( auto_fix=False, purge=True )

            for name in pkglist:

                if not name in cache:
                    self.outf.printo( "- package %s does not exist" % name )
                    errors += 1
                    continue

                cp = cache[name]

                cp.mark_install()
                print "MARK INSTALL %s" % cp.name

            cache.commit(apt.progress.base.AcquireProgress(),
                         apt.progress.base.InstallProgress())


            cache.update()
            cache.open(None)

            for p in cache:
                if not p.is_installed:
                    continue
                if p.is_auto_removable:
                    p.mark_delete( purge=True )
                    print "MARKED AS AUTOREMOVE %s" % p.name

        cache.commit(apt.progress.base.AcquireProgress(),
                     apt.progress.base.InstallProgress())

        return errors
Exemple #6
0
    def build_cdroms(self, build_bin=True,
                     build_sources=False, cdrom_size=None):
        self.repo_images = []

        elog = ASCIIDocLog(self.validationpath, True)

        env = None
        sysrootstr = ""
        if os.path.exists(self.sysrootpath):
            sysrootstr = "(including sysroot packages)"
            env = BuildEnv(self.xml, self.log, self.sysrootpath,
                           build_sources=build_sources, clean=False)
        else:
            env = BuildEnv(self.xml, self.log, self.chrootpath,
                           build_sources=build_sources, clean=False)

        # ensure the /etc/apt/sources.list is created according to
        # buil_sources, # build_bin flag, ensure to reopen it with
        # the new 'sources.list'
        with env:
            env.seed_etc()

        self.drop_rpcaptcache(env=env)

        with env:
            init_codename = self.xml.get_initvm_codename()

            if build_bin:
                elog.h1("Binary CD %s" % sysrootstr)

                self.repo_images += mk_binary_cdrom(env.rfs,
                                                    self.arch,
                                                    self.codename,
                                                    init_codename,
                                                    self.xml,
                                                    self.builddir,
                                                    self.log,
                                                    cdrom_size=cdrom_size)
            if build_sources:
                elog.h1("Source CD %s" % sysrootstr)
                try:
                    self.repo_images += mk_source_cdrom(env.rfs,
                                                        self.arch,
                                                        self.codename,
                                                        init_codename,
                                                        self.builddir,
                                                        self.log,
                                                        cdrom_size=cdrom_size,
                                                        xml=self.xml)
                except SystemError as e:
                    # e.g. no deb-src urls specified
                    elog.printo(str(e))
Exemple #7
0
    def __init__( self, rfs, logpath, arch, notifier=None, norecommend = False, noauth = True ):
        self.log = ASCIIDocLog(logpath)
        self.notifier = notifier
        InChRootObject.__init__(self, rfs)
        config.set ("APT::Architecture", arch)
        if norecommend:
            config.set ("APT::Install-Recommends", "1")
        else:
            config.set ("APT::Install-Recommends", "0")

        if noauth:
            config.set ("APT::Get::AllowUnauthenticated", "1")
        else:
            config.set ("APT::Get::AllowUnauthenticated", "0")

        self.cache = Cache()
        self.cache.open()
Exemple #8
0
    def __init__(self,
                 builddir,
                 xmlpath=None,
                 logpath=None,
                 name=None,
                 override_buildtype=None,
                 skip_validate=False,
                 url_validation=ValidationMode.CHECK_ALL,
                 rpcaptcache_notifier=None,
                 private_data=None,
                 postbuild_file=None,
                 presh_file=None,
                 postsh_file=None,
                 savesh_file=None):

        # pylint: disable=too-many-arguments

        self.builddir = os.path.abspath(str(builddir))
        self.chrootpath = os.path.join(self.builddir, "chroot")
        self.targetpath = os.path.join(self.builddir, "target")
        self.sysrootpath = os.path.join(self.builddir, "sysroot")
        self.sdkpath = os.path.join(self.builddir, "sdk")
        self.validationpath = os.path.join(self.builddir, "validation.txt")

        self.name = name
        self.override_buildtype = override_buildtype
        self.skip_validate = skip_validate
        self.url_validation = url_validation
        self.postbuild_file = postbuild_file
        self.presh_file = presh_file
        self.postsh_file = postsh_file
        self.savesh_file = savesh_file

        self.private_data = private_data

        # Apt-Cache will be created on demand with the specified notifier by
        # the get_rpcaptcache method
        self._rpcaptcache = None
        self.rpcaptcache_notifier = rpcaptcache_notifier

        # Initialise Repo Images to Empty list.
        self.repo_images = []

        self.orig_fname = None
        self.orig_files = []

        # Use supplied XML file, if given, otherwise use the source.xml
        # file of the project
        if xmlpath:
            self.xml = ElbeXML(xmlpath,
                               buildtype=override_buildtype,
                               skip_validate=skip_validate,
                               url_validation=url_validation)
        else:
            sourcexmlpath = os.path.join(self.builddir, "source.xml")
            self.xml = ElbeXML(sourcexmlpath,
                               buildtype=override_buildtype,
                               skip_validate=skip_validate,
                               url_validation=url_validation)

        self.arch = self.xml.text("project/arch", key="arch")
        self.codename = self.xml.text("project/suite")

        if not self.name:
            self.name = self.xml.text("project/name")

        # If logpath is given, use an AsciiDocLog instance, otherwise log
        # to stdout
        if logpath:
            self.log = ASCIIDocLog(logpath)
        else:
            self.log = StdoutLog()

        self.repo = ProjectRepo(self.arch, self.codename,
                                os.path.join(self.builddir, "repo"), self.log)

        # Create BuildEnv instance, if the chroot directory exists and
        # has an etc/elbe_version
        if os.path.exists(self.chrootpath):
            self.buildenv = BuildEnv(self.xml,
                                     self.log,
                                     self.chrootpath,
                                     clean=False)
        else:
            self.buildenv = None

        # Create TargetFs instance, if the target directory exists
        if os.path.exists(self.targetpath) and self.buildenv:
            self.targetfs = TargetFs(self.targetpath,
                                     self.log,
                                     self.buildenv.xml,
                                     clean=False)
        else:
            self.targetfs = None

        # dont create sysroot instance, it should be build from scratch
        # each time, because the pkglist including the -dev packages is
        # tracked nowhere.
        self.sysrootenv = None
        self.log.do('rm -rf %s' % self.sysrootpath)

        # same for host_sysroot instance recreate it in any case
        self.host_sysrootenv = None
Exemple #9
0
    def __init__(self,
                 builddir,
                 xmlpath=None,
                 logpath=None,
                 name=None,
                 override_buildtype=None,
                 skip_validate=False,
                 skip_urlcheck=False,
                 rpcaptcache_notifier=None,
                 private_data=None,
                 postbuild_file=None,
                 presh_file=None,
                 postsh_file=None,
                 savesh_file=None):
        self.builddir = os.path.abspath(str(builddir))
        self.chrootpath = os.path.join(self.builddir, "chroot")
        self.targetpath = os.path.join(self.builddir, "target")

        self.name = name
        self.override_buildtype = override_buildtype
        self.skip_validate = skip_validate
        self.skip_urlcheck = skip_urlcheck
        self.postbuild_file = postbuild_file
        self.presh_file = presh_file
        self.postsh_file = postsh_file
        self.savesh_file = savesh_file

        self.private_data = private_data

        # Apt-Cache will be created on demand with the specified notifier by
        # the get_rpcaptcache method
        self._rpcaptcache = None
        self.rpcaptcache_notifier = rpcaptcache_notifier

        # Initialise Repo Images to Empty list.
        self.repo_images = []

        # Use supplied XML file, if given, otherwise use the source.xml
        # file of the project
        if xmlpath:
            self.xml = ElbeXML(xmlpath,
                               buildtype=override_buildtype,
                               skip_validate=skip_validate,
                               skip_urlcheck=skip_urlcheck)
        else:
            sourcexmlpath = os.path.join(self.builddir, "source.xml")
            self.xml = ElbeXML(sourcexmlpath,
                               buildtype=override_buildtype,
                               skip_validate=skip_validate,
                               skip_urlcheck=skip_urlcheck)

        # If logpath is given, use an AsciiDocLog instance, otherwise log
        # to stdout
        if logpath:
            self.log = ASCIIDocLog(logpath)
        else:
            self.log = StdoutLog()

        # Create BuildEnv instance, if the chroot directory exists and
        # has an etc/elbe_version
        if self.has_full_buildenv():
            self.buildenv = BuildEnv(self.xml, self.log, self.chrootpath)
        else:
            self.buildenv = None
            self.targetfs = None
            return

        # Create TargetFs instance, if the target directory exists
        if os.path.exists(self.targetpath):
            self.targetfs = TargetFs(self.targetpath,
                                     self.log,
                                     self.buildenv.xml,
                                     clean=False)
        else:
            self.targetfs = None
Exemple #10
0
def check_full_pkgs(pkgs, fullpkgs, errorname, cache):
    elog = ASCIIDocLog(errorname)

    elog.h1("ELBE Package validation")
    elog.h2("Package List validation")

    errors = 0

    for name in [p.et.text for p in pkgs]:

        nomulti_name = name.split(":")[0]
        if not cache.has_pkg(nomulti_name):
            elog.printo("- package %s does not exist" % nomulti_name)
            errors += 1
            continue

        if not cache.is_installed(nomulti_name):
            elog.printo("- package %s is not installed" % nomulti_name)
            errors += 1
            continue

    if errors == 0:
        elog.printo("No Errors found")

    if not fullpkgs:
        return

    elog.h2("Full Packagelist validation")
    errors = 0

    pindex = {}
    for p in fullpkgs:
        name = p.et.text
        ver = p.et.get('version')
        md5 = p.et.get('md5')

        pindex[name] = p

        if not cache.has_pkg(name):
            elog.printo("- package %s does not exist" % name)
            errors += 1
            continue

        if not cache.is_installed(name):
            elog.printo("- package %s is not installed" % name)
            errors += 1
            continue

        pkg = cache.get_pkg(name)

        if pkg.installed_version != ver:
            elog.printo(
                "- package %s version %s does not match installed version %s" %
                (name, ver, pkg.installed_version))
            errors += 1
            continue

        if pkg.installed_md5 != md5:
            elog.printo("- package %s md5 %s does not match installed md5 %s" %
                        (name, md5, pkg.installed_md5))
            errors += 1

    for cp in cache.get_installed_pkgs():
        if not pindex.has_key(cp.name):
            elog.printo(
                "additional package %s installed, that was not requested" %
                cp.name)
            errors += 1

    if errors == 0:
        elog.printo("No Errors found")
Exemple #11
0
def elbe_report(xml, buildenv, cache, reportname, targetfs):
    outf = ASCIIDocLog(reportname)
    rfs = buildenv.rfs

    outf.h1("ELBE Report for Project " + xml.text("project/name"))

    outf.printo("report timestamp: " +
                datetime.now().strftime("%Y%m%d-%H%M%S"))

    slist = rfs.read_file('etc/apt/sources.list')
    outf.h2("Apt Sources dump")
    outf.verbatim_start()
    outf.print_raw(slist)
    outf.verbatim_end()

    try:
        prefs = rfs.read_file("etc/apt/preferences")
    except IOError:
        prefs = ""

    outf.h2("Apt Preferences dump")
    outf.verbatim_start()
    outf.print_raw(prefs)
    outf.verbatim_end()

    outf.h2("Installed Packages List")
    outf.table()

    instpkgs = cache.get_installed_pkgs()
    for p in instpkgs:
        outf.printo("|%s|%s|%s" % (p.name, p.installed_version, p.origin))
    outf.table()

    # archive extraction is done before and after finetuning the first
    # extraction is needed that the files can be used (copied/moved to the
    # buildenv in finetuning
    # the second extraction is done to ensure that files from the archive
    # can't be modified/removed in finetuning

    outf.h2("archive extract before finetuning")

    if xml.has("archive"):
        with xml.archive_tmpfile() as fp:
            outf.do('tar xvfj "%s" -C "%s"' % (fp.name, targetfs.path))

    outf.h2("finetuning log")
    outf.verbatim_start()

    index = cache.get_fileindex()
    mt_index = targetfs.mtime_snap()
    if xml.has("target/finetuning"):
        do_finetuning(xml, outf, buildenv, targetfs)
        #outf.print_raw( do_command( opt.finetuning ) )
        mt_index_post_fine = targetfs.mtime_snap()
    else:
        mt_index_post_fine = mt_index

    outf.verbatim_end()

    outf.h2("archive extract after finetuning")

    if xml.has("archive"):
        with xml.archive_tmpfile() as fp:
            outf.do('tar xvfj "%s" -C "%s"' % (fp.name, targetfs.path))
        mt_index_post_arch = targetfs.mtime_snap()
    else:
        mt_index_post_arch = mt_index_post_fine

    outf.h2("fileslist")
    outf.table()

    tgt_pkg_list = set()

    for fpath, realpath in targetfs.walk_files():
        if index.has_key(fpath):
            pkg = index[fpath]
            tgt_pkg_list.add(pkg)
        else:
            pkg = "postinst generated"

        if mt_index_post_fine.has_key(fpath) and mt_index.has_key(fpath):
            if mt_index_post_fine[fpath] > mt_index[fpath]:
                pkg = "modified finetuning"
        if mt_index_post_fine.has_key(fpath):
            if mt_index_post_arch[fpath] > mt_index_post_fine[fpath]:
                pkg = "from archive"
            elif not mt_index.has_key(fpath):
                pkg = "added in finetuning"
        else:
            pkg = "added in archive"

        outf.printo("|+%s+|%s" % (fpath, pkg))

    outf.table()

    outf.h2("Deleted Files")
    outf.table()
    for fpath in mt_index.keys():
        if not mt_index_post_arch.has_key(fpath):
            if index.has_key(fpath):
                pkg = index[fpath]
            else:
                pkg = "postinst generated"
            outf.printo("|+%s+|%s" % (fpath, pkg))
    outf.table()

    outf.h2("Target Package List")
    outf.table()
    instpkgs = cache.get_installed_pkgs()
    pkgindex = {}
    for p in instpkgs:
        pkgindex[p.name] = p

    if xml.has("target/pkgversionlist"):
        targetfs.remove('etc/elbe_pkglist')
        f = targetfs.open('etc/elbe_pkglist', 'w')
    for pkg in tgt_pkg_list:
        p = pkgindex[pkg]
        outf.printo("|%s|%s|%s|%s" % (p.name, p.installed_version,
                                      p.is_auto_installed, p.installed_md5))
        if xml.has("target/pkgversionlist"):
            f.write("%s %s %s\n" %
                    (p.name, p.installed_version, p.installed_md5))
    outf.table()

    if xml.has("target/pkgversionlist"):
        f.close()
Exemple #12
0
def run_command(argv):

    # pylint disable=too-many-statements

    oparser = OptionParser(usage="usage: %prog mkcdrom [options] <builddir>")
    oparser.add_option("--skip-validation",
                       action="store_true",
                       dest="skip_validation",
                       default=False,
                       help="Skip xml schema validation")
    oparser.add_option("--buildtype",
                       dest="buildtype",
                       help="Override the buildtype")
    oparser.add_option("--arch", dest="arch", help="Override the architecture")
    oparser.add_option("--codename",
                       dest="codename",
                       help="Override the codename")
    oparser.add_option("--init_codename",
                       dest="init_codename",
                       help="Override the initvm codename")
    oparser.add_option("--rfs-only",
                       action="store_true",
                       dest="rfs_only",
                       default=False,
                       help="builddir points to RFS")
    oparser.add_option("--log", dest="log", help="Log to filename")
    oparser.add_option("--binary",
                       action="store_true",
                       dest="binary",
                       default=False,
                       help="build binary cdrom")
    oparser.add_option("--source",
                       action="store_true",
                       dest="source",
                       default=False,
                       help="build source cdrom")
    oparser.add_option("--cdrom-size",
                       action="store",
                       dest="cdrom_size",
                       default=CDROM_SIZE,
                       help="ISO CD size in MB")

    (opt, args) = oparser.parse_args(argv)

    if len(args) != 1:
        print("wrong number of arguments", file=sys.stderr)
        oparser.print_help()
        sys.exit(20)

    if not opt.rfs_only:
        try:
            project = ElbeProject(args[0],
                                  logpath=opt.log,
                                  override_buildtype=opt.buildtype,
                                  skip_validate=opt.skip_validation)
        except ValidationError as e:
            print(str(e), file=sys.stderr)
            print("xml validation failed. Bailing out", file=sys.stderr)
            sys.exit(20)

        builddir = project.builddir
        rfs = project.buildenv.rfs
        xml = project.xml
        arch = xml.text("project/arch", key="arch")
        codename = xml.text("project/suite")
        log = project.log
        init_codename = xml.get_initvm_codename()
    else:
        builddir = os.path.abspath(os.path.curdir)
        rfs = ChRootFilesystem(args[0])
        arch = opt.arch
        codename = opt.codename
        init_codename = opt.init_codename
        xml = None
        if opt.log:
            log = ASCIIDocLog(opt.log)
        else:
            log = StdoutLog()

    generated_files = []
    if opt.source:
        with rfs:
            generated_files += mk_source_cdrom(rfs, arch, codename,
                                               init_codename, builddir, log,
                                               opt.cdrom_size)

    if opt.binary:
        with rfs:
            generated_files += mk_binary_cdrom(rfs, arch, codename,
                                               init_codename, xml, builddir,
                                               log, opt.cdrom_size)

    print("")
    print("Image Build finished !")
    print("")
    print("Files generated:")
    for f in generated_files:
        print(" %s" % f)
Exemple #13
0
def elbe_report(xml, buildenv, cache, reportname, errorname, targetfs):

    # pylint: disable=too-many-arguments
    # pylint: disable=too-many-locals
    # pylint: disable=too-many-statements
    # pylint: disable=too-many-branches

    outf = ASCIIDocLog(reportname)
    rfs = buildenv.rfs

    outf.h1("ELBE Report for Project " + xml.text("project/name"))

    outf.printo("report timestamp: " +
                datetime.now().strftime("%Y%m%d-%H%M%S"))
    outf.printo("elbe: %s" % str(elbe_version))

    slist = rfs.read_file('etc/apt/sources.list')
    outf.h2("Apt Sources dump")
    outf.verbatim_start()
    outf.print_raw(slist)
    outf.verbatim_end()

    try:
        prefs = rfs.read_file("etc/apt/preferences")
    except IOError:
        prefs = ""

    outf.h2("Apt Preferences dump")
    outf.verbatim_start()
    outf.print_raw(prefs)
    outf.verbatim_end()

    outf.h2("Installed Packages List")
    outf.table()

    instpkgs = cache.get_installed_pkgs()
    for p in instpkgs:
        outf.printo("|%s|%s|%s" % (p.name, p.installed_version, p.origin))
    outf.table()

    index = cache.get_fileindex()
    mt_index = targetfs.mtime_snap()

    outf.h2("archive extract")

    if xml.has("archive") and not xml.text("archive") is None:
        with xml.archive_tmpfile() as fp:
            outf.do('tar xvfj "%s" -h -C "%s"' % (fp.name, targetfs.path))
        mt_index_postarch = targetfs.mtime_snap()
    else:
        mt_index_postarch = mt_index

    outf.h2("finetuning log")
    outf.verbatim_start()

    if xml.has("target/finetuning"):
        do_finetuning(xml, outf, buildenv, targetfs)
        mt_index_post_fine = targetfs.mtime_snap()
    else:
        mt_index_post_fine = mt_index_postarch

    outf.verbatim_end()

    outf.h2("fileslist")
    outf.table()

    tgt_pkg_list = set()

    for fpath, _ in targetfs.walk_files():
        if fpath in index:
            pkg = index[fpath]
            tgt_pkg_list.add(pkg)
        else:
            pkg = "postinst generated"

        if fpath in mt_index_post_fine:
            if fpath in mt_index_postarch:
                if mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
                    pkg = "modified finetuning"
                elif fpath in mt_index:
                    if mt_index_postarch[fpath] != mt_index[fpath]:
                        pkg = "from archive"
                    # else leave pkg as is
                else:
                    pkg = "added in archive"
            else:
                pkg = "added in finetuning"
        # else leave pkg as is

        outf.printo("|+%s+|%s" % (fpath, pkg))

    outf.table()

    outf.h2("Deleted Files")
    outf.table()
    for fpath in list(mt_index.keys()):
        if fpath not in mt_index_post_fine:
            if fpath in index:
                pkg = index[fpath]
            else:
                pkg = "postinst generated"
            outf.printo("|+%s+|%s" % (fpath, pkg))
    outf.table()

    outf.h2("Target Package List")
    outf.table()
    instpkgs = cache.get_installed_pkgs()
    pkgindex = {}
    for p in instpkgs:
        pkgindex[p.name] = p

    if xml.has("target/pkgversionlist"):
        targetfs.remove('etc/elbe_pkglist')
        f = targetfs.open('etc/elbe_pkglist', 'w')
    for pkg in tgt_pkg_list:
        p = pkgindex[pkg]
        outf.printo("|%s|%s|%s|%s" % (p.name, p.installed_version,
                                      p.is_auto_installed, p.installed_md5))
        if xml.has("target/pkgversionlist"):
            f.write("%s %s %s\n" %
                    (p.name, p.installed_version, p.installed_md5))
    outf.table()

    if xml.has("target/pkgversionlist"):
        f.close()

    if not xml.has("archive") or xml.text("archive") is None:
        return

    elog = ASCIIDocLog(errorname, True)

    elog.h1("Archive validation")

    errors = 0

    for fpath in list(mt_index_postarch.keys()):
        if fpath not in mt_index or \
                mt_index_postarch[fpath] != mt_index[fpath]:
            if fpath not in mt_index_post_fine:
                elog.printo("- archive file %s deleted in finetuning" % fpath)
                errors += 1
            elif mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
                elog.printo("- archive file %s modified in finetuning" % fpath)
                errors += 1

    if errors == 0:
        elog.printo("No Errors found")
Exemple #14
0
def check_full_pkgs(pkgs, fullpkgs, errorname, cache):

    # pylint: disable=too-many-statements
    # pylint: disable=too-many-branches

    elog = ASCIIDocLog(errorname, True)

    elog.h1("ELBE Package validation")
    elog.h2("Package List validation")

    errors = 0

    if pkgs:
        for p in pkgs:
            name = p.et.text
            nomulti_name = name.split(":")[0]
            if not cache.has_pkg(nomulti_name):
                elog.printo("- package %s does not exist" % nomulti_name)
                errors += 1
                continue

            if not cache.is_installed(nomulti_name):
                elog.printo("- package %s is not installed" % nomulti_name)
                errors += 1
                continue

            ver = p.et.get('version')
            pkg = cache.get_pkg(nomulti_name)
            if ver and (pkg.installed_version != ver):
                elog.printo(
                    "- package %s version %s does not match installed version %s" %
                    (name, ver, pkg.installed_version))
                errors += 1
                continue

    if errors == 0:
        elog.printo("No Errors found")

    if not fullpkgs:
        return

    elog.h2("Full Packagelist validation")
    errors = 0

    pindex = {}
    for p in fullpkgs:
        name = p.et.text
        ver = p.et.get('version')
        md5 = p.et.get('md5')

        pindex[name] = p

        if not cache.has_pkg(name):
            elog.printo("- package %s does not exist" % name)
            errors += 1
            continue

        if not cache.is_installed(name):
            elog.printo("- package %s is not installed" % name)
            errors += 1
            continue

        pkg = cache.get_pkg(name)

        if pkg.installed_version != ver:
            elog.printo(
                "- package %s version %s does not match installed version %s" %
                (name, ver, pkg.installed_version))
            errors += 1
            continue

        if pkg.installed_md5 != md5:
            elog.printo("- package %s md5 %s does not match installed md5 %s" %
                        (name, md5, pkg.installed_md5))
            errors += 1

    for cp in cache.get_installed_pkgs():
        if cp.name not in pindex:
            elog.printo(
                "additional package %s installed, that was not requested" %
                cp.name)
            errors += 1

    if errors == 0:
        elog.printo("No Errors found")
Exemple #15
0
def elbe_report(xml, buildenv, cache, reportname, errorname, targetfs):

    # pylint: disable=too-many-arguments
    # pylint: disable=too-many-locals
    # pylint: disable=too-many-statements
    # pylint: disable=too-many-branches

    outf = ASCIIDocLog(reportname)
    rfs = buildenv.rfs

    outf.h1("ELBE Report for Project " + xml.text("project/name"))

    outf.printo(
        "report timestamp: " +
        datetime.now().strftime("%Y%m%d-%H%M%S"))
    outf.printo("elbe: %s" % str(elbe_version))

    slist = rfs.read_file('etc/apt/sources.list')
    outf.h2("Apt Sources dump")
    outf.verbatim_start()
    outf.print_raw(slist)
    outf.verbatim_end()

    try:
        prefs = rfs.read_file("etc/apt/preferences")
    except IOError:
        prefs = ""

    outf.h2("Apt Preferences dump")
    outf.verbatim_start()
    outf.print_raw(prefs)
    outf.verbatim_end()

    outf.h2("Installed Packages List")
    outf.table()

    instpkgs = cache.get_installed_pkgs()
    for p in instpkgs:
        outf.printo("|%s|%s|%s" % (p.name, p.installed_version, p.origin))
    outf.table()

    index = cache.get_fileindex()
    mt_index = targetfs.mtime_snap()

    outf.h2("archive extract")

    if xml.has("archive") and not xml.text("archive") is None:
        with xml.archive_tmpfile() as fp:
            outf.do('tar xvfj "%s" -C "%s"' % (fp.name, targetfs.path))
        mt_index_postarch = targetfs.mtime_snap()
    else:
        mt_index_postarch = mt_index

    outf.h2("finetuning log")
    outf.verbatim_start()

    if xml.has("target/finetuning"):
        do_finetuning(xml, outf, buildenv, targetfs)
        mt_index_post_fine = targetfs.mtime_snap()
    else:
        mt_index_post_fine = mt_index_postarch

    outf.verbatim_end()

    outf.h2("fileslist")
    outf.table()

    tgt_pkg_list = set()

    for fpath, _ in targetfs.walk_files():
        if fpath in index:
            pkg = index[fpath]
            tgt_pkg_list.add(pkg)
        else:
            pkg = "postinst generated"

        if fpath in mt_index_post_fine:
            if fpath in mt_index_postarch:
                if mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
                    pkg = "modified finetuning"
                elif fpath in mt_index:
                    if mt_index_postarch[fpath] != mt_index[fpath]:
                        pkg = "from archive"
                    # else leave pkg as is
                else:
                    pkg = "added in archive"
            else:
                pkg = "added in finetuning"
        # else leave pkg as is

        outf.printo("|+%s+|%s" % (fpath, pkg))

    outf.table()

    outf.h2("Deleted Files")
    outf.table()
    for fpath in list(mt_index.keys()):
        if fpath not in mt_index_post_fine:
            if fpath in index:
                pkg = index[fpath]
            else:
                pkg = "postinst generated"
            outf.printo("|+%s+|%s" % (fpath, pkg))
    outf.table()

    outf.h2("Target Package List")
    outf.table()
    instpkgs = cache.get_installed_pkgs()
    pkgindex = {}
    for p in instpkgs:
        pkgindex[p.name] = p

    if xml.has("target/pkgversionlist"):
        targetfs.remove('etc/elbe_pkglist')
        f = targetfs.open('etc/elbe_pkglist', 'w')
    for pkg in tgt_pkg_list:
        p = pkgindex[pkg]
        outf.printo(
            "|%s|%s|%s|%s" %
            (p.name,
             p.installed_version,
             p.is_auto_installed,
             p.installed_md5))
        if xml.has("target/pkgversionlist"):
            f.write(
                "%s %s %s\n" %
                (p.name,
                 p.installed_version,
                 p.installed_md5))
    outf.table()

    if xml.has("target/pkgversionlist"):
        f.close()

    if not xml.has("archive") or xml.text("archive") is None:
        return

    elog = ASCIIDocLog(errorname, True)

    elog.h1("Archive validation")

    errors = 0

    for fpath in list(mt_index_postarch.keys()):
        if fpath not in mt_index or \
                mt_index_postarch[fpath] != mt_index[fpath]:
            if fpath not in mt_index_post_fine:
                elog.printo(
                        "- archive file %s deleted in finetuning" %
                        fpath)
                errors += 1
            elif mt_index_post_fine[fpath] != mt_index_postarch[fpath]:
                elog.printo(
                        "- archive file %s modified in finetuning" %
                        fpath)
                errors += 1

    if errors == 0:
        elog.printo("No Errors found")
Exemple #16
0
def elbe_report( xml, buildenv, cache, reportname, targetfs ):
    outf = ASCIIDocLog(reportname)
    rfs = buildenv.rfs

    outf.h1( "ELBE Report for Project " + xml.text("project/name") )

    outf.printo( "report timestamp: "+datetime.now().strftime("%Y%m%d-%H%M%S") )

    slist = rfs.read_file('etc/apt/sources.list')
    outf.h2( "Apt Sources dump" )
    outf.verbatim_start()
    outf.print_raw(slist)
    outf.verbatim_end()

    try:
        prefs = rfs.read_file("etc/apt/preferences")
    except IOError:
        prefs = ""

    outf.h2( "Apt Preferences dump" )
    outf.verbatim_start()
    outf.print_raw(prefs)
    outf.verbatim_end()

    outf.h2( "Installed Packages List" )
    outf.table()

    instpkgs = cache.get_installed_pkgs()
    for p in instpkgs:
        outf.printo( "|%s|%s|%s" % (p.name, p.installed_version, p.origin) )
    outf.table()

    # archive extraction is done before and after finetuning the first
    # extraction is needed that the files can be used (copied/moved to the
    # buildenv in finetuning
    # the second extraction is done to ensure that files from the archive
    # can't be modified/removed in finetuning

    outf.h2( "archive extract before finetuning" )

    if xml.has("archive"):
        with xml.archive_tmpfile() as fp:
            outf.do( 'tar xvfj "%s" -C "%s"' % (fp.name, targetfs.path) )

    outf.h2( "finetuning log" )
    outf.verbatim_start()

    index = cache.get_fileindex()
    mt_index = targetfs.mtime_snap()
    if xml.has("target/finetuning"):
        do_finetuning(xml, outf, buildenv, targetfs)
        #outf.print_raw( do_command( opt.finetuning ) )
        mt_index_post_fine = targetfs.mtime_snap()
    else:
        mt_index_post_fine = mt_index

    outf.verbatim_end()

    outf.h2( "archive extract after finetuning" )

    if xml.has("archive"):
        with xml.archive_tmpfile() as fp:
            outf.do( 'tar xvfj "%s" -C "%s"' % (fp.name, targetfs.path) )
        mt_index_post_arch = targetfs.mtime_snap()
    else:
        mt_index_post_arch = mt_index_post_fine

    outf.h2( "fileslist" )
    outf.table()

    tgt_pkg_list = set()

    for fpath, realpath in targetfs.walk_files():
        if index.has_key(fpath):
            pkg = index[fpath]
            tgt_pkg_list.add(pkg)
        else:
            pkg = "postinst generated"

        if mt_index_post_fine.has_key(fpath) and mt_index.has_key(fpath):
            if mt_index_post_fine[fpath] > mt_index[fpath]:
                pkg = "modified finetuning"
        if mt_index_post_fine.has_key(fpath):
            if mt_index_post_arch[fpath] > mt_index_post_fine[fpath]:
                pkg = "from archive"
            elif not mt_index.has_key(fpath):
                pkg = "added in finetuning"
        else:
            pkg = "added in archive"

        outf.printo( "|+%s+|%s" % (fpath,pkg) )

    outf.table()

    outf.h2( "Deleted Files" )
    outf.table()
    for fpath in mt_index.keys():
        if not mt_index_post_arch.has_key(fpath):
            if index.has_key(fpath):
                pkg = index[fpath]
            else:
                pkg = "postinst generated"
            outf.printo( "|+%s+|%s" % (fpath,pkg) )
    outf.table()

    outf.h2( "Target Package List" )
    outf.table()
    instpkgs = cache.get_installed_pkgs()
    pkgindex = {}
    for p in instpkgs:
        pkgindex[p.name] = p

    if xml.has("target/pkgversionlist"):
        targetfs.remove('etc/elbe_pkglist')
        f = targetfs.open('etc/elbe_pkglist', 'w')
    for pkg in tgt_pkg_list:
        p = pkgindex[pkg]
        outf.printo( "|%s|%s|%s|%s" % (p.name, p.installed_version, p.is_auto_installed, p.installed_md5) )
        if xml.has("target/pkgversionlist"):
            f.write ("%s %s %s\n" % (p.name, p.installed_version, p.installed_md5))
    outf.table()

    if xml.has("target/pkgversionlist"):
        f.close ()