Ejemplo n.º 1
0
def main(args, config):
    abspath = os.path.abspath(args.directory)
    for fp in os.listdir(abspath):
        path = os.path.join(abspath, fp)
        if args.dud and fnmatch.fnmatch(path, "*.dud"):
            with session() as s:
                process_dud(config, s, path)
        if args.changes and fnmatch.fnmatch(path, "*.changes"):
            with session() as s:
                process_changes(args.group, config, s, path)
Ejemplo n.º 2
0
def check_shutdown():
    with session() as s:
        shutdown = not s.query(exists().where(
            (Job.assigned_at != None) & (Job.finished_at == None))
        ).scalar()
        if shutdown:
            raise SystemExit(0)
    def unblock_jobs(self, suite):
        bcheck_data = self._create_depwait_report(suite)

        with session() as s:
            jobs = s.query(Job).join(Job.check).join(Job.source).join(Source.group_suite).join(GroupSuite.group).join(GroupSuite.suite).filter(
                Group.name == "default",
                Suite.name == suite,
                Check.build == True,
                (Job.dose_report != None) | ~Job.built_binaries.any()
            )

            for job in jobs:
                try:
                    report = self._get_package_depwait_report(bcheck_data, job)
                    if report and report['status'] != "ok":
                        dose_report = "Unknown problem"
                        for reason in report["reasons"]:
                            if "missing" in reason:
                                dose_report = "Unsat dependency %s" % (reason["missing"]["pkg"]["unsat-dependency"])
                                break
                            elif "conflict" in reason:
                                dose_report = "Conflict between %s and %s" % (reason["conflict"]["pkg1"]["package"], reason["conflict"]["pkg2"]["package"])
                                break
                        if job.dose_report != dose_report:
                            job.dose_report = dose_report
                    elif job.dose_report != None:
                        job.dose_report = None
                        print("Unblocked job %s (%s) %s" % (job.source.name, job.source.version, job.name))
                except Exception as ex:
                    print("Skipping %s (%s) %s due to error: %s" % (job.source.name, job.source.version, job.name, str(ex)))
Ejemplo n.º 4
0
    def import_pkgs(self, suite):
        pkg_dict = self._pkginfo.get_packages_dict(suite)

        for pkg in pkg_dict.values():
            if pkg.suite.startswith("buildq"):
                _, _, main_suite = pkg.suite.partition("-")
                pkg.queue_name = pkg.suite
                pkg.suite = main_suite
            try:
                with session() as s:
                    source = (
                        s.query(Source)
                        .join(Source.group_suite)
                        .join(GroupSuite.group)
                        .join(GroupSuite.suite)
                        .filter(
                            Source.name == pkg.pkgname,
                            Source.version == pkg.version,
                            Group.name == "default",
                            Suite.name == pkg.suite,
                        )
                        .first()
                    )

                    if not source:
                        self._create_debile_source(s, pkg)
                    elif pkg.installed_archs:
                        self._create_debile_binaries(s, source, pkg)

            except Exception as ex:
                print("Skipping %s (%s) in %s due to error: %s" % (pkg.pkgname, pkg.version, pkg.suite, str(ex)))
Ejemplo n.º 5
0
    def reschedule_jobs(self):
        with session() as s:

            cutoff = datetime.utcnow() - timedelta(days=1)
            jobs = s.query(Job).filter(Job.failed.is_(None), Job.finished_at != None, Job.finished_at < cutoff)

            for job in jobs:
                # Still missing the .dud a day after the builder told debile-master it had finished the job
                print("Rescheduling %s in %s due to missing *.dud upload" % (str(job), str(job.group_suite)))
                job.failed = None
                job.builder = None
                job.assigned_at = None
                job.finished_at = None

            cutoff = datetime.utcnow() - timedelta(days=7)
            jobs = (
                s.query(Job)
                .join(Job.check)
                .filter(
                    Check.build == True,
                    Job.failed.is_(False),
                    ~Job.built_binaries.any(),
                    Job.finished_at != None,
                    Job.finished_at < cutoff,
                )
            )

            for job in jobs:
                # Still missing the .changes a week after the builder told debile-master it had finished the build job
                print("Rescheduling %s in %s due to missing *.changes upload" % (str(job), str(job.group_suite)))
                job.failed = None
                job.builder = None
                job.assigned_at = None
                job.finished_at = None
Ejemplo n.º 6
0
    def prune_pkgs(self, suite):
        base_suite = self._conf.get_base_suite(suite)
        suites = [suite, base_suite] if suite != base_suite else [suite]
        components = self._conf.get_supported_components(base_suite).split(" ")

        pkg_list = []
        for s in suites:
            for c in components:
                pkg_list += self._pkginfo._get_package_list(s, c)

        pkgs = set()
        pkgs.update(pkg.pkgname + " " + pkg.version for pkg in pkg_list)

        with session() as s:
            sources = (
                s.query(Source)
                .join(Source.group_suite)
                .join(GroupSuite.group)
                .join(GroupSuite.suite)
                .filter(Group.name == "default", Suite.name == suite)
            )

            for source in sources:
                if not (source.name + " " + source.version) in pkgs and not os.path.exists(source.dsc_path):
                    print("Removed obsolete source %s %s" % (source.name, source.version))
                    # Package no longer in the archive (neither in the index nor the pool)
                    s.delete(source)
Ejemplo n.º 7
0
    def clean_results(self):
        path = None
        dirs = set()

        with session() as s:
            group = s.query(Group).filter_by(name="default").one()
            path = group.files_path

            dirs.update(
                x.directory
                for x in s.query(Result)
                .join(Result.job)
                .join(Job.source)
                .join(Source.group_suite)
                .filter(GroupSuite.group == group)
            )

        old_cwd = os.getcwd()
        try:
            os.chdir(path)
            for dir in glob.iglob("*/*/*"):
                if os.path.isdir(dir) and dir not in dirs:
                    # An orphaned results path, remove it
                    shutil.rmtree(dir)
                    print("Removed orphaned result dir %s" % dir)
        finally:
            os.chdir(old_cwd)
Ejemplo n.º 8
0
    def prune_pkgs(self, suite):
        base_suite = self._conf.get_base_suite(suite)
        suites = [suite, base_suite] if suite != base_suite else [suite]
        components = self._conf.get_supported_components(base_suite).split(" ")

        pkg_list = []
        for s in suites:
            for c in components:
                pkg_list += self._pkginfo._get_package_list(s, c)

        pkgs = set()
        pkgs.update(pkg.pkgname + " " + pkg.version for pkg in pkg_list)

        with session() as s:
            sources = s.query(Source).join(Source.group_suite).join(
                GroupSuite.group).join(GroupSuite.suite).filter(
                    Group.name == "default",
                    Suite.name == suite,
                )

            for source in sources:
                if not (source.name + " " + source.version
                        ) in pkgs and not os.path.exists(source.dsc_path):
                    print("Removed obsolete source %s %s" %
                          (source.name, source.version))
                    # Package no longer in the archive (neither in the index nor the pool)
                    s.delete(source)
Ejemplo n.º 9
0
def check_shutdown():
    with session() as s:
        shutdown = not s.query(
            exists().where((Job.assigned_at != None)
                           & (Job.finished_at == None))).scalar()
        if shutdown:
            raise SystemExit(0)
Ejemplo n.º 10
0
def process_directory(path):
    with session() as s:
        abspath = os.path.abspath(path)
        for fp in os.listdir(abspath):
            path = os.path.join(abspath, fp)
            for glob, handler in DELEGATE.items():
                if fnmatch.fnmatch(path, glob):
                    handler(s, path)
                    break
Ejemplo n.º 11
0
def process_directory(path):
    with session() as s:
        abspath = os.path.abspath(path)
        for fp in os.listdir(abspath):
            path = os.path.join(abspath, fp)
            for glob, handler in DELEGATE.items():
                if fnmatch.fnmatch(path, glob):
                    handler(s, path)
                    break
Ejemplo n.º 12
0
    def handle_one_request(self):
        try:
            with session() as s:
                NAMESPACE.session = s
                SimpleXMLRPCRequestHandler.handle_one_request(self)
        finally:
            NAMESPACE.session = None
            NAMESPACE.machine = None
            NAMESPACE.user = None

        if DebileMasterInterface.shutdown_request:
            check_shutdown()
Ejemplo n.º 13
0
    def handle_one_request(self):
        try:
            with session() as s:
                NAMESPACE.session = s
                SimpleXMLRPCRequestHandler.handle_one_request(self)
        finally:
            NAMESPACE.session = None
            NAMESPACE.machine = None
            NAMESPACE.user = None

        if DebileMasterInterface.shutdown_request:
            check_shutdown()
Ejemplo n.º 14
0
    def clean_results(self):
        path = None
        dirs = set()

        with session() as s:
            group = s.query(Group).filter_by(name="default").one()
            path = group.files_path

            dirs.update(x.directory for x in s.query(Result).join(
                Result.job).join(Job.source).join(Source.group_suite).filter(
                    GroupSuite.group == group))

        old_cwd = os.getcwd()
        try:
            os.chdir(path)
            for dir in glob.iglob("*/*/*"):
                if os.path.isdir(dir) and dir not in dirs:
                    # An orphaned results path, remove it
                    shutil.rmtree(dir)
                    print("Removed orphaned result dir %s" % dir)
        finally:
            os.chdir(old_cwd)
Ejemplo n.º 15
0
    def import_pkgs(self, suite):
        pkg_dict = self._pkginfo.get_packages_dict(suite)

        for pkg in pkg_dict.values():
            try:
                with session() as s:
                    source = s.query(Source).join(Source.group_suite).join(
                        GroupSuite.group).join(GroupSuite.suite).filter(
                            Source.name == pkg.pkgname,
                            Source.version == pkg.version,
                            Group.name == "default",
                            Suite.name == pkg.suite,
                        ).first()

                    if not source:
                        self._create_debile_source(s, pkg)
                    elif pkg.installed_archs:
                        self._create_debile_binaries(s, source, pkg)

            except Exception as ex:
                print("Skipping %s (%s) in %s due to error: %s" %
                      (pkg.pkgname, pkg.version, pkg.suite, str(ex)))
Ejemplo n.º 16
0
    def unblock_jobs(self, suite):
        bcheck_data = self._create_depwait_report(suite)

        with session() as s:
            jobs = s.query(Job).join(Job.check).join(Job.source).join(
                Source.group_suite).join(
                    GroupSuite.group).join(GroupSuite.suite).filter(
                        Group.name == "default", Suite.name == suite,
                        Check.build == True,
                        (Job.dose_report != None) | ~Job.built_binaries.any())

            for job in jobs:
                try:
                    report = self._get_package_depwait_report(bcheck_data, job)
                    if report and report['status'] != "ok":
                        dose_report = "Unknown problem"
                        for reason in report["reasons"]:
                            if "missing" in reason:
                                dose_report = ("Unsat dependency %s" %
                                               (reason["missing"]["pkg"]
                                                ["unsat-dependency"]))
                                break
                            elif "conflict" in reason:
                                dose_report = (
                                    "Conflict between %s and %s" %
                                    (reason["conflict"]["pkg1"]["package"],
                                     reason["conflict"]["pkg2"]["package"]))
                                break
                        if job.dose_report != dose_report:
                            job.dose_report = dose_report
                    elif job.dose_report != None:
                        job.dose_report = None
                        print("Unblocked job %s (%s) %s" %
                              (job.source.name, job.source.version, job.name))
                except Exception as ex:
                    print("Skipping %s (%s) %s due to error: %s" %
                          (job.source.name, job.source.version, job.name,
                           str(ex)))
Ejemplo n.º 17
0
    def reschedule_jobs(self):
        with session() as s:

            cutoff = datetime.utcnow() - timedelta(days=1)
            jobs = s.query(Job).filter(
                Job.failed.is_(None),
                Job.finished_at != None,
                Job.finished_at < cutoff,
            )

            for job in jobs:
                # Still missing the .dud a day after the builder told debile-master it had finished the job
                print("Rescheduling %s in %s due to missing *.dud upload" %
                      (str(job), str(job.group_suite)))
                job.failed = None
                job.builder = None
                job.assigned_at = None
                job.finished_at = None

            cutoff = datetime.utcnow() - timedelta(days=7)
            jobs = s.query(Job).join(Job.check).filter(
                Check.build == True,
                Job.failed.is_(False),
                ~Job.built_binaries.any(),
                Job.finished_at != None,
                Job.finished_at < cutoff,
            )

            for job in jobs:
                # Still missing the .changes a week after the builder told debile-master it had finished the build job
                print("Rescheduling %s in %s due to missing *.changes upload" %
                      (str(job), str(job.group_suite)))
                job.failed = None
                job.builder = None
                job.assigned_at = None
                job.finished_at = None
Ejemplo n.º 18
0
def import_dict(obj):
    maintainer = obj.pop("Maintainer")
    users = obj.pop("Users")
    builders = obj.pop("Builders")
    suites = obj.pop("Suites")
    groups = obj.pop("Groups")
    checks = obj.pop("Checks")
    arches = obj.pop("Arches")

    if obj != {}:
        for key in obj:
            print "Igorning key %s" % (key)

    with session() as s:
        for user in users:
            existing = None
            try:
                existing = s.query(Person).filter_by(
                    username=user['username']
                ).one()
            except NoResultFound:
                pass

            p = Person(**user)

            if existing:
                p.id = existing.id
                s.merge(p)
            else:
                s.add(p)

        for builder in builders:
            username = builder.pop('maintainer')
            who = s.query(Person).filter_by(username=username).one()
            builder['maintainer'] = who
            builder['last_ping'] = dt.datetime.utcnow()
            s.add(Builder(**builder))

        for suite in suites:
            s.add(Suite(**suite))

        for arch in arches:
            s.add(Arch(name=arch['name']))

        for group in groups:
            arches = group.pop('arches')
            suites = group.pop('suites')

            who = s.query(Person).filter_by(username=group['maintainer']).one()
            group['maintainer'] = who
            group = Group(**group)
            s.add(group)

            for arch in arches:
                arch = s.query(Arch).filter_by(name=arch).one()
                ga = GroupArch(group=group, arch=arch)
                s.add(ga)

            for suite in suites:
                suite = s.query(Suite).filter_by(name=suite).one()
                ga = GroupSuite(group=group, suite=suite)
                s.add(ga)

        for check in checks:
            group = s.query(Group).filter_by(name=check['group']).one()
            check['group'] = group
            s.add(Check(**check))
Ejemplo n.º 19
0
def main(args, config):
    obj = yaml.safe_load(open(args.file, 'r'))

    users = obj.pop("Users", [])
    builders = obj.pop("Builders", [])
    suites = obj.pop("Suites", [])
    components = obj.pop("Components", [])
    arches = obj.pop("Arches", [])
    checks = obj.pop("Checks", [])
    groups = obj.pop("Groups", [])

    with session() as s:
        Base.metadata.create_all(s.bind)

        for user in users:
            s.add(Person(**user))

        for builder in builders:
            who = s.query(Person).filter_by(email=builder['maintainer']).one()
            builder['maintainer'] = who
            builder['last_ping'] = datetime.utcnow()
            s.add(Builder(**builder))

        for suite in suites:
            s.add(Suite(**suite))

        for component in components:
            s.add(Component(**component))

        for arch in ["source", "all"]:
            s.add(Arch(name=arch))

        for arch in arches:
            s.add(Arch(name=arch['name']))

        for check in checks:
            s.add(Check(**check))

        for group in groups:
            suites = group.pop('suites')

            who = s.query(Person).filter_by(email=group['maintainer']).one()
            group['maintainer'] = who
            group = Group(**group)
            s.add(group)

            for suite in suites:
                gs = GroupSuite(group=group, suite=s.query(Suite).filter_by(
                    name=suite['suite']).one())

                for component in suite.pop('components'):
                    component = s.query(Component).filter_by(
                        name=component
                    ).one()
                    gs.components.append(component)

                for arch in ["source", "all"] + suite.pop('arches'):
                    arch = s.query(Arch).filter_by(name=arch).one()
                    gs.arches.append(arch)

                for check in suite.pop('checks'):
                    check = s.query(Check).filter_by(name=check).one()
                    gs.checks.append(check)

                s.add(gs)

        sane = True
        for key in obj:
            print "Unknown key '%s' in yaml file '%s'" % (key, args.file)
            sane = False

        if not s.query(exists().where(Person.id == Person.id)).scalar():
            print "No users in yaml file '%s'" % args.file
            sane = False
        elif not s.query(exists().where((Person.ssl != None) & (Person.ssl != DEADBEEF))).scalar():
            print "No enabled users in yaml file '%s' (user 'ssl' key missing or dummy 'DEADBEEF' string)" % args.file
            sane = False

        if not s.query(exists().where(GroupSuite.id == GroupSuite.id)).scalar():
            print "No group in yaml file '%s'" % args.file
            sane = False

        for group in s.query(Group).filter(~Group.group_suites.any()):
            print "No suites in group '%s' " % group.name
            sane = False

        for gs in s.query(GroupSuite).filter(~GroupSuite.arches.any((Arch.name != 'source') & (Arch.name != 'all'))):
            print "No arches in group '%s' suite '%s'" % (gs.group.name, gs.suite.name)
            sane = False

        for gs in s.query(GroupSuite).filter(~GroupSuite.components.any()):
            print "No components in group '%s' suite '%s'" % (gs.group.name, gs.suite.name)
            sane = False

        for gs in s.query(GroupSuite).filter(~GroupSuite.checks.any()):
            print "No checks in group '%s' suite '%s'" % (gs.group.name, gs.suite.name)
            sane = False

        if not sane and not args.force:
            raise Exception("Sanity checks failed, use --force to override")
Ejemplo n.º 20
0
def import_dict(obj):
    users = obj.pop("Users", [])
    builders = obj.pop("Builders", [])
    suites = obj.pop("Suites", [])
    components = obj.pop("Components", [])
    arches = obj.pop("Arches", [])
    checks = obj.pop("Checks", [])
    groups = obj.pop("Groups", [])

    if obj != {}:
        for key in obj:
            print "Igorning key %s" % (key)

    with session() as s:
        for user in users:
            existing = None
            try:
                existing = s.query(Person).filter_by(
                    username=user['username']
                ).one()
            except NoResultFound:
                pass

            p = Person(**user)

            if existing:
                p.id = existing.id
                s.merge(p)
            else:
                s.add(p)

        for builder in builders:
            username = builder.pop('maintainer')
            who = s.query(Person).filter_by(username=username).one()
            builder['maintainer'] = who
            builder['last_ping'] = datetime.utcnow()
            s.add(Builder(**builder))

        for suite in suites:
            s.add(Suite(**suite))

        for component in components:
            s.add(Component(**component))

        for arch in arches:
            s.add(Arch(name=arch['name']))

        for check in checks:
            s.add(Check(**check))

        for group in groups:
            suites = group.pop('suites')

            who = s.query(Person).filter_by(username=group['maintainer']).one()
            group['maintainer'] = who
            group = Group(**group)
            s.add(group)

            for suite in suites:
                gs = GroupSuite(
                    group=group,
                    suite=s.query(Suite).filter_by(name=suite['suite']).one()
                )

                for component in suite.pop('components'):
                    component = s.query(Component).filter_by(name=component).one()
                    gs.components.append(component)
                for arch in suite.pop('arches'):
                    arch = s.query(Arch).filter_by(name=arch).one()
                    gs.arches.append(arch)
                for check in suite.pop('checks'):
                    check = s.query(Check).filter_by(name=check).one()
                    gs.checks.append(check)

                s.add(gs)
Ejemplo n.º 21
0
def import_dict(obj):
    maintainer = obj.pop("Maintainer", None)
    users = obj.pop("Users", [])
    builders = obj.pop("Builders", [])
    suites = obj.pop("Suites", [])
    groups = obj.pop("Groups", [])
    checks = obj.pop("Checks", [])
    arches = obj.pop("Arches", [])

    if obj != {}:
        for key in obj:
            print "Igorning key %s" % (key)

    with session() as s:
        for user in users:
            existing = None
            try:
                existing = s.query(Person).filter_by(
                    username=user['username']).one()
            except NoResultFound:
                pass

            p = Person(**user)

            if existing:
                p.id = existing.id
                s.merge(p)
            else:
                s.add(p)

        for builder in builders:
            username = builder.pop('maintainer')
            who = s.query(Person).filter_by(username=username).one()
            builder['maintainer'] = who
            builder['last_ping'] = dt.datetime.utcnow()
            s.add(Builder(**builder))

        for suite in suites:
            s.add(Suite(**suite))

        for arch in arches:
            s.add(Arch(name=arch['name']))

        for group in groups:
            arches = group.pop('arches')
            suites = group.pop('suites')

            who = s.query(Person).filter_by(username=group['maintainer']).one()
            group['maintainer'] = who
            group = Group(**group)
            s.add(group)

            for arch in arches:
                arch = s.query(Arch).filter_by(name=arch).one()
                ga = GroupArch(group=group, arch=arch)
                s.add(ga)

            for suite in suites:
                suite = s.query(Suite).filter_by(name=suite).one()
                ga = GroupSuite(group=group, suite=suite)
                s.add(ga)

        for check in checks:
            group = s.query(Group).filter_by(name=check['group']).one()
            check['group'] = group
            s.add(Check(**check))
Ejemplo n.º 22
0
def main(args, config):
    with session() as s:
        dimport(args, s)
Ejemplo n.º 23
0
def main(args, config):
    with session() as s:
        dimport(args, s)