def create_builder(self, name, pgp, ssl=None, ip=None): """ :param str name: name of the builder :param str pgp: path to the pgp public keyfile :param str ssl: path to the ssl certificate :param str ip: ip address of the builder """ if NAMESPACE.session.query(Builder).filter_by(name=name).first(): raise ValueError("Slave already exists.") pgp = import_pgp(self.pgp_keyring, pgp) if ssl is not None: ssl = import_ssl(self.ssl_keyring, ssl, name) b = Builder(name=name, maintainer=NAMESPACE.user, pgp=pgp, ssl=ssl, last_ping=datetime.utcnow()) elif ip is not None: b = Builder(name=name, maintainer=NAMESPACE.user, pgp=pgp, ip=ip, last_ping=datetime.utcnow()) else: raise ValueError("Need either ssl certificate or ip address") NAMESPACE.session.add(b) emit('create', 'slave', b.debilize()) return b.debilize()
def get_next_job(self, suites, components, arches, checks): NAMESPACE.machine.last_ping = datetime.utcnow() if self.__class__.shutdown_request: return None arches = [x for x in arches if x not in ["source", "all"]] job = NAMESPACE.session.query(Job).join(Job.source).join(Source.group_suite).filter( ~Job.depedencies.any(), Job.dose_report == None, Job.assigned_at == None, Job.finished_at == None, Job.failed.is_(None), GroupSuite.suite.has(Suite.name.in_(suites)), Source.component.has(Component.name.in_(components)), (Job.arch.has(Arch.name.in_(arches)) | (Job.arch.has(Arch.name.in_(["source", "all"])) & Source.affinity.has(Arch.name.in_(arches)))), Job.check.has(Check.name.in_(checks)), ).order_by( Job.assigned_count.asc(), Source.uploaded_at.asc(), ).first() if job is None: return None job.assigned_count += 1 job.assigned_at = datetime.utcnow() job.builder = NAMESPACE.machine emit('start', 'job', job.debilize()) return job.debilize()
def close_job(self, job_id, failed): job = NAMESPACE.session.query(Job).get(job_id) job.finished_at = datetime.utcnow() emit('complete', 'job', job.debilize()) return True
def forfeit_job(self, job_id): job = NAMESPACE.session.query(Job).get(job_id) job.assigned_at = None job.builder = None emit('abort', 'job', job.debilize()) return True
def _create_debile_binaries(self, session, source, pkg): arch_all = session.query(Arch).filter(Arch.name == "all").one() arches = session.query(Arch).filter(Arch.name.in_(pkg.installed_archs)).all() if arch_all in source.arches and arch_all not in arches and source.affinity in arches: if not session.query( exists().where((Job.source == source) & (Job.arch == arch_all) & Job.check.has(Check.build == True)) ).scalar(): # We have the arch:affinity binary but is still lacking the arch:all binary # Make sure debile builds the arch:all binary separately check = session.query(Check).filter(Check.build == True).one() job = Job(check=check, arch=arch_all, source=source, binary=None) session.add(job) for arch in arches: if session.query(exists().where((Binary.source == source) & (Binary.arch == arch))).scalar(): continue # Find the job for this binary job = ( session.query(Job) .join(Job.check) .filter(Job.source == source, Job.arch == arch, Check.build == True) .first() ) if not job and arch == arch_all and source.affinity in arches: # The arch:all binary might have been created by the arch:affinity build job. job = ( session.query(Job) .join(Job.check) .filter(Job.source == source, Job.arch == source.affinity, Check.build == True) .first() ) if job and (not job.finished_at or job.failed is True): # Dak accepted a binary upload that debile-master didn't ask for if arch != arch_all and not any(job.built_binaries): session.delete(job) job = None if job: binary = job.new_binary(arch) else: binary = Binary(source=source, arch=arch, uploaded_at=datetime.utcnow()) session.add(binary) for name, arch, filename in pkg.binaries: if arch == binary.arch.name: directory, _, filename = filename.rpartition("/") deb = Deb(binary=binary, directory=directory, filename=filename) session.add(deb) print("Created binary for %s %s on %s" % (binary.name, binary.version, binary.arch)) emit("accept", "binary", binary.debilize())
def close_job(self, job_id, failed): job = NAMESPACE.session.query(Job).get(job_id) try: job.finished_at = datetime.utcnow() emit('complete', 'job', job.debilize()) except (AttributeError): logger = logging.getLogger('debile') logger.warn("Job %d went missing during execution!" % job_id, exc_info=True) return True
def forfeit_job(self, job_id): job = NAMESPACE.session.query(Job).get(job_id) try: job.assigned_at = None job.builder = None emit('abort', 'job', job.debilize()) except (AttributeError): logger = logging.getLogger('debile') logger.warn("Job %d went missing during execution!" % job_id, exc_info=True) return True
def create_user(self, name, email, pgp, ssl): if NAMESPACE.session.query(Person).filter_by(email=email).first(): raise ValueError("User already exists.") pgp = import_pgp(self.pgp_keyring, pgp) ssl = import_ssl(self.ssl_keyring, ssl, name, email) p = Person(name=name, email=email, pgp=pgp, ssl=ssl) NAMESPACE.session.add(p) emit('create', 'user', p.debilize()) return p.debilize()
def reject_dud(session, dud, tag): session.rollback() print "REJECT: {source} because {tag}".format(tag=tag, source=dud['Source']) emit('reject', 'result', { "tag": tag, "source": dud['Source'], }) for fp in [dud.get_dud_file()] + dud.get_files(): os.unlink(fp)
def reject_dud(session, dud, tag): session.rollback() print "REJECT: {source} because {tag}".format( tag=tag, source=dud['Source']) emit('reject', 'result', { "tag": tag, "source": dud['Source'], }) for fp in [dud.get_dud_file()] + dud.get_files(): os.unlink(fp)
def create_builder(self, name, pgp, ssl): if NAMESPACE.session.query(Builder).filter_by(name=name).first(): raise ValueError("Slave already exists.") pgp = import_pgp(self.pgp_keyring, pgp) ssl = import_ssl(self.ssl_keyring, ssl, name) b = Builder(name=name, maintainer=NAMESPACE.user, pgp=pgp, ssl=ssl, last_ping=datetime.utcnow()) NAMESPACE.session.add(b) emit('create', 'slave', b.debilize()) return b.debilize()
def reject_changes(session, changes, tag): session.rollback() print "REJECT: {source} because {tag}".format( tag=tag, source=changes.get_package_name()) emit('reject', 'source', { "tag": tag, "source": changes.get_package_name(), }) for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)
def create_user(self, name, email, pgp, ssl=None, ip=None): if NAMESPACE.session.query(Person).filter_by(email=email).first(): raise ValueError("User already exists.") pgp = import_pgp(self.pgp_keyring, pgp) if ssl is not None: ssl = import_ssl(self.ssl_keyring, ssl, name, email) p = Person(name=name, email=email, pgp=pgp, ssl=ssl, ip=ip) NAMESPACE.session.add(p) emit('create', 'user', p.debilize()) return p.debilize()
def reject_dud(session, dud, tag): session.rollback() print "REJECT: {source} because {tag}".format( tag=tag, source=dud['Source']) e = None try: dud.validate() except DudFileException as e: print e emit('reject', 'result', { "tag": tag, "source": dud['Source'], }) for fp in [dud.get_filename()] + dud.get_files(): os.unlink(fp)
def accept_dud(config, session, dud, builder): fire = dud.get_firehose() failed = True if dud.get('X-Debile-Failed', None) == "Yes" else False job = session.query(Job).get(dud['X-Debile-Job']) fire, _ = idify(fire) fire = uniquify(session.bind, fire) result = job.new_result(fire, failed) session.add(result) try: repo = FileRepo() repo.add_dud(result.path, dud, config['filerepo_chmod_mode']) except FilesAlreadyRegistered: return reject_dud(session, dud, "dud-files-already-registered") emit('receive', 'result', result.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [dud.get_dud_file()] + dud.get_files(): os.unlink(fp)
def accept_source_changes(default_group, config, session, changes, user): group = changes.get('X-Debile-Group', default_group) suite = changes['Distribution'] try: group_suite = session.query(GroupSuite).join(GroupSuite.group).join(GroupSuite.suite).filter( Group.name == group, Suite.name == suite, ).one() except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: return reject_changes(session, changes, "invalid-suite-for-group") dsc = changes.get_dsc_obj() if dsc['Source'] != changes['Source']: return reject_changes(session, changes, "dsc-does-not-march-changes") if dsc['Version'] != changes['Version']: return reject_changes(session, changes, "dsc-does-not-march-changes") try: source = session.query(Source).filter( Source.name == dsc['Source'], Source.version == dsc['Version'], GroupSuite.group == group_suite.group, ).one() return reject_changes(session, changes, "source-already-in-group") except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: pass oldsources = session.query(Source).filter( Source.group_suite == group_suite, Source.name == dsc['Source'], ) for oldsource in oldsources: if version_compare(oldsource.version, dsc['Version']) > 0: return reject_changes(session, changes, "newer-source-already-in-suite") # Drop any old jobs that are still pending. for oldsource in oldsources: for job in oldsource.jobs: if (not any(job.results) and not any(job.built_binaries)): session.delete(job) elif job.failed is None: job.failed = True if not any(oldsource.jobs): session.delete(oldsource) component = session.query(Component).filter_by(name="main").one() if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" with session.no_autoflush: source = create_source(dsc, group_suite, component, user, config["affinity_preference"], valid_affinities) create_jobs(source) session.add(source) # We have a changes in order. Let's roll. repo = Repo(group_suite.group.repo_path) repo.add_changes(changes) (source.directory, source.dsc_filename) = repo.find_dsc(source) emit('accept', 'source', source.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)
def _create_debile_source(self, session, pkg): user = session.query(Person).filter_by(email="*****@*****.**").one() group_suite = session.query(GroupSuite).join(GroupSuite.group).join(GroupSuite.suite).filter( Group.name == "default", Suite.name == pkg.suite, ).one() component = session.query(Component).filter( Component.name == pkg.component ).one() dsc_fname = "{root}/{directory}/{filename}".format( root=self._archive_path, directory=pkg.directory, filename=pkg.dsc, ) dsc = Dsc(open(dsc_fname)) if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" source = create_source(dsc, group_suite, component, user, self._affinity_preference, valid_affinities) source.directory = pkg.directory source.dsc_filename = pkg.dsc session.add(source) for aname in pkg.installed_archs: arch = session.query(Arch).filter_by(name=aname).one() binary = Binary(source=source, arch=arch, uploaded_at=source.uploaded_at) session.add(binary) for name, arch, filename in pkg.binaries: if arch == binary.arch.name: directory, _, filename = filename.rpartition('/') deb = Deb(binary=binary, directory=directory, filename=filename) session.add(deb) create_jobs(source, dose_report="No dose-builddebcheck report available yet.") # Drop any old jobs that are still pending. oldsources = session.query(Source).filter( Source.group_suite == source.group_suite, Source.name == source.name, ) for oldsource in oldsources: if version_compare(oldsource.version, source.version) >= 0: continue for job in oldsource.jobs: if (not any(job.results) and not any(job.built_binaries)): session.delete(job) elif job.failed is None: job.failed = True if not any(job.check.build for job in oldsource.jobs): session.delete(oldsource) print("Created source for %s %s" % (source.name, source.version)) emit('accept', 'source', source.debilize())
def _create_debile_source(self, session, pkg): user = session.query(Person).filter_by(email="*****@*****.**").one() group_suite = ( session.query(GroupSuite) .join(GroupSuite.group) .join(GroupSuite.suite) .filter(Group.name == "default", Suite.name == pkg.suite) .one() ) component = session.query(Component).filter(Component.name == pkg.component).one() aroot = None if pkg.queue_name: # package is in a build-queue aroot = self._buildq_path else: aroot = self._archive_path dsc_fname = "{root}/{directory}/{filename}".format(root=aroot, directory=pkg.directory, filename=pkg.dsc) dsc = Dsc(open(dsc_fname)) if "Build-Architecture-Indep" in dsc: valid_affinities = dsc["Build-Architecture-Indep"] elif "X-Build-Architecture-Indep" in dsc: valid_affinities = dsc["X-Build-Architecture-Indep"] elif "X-Arch-Indep-Build-Arch" in dsc: valid_affinities = dsc["X-Arch-Indep-Build-Arch"] else: valid_affinities = "any" source = create_source(dsc, group_suite, component, user, self._affinity_preference, valid_affinities) source.directory = pkg.directory source.dsc_filename = pkg.dsc session.add(source) for aname in pkg.installed_archs: arch = session.query(Arch).filter_by(name=aname).one() binary = Binary(source=source, arch=arch, uploaded_at=source.uploaded_at) session.add(binary) for name, arch, filename in pkg.binaries: if arch == binary.arch.name: directory, _, filename = filename.rpartition("/") deb = Deb(binary=binary, directory=directory, filename=filename) session.add(deb) create_jobs(source, dose_report="No dose-builddebcheck report available yet.") oldsources = session.query(Source).filter(Source.group_suite == source.group_suite, Source.name == source.name) for oldsource in oldsources: if version_compare(oldsource.version, source.version) >= 0: continue # Drop any old jobs that are still pending. for job in oldsource.jobs: if (job.check.build and not any(job.built_binaries)) or not any(job.results): session.delete(job) elif job.failed is None: job.failed = any(result.failed for result in job.results) job.builder = None job.assigned_at = None job.finished_at = None # Actually remove jobs marked for deletion above. session.commit() # If after cleanup there is no build jobs left, remove the source completely if not any(job.check.build for job in oldsource.jobs): session.delete(oldsource) print("Created source for %s %s" % (source.name, source.version)) emit("accept", "source", source.debilize())
def accept_binary_changes(default_group, config, session, changes, builder): # OK. We'll relate this back to a build job. job = changes.get('X-Debile-Job', None) if job is None: return reject_changes(session, changes, "no-job") job = session.query(Job).get(job) source = job.source if changes.get('Source') != source.name: return reject_changes(session, changes, "binary-source-name-mismatch") if changes.get("Version") != source.version: return reject_changes(session, changes, "binary-source-version-mismatch") if changes.get('X-Debile-Group', default_group) != source.group.name: return reject_changes(session, changes, "binary-source-group-mismatch") if changes.get('Distribution') != source.suite.name: return reject_changes(session, changes, "binary-source-suite-mismatch") if builder != job.builder: return reject_changes(session, changes, "wrong-builder") anames = changes.get("Architecture").split(None) arches = session.query(Arch).filter(Arch.name.in_(anames)).all() binaries = {} for arch in arches: if arch.name not in [job.arch.name, "all"]: return reject_changes(session, changes, "wrong-architecture") binaries[arch.name] = job.new_binary(arch) if not binaries: return reject_changes(session, changes, "no-architecture") session.add_all(binaries.values()) PATH = re.compile("^/pool/.*/") ARCH = re.compile(".+_(?P<arch>[^_]+)\.u?deb$") for entry in changes.get('Files'): directory = source.directory if '/' in entry['section']: component, section = entry['section'].split('/', 1) directory = PATH.sub("/pool/%s/" % component, directory) arch = ARCH.match(entry['name']).groupdict().get('arch') if arch not in binaries: return reject_changes(session, changes, "bad-architecture-of-file") deb = Deb(binary=binaries[arch], directory=directory, filename=entry['name']) session.add(deb) # OK. Let's make sure we can add this. try: repo = Repo(job.group.repo_path) repo.add_changes(changes) except RepoSourceAlreadyRegistered: return reject_changes(session, changes, 'stupid-source-thing') for binary in binaries.values(): emit('accept', 'binary', binary.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)
def _create_debile_binaries(self, session, source, pkg): arch_all = session.query(Arch).filter(Arch.name == "all").one() arches = session.query(Arch).filter(Arch.name.in_( pkg.installed_archs)).all() if arch_all in source.arches and arch_all not in arches and source.affinity in arches: if not session.query(exists().where( (Job.source == source) & (Job.arch == arch_all) & Job.check.has(Check.build == True))).scalar(): # We have the arch:affinity binary but is still lacking the arch:all binary # Make sure debile builds the arch:all binary separately check = session.query(Check).filter(Check.build == True).one() job = Job(check=check, arch=arch_all, source=source, binary=None) session.add(job) for arch in arches: if session.query(exists().where((Binary.source == source) & (Binary.arch == arch))).scalar(): continue # Find the job for this binary job = session.query(Job).join(Job.check).filter( Job.source == source, Job.arch == arch, Check.build == True, ).first() if not job and arch == arch_all and source.affinity in arches: # The arch:all binary might have been created by the arch:affinity build job. job = session.query(Job).join(Job.check).filter( Job.source == source, Job.arch == source.affinity, Check.build == True, ).first() if job and (not job.finished_at or job.failed is True): # Dak accepted a binary upload that debile-master didn't ask for if arch != arch_all and not any(job.built_binaries): session.delete(job) job = None if job: binary = job.new_binary(arch) else: binary = Binary(source=source, arch=arch, uploaded_at=datetime.utcnow()) session.add(binary) for name, arch, filename in pkg.binaries: if arch == binary.arch.name: directory, _, filename = filename.rpartition('/') deb = Deb(binary=binary, directory=directory, filename=filename) session.add(deb) print("Created binary for %s %s on %s" % (binary.name, binary.version, binary.arch)) emit('accept', 'binary', binary.debilize())
def _create_debile_source(self, session, pkg): user = session.query(Person).filter_by( email="*****@*****.**").one() group_suite = session.query(GroupSuite).join(GroupSuite.group).join( GroupSuite.suite).filter( Group.name == "default", Suite.name == pkg.suite, ).one() component = session.query(Component).filter( Component.name == pkg.component).one() dsc_fname = "{root}/{directory}/{filename}".format( root=self._archive_path, directory=pkg.directory, filename=pkg.dsc, ) dsc = Dsc(open(dsc_fname)) if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" source = create_source(dsc, group_suite, component, user, self._affinity_preference, valid_affinities) source.directory = pkg.directory source.dsc_filename = pkg.dsc session.add(source) for aname in pkg.installed_archs: arch = session.query(Arch).filter_by(name=aname).one() binary = Binary(source=source, arch=arch, uploaded_at=source.uploaded_at) session.add(binary) for name, arch, filename in pkg.binaries: if arch == binary.arch.name: directory, _, filename = filename.rpartition('/') deb = Deb(binary=binary, directory=directory, filename=filename) session.add(deb) create_jobs(source, dose_report="No dose-builddebcheck report available yet.") oldsources = session.query(Source).filter( Source.group_suite == source.group_suite, Source.name == source.name, ) for oldsource in oldsources: if version_compare(oldsource.version, source.version) >= 0: continue # Drop any old jobs that are still pending. for job in oldsource.jobs: if (job.check.build and not any(job.built_binaries)) or not any(job.results): session.delete(job) elif job.failed is None: job.failed = any(result.failed for result in job.results) job.builder = None job.assigned_at = None job.finished_at = None # Actually remove jobs marked for deletion above. session.commit() # If after cleanup there is no build jobs left, remove the source completely if not any(job.check.build for job in oldsource.jobs): session.delete(oldsource) print("Created source for %s %s" % (source.name, source.version)) emit('accept', 'source', source.debilize())
def accept_source_changes(default_group, config, session, changes, user): group = changes.get('X-Debile-Group', default_group) suite = changes['Distribution'] try: group_suite = session.query(GroupSuite).join(GroupSuite.group).join( GroupSuite.suite).filter( Group.name == group, Suite.name == suite, ).one() except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: return reject_changes(session, changes, "invalid-suite-for-group") dsc = changes.get_dsc_obj() if dsc['Source'] != changes['Source']: return reject_changes(session, changes, "dsc-does-not-march-changes") if dsc['Version'] != changes['Version']: return reject_changes(session, changes, "dsc-does-not-march-changes") try: source = session.query(Source).filter( Source.name == dsc['Source'], Source.version == dsc['Version'], GroupSuite.group == group_suite.group, ).one() return reject_changes(session, changes, "source-already-in-group") except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: pass oldsources = session.query(Source).filter( Source.group_suite == group_suite, Source.name == dsc['Source'], ) for oldsource in oldsources: if version_compare(oldsource.version, dsc['Version']) > 0: return reject_changes(session, changes, "newer-source-already-in-suite") # Drop any old jobs that are still pending. for oldsource in oldsources: for job in oldsource.jobs: if (not any(job.results) and not any(job.built_binaries)): session.delete(job) elif job.failed is None: job.failed = True if not any(oldsource.jobs): session.delete(oldsource) component = session.query(Component).filter_by(name="main").one() if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" with session.no_autoflush: source = create_source(dsc, group_suite, component, user, config["affinity_preference"], valid_affinities) create_jobs(source) session.add(source) # We have a changes in order. Let's roll. repo = Repo(group_suite.group.repo_path) repo.add_changes(changes) try: (source.directory, source.dsc_filename) = repo.find_dsc(source) except RepoPackageNotFound: return reject_changes(session, changes, "reprepo-package-not-found") emit('accept', 'source', source.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)
def accept_binary_changes(default_group, config, session, changes, builder): # OK. We'll relate this back to a build job. job = changes.get('X-Debile-Job', None) if job is None: return reject_changes(session, changes, "no-job") job = session.query(Job).get(job) source = job.source if changes.get('Source') != source.name: return reject_changes(session, changes, "binary-source-name-mismatch") if changes.get("Version") != source.version: return reject_changes( session, changes, "binary-source-version-mismatch") if changes.get('X-Debile-Group', default_group) != source.group.name: return reject_changes(session, changes, "binary-source-group-mismatch") if changes.get('Distribution') != source.suite.name: return reject_changes(session, changes, "binary-source-suite-mismatch") if builder != job.builder: return reject_changes(session, changes, "wrong-builder") anames = changes.get("Architecture").split(None) arches = session.query(Arch).filter(Arch.name.in_(anames)).all() binaries = {} for arch in arches: if arch.name not in [job.arch.name, "all"]: return reject_changes(session, changes, "wrong-architecture") binaries[arch.name] = job.new_binary(arch) if not binaries: return reject_changes(session, changes, "no-architecture") session.add_all(binaries.values()) PATH = re.compile("^/pool/.*/") ARCH = re.compile(".+_(?P<arch>[^_]+)\.u?deb$") for entry in changes.get('Files'): directory = source.directory if '/' in entry['section']: component, section = entry['section'].split('/', 1) directory = PATH.sub("/pool/%s/" % component, directory) arch = ARCH.match(entry['name']).groupdict().get('arch') if arch not in binaries: return reject_changes(session, changes, "bad-architecture-of-file") deb = Deb(binary=binaries[arch], directory=directory, filename=entry['name']) session.add(deb) ## OK. Let's make sure we can add this. try: repo = Repo(job.group.repo_path) repo.add_changes(changes) except RepoSourceAlreadyRegistered: return reject_changes(session, changes, 'stupid-source-thing') for binary in binaries.values(): emit('accept', 'binary', binary.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)