def categorize_version_comparison(a, b): """Returns an identifier that categorizes the difference between a and b. The identifier can be looked up in CATEGORIES_VERSION_COMPARISON to have a long description.""" if a == b: return 'equal' if a is None: return 'missing_pkg' if b is None: return 'new_pkg' deriv_epoch, deriv_upstream, deriv_revision = split_version(a) parent_epoch, parent_upstream, parent_revision = split_version(b) if deriv_epoch == parent_epoch: if deriv_upstream == parent_upstream: if version_compare(deriv_revision, parent_revision) < 0: return 'older_revision' else: return 'newer_revision' elif version_compare(deriv_upstream, parent_upstream) < 0: return 'older_version' else: return 'newer_version' elif version_compare(deriv_epoch, parent_epoch) < 0: return 'older_version' else: return 'newer_version'
def readyaml(db, stream): cur = db.cursor() cur.execute("PRAGMA foreign_keys = ON;") gen = yaml.safe_load_all(stream) metadata = next(gen) package = metadata["package"] cur.execute("SELECT id, version FROM package WHERE name = ?;", (package, )) rows = cur.fetchall() if rows: row = sorted(rows, cmp=lambda row: -version_compare(row[1]))[0] else: row = None if row: pid, version = row if version_compare(version, metadata["version"]) > 0: return else: pid = None cur.execute("BEGIN;") cur.execute("SELECT name, id FROM function;") funcmapping = dict(cur.fetchall()) # First, delete all the old ones that we want to remove from the DB. MAX_OLD_TO_KEEP = 1 if len(rows) > MAX_OLD_TO_KEEP: for row in rows[1:]: delete_pid = row[0] cur.execute("DELETE FROM package WHERE id = ?;", (delete_pid, )) print >> sys.stderr, "EEK" # If last one == this one, delete that too. if row and (version == metadata['version']): cur.execute("DELETE FROM package WHERE id = ?;", (pid, )) # Then store the new data about our new package version. cur.execute( "INSERT INTO package (name, version, architecture, source) VALUES (?, ?, ?, ?);", (package, metadata["version"], metadata["architecture"], metadata["source"])) pid = cur.lastrowid cur.executemany("INSERT INTO dependency (pid, required) VALUES (?, ?);", ((pid, dep) for dep in metadata["depends"])) for entry in gen: if entry == "commit": db.commit() return cur.execute( "INSERT INTO content (pid, filename, size) VALUES (?, ?, ?);", (pid, entry["name"], entry["size"])) cid = cur.lastrowid cur.executemany("INSERT INTO hash (cid, fid, hash) VALUES (?, ?, ?);", ((cid, funcmapping[func], hexhash) for func, hexhash in entry["hashes"].items())) raise ValueError("missing commit block")
def readyaml(db, stream): cur = db.cursor() cur.execute("PRAGMA foreign_keys = ON;") gen = yaml.safe_load_all(stream) metadata = next(gen) package = metadata["package"] cur.execute("SELECT id, version FROM package WHERE name = ?;", (package,)) rows = cur.fetchall() if rows: row = sorted(rows, cmp=lambda row: -version_compare(row[1]))[0] else: row = None if row: pid, version = row if version_compare(version, metadata["version"]) > 0: return else: pid = None cur.execute("BEGIN;") cur.execute("SELECT name, id FROM function;") funcmapping = dict(cur.fetchall()) # First, delete all the old ones that we want to remove from the DB. MAX_OLD_TO_KEEP = 1 if len(rows) > MAX_OLD_TO_KEEP: for row in rows[1:]: delete_pid = row[0] cur.execute("DELETE FROM package WHERE id = ?;", (delete_pid,)) print >> sys.stderr, "EEK" # If last one == this one, delete that too. if row and (version == metadata['version']): cur.execute("DELETE FROM package WHERE id = ?;", (pid,)) # Then store the new data about our new package version. cur.execute("INSERT INTO package (name, version, architecture, source) VALUES (?, ?, ?, ?);", (package, metadata["version"], metadata["architecture"], metadata["source"])) pid = cur.lastrowid cur.executemany("INSERT INTO dependency (pid, required) VALUES (?, ?);", ((pid, dep) for dep in metadata["depends"])) for entry in gen: if entry == "commit": db.commit() return cur.execute("INSERT INTO content (pid, filename, size) VALUES (?, ?, ?);", (pid, entry["name"], entry["size"])) cid = cur.lastrowid cur.executemany("INSERT INTO hash (cid, fid, hash) VALUES (?, ?, ?);", ((cid, funcmapping[func], hexhash) for func, hexhash in entry["hashes"].items())) raise ValueError("missing commit block")
def compare_version(self, other): if self.packagetype == 'R' and other.packagetype == 'R': return labelCompare(self.get_version_string(), other.get_version_string()) elif self.packagetype == 'D' and other.packagetype == 'D': vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo) elif self.packagetype == 'A' and other.packagetype == 'A': vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo)
def register_entry(entry): name = entry.source_package.name version = entry.source_package.version if name not in main_entries: main_entries[name] = entry else: selected_version = main_entries[name].source_package.version if version_compare(selected_version, version) < 0: main_entries[name] = entry elif version_compare(selected_version, version) == 0: # If both versions are equal, we use the repository with the # biggest position if (entry.repository.position > main_entries[name].repository.position): main_entries[name] = entry
def compare_version(self, other): if self.packagetype == 'R' and other.packagetype == 'R': return labelCompare(self._version_string_rpm(), other._version_string_rpm()) elif self.packagetype == 'D' and other.packagetype == 'D': vs = Version(self._version_string_deb()) vo = Version(other._version_string_deb()) return version_compare(vs, vo)
def package_version(self): native_version = self.native_version if native_version: version = '%s+%d' % (native_version, self.build_record.build_counter) else: version = '%d' % (self.build_record.build_counter, ) epoch = 0 last_built_version = self.package_source.last_built_version if last_built_version: if ':' in last_built_version: epoch, cmp_ver = last_built_version.split(':', 1) epoch = int(epoch) else: cmp_ver = last_built_version if version_compare(version, cmp_ver) < 0: epoch = epoch + 1 if epoch: version = '%s:%s' % (epoch, version) return version
def find_newest_publication(method, version_attr, **kwargs): """Hack around being unable to pass status=("Published", "Pending").""" if kwargs["version"] is not None: try: return method(**kwargs)[0] except IndexError: return None published_pubs = method(status="Published", **kwargs) pending_pubs = method(status="Pending", **kwargs) try: newest_published = published_pubs[0] newest_published_ver = getattr(newest_published, version_attr) except IndexError: try: return pending_pubs[0] except IndexError: return None try: newest_pending = pending_pubs[0] newest_pending_ver = getattr(newest_pending, version_attr) except IndexError: return newest_published if debian_support.version_compare( newest_published_ver, newest_pending_ver) > 0: return newest_published else: return newest_pending
def __cmp__(self, other): """comparison based on <package, version> pairs only """ cmp1 = cmp(self['package'], other['package']) if cmp1: # 'package' key is enough to discriminate return cmp1 return version_compare(self['version'], other['version'])
def find_newest_publication(method, version_attr, **kwargs): """Hack around being unable to pass status=("Published", "Pending").""" published_pubs = method(status="Published", **kwargs) pending_pubs = method(status="Pending", **kwargs) try: newest_published = published_pubs[0] newest_published_ver = getattr(newest_published, version_attr) except IndexError: try: return pending_pubs[0] except IndexError: if kwargs["version"] is not None: try: return method(**kwargs)[0] except IndexError: return None else: return None try: newest_pending = pending_pubs[0] newest_pending_ver = getattr(newest_pending, version_attr) except IndexError: return newest_published if debian_support.version_compare(newest_published_ver, newest_pending_ver) > 0: return newest_published else: return newest_pending
def does_series_already_have_this_or_newer_version(cls, series, sp, version): current_spv = series.source_package_versions.filter(source_package=sp) if not current_spv: return False else: return version_compare(version, current_spv[0].version) >= 0
def compare_version(self, other): if self.packagetype == 'R' and other.packagetype == 'R': return labelCompare(self.get_version_string(), other.get_version_string()) elif self.packagetype == 'D' and other.packagetype == 'D': vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo) elif self.packagetype == 'A' and other.packagetype == 'A': if self.epoch == other.epoch \ and self.version == other.version \ and self.release == other.release: return 0 vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo)
def compare_nvrs(a_nvr, b_nvr): (a_name, a_version) = chacra.name_version(a_nvr) (b_name, b_version) = chacra.name_version(b_nvr) if sys.version_info[0] < 3: compare_names = cmp(a_name, b_name) # NOQA: F821 else: compare_names = (a_name > b_name) - (a_name < b_name) if compare_names != 0: return compare_names return debian_support.version_compare(a_version, b_version)
def __cmp__(self, other): if self is other: return 0 if self.version == other.version: if (self._c, self._h, self._md5) == (other._c, other._h, other._md5): return 0 return -1 else: return version_compare(self.version.full_version, other.full_version)
def process_file(pkgs, filename): base = os.path.basename(filename) if not base.endswith(".deb"): raise ValueError("filename does not end in .deb") parts = base.split("_") if len(parts) != 3: raise ValueError("filename not in form name_version_arch.deb") name, version, _ = parts version = urllib.unquote(version) if name in pkgs and version_compare(pkgs[name]["version"], version) > 0: return pkgs[name] = dict(version=version, filename=filename)
def process_http(pkgs, url): pkglist = urllib.urlopen(url + "/dists/sid/main/binary-amd64/Packages.gz").read() pkglist = gzip.GzipFile(fileobj=io.BytesIO(pkglist)).read() pkglist = io.BytesIO(pkglist) pkglist = deb822.Packages.iter_paragraphs(pkglist) for pkg in pkglist: name = pkg["Package"] if name in pkgs and \ version_compare(pkgs[name]["version"], pkg["Version"]) > 0: continue pkgs[name] = dict(version=pkg["Version"], filename="%s/%s" % (url, pkg["Filename"]), sha256hash=pkg["SHA256"])
def compare_with_suite_order(a, b): try: latest_a = max(suite_order.index(x.lower()) for x in a.suites) except ValueError: latest_a = -1 try: latest_b = max(suite_order.index(x.lower()) for x in b.suites) except ValueError: latest_b = -1 if latest_a == latest_b: return version_compare(a.version, b.version) else: return 1 if latest_a > latest_b else -1
def _check_apt_package(self, package_name: str, version: str = None) -> CheckResult: # TODO: check versions dpkg_cmd_result = run_cmd(['dpkg', '-s', package_name], check_code=False) output = dpkg_cmd_result.stdout.decode('utf-8').strip() if dpkg_cmd_result.returncode != 0: return self._failed(name=package_name, info=output) actual_version = self._version_from_dpkg_output(output) expected_version = self.requirements[package_name] info = { 'expected_version': expected_version, 'actual_version': actual_version } compare_result = debian_support.version_compare( actual_version, expected_version) if compare_result == -1: return self._failed(name=package_name, info=info) else: return self._ok(name=package_name, info=info)
def package_version(self): native_version = self.native_version if native_version: version = '%s+%d' % (native_version, self.build_record['build_counter']) else: version = '%d' % (self.build_record['build_counter'],) epoch = 0 last_built_version = self.build_record['source']['last_built_version'] if last_built_version: if ':' in last_built_version: epoch, cmp_ver = last_built_version.split(':', 1) epoch = int(epoch) else: cmp_ver = last_built_version if version_compare(version, cmp_ver) < 0: epoch = epoch + 1 if epoch: version = '%s:%s' % (epoch, version) return version
def compare_versions(current, upstream): """Return 1 if upstream is newer than current, -1 if current is newer than upstream, and 0 if the same.""" if not current or not upstream: return 0 return debian_support.version_compare(upstream, current)
def main(): parser = optparse.OptionParser() parser.add_option("-n", "--new", action="store_true", help="avoid reimporting same versions") parser.add_option("-p", "--prune", action="store_true", help="prune packages old packages") options, args = parser.parse_args() subprocess.check_call(["mkdir", "-p", "tmp"]) db = sqlite3.connect("test.sqlite3") cur = db.cursor() cur.execute("PRAGMA foreign_keys = ON;") e = concurrent.futures.ThreadPoolExecutor(multiprocessing.cpu_count()) pkgs = {} for d in args: print("processing %s" % d) if d.startswith("http://"): process_http(pkgs, d) elif os.path.isdir(d): process_dir(pkgs, d) else: process_file(pkgs, d) print("reading database") cur.execute("SELECT name, version FROM package;") # Best fix: Do the version compare in SQL. # # Asheesh hack: do the version compare from Python on the line # below. This will cause some sizeable memory blow-up for now, # but it will do. # # In the future it'd be nice to move this to use the Postgres # debversion extension. knownpkgs = dict( sorted(((row[0], row[1]) for row in cur.fetchall()), cmp=lambda x, y: version_compare(x[1], y[1]))) distpkgs = set(pkgs.keys()) if options.new: for name in distpkgs: if name in knownpkgs and version_compare(pkgs[name]["version"], knownpkgs[name]) <= 0: del pkgs[name] knownpkgs = set(knownpkgs) with e: fs = {} for name, pkg in pkgs.items(): fs[e.submit(process_pkg, name, pkg)] = name for f in concurrent.futures.as_completed(fs.keys()): name = fs[f] if f.exception(): print("%s failed to import: %r" % (name, f.exception())) continue inf = os.path.join("tmp", name) print("sqlimporting %s" % name) with open(inf) as inp: try: readyaml(db, inp) except Exception as exc: print("%s failed sql with exception %r" % (name, exc)) else: os.unlink(inf) if options.prune: delpkgs = knownpkgs - distpkgs print("clearing packages %s" % " ".join(delpkgs)) cur.executemany("DELETE FROM package WHERE name = ?;", ((pkg,) for pkg in delpkgs)) # Tables content, dependency and sharing will also be pruned # due to ON DELETE CASCADE clauses. db.commit()
def watch_packages(package_config, initial=False, notify_on_startup=False, package_architectures=["amd64"]): global PACKAGE_STATUS global LAST_POLL updated_package_status = ubuntu_package_status.get_status_for_all_packages( package_config, package_architectures) LAST_POLL = format_datetime(datetime.utcnow()) # This is the first time we have polled/watched these packages so set the global variable PACKAGE_STATUS # to the current polled stats if initial: PACKAGE_STATUS = updated_package_status for ubuntu_version, packages in PACKAGE_STATUS.items(): for package, pockets in packages.items(): for pocket, architectures in pockets.items(): for architecture, package_stats in architectures.items(): logging.info("Getting stats for {} {} {} {}".format( ubuntu_version, pocket.lower(), architecture, package)) current_package_stats = updated_package_status[ ubuntu_version][package][pocket][architecture] current_package_version = current_package_stats["version"] previous_package_version = package_stats.get( "version", None) message = None newer_package = False new_package = False if current_package_version: message = "{} {} {} for {} is in {} pocket (published @ {} - {})" \ .format(package, architecture, current_package_version, ubuntu_version, pocket.lower(), current_package_stats["date_published_formatted"], current_package_stats["published_age"]) # Is the version in archive greater than # that in our database? if current_package_version and previous_package_version: vc = debian_support.version_compare( current_package_version, previous_package_version) logging.info(message) if vc > 0: """ > 0 The version current_package_version is greater than version previous_package_version. = 0 Both versions are equal. < 0 The version current_package_version is less than version previous_package_version. """ newer_package = True is_newer_message = "{} is newer than {}".format( current_package_version, previous_package_version) message = '** NEW VERSION ** {}. {}'.format( message, is_newer_message) # Is the version in archive the first time we've seen a # version in this pocket? if not previous_package_version and current_package_version: new_package = True is_new_message = "{} is new to the {} pocket".format( current_package_version, pocket.lower()) message = '** NEW TO POCKET ** {}. {}'.format( message, is_new_message) # If the package is newer or it's a new package send # a notification if newer_package or new_package: PACKAGE_STATUS[ubuntu_version][package][pocket][architecture] \ = package_stats if message and message not in NOTIFICATIONS_SENT: NOTIFICATIONS_SENT.append(message) if not initial or (initial and pocket == "Proposed" and notify_on_startup): send_notification_message(message) if message: logging.info(message)
def find_updates(self): self.updates.clear() kernels = Q(name__name='kernel') | Q(name__name='kernel-xen') | Q(name__name='kernel-pae') | Q(name__name='kernel-devel') | Q(name__name='kernel-pae-devel') | Q(name__name='kernel-xen-devel') | Q(name__name='kernel-headers') kernelpackages = Package.objects.select_related().filter(host=self).filter(kernels).values('name__name').annotate(Count('name')) repopackages = self.get_host_repo_packages() if self.host_repos_only: #find_host_repo_updates(host) for package in self.packages.exclude(kernels): highest = ('', '0', '') highestpackage = None bestrepo = None # find out what hostrepo it belongs to repos_q = Q(mirror__repo__in=self.repos.all(), mirror__enabled=True, mirror__repo__enabled=True, mirror__packages__name=package.name) repos = Repository.objects.filter(repos_q).distinct() hostrepos = HostRepo.objects.filter(repo__in=repos, host=self) if hostrepos: bestrepo = hostrepos[0] if hostrepos.count() > 1: for repo in hostrepos: if repo.repo.security: bestrepo = repo else: if repo.priority > bestrepo.priority: bestrepo = repo # find the packages that are potential updates matchingpackages = repopackages.filter(name=package.name, arch=package.arch, packagetype=package.packagetype) for repopackage in matchingpackages: if package.compare_version(repopackage) == -1: rp_bestrepo = None # find the repos the potential update belongs to rp_repos_q = Q(mirror__repo__in=self.repos.all(), mirror__enabled=True, mirror__repo__enabled=True, mirror__packages=repopackage) rp_repos = Repository.objects.filter(rp_repos_q).distinct() rp_hostrepos = HostRepo.objects.filter(repo__in=rp_repos, host=self) # if it belongs to more than one, find the best one (favour security repos, then higher priority repos) if rp_hostrepos: rp_bestrepo = rp_hostrepos[0] if rp_hostrepos.count() > 1: for repo in rp_hostrepos: if repo.repo.security: rp_bestrepo = repo else: if repo.priority > rp_bestrepo.priority: rp_bestrepo = repo # proceed if that repo has a higher priority if rp_bestrepo.priority >= bestrepo.priority: if package.packagetype == 'R': if labelCompare(highest, repopackage._version_string_rpm()) == -1: highest = repopackage._version_string_rpm() highestpackage = repopackage elif package.packagetype == 'D': vr = Version(repopackage._version_string_deb()) if highest == ('', '0', ''): vh = Version('0') else: vh = Version('%s:%s-%s' % (str(highest[0]), str(highest[1]), str(highest[2]))) if version_compare(vh, vr) == -1: highest = repopackage._version_string_deb() highestpackage = repopackage self.process_update(package, highest, highestpackage) else: #find_osgroup_repo_updates(host) for package in self.packages.exclude(kernels): highest = ('', '0', '') highestpackage = None matchingpackages = repopackages.filter(name=package.name, arch=package.arch, packagetype=package.packagetype) for repopackage in matchingpackages: if package.compare_version(repopackage) == -1: if package.packagetype == 'R': if labelCompare(highest, repopackage._version_string_rpm()) == -1: highest = repopackage._version_string_rpm() highestpackage = repopackage elif package.packagetype == 'D': vr = Version(repopackage._version_string_deb()) if highest == ('', '0', ''): vh = Version('0') else: vh = Version('%s:%s-%s' % (str(highest[0]), str(highest[1]), str(highest[2]))) if version_compare(vh, vr) == -1: highest = repopackage._version_string_deb() highestpackage = repopackage self.process_update(package, highest, highestpackage) #find_kernel_updates(host) try: ver, rel = self.kernel.rsplit('-') rel = rel.rstrip('xen') rel = rel.rstrip('PAE') running_kernel = ('', str(ver), str(rel)) for package in kernelpackages: host_highest = ('', '', '') repo_highest = ('', '', '') host_highest_package = None repo_highest_package = None matchingpackages = repopackages.filter(Q(name__name=package['name__name'])) for repopackage in matchingpackages: repokernel = repopackage._version_string_rpm() if labelCompare(repo_highest, repokernel) == -1: repo_highest = repokernel repo_highest_package = repopackage matchingpackages = self.packages.filter(Q(name__name=package['name__name'])) for hostpackage in matchingpackages: hostkernel = hostpackage._version_string_rpm() if labelCompare(host_highest, hostkernel) == -1: host_highest = hostkernel host_highest_package = hostpackage if labelCompare(host_highest, repo_highest) == -1: matchingrepos = repo_highest_package.mirror_set.filter(repo__arch=self.arch) security = False # If any of the containing repos are security, mark the update as security for mirror in matchingrepos: if mirror.repo.security: security = True update, c = PackageUpdate.objects.get_or_create(oldpackage=host_highest_package, newpackage=repo_highest_package, security=security) self.updates.add(update) info_message.send(sender=None, text="%s\n" % update) if labelCompare(running_kernel, host_highest) == -1: self.reboot_required = True else: self.reboot_required = False except ValueError: # debian kernel pass self.save()
def execute_main(self): # Get the current policy version policy_version = self.get_policy_version() if policy_version is None: # Nothing to do if there is no ``debian-policy`` return self.check_if_full_update_is_required(policy_version) seen_packages = {} for entry in self.items_to_process(): try: package = entry.source_package.source_package_name standards_version = entry.source_package.standards_version try: if package.name in seen_packages: seen_version = seen_packages[package.name] version = entry.source_package.version if version_compare(version, seen_version) < 0: # This version is older, skip it continue # If already seen, then the cached action item # is no longer reliable, retrieve it from the db action_item = get_or_none(ActionItem, package=package, item_type=self.action_type) else: action_item = package.stdver_action_items[0] except IndexError: action_item = None seen_packages[package.name] = entry.source_package.version if standards_version.startswith(policy_version): # The std-ver of the package is up to date. # Remove any possibly existing action item. if action_item is not None: action_item.delete() continue major_policy_version_number, _ = policy_version.split('.', 1) severely_outdated = not standards_version.startswith( major_policy_version_number) if action_item is None: action_item = ActionItem( package=package, item_type=self.action_type) if severely_outdated: action_item.severity = ActionItem.SEVERITY_HIGH else: action_item.severity = ActionItem.SEVERITY_WISHLIST action_item.short_description = self.ITEM_DESCRIPTION action_item.extra_data = { 'lastsv': policy_version, 'lastsv_dashes': policy_version.replace('.', '-'), 'standards_version': standards_version, 'standards_version_dashes': standards_version.replace('.', '-'), 'severely_outdated': severely_outdated, } action_item.save() finally: self.item_mark_processed(entry) # Remove action items for packages that disappeared from the default # repository ActionItem.objects.delete_obsolete_items( [self.action_type], self.items_all().values_list( 'source_package__source_package_name__name', flat=True) )
def accept_source_changes(default_group, config, session, changes, user): group = changes.get('X-Debile-Group', default_group) suite = changes['Distribution'] try: group_suite = session.query(GroupSuite).join(GroupSuite.group).join(GroupSuite.suite).filter( Group.name == group, Suite.name == suite, ).one() except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: return reject_changes(session, changes, "invalid-suite-for-group") dsc = changes.get_dsc_obj() if dsc['Source'] != changes['Source']: return reject_changes(session, changes, "dsc-does-not-march-changes") if dsc['Version'] != changes['Version']: return reject_changes(session, changes, "dsc-does-not-march-changes") try: source = session.query(Source).filter( Source.name == dsc['Source'], Source.version == dsc['Version'], GroupSuite.group == group_suite.group, ).one() return reject_changes(session, changes, "source-already-in-group") except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: pass oldsources = session.query(Source).filter( Source.group_suite == group_suite, Source.name == dsc['Source'], ) for oldsource in oldsources: if version_compare(oldsource.version, dsc['Version']) > 0: return reject_changes(session, changes, "newer-source-already-in-suite") # Drop any old jobs that are still pending. for oldsource in oldsources: for job in oldsource.jobs: if (not any(job.results) and not any(job.built_binaries)): session.delete(job) elif job.failed is None: job.failed = True if not any(oldsource.jobs): session.delete(oldsource) component = session.query(Component).filter_by(name="main").one() if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" with session.no_autoflush: source = create_source(dsc, group_suite, component, user, config["affinity_preference"], valid_affinities) create_jobs(source) session.add(source) # We have a changes in order. Let's roll. repo = Repo(group_suite.group.repo_path) repo.add_changes(changes) (source.directory, source.dsc_filename) = repo.find_dsc(source) emit('accept', 'source', source.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)
def accept_source_changes(default_group, config, session, changes, user): group = changes.get('X-Debile-Group', default_group) suite = changes['Distribution'] try: group_suite = session.query(GroupSuite).join(GroupSuite.group).join( GroupSuite.suite).filter( Group.name == group, Suite.name == suite, ).one() except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: return reject_changes(session, changes, "invalid-suite-for-group") dsc = changes.get_dsc_obj() if dsc['Source'] != changes['Source']: return reject_changes(session, changes, "dsc-does-not-march-changes") if dsc['Version'] != changes['Version']: return reject_changes(session, changes, "dsc-does-not-march-changes") try: source = session.query(Source).filter( Source.name == dsc['Source'], Source.version == dsc['Version'], GroupSuite.group == group_suite.group, ).one() return reject_changes(session, changes, "source-already-in-group") except MultipleResultsFound: return reject_changes(session, changes, "internal-error") except NoResultFound: pass oldsources = session.query(Source).filter( Source.group_suite == group_suite, Source.name == dsc['Source'], ) for oldsource in oldsources: if version_compare(oldsource.version, dsc['Version']) > 0: return reject_changes(session, changes, "newer-source-already-in-suite") # Drop any old jobs that are still pending. for oldsource in oldsources: for job in oldsource.jobs: if (not any(job.results) and not any(job.built_binaries)): session.delete(job) elif job.failed is None: job.failed = True if not any(oldsource.jobs): session.delete(oldsource) component = session.query(Component).filter_by(name="main").one() if 'Build-Architecture-Indep' in dsc: valid_affinities = dsc['Build-Architecture-Indep'] elif 'X-Build-Architecture-Indep' in dsc: valid_affinities = dsc['X-Build-Architecture-Indep'] elif 'X-Arch-Indep-Build-Arch' in dsc: valid_affinities = dsc['X-Arch-Indep-Build-Arch'] else: valid_affinities = "any" with session.no_autoflush: source = create_source(dsc, group_suite, component, user, config["affinity_preference"], valid_affinities) create_jobs(source) session.add(source) # We have a changes in order. Let's roll. repo = Repo(group_suite.group.repo_path) repo.add_changes(changes) try: (source.directory, source.dsc_filename) = repo.find_dsc(source) except RepoPackageNotFound: return reject_changes(session, changes, "reprepo-package-not-found") emit('accept', 'source', source.debilize()) # OK. It's safely in the database and repo. Let's cleanup. for fp in [changes.get_changes_file()] + changes.get_files(): os.unlink(fp)