def generate_diff(last, this): """Generate the differences.""" changes_filename = changes_file(this.package.distro, this.getSources()) if last is None: return if not os.path.isfile(changes_filename) \ and not os.path.isfile(changes_filename + ".bz2"): try: unpack_source(this) except ValueError: logger.exception("Couldn't unpack %s.", this) return try: save_changes_file(changes_filename, this.getSources(), last.getSources()) logger.info("Saved changes file: %s", tree.subdir(ROOT, changes_filename)) except (ValueError, OSError): logger.error("dpkg-genchanges for %s failed", tree.subdir(ROOT, changes_filename)) logger.debug("Producing diff from %s to %s", this, last) diff_filename = diff_file(this.package.distro.name, this.getSources()) if not os.path.isfile(diff_filename) \ and not os.path.isfile(diff_filename + ".bz2"): unpack_source(this) unpack_source(last) save_patch_file(diff_filename, last.getSources(), this.getSources()) save_basis(diff_filename, last.getSources()["Version"]) logger.info("Saved diff file: %s", tree.subdir(ROOT, diff_filename))
def generate_diff(last, this): """Generate the differences.""" changes_filename = changes_file(this.package.distro, this) if last is None: return if not os.path.isfile(changes_filename) \ and not os.path.isfile(changes_filename + ".bz2"): try: unpack_source(this) except ValueError: logger.exception("Couldn't unpack %s.", this) return try: save_changes_file(changes_filename, this, last) logger.info("Saved changes file: %s", tree.subdir(config.get('ROOT'), changes_filename)) except (ValueError, OSError): logger.error("dpkg-genchanges for %s failed", tree.subdir(config.get('ROOT'), changes_filename)) logger.debug("Producing diff from %s to %s", this, last) diff_filename = diff_file(this.package.distro.name, this) if not os.path.isfile(diff_filename) \ and not os.path.isfile(diff_filename + ".bz2"): unpack_source(this) unpack_source(last) save_patch_file(diff_filename, last, this) save_basis(diff_filename, last.version) logger.info("Saved diff file: %s", tree.subdir(config.get('ROOT'), diff_filename))
def generate_diff(distro, last, this): """Generate the differences.""" logging.debug("%s: %s %s", distro, this["Package"], this["Version"]) changes_filename = changes_file(distro, this) if not os.path.isfile(changes_filename) \ and not os.path.isfile(changes_filename + ".bz2"): unpack_source(distro, this) try: save_changes_file(changes_filename, this, last) logging.info("Saved changes file: %s", tree.subdir(ROOT, changes_filename)) except (ValueError, OSError): logging.error("dpkg-genchanges for %s failed", tree.subdir(ROOT, changes_filename)) if last is None: return diff_filename = diff_file(distro, this) if not os.path.isfile(diff_filename) \ and not os.path.isfile(diff_filename + ".bz2"): unpack_source(distro, this) unpack_source(distro, last) save_patch_file(diff_filename, last, this) save_basis(diff_filename, last["Version"]) logging.info("Saved diff file: %s", tree.subdir(ROOT, diff_filename))
def save_patch_file(filename, last, this): """Save a diff or patch file for the difference between two versions.""" lastdir = unpack_directory(last) thisdir = unpack_directory(this) diffdir = os.path.commonprefix((lastdir, thisdir)) diffdir = diffdir[:diffdir.rindex("/")] lastdir = tree.subdir(diffdir, lastdir) thisdir = tree.subdir(diffdir, thisdir) tree.ensure(filename) with open(filename, "w") as diff: shell.run(("diff", "-pruN", lastdir, thisdir), chdir=diffdir, stdout=diff, okstatus=(0, 1, 2))
def create_tarball(package, version, output_dir, merged_dir): """Create a tarball of a merge with conflicts.""" quilt_format = False try: with open(merged_dir + '/debian/source/format', 'r') as fd: quilt_format = fd.read().strip() == '3.0 (quilt)' except IOError: pass if quilt_format: filename = '%s/%s_%s.debian.tar.gz' % (output_dir, package, version.without_epoch) contained = 'debian' source = merged_dir + '/debian' else: filename = "%s/%s_%s.src.tar.gz" % (output_dir, package, version.without_epoch) contained = "%s-%s" % (package, version.without_epoch) source = merged_dir tree.ensure("%s/tmp/" % config.get('ROOT')) parent = tempfile.mkdtemp(dir="%s/tmp/" % config.get('ROOT')) try: tree.copytree(source, "%s/%s" % (parent, contained)) shell.run(("tar", "czf", filename, contained), chdir=parent) logger.info("Created %s", tree.subdir(config.get('ROOT'), filename)) return os.path.basename(filename) finally: tree.remove(parent)
def downloadPackage(self, dist, component, package=None, version=None): """Populate the 'pool' directory by downloading Debian source packages from the given release and component. :param str dist: a release codename such as "wheezy" or "precise" :param str component: a component (archive area) such as "main" or "contrib" :param package: a source package name, or None to download all of them :type package: str or None :return: True if anything actually changed, False otherwise :rtype: bool """ if package is None: logger.debug('Downloading all packages from %s/%s/%s into %s pool', self, dist, component, self) else: logger.debug('Downloading package "%s" from %s/%s/%s into %s pool', package, self, dist, component, self) mirror = self.mirrorURL() sources = self.getSources(dist, component) changed = False for source in sources: if package != source["Package"] and not (package is None): continue if package is not None and version is not None \ and source["Version"] != str(version): continue sourcedir = source["Directory"] pkg = self.package(dist, component, source['Package']) for md5sum, size, name in files(source): url = "%s/%s/%s" % (mirror, sourcedir, name) filename = "%s/%s" % (pkg.poolPath, name) if os.path.isfile(filename): if os.path.getsize(filename) == int(size): logger.debug("Skipping %s, already downloaded.", filename) continue logger.debug("Downloading %s", url) changed = True tree.ensure(filename) with open(filename, 'w') as fd: try: dl = urllib2.urlopen(url) fd.write(dl.read()) except IOError: logger.error("Downloading %s failed", url) raise logger.debug("Saved %s", tree.subdir(config.get('ROOT'), filename)) return changed
def update_pool_sources(distro, package): """Update the Sources files in the pool.""" pooldir = pool_directory(distro, package) filename = pool_sources_file(distro, package) if pool_sources_already_updated(pooldir, filename): return logging.info("Updating %s", tree.subdir(ROOT, filename)) with open("%s.new" % filename, "w") as sources: shell.run(("apt-ftparchive", "sources", pooldir), chdir=ROOT, stdout=sources) os.rename("%s.new" % filename, filename)
def publish_patch(distro, pv, filename, list_file): """Publish the latest version of the patch for all to see.""" publish_filename = published_file(distro, pv) tree.ensure(publish_filename) if os.path.isfile(publish_filename): os.unlink(publish_filename) os.link(filename, publish_filename) logger.info("Published %s", tree.subdir(config.get('ROOT'), publish_filename)) print >> list_file, "%s %s" % ( pv.package, tree.subdir("%s/published" % config.get('ROOT'), publish_filename)) # Remove older patches for junk in os.listdir(os.path.dirname(publish_filename)): junkpath = "%s/%s" % (os.path.dirname(publish_filename), junk) if os.path.isfile(junkpath) \ and junk != os.path.basename(publish_filename): os.unlink(junkpath) # Publish extracted patches output = "%s/extracted" % os.path.dirname(publish_filename) if os.path.isdir(output): tree.remove(output) dpatch_dir = dpatch_directory(distro, pv) if os.path.isdir(dpatch_dir): for dpatch in tree.walk(dpatch_dir): if not len(dpatch): continue src_filename = "%s/%s" % (dpatch_dir, dpatch) dest_filename = "%s/%s" % (output, dpatch) logger.info("Published %s", tree.subdir(config.get('ROOT'), dest_filename)) tree.ensure(dest_filename) tree.copyfile(src_filename, dest_filename)
def publish_patch(distro, source, filename, list_file): """Publish the latest version of the patch for all to see.""" publish_filename = published_file(distro, source) ensure(publish_filename) if os.path.isfile(publish_filename): os.unlink(publish_filename) os.link(filename, publish_filename) logging.info("Published %s", tree.subdir(ROOT, publish_filename)) print("%s %s" % (source["Package"], tree.subdir("%s/published" % ROOT, publish_filename)), file=list_file) # Remove older patches for junk in os.listdir(os.path.dirname(publish_filename)): junkpath = "%s/%s" % (os.path.dirname(publish_filename), junk) if os.path.isfile(junkpath) \ and junk != os.path.basename(publish_filename): os.unlink(junkpath) # Publish extracted patches output = "%s/extracted" % os.path.dirname(publish_filename) if os.path.isdir(output): tree.remove(output) dpatch_dir = dpatch_directory(distro, source) if os.path.isdir(dpatch_dir): for dpatch in tree.walk(dpatch_dir): if not len(dpatch): continue src_filename = "%s/%s" % (dpatch_dir, dpatch) dest_filename = "%s/%s" % (output, dpatch) logging.info("Published %s", tree.subdir(ROOT, dest_filename)) ensure(dest_filename) tree.copyfile(src_filename, dest_filename)
def updatePool(self, dist, component, package=None): """Populate the 'pool' directory by downloading Debian source packages from the given release and component. :param str dist: a release codename such as "wheezy" or "precise" :param str component: a component (archive area) such as "main" or "contrib" :param package: a source package name, or None to download all of them :type package: str or None :return: True if anything actually changed, False otherwise :rtype: bool """ if package is None: logger.debug('Downloading all packages from %s/%s/%s into %s pool', self, dist, component, self) else: logger.debug('Downloading package "%s" from %s/%s/%s into %s pool', package, self, dist, component, self) mirror = self.mirrorURL(dist, component) sources = self.getSources(dist, component) changed = False for source in sources: if package != source["Package"] and not (package is None): continue sourcedir = source["Directory"] pooldir = PoolDirectory(self, component, source["Package"]).path for md5sum, size, name in files(source): url = "%s/%s/%s" % (mirror, sourcedir, name) filename = "%s/%s/%s" % (config.get('ROOT'), pooldir, name) if os.path.isfile(filename): if os.path.getsize(filename) == int(size): logger.debug("Skipping %s, already downloaded.", filename) continue logger.debug("Downloading %s", url) changed = True tree.ensure(filename) try: urllib.URLopener().retrieve(url, filename) except IOError: logger.error("Downloading %s failed", url) raise logger.debug("Saved %s", tree.subdir(config.get('ROOT'), filename)) return changed
def generate_dpatch(distro, source): """Generate the extracted patches.""" logging.debug("%s: %s %s", distro, source["Package"], source["Version"]) stamp = "%s/%s/dpatch-stamp-%s" \ % (ROOT, source["Directory"], source["Version"]) if not os.path.isfile(stamp): open(stamp, "w").close() unpack_source(distro, source) try: dirname = dpatch_directory(distro, source) extract_dpatches(dirname, source) logging.info("Saved dpatches: %s", tree.subdir(ROOT, dirname)) finally: cleanup_source(source)
def create_patch(version, filename, merged_dir, basis_source, basis_dir): """Create the merged patch.""" parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(basis_dir, "%s/%s" % (parent, basis_source["Version"])) with open(filename, "w") as diff: shell.run(("diff", "-pruN", basis_source["Version"], "%s" % version), chdir=parent, stdout=diff, okstatus=(0, 1, 2)) logger.info("Created %s", tree.subdir(ROOT, filename)) return os.path.basename(filename) finally: tree.remove(parent)
def create_patch(version, filename, merged_dir, basis, basis_dir): """Create the merged patch.""" parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(basis_dir, "%s/%s" % (parent, basis.version)) with open(filename, "w") as diff: shell.run(("diff", "-pruN", str(basis.version), str(version)), chdir=parent, stdout=diff, okstatus=(0, 1, 2)) logger.info("Created %s", tree.subdir(config.get('ROOT'), filename)) return os.path.basename(filename) finally: tree.remove(parent)
def updateSources(self, dist, component): """Update a Sources file.""" url = self.sourcesURL(dist, component) filename = self.sourcesFile(dist, component) logger.debug("Downloading %s", url) try: if not os.path.isdir(os.path.dirname(filename)): os.makedirs(os.path.dirname(filename)) urllib.URLopener().retrieve(url, filename) except IOError: logger.error("Downloading %s failed", url) raise logger.debug("Saved %s", tree.subdir(config.get('ROOT'), filename)) with gzip.open(self.sourcesFile(dist, component)) as gzf: with open(self.sourcesFile(dist, component, False), "wb") as f: f.write(gzf.read())
def create_patch(package, version, output_dir, merged_dir, right_source, right_dir): """Create the merged patch.""" filename = "%s/%s_%s.patch" % (output_dir, package, version) parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(right_dir, "%s/%s" % (parent, right_source["Version"])) with open(filename, "w") as diff: shell.run(("diff", "-pruN", right_source["Version"], "%s" % version), chdir=parent, stdout=diff, okstatus=(0, 1, 2)) logging.info("Created %s", tree.subdir(ROOT, filename)) return os.path.basename(filename) finally: tree.remove(parent)
def generate_patch(base_distro, base_source, distro, our_source, slipped=False, force=False): """Generate a patch file for the given comparison.""" our_version = Version(our_source["Version"]) base_version = Version(base_source["Version"]) if base_version > our_version: # Allow comparison of source -1 against our -0ubuntuX (slipped) if not slipped: return elif our_version.revision is None: return elif not our_version.revision.startswith("0ubuntu"): return elif base_version.revision != "1": return elif base_version.upstream != our_version.upstream: return elif base_version.epoch != our_version.epoch: return logging.debug("Allowing comparison of -1 against -0ubuntuX") elif base_version == our_version: return filename = patch_file(distro, our_source, slipped) if not force: basis = read_basis(filename) if basis is not None and basis == base_version: return unpack_source(base_distro, base_source) unpack_source(distro, our_source) ensure(filename) save_patch_file(filename, base_source, our_source) save_basis(filename, base_version) logging.info("Saved patch file: %s", tree.subdir(ROOT, filename))
def generate_patch(base, distro, ours, slipped=False, force=False, unpacked=False): """Generate a patch file for the given comparison.""" base_source = base.getSources() our_source = ours.getSources() our_version = Version(our_source["Version"]) base_version = Version(base_source["Version"]) if base_version > our_version: # Allow comparison of source -1 against our -0coX (slipped) if not slipped: return elif our_version.revision is None: return elif not our_version.revision.startswith("0co"): return elif base_version.revision != "1": return elif base_version.upstream != our_version.upstream: return elif base_version.epoch != our_version.epoch: return logging.debug("Allowing comparison of -1 against -0coX") elif base_version == our_version: return filename = patch_file(distro, our_source, slipped) if not force: basis = read_basis(filename) if basis is not None and basis == base_version: return if not os.path.exists(filename): if not unpacked: unpack_source(base) unpack_source(ours) tree.ensure(filename) save_patch_file(filename, base_source, our_source) save_basis(filename, base_version) logging.info("Saved patch file: %s", tree.subdir(ROOT, filename))
def generate_patch(base, distro, ours, slipped=False, force=False, unpacked=False): """Generate a patch file for the given comparison.""" if base.version > ours.version: # Allow comparison of source -1 against our -0coX (slipped) if not slipped: return elif ours.version.revision is None: return elif not ours.version.revision.startswith("0co"): return elif base.version.revision != "1": return elif base.version.upstream != ours.version.upstream: return elif base.version.epoch != ours.version.epoch: return logging.debug("Allowing comparison of -1 against -0coX") elif base.version == ours.version: return filename = patch_file(distro, ours, slipped) if not force: basis = read_basis(filename) if basis is not None and basis == base.version: return if not os.path.exists(filename): if not unpacked: unpack_source(base) unpack_source(ours) tree.ensure(filename) save_patch_file(filename, base, ours) save_basis(filename, base.version) logging.info("Saved patch file: %s", tree.subdir(config.get('ROOT'), filename))
def create_tarball(package, version, output_dir, merged_dir): """Create a tarball of a merge with conflicts.""" filename = "%s/%s_%s.src.tar.gz" % (output_dir, package, version.without_epoch) contained = "%s-%s" % (package, version.without_epoch) parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, contained)) debian_rules = "%s/%s/debian/rules" % (parent, contained) if os.path.isfile(debian_rules): os.chmod(debian_rules, os.stat(debian_rules).st_mode | 0111) shell.run(("tar", "czf", filename, contained), chdir=parent) logging.info("Created %s", tree.subdir(ROOT, filename)) return os.path.basename(filename) finally: tree.remove(parent)
def generate_dpatch(distro, pv): """Generate the extracted patches.""" logger.debug("%s: %s", distro, pv) stamp = "%s/dpatch-stamp-%s" % (pv.package.poolPath, pv.version) if not os.path.isfile(stamp): open(stamp, "w").close() try: unpack_source(pv) except ValueError: logger.exception("Could not unpack %s!", pv) try: dirname = dpatch_directory(distro, pv) extract_dpatches(dirname, pv) logger.info("Saved dpatches: %s", tree.subdir(config.get('ROOT'), dirname)) finally: cleanup_source(pv)
def generate_dpatch(distro, source, pkg): """Generate the extracted patches.""" logger.debug("%s: %s %s", distro, pkg, source["Version"]) stamp = "%s/%s/dpatch-stamp-%s" \ % (ROOT, pkg.poolDirectory().path, source["Version"]) if not os.path.isfile(stamp): open(stamp, "w").close() try: unpack_source(pkg) except ValueError: logger.exception("Could not unpack %s!", pkg) try: dirname = dpatch_directory(distro, source) extract_dpatches(dirname, source) logger.info("Saved dpatches: %s", tree.subdir(ROOT, dirname)) finally: cleanup_source(source)
def create_patch(package, version, output_dir, merged_dir, right_source, right_dir): """Create the merged patch.""" filename = "%s/%s_%s.patch" % (output_dir, package, version) parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(right_dir, "%s/%s" % (parent, right_source["Version"])) with open(filename, "w") as diff: shell.run( ("diff", "-pruN", right_source["Version"], "%s" % version), chdir=parent, stdout=diff, okstatus=(0, 1, 2)) logging.info("Created %s", tree.subdir(ROOT, filename)) return os.path.basename(filename) finally: tree.remove(parent)
def create_source(package, version, since, output_dir, merged_dir): """Create a source package without conflicts.""" contained = "%s-%s" % (package, version.upstream) filename = "%s_%s.dsc" % (package, version.without_epoch) parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, contained)) orig_filename = "%s_%s.orig.tar.gz" % (package, version.upstream) if os.path.isfile("%s/%s" % (output_dir, orig_filename)): os.link("%s/%s" % (output_dir, orig_filename), "%s/%s" % (parent, orig_filename)) cmd = ("dpkg-source",) if version.revision is not None and since.upstream != version.upstream: cmd += ("-sa",) cmd += ("-b", contained) try: shell.run(cmd, chdir=parent) except (ValueError, OSError): logging.error("dpkg-source failed") return create_tarball(package, version, output_dir, merged_dir) if os.path.isfile("%s/%s" % (parent, filename)): logging.info("Created %s", filename) for name in os.listdir(parent): src = "%s/%s" % (parent, name) dest = "%s/%s" % (output_dir, name) if os.path.isfile(src) and not os.path.isfile(dest): os.link(src, dest) return os.path.basename(filename) else: logging.warning("Dropped dsc %s", tree.subdir(ROOT, filename)) return create_tarball(package, version, output_dir, merged_dir) finally: tree.remove(parent)
def create_source(package, version, since, output_dir, merged_dir): """Create a source package without conflicts.""" contained = "%s-%s" % (package, version.upstream) filename = "%s_%s.dsc" % (package, version.without_epoch) parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, contained)) orig_filename = "%s_%s.orig.tar.gz" % (package, version.upstream) if os.path.isfile("%s/%s" % (output_dir, orig_filename)): os.link("%s/%s" % (output_dir, orig_filename), "%s/%s" % (parent, orig_filename)) cmd = ("dpkg-source", ) if version.revision is not None and since.upstream != version.upstream: cmd += ("-sa", ) cmd += ("-b", contained) try: shell.run(cmd, chdir=parent) except (ValueError, OSError): logging.error("dpkg-source failed") return create_tarball(package, version, output_dir, merged_dir) if os.path.isfile("%s/%s" % (parent, filename)): logging.info("Created %s", filename) for name in os.listdir(parent): src = "%s/%s" % (parent, name) dest = "%s/%s" % (output_dir, name) if os.path.isfile(src) and not os.path.isfile(dest): os.link(src, dest) return os.path.basename(filename) else: logging.warning("Dropped dsc %s", tree.subdir(ROOT, filename)) return create_tarball(package, version, output_dir, merged_dir) finally: tree.remove(parent)
def update_pool(distro, source): """Download a source package into our pool.""" mirror = DISTROS[distro]["mirror"] sourcedir = source["Directory"] pooldir = pool_directory(distro, source["Package"]) for md5sum, size, name in files(source): url = "%s/%s/%s" % (mirror, sourcedir, name) filename = "%s/%s/%s" % (ROOT, pooldir, name) if os.path.isfile(filename): if os.path.getsize(filename) == int(size): continue logging.debug("Downloading %s", url) ensure(filename) try: urllib.URLopener().retrieve(url, filename) except IOError: logging.error("Downloading %s failed", url) raise logging.info("Saved %s", tree.subdir(ROOT, filename))
def update_sources(distro, dist, component): """Update a Sources file.""" url = sources_url(distro, dist, component) filename = sources_file(distro, dist, component) logging.debug("Downloading %s", url) gzfilename = tempfile.mktemp() try: urllib.URLopener().retrieve(url, gzfilename) except IOError: logging.error("Downloading %s failed", url) raise try: with closing(gzip.GzipFile(gzfilename)) as gzfile: ensure(filename) with open(filename, "w") as local: local.write(gzfile.read()) finally: os.unlink(gzfilename) logging.info("Saved %s", tree.subdir(ROOT, filename)) return filename
def do_merge(left_dir, left_name, left_distro, base_dir, right_dir, right_name, right_distro, merged_dir): """Do the heavy lifting of comparing and merging.""" logging.debug("Producing merge in %s", tree.subdir(ROOT, merged_dir)) conflicts = [] po_files = [] # Look for files in the base and merge them if they're in both new # files (removed files get removed) for filename in tree.walk(base_dir): if tree.under(".pc", filename): # Not interested in merging quilt metadata continue base_stat = os.lstat("%s/%s" % (base_dir, filename)) try: left_stat = os.lstat("%s/%s" % (left_dir, filename)) except OSError: left_stat = None try: right_stat = os.lstat("%s/%s" % (right_dir, filename)) except OSError: right_stat = None if left_stat is None and right_stat is None: # Removed on both sides pass elif left_stat is None: logging.debug("removed from %s: %s", left_distro, filename) if not same_file(base_stat, base_dir, right_stat, right_dir, filename): # Changed on RHS conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) elif right_stat is None: # Removed on RHS only logging.debug("removed from %s: %s", right_distro, filename) if not same_file(base_stat, base_dir, left_stat, left_dir, filename): # Changed on LHS conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) elif S_ISREG(left_stat.st_mode) and S_ISREG(right_stat.st_mode): # Common case: left and right are both files if handle_file(left_stat, left_dir, left_name, left_distro, right_dir, right_stat, right_name, right_distro, base_stat, base_dir, merged_dir, filename, po_files): conflicts.append(filename) elif same_file(left_stat, left_dir, right_stat, right_dir, filename): # left and right are the same, doesn't matter which we keep tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) elif same_file(base_stat, base_dir, left_stat, left_dir, filename): # right has changed in some way, keep that one logging.debug("preserving non-file change in %s: %s", right_distro, filename) tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) elif same_file(base_stat, base_dir, right_stat, right_dir, filename): # left has changed in some way, keep that one logging.debug("preserving non-file change in %s: %s", left_distro, filename) tree.copyfile("%s/%s" % (left_dir, filename), "%s/%s" % (merged_dir, filename)) else: # all three differ, mark a conflict conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) # Look for files in the left hand side that aren't in the base, # conflict if new on both sides or copy into the tree for filename in tree.walk(left_dir): if tree.under(".pc", filename): # Not interested in merging quilt metadata continue if tree.exists("%s/%s" % (base_dir, filename)): continue if not tree.exists("%s/%s" % (right_dir, filename)): logging.debug("new in %s: %s", left_distro, filename) tree.copyfile("%s/%s" % (left_dir, filename), "%s/%s" % (merged_dir, filename)) continue left_stat = os.lstat("%s/%s" % (left_dir, filename)) right_stat = os.lstat("%s/%s" % (right_dir, filename)) if S_ISREG(left_stat.st_mode) and S_ISREG(right_stat.st_mode): # Common case: left and right are both files if handle_file(left_stat, left_dir, left_name, left_distro, right_dir, right_stat, right_name, right_distro, None, None, merged_dir, filename, po_files): conflicts.append(filename) elif same_file(left_stat, left_dir, right_stat, right_dir, filename): # left and right are the same, doesn't matter which we keep tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) else: # they differ, mark a conflict conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) # Copy new files on the right hand side only into the tree for filename in tree.walk(right_dir): if tree.under(".pc", filename): # Not interested in merging quilt metadata continue if tree.exists("%s/%s" % (base_dir, filename)): continue if tree.exists("%s/%s" % (left_dir, filename)): continue logging.debug("new in %s: %s", right_distro, filename) tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) # Handle po files separately as they need special merging for filename in po_files: if merge_po(left_dir, right_dir, merged_dir, filename): conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) continue merge_attr(base_dir, left_dir, right_dir, merged_dir, filename) return conflicts
def do_merge(left_dir, left, base_dir, right_dir, right, merged_dir): """Do the heavy lifting of comparing and merging.""" logger.debug("Producing merge in %s", tree.subdir(ROOT, merged_dir)) conflicts = [] po_files = [] left_name = left.package.name left_distro = left.package.distro.name right_name = right.package.name right_distro = right.package.distro.name # See what format each is and whether they're both quilt left_format = left.getSources()["Format"] right_format = right.getSources()["Format"] both_formats_quilt = left_format == right_format == "3.0 (quilt)" if both_formats_quilt: logger.debug("Only merging debian directory since both " "formats 3.0 (quilt)") # Look for files in the base and merge them if they're in both new # files (removed files get removed) for filename in tree.walk(base_dir): # If both packages are 3.0 (quilt), ignore everything except the # debian directory if both_formats_quilt and not tree.under("debian", filename): continue if tree.under(".pc", filename): # Not interested in merging quilt metadata continue base_stat = os.lstat("%s/%s" % (base_dir, filename)) try: left_stat = os.lstat("%s/%s" % (left_dir, filename)) except OSError: left_stat = None try: right_stat = os.lstat("%s/%s" % (right_dir, filename)) except OSError: right_stat = None if left_stat is None and right_stat is None: # Removed on both sides pass elif left_stat is None: logger.debug("removed from %s: %s", left_distro, filename) if not same_file(base_stat, base_dir, right_stat, right_dir, filename): # Changed on RHS conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) elif right_stat is None: # Removed on RHS only logger.debug("removed from %s: %s", right_distro, filename) if not same_file(base_stat, base_dir, left_stat, left_dir, filename): # Changed on LHS conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) elif S_ISREG(left_stat.st_mode) and S_ISREG(right_stat.st_mode): # Common case: left and right are both files if handle_file(left_stat, left_dir, left_name, left_distro, right_dir, right_stat, right_name, right_distro, base_stat, base_dir, merged_dir, filename, po_files): conflicts.append(filename) elif same_file(left_stat, left_dir, right_stat, right_dir, filename): # left and right are the same, doesn't matter which we keep tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) elif same_file(base_stat, base_dir, left_stat, left_dir, filename): # right has changed in some way, keep that one logger.debug("preserving non-file change in %s: %s", right_distro, filename) tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) elif same_file(base_stat, base_dir, right_stat, right_dir, filename): # left has changed in some way, keep that one logger.debug("preserving non-file change in %s: %s", left_distro, filename) tree.copyfile("%s/%s" % (left_dir, filename), "%s/%s" % (merged_dir, filename)) else: # all three differ, mark a conflict conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) # Look for files in the left hand side that aren't in the base, # conflict if new on both sides or copy into the tree for filename in tree.walk(left_dir): # If both packages are 3.0 (quilt), ignore everything except the # debian directory if both_formats_quilt and not tree.under("debian", filename): continue if tree.under(".pc", filename): # Not interested in merging quilt metadata continue if tree.exists("%s/%s" % (base_dir, filename)): continue if not tree.exists("%s/%s" % (right_dir, filename)): logger.debug("new in %s: %s", left_distro, filename) tree.copyfile("%s/%s" % (left_dir, filename), "%s/%s" % (merged_dir, filename)) continue left_stat = os.lstat("%s/%s" % (left_dir, filename)) right_stat = os.lstat("%s/%s" % (right_dir, filename)) if S_ISREG(left_stat.st_mode) and S_ISREG(right_stat.st_mode): # Common case: left and right are both files if handle_file(left_stat, left_dir, left_name, left_distro, right_dir, right_stat, right_name, right_distro, None, None, merged_dir, filename, po_files): conflicts.append(filename) elif same_file(left_stat, left_dir, right_stat, right_dir, filename): # left and right are the same, doesn't matter which we keep tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) else: # they differ, mark a conflict conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) # Copy new files on the right hand side only into the tree for filename in tree.walk(right_dir): if tree.under(".pc", filename): # Not interested in merging quilt metadata continue if both_formats_quilt and not tree.under("debian", filename): # Always copy right version for quilt non-debian files if not tree.exists("%s/%s" % (left_dir, filename)): logger.debug("new in %s: %s", right_distro, filename) else: if tree.exists("%s/%s" % (base_dir, filename)): continue if tree.exists("%s/%s" % (left_dir, filename)): continue logger.debug("new in %s: %s", right_distro, filename) tree.copyfile("%s/%s" % (right_dir, filename), "%s/%s" % (merged_dir, filename)) # Handle po files separately as they need special merging for filename in po_files: if merge_po(left_dir, right_dir, merged_dir, filename): conflict_file(left_dir, left_distro, right_dir, right_distro, merged_dir, filename) conflicts.append(filename) continue merge_attr(base_dir, left_dir, right_dir, merged_dir, filename) return conflicts
create_tarball(package, version, output_dir, merged_dir)) else: logger.debug("dpkg-source succeeded:\n%s\n", dpkg_source_output) if os.path.isfile("%s/%s" % (parent, filename)): logger.info("Created dpkg-source %s", filename) for name in os.listdir(parent): src = "%s/%s" % (parent, name) dest = "%s/%s" % (output_dir, name) if os.path.isfile(src) and not os.path.isfile(dest): os.link(src, dest) return (MergeResult.MERGED, None, os.path.basename(filename)) else: message = ("dpkg-source did not produce expected filename %s" % tree.subdir(ROOT, filename)) logger.warning("%s", message) return (MergeResult.FAILED, "unable to build merged source package (%s)" % message, create_tarball(package, version, output_dir, merged_dir)) finally: tree.remove(parent) def create_patch(version, filename, merged_dir, basis_source, basis_dir): """Create the merged patch.""" parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(basis_dir, "%s/%s" % (parent, basis_source["Version"]))
def main(options, args): logger.debug('Committing merges...') for target in config.targets(args): d = target.distro if not isinstance(d, OBSDistro): logger.debug('Skipping %r distro %r: not an OBSDistro', target, d) continue for package in d.packages(target.dist, target.component): if options.package and package.name not in options.package: logger.debug('Skipping package %s: not selected', package.name) continue if package.name in target.blacklist: logger.debug('Skipping package %s: blacklisted', package.name) continue try: output_dir = result_dir(target.name, pkg.name) report = read_report(output_dir) except ValueError: logger.debug('Skipping package %s: unable to read report', package.name) continue if report['committed']: if options.force: logger.info("Forcing commit of %s", package) else: logger.debug("%s already committed, skipping!", package) continue if report['result'] not in (MergeResult.MERGED, MergeResult.SYNC_THEIRS): logger.debug("%s has nothing to commit: result=%s", package, report['result']) continue filepaths = report['merged_files'] if filepaths == []: logger.warning("Empty merged file list in %s/REPORT" % output_dir) continue if target.committable: # we can commit directly to the target distribution # FIXME: is this still a supported configuration? I wouldn't # want to commit automated merges without some sort of manual # check on the debdiff... logger.info("Committing changes to %s", package) if not options.dry_run: try: package.commit('Automatic update by Merge-O-Matic') except urllib2.HTTPError as e: logger.exception('Failed to commit %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to commit %s:', package) # deliberately rather vague, as below update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: update_report(report, output_dir, True, committed_to=d.obsProject(target.dist, target.component)) continue # else we need to branch it and commit to the branch try: logger.debug("Branching %s", package) branchPkg = package.branch("home:%s:branches"%(d.obsUser)) branch = branchPkg.distro branch.sync(target.dist, target.component, [branchPkg,]) logger.info("Committing changes to %s, and submitting merge request to %s", branchPkg, package) if report['result'] == MergeResult.SYNC_THEIRS: srcDistro = Distro.get(report['right_distro']) version = Version(report['right_version']) logger.debug('Copying updated upstream version %s from %r into %r', version, srcDistro, target) for upstream in target.getSourceLists(package.name): for src in upstream: srcDistro = src.distro try: pkg = srcDistro.findPackage(package.name, searchDist=src.dist, version=version)[0] pfx = pkg.poolPath break except model.error.PackageNotFound: pass else: logger.debug('Copying merged version from %r into %r', branch, target) pfx = result_dir(target.name, package.name) # this might raise an error obsFiles = branchPkg.getOBSFiles() # Get the linked target files since the checkout is expanded # and may contain them linkedFiles = package.getOBSFiles() for f in obsFiles: if f.endswith(".dsc"): oldDsc = '%s/%s'%(branchPkg.obsDir(), f) break for f in filepaths: if f.endswith(".dsc"): newDsc = '%s/%s'%(pfx, f) break #logger.debug("Running debdiff on %s and %s", oldDsc, newDsc) #comment = shell.get(("debdiff", oldDsc, newDsc), okstatus=(0,1)) # FIXME: Debdiff needs implemented in OBS, as large merge descriptions break clucene. comment = '' if report['result'] == MergeResult.SYNC_THEIRS: comment += 'Sync to ' elif report['result'] == MergeResult.MERGED: comment += 'Merge with ' comment += 'version %s from %s %s' %(report['right_version'], report['right_distro'], report['right_suite']) comment += "\n\nMerge report is available at %s"%('/'.join((config.get('MOM_URL'), subdir(config.get('ROOT'), output_dir), 'REPORT.html'))) # The newlines seem to cause create_submit_request to send # UTF-32 over the wire, which OBS promptly chokes on. Encode # the message to UTF-8 first. comment = comment.encode('utf-8') if not options.dry_run: filesUpdated = False for f in obsFiles + linkedFiles: if f == "_link": continue try: logger.debug('deleting %s/%s', branchPkg.obsDir(), f) os.unlink('%s/%s'%(branchPkg.obsDir(), f)) filesUpdated = True except OSError: pass for f in filepaths: if f == "_link": continue logger.debug('copying %s/%s -> %s', pfx, f, branchPkg.obsDir()) shutil.copy2("%s/%s"%(pfx, f), branchPkg.obsDir()) filesUpdated = True if filesUpdated: logger.debug('Submitting request to merge %r from %r into %r', branchPkg, branch, target) try: branchPkg.commit('Automatic update by Merge-O-Matic') obs_project = d.obsProject(target.dist, target.component) reqid = branchPkg.submitMergeRequest(obs_project, comment) update_report(report, output_dir, True, committed_to=obs_project, request_url=branchPkg.webMergeRequest(reqid)) except xml.etree.cElementTree.ParseError: logger.exception("Failed to commit %s", branchPkg) update_report(report, output_dir, False, "OBS API Error") except urllib2.HTTPError as e: logger.exception("Failed to commit %s: HTTP error %s at <%s>:", branchPkg, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception("Failed to commit %s", branchPkg) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: logger.info("Not committing, due to --dry-run") except urllib2.HTTPError as e: logger.exception('Failed to branch %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "Failed to branch: HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to branch %s:', package) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "Failed to branch: %s" % e.__class__.__name__)
def main(options, args): logger.debug('Committing merges...') for target in config.targets(args): d = target.distro if not isinstance(d, OBSDistro): logger.debug('Skipping %r distro %r: not an OBSDistro', target, d) continue for source in d.newestSources(target.dist, target.component): if options.package and source['Package'] not in options.package: logger.debug('Skipping package %s: not selected', source['Package']) continue if source['Package'] in target.blacklist: logger.debug('Skipping package %s: blacklisted', source['Package']) continue try: output_dir = result_dir(target.name, source['Package']) report = read_report(output_dir) except ValueError: logger.debug('Skipping package %s: unable to read report', source['Package']) continue package = d.package(target.dist, target.component, report.source_package) if report['committed']: if options.force: logger.info("Forcing commit of %s", package) else: logger.debug("%s already committed, skipping!", package) continue if report['result'] not in (MergeResult.MERGED, MergeResult.SYNC_THEIRS): logger.debug("%s has nothing to commit: result=%s", package, report['result']) continue filepaths = report['merged_files'] if filepaths == []: logger.warning("Empty merged file list in %s/REPORT" % output_dir) continue if target.committable: # we can commit directly to the target distribution # FIXME: is this still a supported configuration? I wouldn't # want to commit automated merges without some sort of manual # check on the debdiff... logger.info("Committing changes to %s", package) if not options.dry_run: try: package.commit('Automatic update by Merge-O-Matic') except urllib2.HTTPError as e: logger.exception('Failed to commit %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to commit %s:', package) # deliberately rather vague, as below update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: update_report(report, output_dir, True, committed_to=d.obsProject(target.dist, target.component)) continue # else we need to branch it and commit to the branch try: logger.debug("Branching %s", package) branchPkg = package.branch("home:%s:branches"%(d.obsUser)) branch = branchPkg.distro branch.sync(target.dist, target.component, [branchPkg,]) logger.info("Committing changes to %s, and submitting merge request to %s", branchPkg, package) if report['result'] == MergeResult.SYNC_THEIRS: srcDistro = Distro.get(report['right_distro']) version = Version(report['right_version']) logger.debug('Copying updated upstream version %s from %r into %r', version, srcDistro, target) for upstream in target.getSourceLists(package.name): for src in upstream: srcDistro = src.distro try: pkg = srcDistro.findPackage(package.name, searchDist=src.dist, version=version)[0] pfx = pkg.poolDirectory().path break except model.error.PackageNotFound: pass else: logger.debug('Copying merged version from %r into %r', branch, target) pfx = result_dir(target.name, package.name) # this might raise an error obsFiles = branchPkg.getOBSFiles() # Get the linked target files since the checkout is expanded # and may contain them linkedFiles = package.getOBSFiles() for f in obsFiles: if f.endswith(".dsc"): oldDsc = '%s/%s'%(branchPkg.obsDir(), f) break for f in filepaths: if f.endswith(".dsc"): newDsc = '%s/%s'%(pfx, f) break #logger.debug("Running debdiff on %s and %s", oldDsc, newDsc) #comment = shell.get(("debdiff", oldDsc, newDsc), okstatus=(0,1)) # FIXME: Debdiff needs implemented in OBS, as large merge descriptions break clucene. comment = '' if report['result'] == MergeResult.SYNC_THEIRS: comment += 'Sync to ' elif report['result'] == MergeResult.MERGED: comment += 'Merge with ' comment += 'version %s from %s %s' %(report['right_version'], report['right_distro'], report['right_suite']) comment += "\n\nMerge report is available at %s"%('/'.join((config.get('MOM_URL'), subdir(config.get('ROOT'), output_dir), 'REPORT.html'))) # The newlines seem to cause create_submit_request to send # UTF-32 over the wire, which OBS promptly chokes on. Encode # the message to UTF-8 first. comment = comment.encode('utf-8') if not options.dry_run: filesUpdated = False for f in obsFiles + linkedFiles: if f == "_link": continue try: logger.debug('deleting %s/%s', branchPkg.obsDir(), f) os.unlink('%s/%s'%(branchPkg.obsDir(), f)) filesUpdated = True except OSError: pass for f in filepaths: if f == "_link": continue logger.debug('copying %s/%s -> %s', pfx, f, branchPkg.obsDir()) shutil.copy2("%s/%s"%(pfx, f), branchPkg.obsDir()) filesUpdated = True if filesUpdated: logger.debug('Submitting request to merge %r from %r into %r', branchPkg, branch, target) try: branchPkg.commit('Automatic update by Merge-O-Matic') obs_project = d.obsProject(target.dist, target.component) reqid = branchPkg.submitMergeRequest(obs_project, comment) update_report(report, output_dir, True, committed_to=obs_project, request_url=branchPkg.webMergeRequest(reqid)) except xml.etree.cElementTree.ParseError: logger.exception("Failed to commit %s", branchPkg) update_report(report, output_dir, False, "OBS API Error") except urllib2.HTTPError as e: logger.exception("Failed to commit %s: HTTP error %s at <%s>:", branchPkg, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception("Failed to commit %s", branchPkg) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: logger.info("Not committing, due to --dry-run") except urllib2.HTTPError as e: logger.exception('Failed to branch %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "Failed to branch: HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to branch %s:', package) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "Failed to branch: %s" % e.__class__.__name__)
create_tarball(package, version, output_dir, merged_dir)) else: logger.debug("dpkg-source succeeded:\n%s\n", dpkg_source_output) if os.path.isfile("%s/%s" % (parent, dsc_filename)): logger.info("Created dpkg-source %s", dsc_filename) for name in os.listdir(parent): src = "%s/%s" % (parent, name) dest = "%s/%s" % (output_dir, name) if os.path.isfile(src) and not os.path.isfile(dest): os.link(src, dest) return (MergeResult.MERGED, None, os.path.basename(dsc_filename)) else: message = ("dpkg-source did not produce expected filename %s" % tree.subdir(config.get('ROOT'), dsc_filename)) logger.warning("%s", message) return (MergeResult.FAILED, "unable to build merged source package (%s)" % message, create_tarball(package, version, output_dir, merged_dir)) finally: tree.remove(parent) def create_patch(version, filename, merged_dir, basis, basis_dir): """Create the merged patch.""" parent = tempfile.mkdtemp() try: tree.copytree(merged_dir, "%s/%s" % (parent, version)) tree.copytree(basis_dir, "%s/%s" % (parent, basis.version))