def test_onlyDiffIsChangelog(self): package = testhelper.build_and_import_simple_package( 'foo', '1.0-1', self.source1_repo) forked = copy(package) forked.changelog_entry(version='1.0-1mom1') forked.build() self.target_repo.importPackage(forked) package.changelog_entry(version='1.2-1') package.create_orig() package.build() self.source2_repo.importPackage(package) target = config.targets()[0] testhelper.update_all_distro_sources() testhelper.update_all_distro_source_pools() our_version = target.distro.findPackage(package.name, version='1.0-1mom1')[0] upstream = target.findSourcePackage(package.name, version='1.2-1')[0] base = target.findSourcePackage(package.name, version='1.0-1')[0] output_dir = result_dir(target.name, package.name) report = produce_merges.produce_merge(target, base, our_version, upstream, output_dir) self.assertEqual(report.result, MergeResult.SYNC_THEIRS) self.assertEqual(report.merged_version, upstream.version)
def main(options, args): logger.info('Updating source packages in target and source distros...') upstreamSources = [] packages = [] for target in config.targets(args): logger.info("Updating sources for %s", target) d = target.distro d.updateSources(target.dist) for upstreamList in target.getAllSourceLists(): for source in upstreamList: if source not in upstreamSources: logger.info("Updating upstream sources for %s", source) source.distro.updateSources(source.dist) for package in target.distro.packages(target.dist, target.component): if options.package and package.name not in options.package: continue try: handle_package(target, package, options.force, options.use_upstream) except urllib2.HTTPError, e: logger.warning('Caught HTTPError while handling %s: %s:', package, e)
def main(options, args): logger.info('Extracting debian/patches from packages...') for target in config.targets(args): d = target.distro for source in d.newestSources(target.dist, target.component): if options.package and source['Package'] not in options.package: continue if source['Package'] in target.blacklist: logger.debug("%s is blacklisted,skipping", source['Package']) continue try: pkg = d.package(target.dist, target.component, source['Package']) except model.error.PackageNotFound, e: logger.exception("FIXME: Spooky stuff going on with %s.", d) continue sources = pkg.poolDirectory().getSourceStanzas() version_sort(sources) for source in sources: try: generate_dpatch(d.name, source, pkg.newestVersion()) except model.error.PackageNotFound: logger.exception("Could not find %s/%s for unpacking. How odd.", pkg, source['Version'])
def test_mergeNewFile(self): package = testhelper.build_and_import_simple_package( 'foo', '1.0-1', self.source1_repo) forked = copy(package) forked.changelog_entry(version='1.0-1mom1') open(forked.pkg_path + '/debian/new.file', 'w').write('hello') forked.build() self.target_repo.importPackage(forked) package.changelog_entry(version='1.2-1') package.create_orig() package.build() self.source2_repo.importPackage(package) target = config.targets()[0] testhelper.update_all_distro_sources() testhelper.update_all_distro_source_pools() our_version = target.distro.findPackage(package.name, version='1.0-1mom1')[0] upstream = target.findSourcePackage(package.name, version='1.2-1')[0] base = target.findSourcePackage(package.name, version='1.0-1')[0] output_dir = result_dir(target.name, package.name) report = produce_merges.produce_merge(target, base, our_version, upstream, output_dir) self.assertEqual(report.result, MergeResult.MERGED) self.assertTrue(report.merged_version > upstream.version)
def test_simpleSync(self): foo = th.build_and_import_simple_package('foo', '1.0', self.target_repo) foo.changelog_entry('2.0') foo.build() self.source1_repo.importPackage(foo) target = config.targets()[0] th.update_all_distro_sources() th.update_all_distro_source_pools() our_version = target.distro.findPackage(foo.name, version='1.0')[0] upstream = target.findSourcePackage(foo.name, '2.0')[0] output_dir = result_dir(target.name, foo.name) report = produce_merge(target, our_version, our_version, upstream, output_dir) self.assertEqual(report.result, MergeResult.SYNC_THEIRS) self.assertEqual(report.merged_version, upstream.version) self.assertEqual(len(report.merged_files), 2) tarfiles = [ x for x in report.merged_files if x.startswith('foo_2.0.tar.') ] self.assertEqual(len(tarfiles), 1) self.assertIn('foo_2.0.dsc', report.merged_files)
def update_all_distro_sources(): for target in config.targets(): target.distro.updateSources(target.dist) for upstreamList in target.getAllSourceLists(): for source in upstreamList: source.distro.updateSources(source.dist)
def test_multipleOrig(self): package = testhelper.TestPackage('foo', '2.0-1') os.makedirs(package.pkg_path + '/mydir') open(package.pkg_path + '/mydir/mainfile', 'w').write('hello') package.create_orig() package.create_orig(subdir='mydir') package.build() self.source1_repo.importPackage(package) forked = copy(package) forked.changelog_entry(version='2.0-1mom1') open(forked.pkg_path + '/debian/new.file', 'w').write('hello') forked.build() self.target_repo.importPackage(forked) package.changelog_entry(version='3.0-1') open(package.pkg_path + '/debian/new.file2', 'w').write('another') package.create_orig() package.create_orig(subdir='mydir') package.build() self.source2_repo.importPackage(package) target = config.targets()[0] testhelper.update_all_distro_sources() testhelper.update_all_distro_source_pools() our_version = target.distro.findPackage(package.name, version='2.0-1mom1')[0] upstream = target.findSourcePackage(package.name, version='3.0-1')[0] base = target.findSourcePackage(package.name, version='2.0-1')[0] output_dir = result_dir(target.name, package.name) report = produce_merges.produce_merge(target, base, our_version, upstream, output_dir) self.assertEqual(report.result, MergeResult.MERGED)
def update_all_distro_source_pools(): for target in config.targets(): target.distro.downloadPackage(target.dist, target.component) for upstreamList in target.getAllSourceLists(): for source in upstreamList: for component in source.distro.components(): source.distro.downloadPackage(source.dist, component)
def main(options, args): # For latest version of each package in the destination distribution, # locate the latest in the source distribution; calculate the base from # the destination if options.package: logger.info("Skipping stats since -p was specified.") return logger.info('Collecting stats...') for target in config.targets(args): stats = {} stats["total"] = 0 stats["local"] = 0 stats["unmodified"] = 0 stats["needs-sync"] = 0 stats["needs-merge"] = 0 stats["repackaged"] = 0 stats["modified"] = 0 for pkg in target.distro.packages(target.dist, target.component): update_info = UpdateInfo(pkg) upstream = update_info.upstream_version base = update_info.base_version our_version = pkg.newestVersion() if our_version.version != update_info.version: logger.debug("Skip %s, no UpdateInfo", pkg.name) continue stats['total'] += 1 logger.debug("%s: %s, upstream: %s", target.distro, our_version, upstream) if upstream is None: logger.debug("%s: locally packaged", pkg) stats["local"] += 1 continue if our_version.version == upstream: logger.debug("%s: unmodified", pkg) stats["unmodified"] += 1 elif base > upstream: logger.debug("%s: locally repackaged", pkg) stats["repackaged"] += 1 elif our_version.version == base: logger.debug("%s: needs sync", pkg) stats["needs-sync"] += 1 elif our_version.version < upstream: logger.debug("%s: needs merge", pkg) stats["needs-merge"] += 1 elif "-0co" in str(our_version.version): logger.debug("%s: locally repackaged", pkg) stats["repackaged"] += 1 else: logger.debug("%s: modified", pkg) stats["modified"] += 1 write_stats(target.name, stats)
def test_noUpstream(self): th.build_and_import_simple_package('foo', '1.0', self.target_repo) th.update_all_distro_sources() target = config.targets()[0] pkg_version = target.distro.findPackage('foo', version='1.0')[0] upstream = update_sources.find_upstream(target, pkg_version) self.assertIsNone(upstream)
def main(options, args): logger.info('Trying to download missing base versions for 3-way merge...') for target in config.targets(args): distro = target.distro for pkg in distro.packages(target.dist, target.component): if options.package is not None and pkg.name not in options.package: continue base = pkg.newestVersion().version.base() # See if the base version is already in the target distro try: target.distro.findPackage(pkg.name, searchDist=target.dist, version=base) # already have the base continue except model.error.PackageNotFound: pass # Now look for the base version in the source distros if package_version_present_in_sources(target, pkg, base): continue logger.debug("Attempting to fetch missing base %s for %s", base, pkg.newestVersion()) # For lack of a better place, we save the missing base version under # the very last source distro in the list. source_list = target.getSourceLists(pkg.name)[-1] source = source_list[-1] component = source.distro.components()[-1] logger.debug("Saving it into last source %s component %s", source.distro, component) poolDir = PoolDirectory(source.distro, component, pkg.name) tmpdir = mkdtemp() try: rc = subprocess.call(['debsnap', '-d', tmpdir, '-f', '-v', pkg.name, str(base)]) if rc != 0: logger.warning("debsnap failed with code %d", rc) continue if not os.path.exists(poolDir.path): os.makedirs(poolDir.path) updated = False for filename in os.listdir(tmpdir): if not os.path.exists(os.path.join(poolDir.path, filename)): shutil.move(os.path.join(tmpdir, filename), poolDir.path) updated = True finally: shutil.rmtree(tmpdir) if updated: poolDir.updateSources()
def test_ourVersionNewer(self): th.build_and_import_simple_package('foo', '2.0', self.target_repo) th.build_and_import_simple_package('foo', '1.0', self.source1_repo) target = config.targets()[0] th.update_all_distro_sources() pv = target.distro.findPackage('foo', version='2.0')[0] update_sources.handle_package(target, pv.package) update_info = UpdateInfo(pv.package) self.assertEqual(update_info.upstream_version, '1.0')
def test_noBase(self): th.build_and_import_simple_package('foo', '1.0-1mom1', self.target_repo) target = config.targets()[0] th.update_all_distro_sources() pv = target.distro.findPackage('foo', version='1.0-1mom1')[0] update_sources.handle_package(target, pv.package) update_info = UpdateInfo(pv.package) self.assertIsNone(update_info.base_version)
def main(options, args): logger.info('Producing merges...') excludes = [] if options.exclude is not None: for filename in options.exclude: logger.info('excluding packages from %s', filename) excludes.extend(read_package_list(filename)) includes = [] if options.include is not None: for filename in options.include: logger.info('including packages from %s', filename) includes.extend(read_package_list(filename)) # For each package in the destination distribution, locate the latest in # the source distribution; calculate the base from the destination and # produce a merge combining both sets of changes for target in config.targets(args): logger.info('considering target %s', target) our_dist = target.dist our_component = target.component d = target.distro for pkg in d.packages(target.dist, target.component): if options.package is not None and pkg.name not in options.package: continue if len(includes) and pkg.name not in includes: logger.info('skipping package %s: not in include list', pkg.name) continue if len(excludes) and pkg.name in excludes: logger.info('skipping package %s: in exclude list', pkg.name) continue if pkg.name in target.blacklist: logger.info("%s is blacklisted, skipping", pkg.name) continue logger.info('considering package %s', pkg.name) if options.version: our_version = PackageVersion(pkg, Version(options.version)) logger.debug('our version: %s (from command line)', our_version) else: our_version = pkg.newestVersion() logger.debug('our version: %s', our_version) output_dir = result_dir(target.name, pkg.name) try: report = handle_package(options, output_dir, target, pkg, our_version) if report is not None: report.write_report(output_dir) except Exception: logging.exception('Failed handling merge for %s', pkg)
def test_targetIsNewer(self): th.build_and_import_simple_package('foo', '4.0', self.target_repo) th.build_and_import_simple_package('foo', '3.0', self.source_repo) th.update_all_distro_sources() target = config.targets()[0] pkg_version = target.distro.findPackage('foo', version='4.0')[0] upstream = update_sources.find_upstream(target, pkg_version) self.assertEqual(upstream.package.name, 'foo') self.assertEqual(upstream.version, '3.0') self.assertTrue(pkg_version > upstream)
def main(options, args): if len(args): distros = [Distro.get(a) for a in args] else: distros = Distro.all() # Run through our default distribution and use that for the base # package names. Expire from all distributions. for target in config.targets(args): d = target.distro for pkg in d.packages(target.dist, target.component): if options.package and pkg.name not in options.package: continue try: output_dir = result_dir(target, pkg.name) report = read_report(output_dir) base = report["base_version"] except ValueError: logger.exception('Skipping package %s: unable to read merge ' 'report', pkg.name) continue if report['result'] not in (MergeResult.SYNC_THEIRS, MergeResult.KEEP_OURS, MergeResult.MERGED, MergeResult.CONFLICTS): logger.debug('Skipping expiry for package %s: result=%s', pkg.name, report['result']) continue if base is None: # If there's no suitable base for merges, we don't # automatically expire any versions. logger.debug('Skipping expiry for package %s: ' 'no base version found (result=%s)', pkg.name, report['result']) continue base = Version(base) logger.debug("%s base is %s", pkg.name, base) for distro in distros: if distro.shouldExpire(): for component in distro.components(): try: distro_pkg = distro.package(target.dist, component, pkg.name) expire_pool_sources(distro_pkg, base) except PackageNotFound: continue
def test_unmodifiedUpgrade(self): testhelper.build_and_import_simple_package('foo', '1.0', self.target_repo) testhelper.build_and_import_simple_package('foo', '2.0', self.source1_repo) target = config.targets()[0] testhelper.update_all_distro_sources() pv = target.distro.findPackage('foo', version='1.0')[0] update_sources.handle_package(target, pv.package) update_info = UpdateInfo(pv.package) self.assertEqual(update_info.upstream_version, '2.0') self.assertEqual(update_info.base_version, '1.0')
def test_poolDirectory(self): foo = testhelper.build_and_import_simple_package('foo', '1.0', self.target_repo) testhelper.update_all_distro_sources() target = config.targets()[0] pkgs = target.distro.findPackage(foo.name, searchDist=target.dist) self.assertEqual(len(pkgs), 1) pv = pkgs[0] pv.download() self.assertTrue(os.path.isdir(pv.package.poolPath)) versions = pv.package.getPoolVersions() self.assertEqual(len(versions), 1) self.assertEqual(versions[0].version, foo.version)
def test_distroSources(self): testhelper.build_and_import_simple_package('foo', '1.0', self.source_repo) target = config.targets()[0] self.assertEqual(target.name, 'testtarget') sourceLists = target.getAllSourceLists() upstreamList = sourceLists[0] source = upstreamList[0] component = source.distro.components()[0] source.distro.updateSources(source.dist) sources = source.distro.getSources(source.dist, component) self.assertEqual(len(sources), 1) self.assertEqual(sources[0].get('Package'), 'foo') self.assertEqual(sources[0].get('Version'), '1.0')
def test_upstreamCorrectUnstableSource(self): th.build_and_import_simple_package('foo', '2.0', self.target_repo) th.build_and_import_simple_package('foo', '2.2', self.stable_source_repo) th.build_and_import_simple_package('foo', '2.4', self.unstable1_source_repo) th.update_all_distro_sources() target = config.targets()[0] pkg_version = target.distro.findPackage('foo', version='2.0')[0] upstream = update_sources.find_upstream( target, pkg_version, specific_upstream='unstable0distro_source') self.assertEqual(upstream.package.name, 'foo') self.assertEqual(upstream.version, '2.4')
def test_upstreamFromStable(self): th.build_and_import_simple_package('foo', '2.0', self.target_repo) th.build_and_import_simple_package('foo', '2.1', self.stable_source_repo) th.build_and_import_simple_package('foo', '2.4', self.unstable1_source_repo) th.update_all_distro_sources() target = config.targets()[0] pkg_version = target.distro.findPackage('foo', version='2.0')[0] upstream = update_sources.find_upstream(target, pkg_version) self.assertEqual(upstream.package.name, 'foo') self.assertEqual(upstream.version, '2.1') self.assertTrue(upstream > pkg_version)
def test_findPackage(self): foo = testhelper.build_and_import_simple_package('foo', '1.0', self.target_repo) testhelper.update_all_distro_sources() target = config.targets()[0] pkgs = target.distro.findPackage(foo.name, searchDist=target.dist) self.assertEqual(len(pkgs), 1) pkgs = target.distro.findPackage(foo.name, searchDist=target.dist, version=foo.version) self.assertEqual(len(pkgs), 1) with self.assertRaises(PackageNotFound): target.distro.findPackage(foo.name, searchDist=target.dist, version="9")
def test_baseFromChangelog(self): foo = th.build_and_import_simple_package('foo', '3.0-1', self.source1_repo) foo.changelog_entry('4.0-1mom1') foo.create_orig() foo.build() self.target_repo.importPackage(foo) target = config.targets()[0] th.update_all_distro_sources() pv = target.distro.findPackage('foo', version='4.0-1mom1')[0] update_sources.handle_package(target, pv.package) update_info = UpdateInfo(pv.package) self.assertEqual(update_info.base_version, '3.0-1')
def main(options, args): logger.debug('Sending email if actions are needed...') for target in config.targets(args): logger.debug('%r', target) d = target.distro if not isinstance(d, OBSDistro): logger.debug('Skipping %r distro %r: not an OBSDistro', target, d) continue for pkg in d.packages(target.dist, target.component): if options.package and pkg.name not in options.package: logger.debug('Skipping package %s: not selected', pkg.name) continue if pkg.name in target.blacklist: logger.debug('Skipping package %s: blacklisted', pkg.name) continue try: output_dir = result_dir(target.name, pkg.name) report = read_report(output_dir) except ValueError: logger.debug('Skipping package %s: unable to read report', pkg.name) continue if report.result == MergeResult.KEEP_OURS: logger.debug('Skipping package %s: result=%s', pkg.name, report.result) continue if (target.committable and report.result in (MergeResult.MERGED, MergeResult.SYNC_THEIRS)): logger.debug('Skipping package %s: result=%s, would already ' 'have been committed', pkg.name, report.result) continue try: notify_action_needed(target, output_dir, report) except Exception: logger.exception('Error processing %s:', pkg.name)
def test_simpleSync(self): foo = testhelper.build_and_import_simple_package( 'foo', '1.0', self.target_repo) foo.changelog_entry('2.0') foo.build() self.source1_repo.importPackage(foo) target = config.targets()[0] testhelper.update_all_distro_sources() testhelper.update_all_distro_source_pools() our_version = target.distro.findPackage(foo.name, version='1.0')[0] upstream = target.findSourcePackage(foo.name, '2.0')[0] output_dir = result_dir(target.name, foo.name) report = produce_merges.produce_merge(target, our_version, our_version, upstream, output_dir) self.assertEqual(report.result, MergeResult.SYNC_THEIRS) self.assertEqual(report.merged_version, upstream.version)
def main(options, args): logger.info('Extracting debian/patches from packages...') for target in config.targets(args): d = target.distro for pkg in d.packages(target.dist, target.component): if options.package and pkg.name not in options.package: continue if pkg.name in target.blacklist: logger.debug("%s is blacklisted,skipping", source['Package']) continue pvs = pkg.getPoolVersions() pvs.sort() for pv in pvs: try: generate_dpatch(d.name, pv) except model.error.PackageNotFound: logger.exception("Could not find %s/%s for unpacking.", pkg, version)
def main(options, args): logger.info('Comparing current and previous versions in source distros...') # For latest version of each package in the given distributions, iterate the pool in order # and generate a diff from the previous version and a changes file for target in config.targets(args): d = target.distro for source in d.newestSources(target.dist, target.component): if options.package and source['Package'] not in options.package: continue if source['Package'] in target.blacklist: logger.debug("%s is blacklisted, skipping", source['Package']) continue try: pkg = d.package(target.dist, target.component, source['Package']) except model.error.PackageNotFound, e: logger.exception("Spooky stuff going on with %s.", d) continue sources = pkg.poolDirectory().getSourceStanzas() version_sort(sources) last = None try: for version in pkg.poolDirectory().getVersions(): pv = PackageVersion(pkg, version) try: generate_diff(last, pv) except model.error.PackageNotFound: logger.exception("Could not find a package to diff against.") except ValueError: logger.exception("Could not find a .dsc file, perhaps it moved components?") finally: if last is not None: cleanup_source(last.getSources()) last = pv finally: if last is not None: cleanup_source(last.getSources())
def main(options, args): logger.info('Comparing current and previous versions in source distros...') # For latest version of each package in the given distributions, iterate # the pool in order and generate a diff from the previous version and a # changes file for target in config.targets(args): d = target.distro for pkg in d.packages(target.dist, target.component): if options.package and pkg.name not in options.package: continue if pkg.name in target.blacklist: logger.debug("%s is blacklisted, skipping", source['Package']) continue pvs = pkg.getPoolVersions() pvs.sort() last = None try: for pv in pvs: try: generate_diff(last, pv) except model.error.PackageNotFound: logger.exception("Could not find a package to diff " "against.") except ValueError: logger.exception("Could not find a .dsc file, " "perhaps it moved components?") finally: if last is not None: cleanup_source(pv) last = pv finally: if last is not None: cleanup_source(pv)
def main(options, args): # For latest version of each package in the destination distribution, locate the latest in # the source distribution; calculate the base from the destination if options.package: logger.info("Skipping stats since -p was specified.") return logger.info('Collecting stats...') for target in config.targets(args): stats = {} stats["total"] = 0 stats["local"] = 0 stats["unmodified"] = 0 stats["needs-sync"] = 0 stats["needs-merge"] = 0 stats["repackaged"] = 0 stats["modified"] = 0 for pkg in target.distro.packages(target.dist, target.component): stats['total'] += 1 upstream = None for srclist in target.getSourceLists(pkg.name, include_unstable=False): for src in srclist: try: for possible in src.distro.findPackage(pkg.name, searchDist=src.dist): if upstream is None or possible > upstream: upstream = possible except model.error.PackageNotFound: pass our_version = pkg.newestVersion() logger.debug("%s: %s, upstream: %s", target.distro, our_version, upstream) if upstream is None: logger.debug("%s: locally packaged", pkg) stats["local"] += 1 continue base = target.findNearestVersion(our_version) if our_version.version == upstream.version: logger.debug("%s: unmodified", pkg) stats["unmodified"] += 1 elif base > upstream: logger.debug("%s: locally repackaged", pkg) stats["repackaged"] += 1 elif our_version.version == base.version: logger.debug("%s: needs sync", pkg) stats["needs-sync"] += 1 elif our_version.version < upstream.version: logger.debug("%s: needs merge", pkg) stats["needs-merge"] += 1 elif "-0co" in str(our_version.version): logger.debug("%s: locally repackaged", pkg) stats["repackaged"] += 1 else: logger.debug("%s: modified", pkg) stats["modified"] += 1 write_stats(target.name, stats)
def do_table(status, merges, comments, our_distro, target, obsProject): """Output a table.""" target_object = config.targets([target])[0] web_ui = target_object.distro.config('obs', 'web') if web_ui is None: # not really human-usable but it's the best we can do web_ui = target_object.distro.config('obs', 'url') default_src_distro = config.get('DISTRO_SOURCES')[config.get( 'DISTRO_TARGETS')[target]["sources"][0]][0]["distro"] print >> status, "<table cellspacing=0>" print >> status, "<tr bgcolor=#d0d0d0>" print >> status, "<td rowspan=2><b>Package</b></td>" print >> status, "<td rowspan=2><b>Comment</b></td>" print >> status, "</tr>" print >> status, "<tr bgcolor=#d0d0d0>" print >> status, "<td><b>%s Version</b></td>" % our_distro.title() print >> status, "<td><b>%s Version</b></td>" % default_src_distro.title() print >> status, "<td><b>Base Version</b></td>" print >> status, "<td><b>Result</b></td>" print >> status, "</tr>" for uploaded, priority, package, source, \ base_version, left_version, right_version, right_distro, \ output_dir, report in merges: escaped_root = re.escape(config.get('ROOT')) print >> status, "<tr bgcolor=%s class=first>" % COLOURS[priority] if os.path.exists(output_dir + '/REPORT.html'): print >>status, "<td><tt><a href=\"%s/REPORT.html\">" \ "%s</a></tt>" % (re.sub('^' + escaped_root, '../', output_dir, 1), package) else: print >>status, "<td><tt><a href=\"%s/REPORT\">" \ "%s</a></tt>" % (re.sub('^' + escaped_root, '../', output_dir, 1), package) if os.path.exists(output_dir + '/REPORT.json'): print >>status, " <sup><a href=\"%s/REPORT.json\">" \ "JSON</a></sup>" % (re.sub('^' + escaped_root, '../', output_dir, 1)) print >>status, " <sup><a href=\"https://launchpad.net/ubuntu/" \ "+source/%s\">LP</a></sup>" % package print >>status, " <sup><a href=\"http://packages.qa.debian.org/" \ "%s\">PTS</a></sup>" % package print >>status, " <sup><a href=\"%s/package/show?package=%s" \ "&project=%s\">OBS</a></sup></td>" % (web_ui, package, obsProject) print >> status, "<td rowspan=2>%s</td>" % ( comments[package] if package in comments else "") print >> status, "</tr>" print >> status, "<tr bgcolor=%s>" % COLOURS[priority] print >> status, "<td><small>%s</small></td>" % source["Binary"] print >> status, "<td>%s</td>" % left_version print >> status, "<td>%s" % right_version if right_distro != default_src_distro: print >> status, "<br/>(%s)" % right_distro print >> status, "</td>" if base_version is None: print >> status, "<td style='text-align:center'><em>???</em></td>" else: print >> status, "<td>%s</td>" % base_version print >> status, "<td>%s</td>" % report.result print >> status, "</tr>" print >> status, "</table>"
def main(options, args): logger.debug('Committing merges...') for target in config.targets(args): d = target.distro if not isinstance(d, OBSDistro): logger.debug('Skipping %r distro %r: not an OBSDistro', target, d) continue for package in d.packages(target.dist, target.component): if options.package and package.name not in options.package: logger.debug('Skipping package %s: not selected', package.name) continue if package.name in target.blacklist: logger.debug('Skipping package %s: blacklisted', package.name) continue try: output_dir = result_dir(target.name, pkg.name) report = read_report(output_dir) except ValueError: logger.debug('Skipping package %s: unable to read report', package.name) continue if report['committed']: if options.force: logger.info("Forcing commit of %s", package) else: logger.debug("%s already committed, skipping!", package) continue if report['result'] not in (MergeResult.MERGED, MergeResult.SYNC_THEIRS): logger.debug("%s has nothing to commit: result=%s", package, report['result']) continue filepaths = report['merged_files'] if filepaths == []: logger.warning("Empty merged file list in %s/REPORT" % output_dir) continue if target.committable: # we can commit directly to the target distribution # FIXME: is this still a supported configuration? I wouldn't # want to commit automated merges without some sort of manual # check on the debdiff... logger.info("Committing changes to %s", package) if not options.dry_run: try: package.commit('Automatic update by Merge-O-Matic') except urllib2.HTTPError as e: logger.exception('Failed to commit %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to commit %s:', package) # deliberately rather vague, as below update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: update_report(report, output_dir, True, committed_to=d.obsProject(target.dist, target.component)) continue # else we need to branch it and commit to the branch try: logger.debug("Branching %s", package) branchPkg = package.branch("home:%s:branches"%(d.obsUser)) branch = branchPkg.distro branch.sync(target.dist, target.component, [branchPkg,]) logger.info("Committing changes to %s, and submitting merge request to %s", branchPkg, package) if report['result'] == MergeResult.SYNC_THEIRS: srcDistro = Distro.get(report['right_distro']) version = Version(report['right_version']) logger.debug('Copying updated upstream version %s from %r into %r', version, srcDistro, target) for upstream in target.getSourceLists(package.name): for src in upstream: srcDistro = src.distro try: pkg = srcDistro.findPackage(package.name, searchDist=src.dist, version=version)[0] pfx = pkg.poolPath break except model.error.PackageNotFound: pass else: logger.debug('Copying merged version from %r into %r', branch, target) pfx = result_dir(target.name, package.name) # this might raise an error obsFiles = branchPkg.getOBSFiles() # Get the linked target files since the checkout is expanded # and may contain them linkedFiles = package.getOBSFiles() for f in obsFiles: if f.endswith(".dsc"): oldDsc = '%s/%s'%(branchPkg.obsDir(), f) break for f in filepaths: if f.endswith(".dsc"): newDsc = '%s/%s'%(pfx, f) break #logger.debug("Running debdiff on %s and %s", oldDsc, newDsc) #comment = shell.get(("debdiff", oldDsc, newDsc), okstatus=(0,1)) # FIXME: Debdiff needs implemented in OBS, as large merge descriptions break clucene. comment = '' if report['result'] == MergeResult.SYNC_THEIRS: comment += 'Sync to ' elif report['result'] == MergeResult.MERGED: comment += 'Merge with ' comment += 'version %s from %s %s' %(report['right_version'], report['right_distro'], report['right_suite']) comment += "\n\nMerge report is available at %s"%('/'.join((config.get('MOM_URL'), subdir(config.get('ROOT'), output_dir), 'REPORT.html'))) # The newlines seem to cause create_submit_request to send # UTF-32 over the wire, which OBS promptly chokes on. Encode # the message to UTF-8 first. comment = comment.encode('utf-8') if not options.dry_run: filesUpdated = False for f in obsFiles + linkedFiles: if f == "_link": continue try: logger.debug('deleting %s/%s', branchPkg.obsDir(), f) os.unlink('%s/%s'%(branchPkg.obsDir(), f)) filesUpdated = True except OSError: pass for f in filepaths: if f == "_link": continue logger.debug('copying %s/%s -> %s', pfx, f, branchPkg.obsDir()) shutil.copy2("%s/%s"%(pfx, f), branchPkg.obsDir()) filesUpdated = True if filesUpdated: logger.debug('Submitting request to merge %r from %r into %r', branchPkg, branch, target) try: branchPkg.commit('Automatic update by Merge-O-Matic') obs_project = d.obsProject(target.dist, target.component) reqid = branchPkg.submitMergeRequest(obs_project, comment) update_report(report, output_dir, True, committed_to=obs_project, request_url=branchPkg.webMergeRequest(reqid)) except xml.etree.cElementTree.ParseError: logger.exception("Failed to commit %s", branchPkg) update_report(report, output_dir, False, "OBS API Error") except urllib2.HTTPError as e: logger.exception("Failed to commit %s: HTTP error %s at <%s>:", branchPkg, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception("Failed to commit %s", branchPkg) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: logger.info("Not committing, due to --dry-run") except urllib2.HTTPError as e: logger.exception('Failed to branch %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "Failed to branch: HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to branch %s:', package) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "Failed to branch: %s" % e.__class__.__name__)
def main(options, args): logger.info('Updating source packages in target and source distros...') upstreamSources = [] packages = [] for target in config.targets(args): logger.info("Updating sources for %s", target) d = target.distro d.updateSources(target.dist, target.component) pairs = [] for stanza in d.getSources(target.dist, target.component): pairs.append((stanza.get('Package'), Version(stanza.get('Version')))) logger.debug('Packages in %s:', target) for pair in sorted(pairs): logger.debug('- %s/%s', pair[0], pair[1]) for upstreamList in target.getAllSourceLists(): for source in upstreamList: if source not in upstreamSources: for component in source.distro.components(): logger.info("Updating upstream sources for %s/%s", source, component) source.distro.updateSources(source.dist, component) pairs = [] for stanza in source.distro.getSources(source.dist, component): pairs.append((stanza.get('Package'), Version(stanza.get('Version')))) logger.debug('Packages in %s/%s:', source, component) for pair in sorted(pairs): logger.debug('- %s/%s', pair[0], pair[1]) upstreamSources.append(source) package_names = set() for package in target.distro.packages(target.dist, target.component): package_names.add(package.name) if options.package and package.name not in options.package: continue packages.append(package) for upstreamList in target.getSourceLists(package.name): for source in upstreamList: try: upstreamPkgs = source.distro.findPackage(package.name, searchDist=source.dist) for upstreamPkg in upstreamPkgs: if upstreamPkg.package not in packages: packages.append(upstreamPkg.package) except model.error.PackageNotFound: logger.debug("%s not found in %s, skipping.", package, source) pass if isinstance(d, OBSDistro): try: project = d.obsProject(target.dist, target.component) logger.debug('Checking packages in %s', project) obs_packages = set( osc.core.meta_get_packagelist(d.config('obs', 'url'), project)) for p in package_names: if p not in obs_packages: logger.warning('Debian source package "%s" does not seem ' 'to correspond to an OBS package. Please rename the OBS ' 'package to match "Source" in the .dsc file', p) except: logger.warning('Unable to check packages in %s', project, exc_info=1) logger.info("%d packages considered for updating", len(packages)) for pkg in packages: # FIXME: if we track two suites, say raring and saucy, we could # have both ubuntu/raring/main/hello and ubuntu/saucy/main/hello # in @packages, resulting in us updating the ubuntu/*/main/hello # pool directory twice. For the moment, we just live with it. pd = pkg.poolDirectory() logger.info("Updating %r from suite %s", pd, pkg.dist) if pkg.updatePool(): pd.updateSources() logger.info("Available versions in %r:", pd) for v in sorted(pd.getVersions()): logger.info('- %s', v)
def do_table(status, merges, comments, our_distro, target, obsProject): """Output a table.""" target_object = config.targets([target])[0] web_ui = target_object.distro.config('obs', 'web') if web_ui is None: # not really human-usable but it's the best we can do web_ui = target_object.distro.config('obs', 'url') default_src_distro =DISTRO_SOURCES[DISTRO_TARGETS[target]["sources"][0]][0]["distro"] print >>status, "<table cellspacing=0>" print >>status, "<tr bgcolor=#d0d0d0>" print >>status, "<td rowspan=2><b>Package</b></td>" print >>status, "<td rowspan=2><b>Comment</b></td>" print >>status, "</tr>" print >>status, "<tr bgcolor=#d0d0d0>" print >>status, "<td><b>%s Version</b></td>" % our_distro.title() print >>status, "<td><b>%s Version</b></td>" % default_src_distro.title() print >>status, "<td><b>Base Version</b></td>" print >>status, "<td><b>Result</b></td>" print >>status, "</tr>" for uploaded, priority, package, source, \ base_version, left_version, right_version, right_distro, \ output_dir, report in merges: print >>status, "<tr bgcolor=%s class=first>" % COLOURS[priority] if os.path.exists(output_dir + '/REPORT.html'): print >>status, "<td><tt><a href=\"%s/REPORT.html\">" \ "%s</a></tt>" % (re.sub('^' + re.escape(ROOT), '../', output_dir, 1), package) else: print >>status, "<td><tt><a href=\"%s/REPORT\">" \ "%s</a></tt>" % (re.sub('^' + re.escape(ROOT), '../', output_dir, 1), package) if os.path.exists(output_dir + '/REPORT.json'): print >>status, " <sup><a href=\"%s/REPORT.json\">" \ "JSON</a></sup>" % (re.sub('^' + re.escape(ROOT), '../', output_dir, 1)) print >>status, " <sup><a href=\"https://launchpad.net/ubuntu/" \ "+source/%s\">LP</a></sup>" % package print >>status, " <sup><a href=\"http://packages.qa.debian.org/" \ "%s\">PTS</a></sup>" % package print >>status, " <sup><a href=\"%s/package/show?package=%s" \ "&project=%s\">OBS</a></sup></td>" % (web_ui, package, obsProject) print >>status, "<td rowspan=2>%s</td>" % (comments[package] if package in comments else "") print >>status, "</tr>" print >>status, "<tr bgcolor=%s>" % COLOURS[priority] print >>status, "<td><small>%s</small></td>" % source["Binary"] print >>status, "<td>%s</td>" % left_version print >>status, "<td>%s" % right_version if right_distro != default_src_distro: print >>status, "<br/>(%s)" % right_distro print >>status, "</td>" if base_version is None: print >>status, "<td style='text-align:center'><em>???</em></td>" else: print >>status, "<td>%s</td>" % base_version print >>status, "<td>%s</td>" % report.result print >>status, "</tr>" print >>status, "</table>"
def main(options, args): logger.debug('Committing merges...') for target in config.targets(args): d = target.distro if not isinstance(d, OBSDistro): logger.debug('Skipping %r distro %r: not an OBSDistro', target, d) continue for source in d.newestSources(target.dist, target.component): if options.package and source['Package'] not in options.package: logger.debug('Skipping package %s: not selected', source['Package']) continue if source['Package'] in target.blacklist: logger.debug('Skipping package %s: blacklisted', source['Package']) continue try: output_dir = result_dir(target.name, source['Package']) report = read_report(output_dir) except ValueError: logger.debug('Skipping package %s: unable to read report', source['Package']) continue package = d.package(target.dist, target.component, report.source_package) if report['committed']: if options.force: logger.info("Forcing commit of %s", package) else: logger.debug("%s already committed, skipping!", package) continue if report['result'] not in (MergeResult.MERGED, MergeResult.SYNC_THEIRS): logger.debug("%s has nothing to commit: result=%s", package, report['result']) continue filepaths = report['merged_files'] if filepaths == []: logger.warning("Empty merged file list in %s/REPORT" % output_dir) continue if target.committable: # we can commit directly to the target distribution # FIXME: is this still a supported configuration? I wouldn't # want to commit automated merges without some sort of manual # check on the debdiff... logger.info("Committing changes to %s", package) if not options.dry_run: try: package.commit('Automatic update by Merge-O-Matic') except urllib2.HTTPError as e: logger.exception('Failed to commit %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to commit %s:', package) # deliberately rather vague, as below update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: update_report(report, output_dir, True, committed_to=d.obsProject(target.dist, target.component)) continue # else we need to branch it and commit to the branch try: logger.debug("Branching %s", package) branchPkg = package.branch("home:%s:branches"%(d.obsUser)) branch = branchPkg.distro branch.sync(target.dist, target.component, [branchPkg,]) logger.info("Committing changes to %s, and submitting merge request to %s", branchPkg, package) if report['result'] == MergeResult.SYNC_THEIRS: srcDistro = Distro.get(report['right_distro']) version = Version(report['right_version']) logger.debug('Copying updated upstream version %s from %r into %r', version, srcDistro, target) for upstream in target.getSourceLists(package.name): for src in upstream: srcDistro = src.distro try: pkg = srcDistro.findPackage(package.name, searchDist=src.dist, version=version)[0] pfx = pkg.poolDirectory().path break except model.error.PackageNotFound: pass else: logger.debug('Copying merged version from %r into %r', branch, target) pfx = result_dir(target.name, package.name) # this might raise an error obsFiles = branchPkg.getOBSFiles() # Get the linked target files since the checkout is expanded # and may contain them linkedFiles = package.getOBSFiles() for f in obsFiles: if f.endswith(".dsc"): oldDsc = '%s/%s'%(branchPkg.obsDir(), f) break for f in filepaths: if f.endswith(".dsc"): newDsc = '%s/%s'%(pfx, f) break #logger.debug("Running debdiff on %s and %s", oldDsc, newDsc) #comment = shell.get(("debdiff", oldDsc, newDsc), okstatus=(0,1)) # FIXME: Debdiff needs implemented in OBS, as large merge descriptions break clucene. comment = '' if report['result'] == MergeResult.SYNC_THEIRS: comment += 'Sync to ' elif report['result'] == MergeResult.MERGED: comment += 'Merge with ' comment += 'version %s from %s %s' %(report['right_version'], report['right_distro'], report['right_suite']) comment += "\n\nMerge report is available at %s"%('/'.join((config.get('MOM_URL'), subdir(config.get('ROOT'), output_dir), 'REPORT.html'))) # The newlines seem to cause create_submit_request to send # UTF-32 over the wire, which OBS promptly chokes on. Encode # the message to UTF-8 first. comment = comment.encode('utf-8') if not options.dry_run: filesUpdated = False for f in obsFiles + linkedFiles: if f == "_link": continue try: logger.debug('deleting %s/%s', branchPkg.obsDir(), f) os.unlink('%s/%s'%(branchPkg.obsDir(), f)) filesUpdated = True except OSError: pass for f in filepaths: if f == "_link": continue logger.debug('copying %s/%s -> %s', pfx, f, branchPkg.obsDir()) shutil.copy2("%s/%s"%(pfx, f), branchPkg.obsDir()) filesUpdated = True if filesUpdated: logger.debug('Submitting request to merge %r from %r into %r', branchPkg, branch, target) try: branchPkg.commit('Automatic update by Merge-O-Matic') obs_project = d.obsProject(target.dist, target.component) reqid = branchPkg.submitMergeRequest(obs_project, comment) update_report(report, output_dir, True, committed_to=obs_project, request_url=branchPkg.webMergeRequest(reqid)) except xml.etree.cElementTree.ParseError: logger.exception("Failed to commit %s", branchPkg) update_report(report, output_dir, False, "OBS API Error") except urllib2.HTTPError as e: logger.exception("Failed to commit %s: HTTP error %s at <%s>:", branchPkg, e.code, e.geturl()) update_report(report, output_dir, False, "HTTP error %s" % e.code) except Exception as e: logger.exception("Failed to commit %s", branchPkg) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "%s" % e.__class__.__name__) else: logger.info("Not committing, due to --dry-run") except urllib2.HTTPError as e: logger.exception('Failed to branch %s: HTTP error %s at <%s>:', package, e.code, e.geturl()) update_report(report, output_dir, False, "Failed to branch: HTTP error %s" % e.code) except Exception as e: logger.exception('Failed to branch %s:', package) # deliberately being a bit vague here in case the exact # exception leaks internal info update_report(report, output_dir, False, "Failed to branch: %s" % e.__class__.__name__)
def main(options, args): logger.info('Producing merges...') excludes = [] if options.exclude is not None: for filename in options.exclude: logger.info('excluding packages from %s', filename) excludes.extend(read_package_list(filename)) includes = [] if options.include is not None: for filename in options.include: logger.info('including packages from %s', filename) includes.extend(read_package_list(filename)) # For each package in the destination distribution, locate the latest in # the source distribution; calculate the base from the destination and # produce a merge combining both sets of changes for target in config.targets(args): logger.info('considering target %s', target) our_dist = target.dist our_component = target.component d = target.distro for pkg in d.packages(target.dist, target.component): if options.package is not None and pkg.name not in options.package: logger.debug('skipping package %s: not the selected package', pkg.name) continue if len(includes) and pkg.name not in includes: logger.info('skipping package %s: not in include list', pkg.name) continue if len(excludes) and pkg.name in excludes: logger.info('skipping package %s: in exclude list', pkg.name) continue if pkg.name in target.blacklist: logger.info("%s is blacklisted, skipping", pkg.name) continue logger.info('considering package %s', pkg.name) if options.version: our_version = Version(options.version) logger.debug('our version: %s (from command line)', our_version) else: our_version = pkg.newestVersion() logger.debug('our version: %s', our_version) upstream = None for srclist in target.getSourceLists(pkg.name, include_unstable=False): for src in srclist: logger.debug('considering source %s', src) try: for possible in src.distro.findPackage(pkg.name, searchDist=src.dist): logger.debug('- contains version %s', possible) if upstream is None or possible > upstream: logger.debug(' - that version is the best yet seen') upstream = possible except model.error.PackageNotFound: pass output_dir = result_dir(target.name, pkg.name) # There are two situations in which we will look in unstable distros # for a better version: try_unstable = False # 1. If our version is newer than the stable upstream version, we # assume that our version was sourced from unstable, so let's # check for an update there. # However we must use the base version for the comparison here, # otherwise we would consider our version 1.0-1endless1 newer # than the stable 1.0-1 and look in unstable for an update. if upstream is not None and our_version >= upstream: our_base_version = our_version.version.base() logger.info("our version %s >= their version %s, checking base version %s", our_version, upstream, our_base_version) if our_base_version > upstream.version: logger.info("base version still newer than their version, checking in unstable") try_unstable = True # 2. If we didn't find any upstream version at all, it's possible # that it's a brand new package where our version was imported # from unstable, so let's see if we can find a better version # there. if upstream is None: try_unstable = True # However, if this package has been assigned a specific source, # we'll honour that. if target.packageHasSpecificSource(pkg.name): try_unstable = False if try_unstable: for srclist in target.unstable_sources: for src in srclist: logger.debug('considering unstable source %s', src) try: for possible in src.distro.findPackage(pkg.name, searchDist=src.dist): logger.debug('- contains version %s', possible) if upstream is None or possible > upstream: logger.debug(' - that version is the best yet seen') upstream = possible except model.error.PackageNotFound: pass if upstream is None: logger.info("%s not available upstream, skipping", our_version) cleanup(output_dir) report = MergeReport(left=our_version) report.target = target.name report.result = MergeResult.KEEP_OURS report.merged_version = our_version.version report.write_report(output_dir) continue try: report = read_report(output_dir) # See if sync_upstream_packages already set if not options.force and \ pkg.name in target.sync_upstream_packages and \ Version(report['right_version']) == upstream.version and \ Version(report['left_version']) == our_version.version and \ Version(report['merged_version']) == upstream.version and \ report['result'] == MergeResult.SYNC_THEIRS: logger.info("sync to upstream for %s [ours=%s, theirs=%s] " "already produced, skipping run", pkg, our_version.version, upstream.version) continue elif (not options.force and Version(report['right_version']) == upstream.version and Version(report['left_version']) == our_version.version and # we'll retry the merge if there was an unexpected # failure, a missing base or an unknown result last time report['result'] in (MergeResult.KEEP_OURS, MergeResult.SYNC_THEIRS, MergeResult.MERGED, MergeResult.CONFLICTS)): logger.info("merge for %s [ours=%s, theirs=%s] already produced, skipping run", pkg, our_version.version, upstream.version) continue except (AttributeError, ValueError, KeyError): pass if our_version >= upstream: logger.info("our version %s >= their version %s, skipping", our_version, upstream) cleanup(output_dir) report = MergeReport(left=our_version, right=upstream) report.target = target.name report.result = MergeResult.KEEP_OURS report.merged_version = our_version.version report.write_report(output_dir) continue elif our_version < upstream and \ pkg.name in target.sync_upstream_packages: logger.info("Syncing to %s per sync_upstream_packages", upstream) cleanup(output_dir) report = MergeReport(left=our_version, right=upstream) report.target = target.name report.result = MergeResult.SYNC_THEIRS report.merged_version = upstream.version report.message = "Using version in upstream distro per " \ "sync_upstream_packages configuration" report.write_report(output_dir) continue logger.info("local: %s, upstream: %s", our_version, upstream) try: produce_merge(target, our_version, upstream, output_dir) except ValueError as e: logger.exception("Could not produce merge, perhaps %s changed components upstream?", pkg) report = MergeReport(left=our_version, right=upstream) report.target = target.name report.result = MergeResult.FAILED report.message = 'Could not produce merge: %s' % e report.write_report(output_dir) continue