def DataPointTest(self, in_catalog, pkg_osrel, osrel_spec, expected_rels): pkg_struct_map = { None: None, 8: GDB_STRUCT_8, 9: GDB_STRUCT_9, 10: GDB_STRUCT_10, 11: GDB_STRUCT_11, } rest_client_mock = self.mox.CreateMock(rest.RestClient) self.mox.StubOutWithMock(rest, "RestClient") rest.RestClient(None, username=None, password=None).AndReturn(rest_client_mock) for i, os_n in enumerate(in_catalog, 3 - len(in_catalog)): pkg_struct = pkg_struct_map[os_n] rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.%s' % (i + 9), 'gdb').AndReturn(pkg_struct) self.mox.ReplayAll() os_release_to_specify = "SunOS5.%s" % osrel_spec if osrel_spec else None su = csw_upload_pkg.Srv4Uploader(None, None, os_release=os_release_to_specify) result = su._MatchSrv4ToCatalogs( self.BASENAME, "unstable", "sparc", "SunOS5.%s" % pkg_osrel, self.MD5_SUM) expected = [] for n in expected_rels: expected.append(("unstable", "sparc", "SunOS5.%s" % n)) expected = tuple(expected) self.assertEquals(expected, result) self.mox.ResetAll() self.mox.UnsetStubs()
def scanPackage(self): # Get md5 sums rest_client = rest.RestClient() rd = FindBins() needed_bins = {} osrel_list = [] with open(fn_stdlibs, "r") as fd: stdlibs = cjson.decode(fd.read()) fl = open(fn_report, "w") for osrel in common_constants.OS_RELS: if osrel in common_constants.OBSOLETE_OS_RELS: logging.debug("scanPackage: %s is obsoleted" % osrel) continue for arch in common_constants.PHYSICAL_ARCHITECTURES: # get the list of binaries of a package bins = rd.getBins(catrel, arch, osrel) # get the list of libs which a package needs needed_bins = rd.getNeededBins(catrel, arch, osrel) # get the list of links in a package links = rd.getLinks(catrel, arch, osrel) i = 0 checked = [] for pkg in needed_bins: i = i + 1 for nb in needed_bins[pkg]: if nb in checked: continue checked.append(nb) found = False # at first search the lib in any other package for npkg in bins: for b in bins[npkg]: # if lib in the package if nb in b: found = True # logging.debug ("\nfound %s [%s]: %s in %s (%s)" % (osrel,pkg,nb,b,npkg)) break if found: break if not found: # second search is there a link with this name for lpkg in links: for l in links[lpkg]: # if lib in the package if nb in l: found = True # logging.debug ("\nfound %s [%s]: %s in %s (%s)" % (osrel,pkg,nb,b,npkg)) break if found: break if not found: # third is the need lib a std solaris lib if nb in stdlibs: found = True # logging.debug ("\nfound %s" % nb) # at last search the lib in earlier os releases if not found: fl.write("%s:%s:%s:%s\n" % (nb, pkg, arch, osrel)) print "\nNOT FOUND: %s, needed in pkg %s %s %s" % ( nb, pkg, arch, osrel) sys.stdout.write("\rscanPackage %4d %s" % (i, pkg)) sys.stdout.flush() fl.close()
def __init__(self, filenames, os_release=None, debug=False, output_to_screen=True, username=None, password=None, catrel=DEFAULT_CATREL): super(Srv4Uploader, self).__init__() if filenames: filenames = self.SortFilenames(filenames) self.filenames = filenames self.md5_by_filename = {} self.debug = debug self.os_release = os_release config = configuration.GetConfig() self._rest_client = rest.RestClient( pkgdb_url=config.get('rest', 'pkgdb'), releases_url=config.get('rest', 'releases'), username=username, password=password) self.output_to_screen = output_to_screen self.username = username self.password = password self.catrel = catrel
def GetDiffsByCatalogname(catrel_from, catrel_to, include_downgrades, include_version_changes): rest_client = rest.RestClient() def GetCatalog(rest_client, r_catrel, r_arch, r_osrel): key = r_catrel, r_arch, r_osrel catalog = rest_client.GetCatalog(*key) return (key, catalog) # TODO(maciej): Enable this once the multiprocessing module is fixed. # https://www.opencsw.org/mantis/view.php?id=4894 # proc_pool = multiprocessing.Pool(20) catalogs_to_fetch_args = [] for arch in common_constants.PHYSICAL_ARCHITECTURES: for osrel in common_constants.OS_RELS: for catrel in (catrel_from, catrel_to): catalogs_to_fetch_args.append( (rest_client, catrel, arch, osrel)) # Convert this to pool.map when multiprocessing if fixed. catalogs = dict(map(lambda x: GetCatalog(*x), catalogs_to_fetch_args)) diffs_by_catalogname = ComposeDiffsByCatalogname(catalogs, catrel_from, catrel_to, include_version_changes, include_downgrades) return catalogs, diffs_by_catalogname
def __init__(self, catrel, arch, osrel, pkgcache=None, rest_client=None): self.catrel = catrel self.arch = arch self.osrel = osrel self.pkgcache = pkgcache or rest.CachedPkgstats("/home/web/pkgstats") self.rest_client = rest_client or rest.RestClient() self._catalog = None
def __init__(self, filenames, rest_url, os_release=None, debug=False, output_to_screen=True): super(Srv4Uploader, self).__init__() self.filenames = self.SortFilenames(filenames) self.md5_by_filename = {} self.debug = debug self.os_release = os_release self.rest_url = rest_url self._rest_client = rest.RestClient(self.rest_url) self.output_to_screen = output_to_screen
def test_MatchSrv4ToCatalogsSameSpecificOsrelAlreadyPresent(self): rest_client_mock = self.mox.CreateMock(rest.RestClient) self.mox.StubOutWithMock(rest, "RestClient") rest.RestClient(None).AndReturn(rest_client_mock) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.9', 'gdb').AndReturn(GDB_STRUCT_9) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.10', 'gdb').AndReturn(GDB_STRUCT_10) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.11', 'gdb').AndReturn(GDB_STRUCT_10) self.mox.ReplayAll() su = csw_upload_pkg.Srv4Uploader(None, None, os_release="SunOS5.10") result = su._MatchSrv4ToCatalogs( "gdb-7.2,REV=2011.01.21-SunOS5.9-sparc-CSW.pkg.gz", "unstable", "sparc", "SunOS5.9", "deadbeef61b53638d7813407fab4765b") expected = (("unstable", "sparc", "SunOS5.10"), ) self.assertEquals(expected, result)
def RemovePackage(self, catalogname, execute=False, os_releases=None): if not os_releases: os_releases = common_constants.OS_RELS username, password = rest.GetUsernameAndPassword() rest_client = rest.RestClient(username=username, password=password) rd = RevDeps() rev_deps = {} # md5 sums to remove to_remove = [] found_anywhere = False for osrel in os_releases: if osrel not in common_constants.OS_RELS: logging.warning( "%s not found in common_constants.OS_RELS (%s). Skipping.", osrel, common_constants.OS_RELS) continue if osrel in common_constants.OBSOLETE_OS_RELS: logging.info("%s is an obsolete OS release. Skipping.", osrel) continue for arch in common_constants.PHYSICAL_ARCHITECTURES: try: pkg_simple = rest_client.Srv4ByCatalogAndCatalogname( UNSTABLE, arch, osrel, catalogname) except urllib2.HTTPError, e: logging.warning("could not fetch %r from %s/%s: %s", catalogname, arch, osrel, e) pkg_simple = None if not pkg_simple: # Maybe we were given a pkgname instead of a catalogname? We can try # that before failing. pkg_simple = rest_client.Srv4ByCatalogAndPkgname( UNSTABLE, arch, osrel, catalogname) if not pkg_simple: msg = "{0} was not in the unstable {1} {2} catalog." logging.debug( msg.format(repr(catalogname), arch, osrel)) continue if pkg_simple: found_anywhere = True md5 = pkg_simple["md5_sum"] pkg = rd.cp.GetPkgstats(md5) key = UNSTABLE, arch, osrel cat_rev_deps = rd.RevDeps(UNSTABLE, arch, osrel, md5) if cat_rev_deps: rev_deps[key] = cat_rev_deps to_remove.append((UNSTABLE, arch, osrel, md5))
def GetDiffsByCatalogname(catrel_from, catrel_to, include_downgrades): rest_client = rest.RestClient() diffs_by_catalogname = {} for arch in common_constants.PHYSICAL_ARCHITECTURES: logging.debug("Architecture: %s", arch) for osrel in common_constants.OS_RELS: logging.debug("OS release: %s", osrel) cat_from = rest_client.GetCatalog(catrel_from, arch, osrel) cat_to = rest_client.GetCatalog(catrel_to, arch, osrel) # Should use catalog comparator, but the data format is different if cat_from is None: cat_from = [] if cat_to is None: cat_to = [] cat_from_by_c = IndexByCatalogname(cat_from) cat_to_by_c = IndexByCatalogname(cat_to) comparator = catalog.CatalogComparator() new_pkgs, removed_pkgs, updated_pkgs = comparator.GetCatalogDiff( cat_to_by_c, cat_from_by_c) # By passing the catalogs (as arguments) in reverse order, we get # packages to be updated in new_pkgs, and so forth. for pkg in new_pkgs: catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {}) catalogname_d.setdefault("new_pkgs", []).append((arch, osrel, pkg)) for pkg in removed_pkgs: catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {}) catalogname_d.setdefault("removed_pkgs", []).append((arch, osrel, pkg)) for pkg_pair in updated_pkgs: # Upgrade or downgrade? cmp_result = opencsw.CompareVersions( pkg_pair["from"]["version"], pkg_pair["to"]["version"]) if cmp_result < 0: direction = "upgrade" else: direction = "downgrade" pkg_pair["direction"] = direction pkg = pkg_pair["from"] if direction == "upgrade" or include_downgrades: catalogname_d = diffs_by_catalogname.setdefault(pkg["catalogname"], {}) catalogname_d.setdefault("updated_pkgs", []).append((arch, osrel, pkg_pair)) return diffs_by_catalogname
def test_MatchSrv4ToCatalogsNewerPackage(self): # A scenario in which a 5.9 package exists in the catalog, and we're # uploading a 5.10 package. rest_client_mock = self.mox.CreateMock(rest.RestClient) self.mox.StubOutWithMock(rest, "RestClient") rest.RestClient(None).AndReturn(rest_client_mock) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.10', 'gdb').AndReturn(GDB_STRUCT_9) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.11', 'gdb').AndReturn(GDB_STRUCT_9) self.mox.ReplayAll() su = csw_upload_pkg.Srv4Uploader(None, None) result = su._MatchSrv4ToCatalogs( "gdb-7.2,REV=2011.01.21-SunOS5.10-sparc-CSW.pkg.gz", "unstable", "sparc", "SunOS5.10", "deadbeef61b53638d7813407fab4765b") expected = ( ("unstable", "sparc", "SunOS5.10"), ("unstable", "sparc", "SunOS5.11"), ) self.assertEquals(expected, result)
def test_MatchSrv4ToCatalogsAbsentFromAll(self): rest_client_mock = self.mox.CreateMock(rest.RestClient) self.mox.StubOutWithMock(rest, "RestClient") rest.RestClient(None, username=None, password=None).AndReturn(rest_client_mock) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.9', 'gdb').AndReturn(None) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.10', 'gdb').AndReturn(None) rest_client_mock.Srv4ByCatalogAndCatalogname( 'unstable', 'sparc', u'SunOS5.11', 'gdb').AndReturn(None) self.mox.ReplayAll() su = csw_upload_pkg.Srv4Uploader(None, None) result = su._MatchSrv4ToCatalogs( "gdb-7.2,REV=2011.01.21-SunOS5.9-sparc-CSW.pkg.gz", "unstable", "sparc", "SunOS5.9", "deadbeef61b53638d7813407fab4765b") expected = ( ("unstable", "sparc", "SunOS5.9"), ("unstable", "sparc", "SunOS5.10"), ("unstable", "sparc", "SunOS5.11"), ) self.assertEquals(expected, result)
def __init__(self, filenames, rest_url, os_release=None, debug=False, output_to_screen=True, username=None, password=None, catrel=DEFAULT_CATREL): super(Srv4Uploader, self).__init__() if filenames: filenames = self.SortFilenames(filenames) self.filenames = filenames self.md5_by_filename = {} self.debug = debug self.os_release = os_release self.rest_url = rest_url self._rest_client = rest.RestClient(self.rest_url, username=username, password=password) self.output_to_screen = output_to_screen self.username = username self.password = password self.catrel = catrel
previous_catalogs_by_triad = cPickle.load(fd) except (IOError, EOFError), e: logging.warning(e) previous_catalogs_by_triad = {} # Merge the two data structures here catalogs = [] for key in catalogs_by_triad: if key in previous_catalogs_by_triad: catalogs.append( # ("fossil", "amd65", "SolarOS5.12", cat_a, cat_b), key + (previous_catalogs_by_triad[key], catalogs_by_triad[key])) else: logging.debug("%s not found in previous_catalogs_by_triad", key) formatter = NotificationFormatter() rest_client = rest.RestClient() notifications = formatter.FormatNotifications(cat_tree_url, catalogs, rest_client) whitelist = frozenset() if options.whitelist: whitelist = frozenset(options.whitelist.split(",")) logging.debug("Email whitelist: %s", whitelist) for email in notifications: if options.send_notifications: logging.debug("email: %s", repr(email)) if whitelist and email not in whitelist: continue logging.debug("Sending.") msg = MIMEText(notifications[email]) msg["Subject"] = "OpenCSW catalog update report" from_address = "Catalog update notifier <*****@*****.**>"
def __init__(self): self.cached_catalogs_bins = {} self.cached_catalogs_links = {} self.cached_catalogs_needed_bins = {} self.rest_client = rest.RestClient() self.cp = rest.CachedPkgstats("pkgstats.db")
def main(): parser = optparse.OptionParser() parser.add_option( "--from-catalog", dest="catrel_from", default="unstable", help="Catalog release to integrate from, e.g. 'unstable'.") parser.add_option("--to-catalog", dest="catrel_to", default="testing", help="Catalog release to integrate to, e.g. 'testing'.") parser.add_option( "--from-json", dest="from_json", help=("If specified, loads data from a JSON file instead of polling " "the database.")) parser.add_option("--save-json", dest="save_json", help="If specified, saves JSON data to a file.") parser.add_option("-o", "--output-file", dest="output_file", help="Filename to save output to.") parser.add_option("--no-include-downgrades", dest="include_downgrades", default=True, action="store_false", help="Skip package downgrades.") parser.add_option( "--no-include-version-changes", dest="include_version_changes", default=True, action="store_false", help="Skip version upgrades (only accept revision upgrades).") parser.add_option("--debug", dest="debug", default=False, action="store_true") options, args = parser.parse_args() logging_level = logging.INFO if options.debug: logging_level = logging.DEBUG fmt = '%(levelname)s %(asctime)s %(filename)s:%(lineno)d %(message)s' logging.basicConfig(format=fmt, level=logging_level) config = configuration.GetConfig() username, password = rest.GetUsernameAndPassword() rest_client = rest.RestClient(pkgdb_url=config.get('rest', 'pkgdb'), releases_url=config.get('rest', 'releases'), username=username, password=password) if not options.output_file: raise UsageError("Please specify the output file. See --help.") catrel_from = options.catrel_from catrel_to = options.catrel_to if options.from_json: with open(options.from_json, "rb") as fd: logging.info("Loading %s", options.from_json) (bundles_by_md5, jsonable_catalogs, diffs_by_catalogname) = cjson.decode(fd.read()) catalogs = dict((tuple(cjson.decode(x)), jsonable_catalogs[x]) for x in jsonable_catalogs) else: catalogs = GetCatalogs(catrel_from, catrel_to, options.include_version_changes, options.include_downgrades, rest_client) diffs_by_catalogname = ComposeDiffsByCatalogname( catalogs, catrel_from, catrel_to, options.include_version_changes, options.include_downgrades) bundles_by_md5 = {} bundles_missing = set() cp = rest.CachedPkgstats("pkgstats", rest_client) for key in catalogs: if catalogs[key]: # could be None for pkg in catalogs[key]: md5 = pkg["md5_sum"] if md5 not in bundles_by_md5 and md5 not in bundles_missing: stats = cp.GetPkgstats(md5) bundle_key = "OPENCSW_BUNDLE" if stats: if bundle_key in stats["pkginfo"]: bundles_by_md5[md5] = stats["pkginfo"][ bundle_key] else: logging.debug( "%r (%r) does not have the bundle set", stats["basic_stats"]["pkg_basename"], md5) bundles_missing.add(md5) # Here's a good place to calculate the mapping between catalognames and # bundle names. change_types = "new_pkgs", "removed_pkgs", "updated_pkgs" bundles_by_catalogname = {} for catalogname in diffs_by_catalogname: l = bundles_by_catalogname.setdefault(catalogname, set()) for change_type in change_types: if change_type in diffs_by_catalogname[catalogname]: for change_info in diffs_by_catalogname[catalogname][ change_type]: pkg = change_info[2] if "to" in pkg: md5s = [x["md5_sum"] for x in (pkg["from"], pkg["to"])] else: md5s = [pkg["md5_sum"]] for md5 in md5s: if md5 in bundles_by_md5: l.add(bundles_by_md5[md5]) namespace = { "bundles_by_catalogname": bundles_by_catalogname, "bundles_by_md5": bundles_by_md5, "diffs_by_catalogname": diffs_by_catalogname, "catrel_to": catrel_to, "catrel_from": catrel_from, "prog": sys.argv[0], } if options.save_json: with open(options.save_json, "wb") as fd: jsonable_catalogs = dict( (cjson.encode(x), catalogs[x]) for x in catalogs) fd.write( cjson.encode( (bundles_by_md5, jsonable_catalogs, diffs_by_catalogname))) t = Template.Template(CATALOG_MOD_TMPL, searchList=[namespace]) if options.output_file: logging.info("Saving output to %s", options.output_file) with open(options.output_file, "wb") as fd: fd.write(unicode(t)) else: sys.stdout.write(unicode(t))
def __init__(self): self.cached_catalogs = {} self.rest_client = rest.RestClient() self.cp = rest.CachedPkgstats("pkgstats")