def test_patchproducts_update_cache(mocker, data, patchproducts_data): mocker.patch("sps.cache.save", autospec=True) mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = ( f"aöskdföskdföproductsData={json.dumps(data)};\nslkdfjsal" ) patchproducts.get(None, "fake-file-name", False, True) cache.save.assert_called_with("patchproducts", "fake-file-name", patchproducts_data)
def test_patchproducts_get(mocker, data, patchproducts_data): mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = ( f"aöskdföskdföproductsData={json.dumps(data)};\nslkdfjsal" ) assert patchproducts.get(None, "fake-file-name", False, False) == patchproducts_data request.fetch.assert_called_with("https://scc.suse.com/patches", "html")
def test_patchproducts_get_pattern_multipple_match(mocker, data, patchproducts_data): mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = data request.fetch.return_value = ( f"aöskdföskdföproductsData={json.dumps(data)};\nslkdfjsal" ) expected_return = [] for product in patchproducts_data: for k in product.keys(): if "SUSE" in str(product[k]): expected_return.append(product) break assert patchproducts.get("SUSE", "fake-file-name", False, False) == expected_return
def test_patchproducts_get_pattern_single_match(mocker, data, patchproducts_data): mocker.patch("sps.request.fetch") request.fetch.return_value = ( f"aöskdföskdföproductsData={json.dumps(data)};\nslkdfjsal" ) expected_return = [] for product in patchproducts_data: for k in product.keys(): if "Legacy Module" in str(product[k]): expected_return.append(product) break assert ( patchproducts.get("Legacy Module", "fake-file-name", False, False) == expected_return )
def test_patchproducts_get_cache(mocker, patchproducts_data): mocker.patch("sps.cache.load", autospec=True) cache.load.return_value = {"patchproducts": patchproducts_data} assert patchproducts.get(None, __file__, False, False) == patchproducts_data
def test_patchproducts_get_pattern_no_match(mocker, data): mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = ( f"aöskdföskdföproductsData={json.dumps(data)};\nslkdfjsal" ) assert patchproducts.get("no-hit-", "fake-file-name", False, False) == []
def test_patchproducts_json_parse_error(mocker, data): mocker.patch("sps.cache.save", autospec=True) mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = f"aöskdföskdföproductsData={data};\nslkdfjsal" with pytest.raises(SystemExit): patchproducts.get(None, "fake-file-name", False, False)
def test_patchproducts_no_matches_from_scc(mocker, data): mocker.patch("sps.cache.save", autospec=True) mocker.patch("sps.request.fetch", autospec=True) request.fetch.return_value = f"aöskdföskdföprodsData={json.dumps(data)};\nslkdfjsalaöskdföskdföproduata={json.dumps(data)};\nslkdfjsal" with pytest.raises(SystemExit): patchproducts.get(None, "fake-file-name", False, False)
def test_patchproducts_cache_no_key(mocker): mocker.patch("sps.cache.load", autospec=True) cache.load.return_value = {"data": [1, 2, 3]} with pytest.raises(SystemExit): patchproducts.get(None, __file__, False, False)
def main(): """The main program logic""" parser = create_parser() args = parser.parse_args() if args.command in ["product", "package", "patchproduct", "patch"]: table = PrettyTable() if args.command == "product": products_data = products.get(args.pattern, args.cache_file, args.no_cache, args.update_cache) table.field_names = ["id", "Name", "Edition", "Identifier", "Arch"] for product in products_data: table.add_row([ product["id"], product["name"], product["edition"], product["identifier"], product["architecture"], ]) if args.command == "package": package_data = packages.get(args.product, args.pattern, args.cache_file) table.field_names = [ "Name", "Version", "Release", "Arch", "Module" ] for package in package_data: module_line = "" for product in package["products"]: module_line = "{},{}".format(module_line, product["name"]) if args.exact_match and package["name"] == args.pattern: table.add_row([ package["name"], package["version"], package["release"], package["arch"], module_line[1:], ]) elif not args.exact_match: table.add_row([ package["name"], package["version"], package["release"], package["arch"], module_line[1:], ]) if args.command == "patchproduct": products_data = patchproducts.get(args.pattern, args.cache_file, args.no_cache, args.update_cache) table.field_names = ["Name", "Version", "Arch"] for product in products_data: table.add_row([ product["name"], product["version"], product["architecture"], ]) if args.command == "patch": patches = patch.get(args) if args.detail: for p in patches["hits"]: print(patch.format_detail(p)) if patches["meta"]["total_hits"] > PATCH_WARN_NUMBER: print_warn( f"Your query has {patches['meta']['total_hits']} hits, you might want to refine your search criteria" ) print( f"Page {patches['meta']['current_page']}/{patches['meta']['total_pages']}\t Hits: {patches['meta']['total_hits']}" ) sys.exit(0) else: table.hrules = ALL table.field_names = [ "Severity", "Name", "Product", "Arch", "id", "Released", ] for p in patches["hits"]: table.add_row([ p["severity"], p["title"], "\n".join(p["product_friendly_names"]), "\n".join(p["product_architectures"]), p["ibs_id"], p["issued_at"][:p["issued_at"].find("T")], ]) if patches["meta"]["total_hits"] > PATCH_WARN_NUMBER: print("\n") print_warn( f"Your query has {patches['meta']['total_hits']} hits, you might want to refine your search criteria" ) print( f"\nPage {patches['meta']['current_page']}/{patches['meta']['total_pages']}\t Hits: {patches['meta']['total_hits']}" ) for name in table.field_names: table.align[name] = "l" table.border = not args.no_borders table.header = not args.no_header table.sortby = args.sort_table print(table) if args.command == "completion": print(completion.get(args.cache_file, args.shell)) cacheages = cache.age(args.cache_file, args.cache_age) for key, value in cacheages.items(): print_warn(f"The {key} cache is old, last updated {value}")