def test_homepage_dashboard_flagged(user: User, user2: User, package: Package): pkgbase = package.PackageBase now = time.utcnow() with db.begin(): db.create(PackageComaintainer, User=user2, PackageBase=pkgbase, Priority=1) pkgbase.OutOfDateTS = now - 5 pkgbase.Flagger = user # Test that a comaintainer viewing the dashboard shows them their # flagged co-maintained packages. comaint_cookies = {"AURSID": user2.login(Request(), "testPassword")} with client as request: resp = request.get("/", cookies=comaint_cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) flagged = root.xpath('//table[@id="flagged-packages"]//tr/td/a')[0] assert flagged.text.strip() == package.Name # Test that a maintainer viewing the dashboard shows them their # flagged maintained packages. cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/", cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) flagged = root.xpath('//table[@id="flagged-packages"]//tr/td/a')[0] assert flagged.text.strip() == package.Name
def test_homepage_dashboard(redis, packages, user): # Create Comaintainer records for all of the packages. with db.begin(): for pkg in packages: db.create(PackageComaintainer, PackageBase=pkg.PackageBase, User=user, Priority=1) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: response = request.get("/", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) # Assert some expectations that we end up getting all fifty # packages in the "My Packages" table. expectations = [f"pkg_{i}" for i in range(50 - 1, 0, -1)] my_packages = root.xpath('//table[@id="my-packages"]/tbody/tr') for i, expected in enumerate(expectations): name, version, votes, pop, voted, notify, desc, maint \ = my_packages[i].xpath('./td') assert name.xpath('./a').pop(0).text.strip() == expected # Do the same for the Comaintained Packages table. my_packages = root.xpath('//table[@id="comaintained-packages"]/tbody/tr') for i, expected in enumerate(expectations): name, version, votes, pop, voted, notify, desc, maint \ = my_packages[i].xpath('./td') assert name.xpath('./a').pop(0).text.strip() == expected
def test_archdev_navbar(client: TestClient): expected = ["AUR Home", "Packages", "Register", "Login"] with client as request: resp = request.get("/") assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) items = root.xpath('//div[@id="archdev-navbar"]/ul/li/a') for i, item in enumerate(items): assert item.text.strip() == expected[i]
def test_pager_no_results(): """ Test the pager partial with no results. """ num_packages = 0 context = pager_context(num_packages) body = base_template("partials/pager.html").render(context) root = parse_root(body) stats = root.xpath('//div[@class="pkglist-stats"]/p') expected = "0 packages found." assert stats[0].text.strip() == expected
def test_archdev_navbar_authenticated(client: TestClient, user: User): expected = ["Dashboard", "Packages", "Requests", "My Account", "Logout"] cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/", cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) root = parse_root(resp.text) items = root.xpath('//div[@id="archdev-navbar"]/ul/li/a') for i, item in enumerate(items): assert item.text.strip() == expected[i]
def test_pager(): """ Test the pager partial with two pages of results. """ num_packages = 100 context = pager_context(num_packages) body = base_template("partials/pager.html").render(context) root = parse_root(body) stats = root.xpath('//div[@class="pkglist-stats"]/p') stats = re.sub(r"\s{2,}", " ", stats[0].text.strip()) expected = f"{num_packages} packages found. Page 1 of 2." assert stats == expected
def test_rtl(client: TestClient): responses = {} expected = [[], [], ['rtl'], ['rtl']] with client as request: responses["default"] = request.get("/") responses["de"] = request.get("/", cookies={"AURLANG": "de"}) responses["he"] = request.get("/", cookies={"AURLANG": "he"}) responses["ar"] = request.get("/", cookies={"AURLANG": "ar"}) for i, (lang, resp) in enumerate(responses.items()): assert resp.status_code == int(HTTPStatus.OK) t = parse_root(resp.text) assert t.xpath('//html/@dir') == expected[i]
def test_requests(client: TestClient, tu_user: User, packages: List[Package], requests: List[PackageRequest]): cookies = {"AURSID": tu_user.login(Request(), "testPassword")} with client as request: resp = request.get( "/requests", params={ # Pass in url query parameters O, SeB and SB to exercise # their paths inside of the pager_nav used in this request. "O": 0, # Page 1 "SeB": "nd", "SB": "n" }, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) assert "Next ›" in resp.text assert "Last »" in resp.text root = parse_root(resp.text) # We have 55 requests, our defaults.PP is 50, so expect we have 50 rows. rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == defaults.PP # Request page 2 of the requests page. with client as request: resp = request.get( "/requests", params={ "O": 50 # Page 2 }, cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) assert "‹ Previous" in resp.text assert "« First" in resp.text root = parse_root(resp.text) rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == 5 # There are five records left on the second page.
def test_homepage_dashboard_flagged_packages(redis, packages, user): # Set the first Package flagged by setting its OutOfDateTS column. pkg = packages[0] with db.begin(): pkg.PackageBase.OutOfDateTS = time.utcnow() cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: response = request.get("/", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) # Check to see that the package showed up in the Flagged Packages table. root = parse_root(response.text) flagged_pkg = root.xpath('//table[@id="flagged-packages"]/tbody/tr').pop(0) flagged_name = flagged_pkg.xpath('./td/a').pop(0) assert flagged_name.text.strip() == pkg.Name
def test_requests_selfmade(client: TestClient, user: User, requests: List[PackageRequest]): cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: resp = request.get("/requests", cookies=cookies) assert resp.status_code == int(HTTPStatus.OK) # As the user who creates all of the requests, we should see all of them. # However, we are not allowed to accept any of them ourselves. root = parse_root(resp.text) rows = root.xpath('//table[@class="results"]/tbody/tr') assert len(rows) == defaults.PP # Our first and only link in the last row should be "Close". for row in rows: last_row = row.xpath('./td')[-1].xpath('./a')[0] assert last_row.text.strip() == "Close"
def test_homepage_updates(redis, packages): with client as request: response = request.get("/") assert response.status_code == int(HTTPStatus.OK) # Run the request a second time to exercise the Redis path. response = request.get("/") assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) # We expect to see the latest 15 packages, which happens to be # pkg_49 .. pkg_34. So, create a list of expectations using a range # starting at 49, stepping down to 49 - 15, -1 step at a time. expectations = [f"pkg_{i}" for i in range(50 - 1, 50 - 1 - 15, -1)] updates = root.xpath('//div[@id="pkg-updates"]/table/tbody/tr') for i, expected in enumerate(expectations): pkgname = updates[i].xpath('./td/a').pop(0) assert pkgname.text.strip() == expected
def test_homepage_stats(redis, packages): with client as request: response = request.get("/") assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) expectations = [("Packages", r'\d+'), ("Orphan Packages", r'\d+'), ("Packages added in the past 7 days", r'\d+'), ("Packages updated in the past 7 days", r'\d+'), ("Packages updated in the past year", r'\d+'), ("Packages never updated", r'\d+'), ("Registered Users", r'\d+'), ("Trusted Users", r'\d+')] stats = root.xpath('//div[@id="pkg-stats"]//tr') for i, expected in enumerate(expectations): expected_key, expected_regex = expected key, value = stats[i].xpath('./td') assert key.text.strip() == expected_key assert re.match(expected_regex, value.text.strip())
def test_homepage_dashboard_requests(redis, packages, user): now = time.utcnow() pkg = packages[0] reqtype = db.query(RequestType, RequestType.ID == DELETION_ID).first() with db.begin(): pkgreq = db.create(PackageRequest, PackageBase=pkg.PackageBase, PackageBaseName=pkg.PackageBase.Name, User=user, Comments=str(), ClosureComment=str(), RequestTS=now, RequestType=reqtype) cookies = {"AURSID": user.login(Request(), "testPassword")} with client as request: response = request.get("/", cookies=cookies) assert response.status_code == int(HTTPStatus.OK) root = parse_root(response.text) request = root.xpath('//table[@id="pkgreq-results"]/tbody/tr').pop(0) pkgname = request.xpath('./td/a').pop(0) assert pkgname.text.strip() == pkgreq.PackageBaseName
def check_package_details(content: str, pkg: Package) -> None: """ Perform assertion checks against package details. """ pkgbase = pkg.PackageBase root = parse_root(content) pkginfo = root.xpath('//table[@id="pkginfo"]')[0] rows = pkginfo.xpath("./tr") # Check Git Clone URL. git_clone_uris = rows[0].xpath("./td/a") anon_uri, priv_uri = git_clone_uris pkgbasename = pkgbase.Name assert anon_uri.text.strip() == GIT_CLONE_URI_ANON % pkgbasename assert priv_uri.text.strip() == GIT_CLONE_URI_PRIV % pkgbasename # Check Package Base. pkgbase_markup = rows[1].xpath("./td/a")[0] assert pkgbase_markup.text.strip() == pkgbasename # Check Description. desc = rows[2].xpath("./td")[0] assert desc.text.strip() == str(pkg.Description) # Check URL, for which we have none. In this case, no <a> should # be used since we have nothing to link. url = rows[3].xpath("./td")[0] assert url.text.strip() == str(pkg.URL) # Check Keywords, which should be empty. keywords = rows[4].xpath("./td/form/div/input")[0] assert keywords.attrib["value"] == str() i = 4 licenses = pkg.package_licenses.all() if licenses: i += 1 expected = ", ".join([p.License.Name for p in licenses]) license_markup = rows[i].xpath("./td")[0] assert license_markup.text.strip() == expected else: assert "Licenses" not in content provides = pkg.package_relations.filter( PackageRelation.RelTypeID == PROVIDES_ID).all() if provides: i += 1 expected = ", ".join([p.RelName for p in provides]) provides_markup = rows[i].xpath("./td")[0] assert provides_markup.text.strip() == expected else: assert "Provides" not in content replaces = pkg.package_relations.filter( PackageRelation.RelTypeID == REPLACES_ID).all() if replaces: i += 1 expected = ", ".join([r.RelName for r in replaces]) replaces_markup = rows[i].xpath("./td")[0] assert replaces_markup.text.strip() == expected else: assert "Replaces" not in content # Check Submitter. selector = "./td" if not pkg.PackageBase.Submitter else "./td/a" i += 1 submitter = rows[i].xpath(selector)[0] assert submitter.text.strip() == str(pkg.PackageBase.Submitter) # Check Maintainer. selector = "./td" if not pkg.PackageBase.Maintainer else "./td/a" i += 1 maintainer = rows[i].xpath(selector)[0] assert maintainer.text.strip() == str(pkg.PackageBase.Maintainer) # Check Packager. selector = "./td" if not pkg.PackageBase.Packager else "./td/a" i += 1 packager = rows[i].xpath(selector)[0] assert packager.text.strip() == str(pkg.PackageBase.Packager) # Check Votes. i += 1 votes = rows[i].xpath("./td")[0] assert votes.text.strip() == str(pkg.PackageBase.NumVotes) # Check Popularity; for this package, a number_format of 6 places is used. i += 1 pop = rows[i].xpath("./td")[0] assert pop.text.strip() == number_format(0, 6) # Check First Submitted date_fmt = "%Y-%m-%d %H:%M (%Z)" i += 1 first_submitted = rows[i].xpath("./td")[0] converted_dt = as_timezone(to_dt(pkg.PackageBase.SubmittedTS), "UTC") expected = converted_dt.strftime(date_fmt) assert first_submitted.text.strip() == expected # Check Last Updated. i += 1 last_updated = rows[i].xpath("./td")[0] converted_dt = as_timezone(to_dt(pkg.PackageBase.ModifiedTS), "UTC") expected = converted_dt.strftime(date_fmt) assert last_updated.text.strip() == expected