def invalid_username(request: Request = None, U: str = str(), _: l10n.Translator = None, **kwargs) -> None: if not util.valid_username(U): username_min_len = config.getint("options", "username_min_len") username_max_len = config.getint("options", "username_max_len") raise ValidationError([ "The username is invalid.", [ _("It must be between %s and %s characters long") % (username_min_len, username_max_len), "Start and end with a letter or number", "Can contain only one period, underscore or hyphen.", ] ])
def test_request_post_orphan_autogenerated_closure(client: TestClient, tu_user: User, pkgbase: PackageBase, pkgreq: PackageRequest): idle_time = config.getint("options", "request_idle_time") now = time.utcnow() with db.begin(): pkgreq.ReqTypeID = ORPHAN_ID # Set the request time so it's seen as due (idle_time has passed). pkgreq.RequestTS = now - idle_time - 10 endpoint = f"/pkgbase/{pkgbase.Name}/disown" data = {"confirm": True} with client as request: resp = request.post(endpoint, data=data, cookies=tu_user.cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" assert Email.count() == 1 email = Email(1) expr = r"^\[PRQ#\d+\] Orphan Request for .+ Accepted$" assert re.match(expr, email.headers.get("Subject")) expr = r"\[Autogenerated\] Accepted orphan for .+\." assert re.search(expr, email.body)
def timeout(extended: bool) -> int: """ Produce a session timeout based on `remember_me`. This method returns one of AUR_CONFIG's options.persistent_cookie_timeout and options.login_timeout based on the `extended` argument. The `extended` argument is typically the value of the AURREMEMBER cookie, defaulted to False. If `extended` is False, options.login_timeout is returned. Otherwise, if `extended` is True, options.persistent_cookie_timeout is returned. :param extended: Flag which generates an extended timeout when True :returns: Cookie timeout based on configuration options """ timeout = config.getint("options", "login_timeout") if bool(extended): timeout = config.getint("options", "persistent_cookie_timeout") return timeout
def voteinfo(user: User) -> TUVoteInfo: now = time.utcnow() start = config.getint("tuvotereminder", "range_start") with db.begin(): voteinfo = db.create(TUVoteInfo, Agenda="Lorem ipsum.", User=user.Username, End=(now + start + 1), Quorum=0.00, Submitter=user, Submitted=0) yield voteinfo
def verify_orphan_request(user: User, pkgbase: PackageBase): """ Verify that an undue orphan request exists in `requests`. """ requests = pkgbase.requests.filter(PackageRequest.ReqTypeID == ORPHAN_ID) for pkgreq in requests: idle_time = config.getint("options", "request_idle_time") time_delta = time.utcnow() - pkgreq.RequestTS is_due = pkgreq.Status == PENDING_ID and time_delta > idle_time if is_due: # If the requester is the pkgbase maintainer or the # request is already due, we're good to go: return True. return True return False
def test_config_main_set_real(tmpdir: py.path.local): """ Test a real set_option path. """ # Copy AUR_CONFIG to {tmpdir}/aur.config. aur_config = os.environ.get("AUR_CONFIG") tmp_aur_config = os.path.join(str(tmpdir), "aur.config") with open(aur_config) as f: with open(tmp_aur_config, "w") as o: o.write(f.read()) # Force reset the parser. This should NOT be done publicly. config._parser = None value = 666 args = ["aurweb-config", "set", "options", "fake-key", str(value)] with mock.patch.dict("os.environ", {"AUR_CONFIG": tmp_aur_config}): with mock.patch("sys.argv", args): # Run aurweb.config.main(). main() # Update the config; fake-key should be set. config.rehash() assert config.getint("options", "fake-key") == 666 # Restore config back to normal. args = ["aurweb-config", "unset", "options", "fake-key"] with mock.patch("sys.argv", args): main() # Return the config back to normal. config.rehash() # fake-key should no longer exist. assert config.getint("options", "fake-key") is None
def invalid_password(P: str = str(), C: str = str(), _: l10n.Translator = None, **kwargs) -> None: if P: if not util.valid_password(P): username_min_len = config.getint("options", "username_min_len") raise ValidationError([ _("Your password must be at least %s characters.") % (username_min_len) ]) elif not C: raise ValidationError(["Please confirm your new password."]) elif P != C: raise ValidationError(["Password fields do not match."])
async def test_expired_session(backend: BasicAuthBackend, user: User): """ Login, expire the session manually, then authenticate. """ # First, build a Request with a logged in user. request = Request() request.user = user sid = request.user.login(Request(), "testPassword") request.cookies["AURSID"] = sid # Set Session.LastUpdateTS to 20 seconds expired. timeout = config.getint("options", "login_timeout") now_ts = time.utcnow() with db.begin(): request.user.session.LastUpdateTS = now_ts - timeout - 20 # Run through authentication backend and get the session # deleted due to its expiration. await backend.authenticate(request) session = db.query(Session).filter(Session.SessionID == sid).first() assert session is None
def test_request_post_deletion_autoaccept(client: TestClient, auser: User, pkgbase: PackageBase, caplog: pytest.LogCaptureFixture): """ Test the request route for deletion as maintainer. """ caplog.set_level(DEBUG) now = time.utcnow() auto_delete_age = config.getint("options", "auto_delete_age") with db.begin(): pkgbase.ModifiedTS = now - auto_delete_age + 100 endpoint = f"/pkgbase/{pkgbase.Name}/request" data = {"comments": "Test request.", "type": "deletion"} with client as request: resp = request.post(endpoint, data=data, cookies=auser.cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = db.query(PackageRequest).filter( PackageRequest.PackageBaseName == pkgbase.Name).first() assert pkgreq is not None assert pkgreq.ReqTypeID == DELETION_ID assert pkgreq.Status == ACCEPTED_ID # A RequestOpenNotification should've been sent out. assert Email.count() == 2 Email.dump() # Check the content of the open notification. email = Email(1) expr = r"^\[PRQ#%d\] Deletion Request for [^ ]+$" % pkgreq.ID assert re.match(expr, email.headers.get("Subject")) # Check the content of the close notification. email = Email(2) expr = r"^\[PRQ#%d\] Deletion Request for [^ ]+ Accepted$" % pkgreq.ID assert re.match(expr, email.headers.get("Subject")) # Check logs. expr = r"New request #\d+ is marked for auto-deletion." assert re.search(expr, caplog.text)
def test_request_post_orphan_autoaccept(client: TestClient, auser: User, pkgbase: PackageBase, caplog: pytest.LogCaptureFixture): """ Test the standard pkgbase request route GET method. """ caplog.set_level(DEBUG) now = time.utcnow() auto_orphan_age = config.getint("options", "auto_orphan_age") with db.begin(): pkgbase.OutOfDateTS = now - auto_orphan_age - 100 endpoint = f"/pkgbase/{pkgbase.Name}/request" data = { "type": "orphan", "comments": "Test request.", } with client as request: resp = request.post(endpoint, data=data, cookies=auser.cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) pkgreq = pkgbase.requests.first() assert pkgreq is not None assert pkgreq.ReqTypeID == ORPHAN_ID # A Request(Open|Close)Notification should've been sent out. assert Email.count() == 2 # Check the first email; should be our open request. email = Email(1) expr = r"^\[PRQ#%d\] Orphan Request for [^ ]+$" % pkgreq.ID assert re.match(expr, email.headers.get("Subject")) # And the second should be the automated closure. email = Email(2) expr = r"^\[PRQ#%d\] Orphan Request for [^ ]+ Accepted$" % pkgreq.ID assert re.match(expr, email.headers.get("Subject")) # Check logs. expr = r"New request #\d+ is marked for auto-orphan." assert re.search(expr, caplog.text)
def _handle_search_type(self, by: str = defaults.RPC_SEARCH_BY, args: List[str] = []) -> List[Dict[str, Any]]: # If `by` isn't maintainer and we don't have any args, raise an error. # In maintainer's case, return all orphans if there are no args, # so we need args to pass through to the handler without errors. if by != "m" and not len(args): raise RPCError("No request type/data specified.") arg = args[0] if args else str() if by != "m" and len(arg) < 2: raise RPCError("Query arg too small.") search = RPCSearch() search.search_by(by, arg) max_results = config.getint("options", "max_rpc_results") results = self._entities(search.results()).limit(max_results + 1).all() if len(results) > max_results: raise RPCError("Too many package results.") return self._assemble_json_data(results, self._get_json_data)
def test_orphan_request(client: TestClient, user: User, tu_user: User, pkgbase: PackageBase, pkgreq: PackageRequest): """ Test the standard orphan request route. """ user2 = create_user("user2", "*****@*****.**") with db.begin(): db.create(PackageComaintainer, User=user2, PackageBase=pkgbase, Priority=1) idle_time = config.getint("options", "request_idle_time") now = time.utcnow() with db.begin(): pkgreq.ReqTypeID = ORPHAN_ID # Set the request time so it's seen as due (idle_time has passed). pkgreq.RequestTS = now - idle_time - 10 endpoint = f"/pkgbase/{pkgbase.Name}/disown" comments = "Test orphan closure." data = {"comments": comments, "confirm": True} with client as request: resp = request.post(endpoint, data=data, cookies=tu_user.cookies) assert resp.status_code == int(HTTPStatus.SEE_OTHER) assert resp.headers.get("location") == f"/pkgbase/{pkgbase.Name}" # We should have unset the maintainer. assert pkgbase.Maintainer is None # We should have removed the comaintainers. assert not pkgbase.comaintainers.all() # Ensure that `pkgreq`.ClosureComment was left alone when specified. assert pkgreq.ClosureComment == comments # Check the email we expect. assert Email.count() == 1 email = Email(1).parse() subj = r"^\[PRQ#%d\] Orphan Request for [^ ]+ Accepted$" % pkgreq.ID assert re.match(subj, email.headers.get("Subject"))
def _handle_multiinfo_type(self, args: List[str] = [], **kwargs) \ -> List[Dict[str, Any]]: self._enforce_args(args) args = set(args) packages = db.query(models.Package).join(models.PackageBase).join( models.User, models.User.ID == models.PackageBase.MaintainerUID, isouter=True ).filter(models.Package.Name.in_(args)) max_results = config.getint("options", "max_rpc_results") packages = self._entities(packages).limit(max_results + 1) if packages.count() > max_results: raise RPCError("Too many package results.") ids = {pkg.ID for pkg in packages} # Aliases for 80-width. Package = models.Package PackageKeyword = models.PackageKeyword subqueries = [ # PackageDependency db.query( models.PackageDependency ).join(models.DependencyType).filter( models.PackageDependency.PackageID.in_(ids) ).with_entities( models.PackageDependency.PackageID.label("ID"), models.DependencyType.Name.label("Type"), models.PackageDependency.DepName.label("Name"), models.PackageDependency.DepCondition.label("Cond") ).distinct().order_by("Name"), # PackageRelation db.query( models.PackageRelation ).join(models.RelationType).filter( models.PackageRelation.PackageID.in_(ids) ).with_entities( models.PackageRelation.PackageID.label("ID"), models.RelationType.Name.label("Type"), models.PackageRelation.RelName.label("Name"), models.PackageRelation.RelCondition.label("Cond") ).distinct().order_by("Name"), # Groups db.query(models.PackageGroup).join( models.Group, and_(models.PackageGroup.GroupID == models.Group.ID, models.PackageGroup.PackageID.in_(ids)) ).with_entities( models.PackageGroup.PackageID.label("ID"), literal("Groups").label("Type"), models.Group.Name.label("Name"), literal(str()).label("Cond") ).distinct().order_by("Name"), # Licenses db.query(models.PackageLicense).join( models.License, models.PackageLicense.LicenseID == models.License.ID ).filter( models.PackageLicense.PackageID.in_(ids) ).with_entities( models.PackageLicense.PackageID.label("ID"), literal("License").label("Type"), models.License.Name.label("Name"), literal(str()).label("Cond") ).distinct().order_by("Name"), # Keywords db.query(models.PackageKeyword).join( models.Package, and_(Package.PackageBaseID == PackageKeyword.PackageBaseID, Package.ID.in_(ids)) ).with_entities( models.Package.ID.label("ID"), literal("Keywords").label("Type"), models.PackageKeyword.Keyword.label("Name"), literal(str()).label("Cond") ).distinct().order_by("Name") ] # Union all subqueries together. query = subqueries[0].union_all(*subqueries[1:]).all() # Store our extra information in a class-wise dictionary, # which contains package id -> extra info dict mappings. self.extra_info = defaultdict(lambda: defaultdict(list)) for record in query: type_ = TYPE_MAPPING.get(record.Type, record.Type) name = record.Name if record.Cond: name += record.Cond self.extra_info[record.ID][type_].append(name) return self._assemble_json_data(packages, self._get_info_json_data)
def config_getint(section: str, key: str) -> int: return config.getint(section, key)
def mock_config_getint(section: str, key: str): if key == "request_limit": return 4 elif key == "window_length": return 100 return config.getint(section, key)
async def pkgbase_request_post(request: Request, name: str, type: str = Form(...), merge_into: str = Form(default=None), comments: str = Form(default=str()), next: str = Form(default=str())): pkgbase = get_pkg_or_base(name, PackageBase) # Create our render context. context = await make_variable_context(request, "Submit Request") context["pkgbase"] = pkgbase types = {"deletion": DELETION_ID, "merge": MERGE_ID, "orphan": ORPHAN_ID} if type not in types: # In the case that someone crafted a POST request with an invalid # type, just return them to the request form with BAD_REQUEST status. return render_template(request, "pkgbase/request.html", context, status_code=HTTPStatus.BAD_REQUEST) try: validate.request(pkgbase, type, comments, merge_into, context) except ValidationError as exc: logger.error(f"Request Validation Error: {str(exc.data)}") context["errors"] = exc.data return render_template(request, "pkgbase/request.html", context) # All good. Create a new PackageRequest based on the given type. now = time.utcnow() with db.begin(): pkgreq = db.create(PackageRequest, ReqTypeID=types.get(type), User=request.user, RequestTS=now, PackageBase=pkgbase, PackageBaseName=pkgbase.Name, MergeBaseName=merge_into, Comments=comments, ClosureComment=str()) # Prepare notification object. notif = notify.RequestOpenNotification(request.user.ID, pkgreq.ID, type, pkgreq.PackageBase.ID, merge_into=merge_into or None) # Send the notification now that we're out of the DB scope. notif.send() auto_orphan_age = config.getint("options", "auto_orphan_age") auto_delete_age = config.getint("options", "auto_delete_age") ood_ts = pkgbase.OutOfDateTS or 0 flagged = ood_ts and (now - ood_ts) >= auto_orphan_age is_maintainer = pkgbase.Maintainer == request.user outdated = (now - pkgbase.SubmittedTS) <= auto_delete_age if type == "orphan" and flagged: # This request should be auto-accepted. with db.begin(): pkgbase.Maintainer = None pkgreq.Status = ACCEPTED_ID notif = notify.RequestCloseNotification(request.user.ID, pkgreq.ID, pkgreq.status_display()) notif.send() logger.debug(f"New request #{pkgreq.ID} is marked for auto-orphan.") elif type == "deletion" and is_maintainer and outdated: # This request should be auto-accepted. notifs = actions.pkgbase_delete_instance(request, pkgbase, comments=comments) util.apply_all(notifs, lambda n: n.send()) logger.debug(f"New request #{pkgreq.ID} is marked for auto-deletion.") # Redirect the submitting user to /packages. return RedirectResponse("/packages", status_code=HTTPStatus.SEE_OTHER)
def test_getint(): assert config.getint("options", "disable_http_login") == 0
async def packages_get(request: Request, context: Dict[str, Any], status_code: HTTPStatus = HTTPStatus.OK): # Query parameters used in this request. context["q"] = dict(request.query_params) # Per page and offset. offset, per_page = util.sanitize_params( request.query_params.get("O", defaults.O), request.query_params.get("PP", defaults.PP)) context["O"] = offset # Limit PP to options.max_search_results max_search_results = config.getint("options", "max_search_results") context["PP"] = per_page = min(per_page, max_search_results) # Query search by. search_by = context["SeB"] = request.query_params.get("SeB", "nd") # Query sort by. sort_by = request.query_params.get("SB", None) # Query sort order. sort_order = request.query_params.get("SO", None) # Apply ordering, limit and offset. search = PackageSearch(request.user) # For each keyword found in K, apply a search_by filter. # This means that for any sentences separated by spaces, # they are used as if they were ANDed. keywords = context["K"] = request.query_params.get("K", str()) keywords = keywords.split(" ") if search_by == "k": # If we're searchin by keywords, supply a set of keywords. search.search_by(search_by, set(keywords)) else: for keyword in keywords: search.search_by(search_by, keyword) flagged = request.query_params.get("outdated", None) if flagged: # If outdated was given, set it up in the context. context["outdated"] = flagged # When outdated is set to "on," we filter records which do have # an OutOfDateTS. When it's set to "off," we filter out any which # do **not** have OutOfDateTS. criteria = None if flagged == "on": criteria = models.PackageBase.OutOfDateTS.isnot else: criteria = models.PackageBase.OutOfDateTS.is_ # Apply the flag criteria to our PackageSearch.query. search.query = search.query.filter(criteria(None)) submit = request.query_params.get("submit", "Go") if submit == "Orphans": # If the user clicked the "Orphans" button, we only want # orphaned packages. search.query = search.query.filter( models.PackageBase.MaintainerUID.is_(None)) # Collect search result count here; we've applied our keywords. # Including more query operations below, like ordering, will # increase the amount of time required to collect a count. num_packages = search.count() # Apply user-specified sort column and ordering. search.sort_by(sort_by, sort_order) # Insert search results into the context. results = search.results().with_entities( models.Package.ID, models.Package.Name, models.Package.PackageBaseID, models.Package.Version, models.Package.Description, models.PackageBase.Popularity, models.PackageBase.NumVotes, models.PackageBase.OutOfDateTS, models.User.Username.label("Maintainer"), models.PackageVote.PackageBaseID.label("Voted"), models.PackageNotification.PackageBaseID.label("Notify")).group_by( models.Package.Name) packages = results.limit(per_page).offset(offset) context["packages"] = packages context["packages_count"] = num_packages return render_template(request, "packages/index.html", context, status_code=status_code)