コード例 #1
0
async def pkgbase_delete_post(request: Request,
                              name: str,
                              confirm: bool = Form(default=False),
                              comments: str = Form(default=str()),
                              next: str = Form(default="/packages")):
    pkgbase = get_pkg_or_base(name, PackageBase)

    if not request.user.has_credential(creds.PKGBASE_DELETE):
        return RedirectResponse(f"/pkgbase/{name}",
                                status_code=HTTPStatus.SEE_OTHER)

    if not confirm:
        context = templates.make_context(request, "Package Deletion")
        context["pkgbase"] = pkgbase
        context["errors"] = [("The selected packages have not been deleted, "
                              "check the confirmation checkbox.")]
        return render_template(request,
                               "pkgbase/delete.html",
                               context,
                               status_code=HTTPStatus.BAD_REQUEST)

    if comments:
        # Update any existing deletion requests' ClosureComment.
        with db.begin():
            requests = pkgbase.requests.filter(
                and_(PackageRequest.Status == PENDING_ID,
                     PackageRequest.ReqTypeID == DELETION_ID))
            for pkgreq in requests:
                pkgreq.ClosureComment = comments

    notifs = actions.pkgbase_delete_instance(request,
                                             pkgbase,
                                             comments=comments)
    util.apply_all(notifs, lambda n: n.send())
    return RedirectResponse(next, status_code=HTTPStatus.SEE_OTHER)
コード例 #2
0
ファイル: util.py プロジェクト: Acidburn0zzz/aurweb
def add_comaintainers(request: Request, pkgbase: PackageBase,
                      usernames: List[str]) -> None:
    """
    Add comaintainers to `pkgbase`.

    :param request: FastAPI request
    :param pkgbase: PackageBase instance
    :param usernames: Iterable of username strings
    :return: Error string on failure else None
    """
    # For each username in usernames, perform validation of the username
    # and append the User record to `users` if no errors occur.
    users = []
    for username in usernames:
        user = db.query(User).filter(User.Username == username).first()
        if not user:
            _ = l10n.get_translator_for_request(request)
            return _("Invalid user name: %s") % username
        users.append(user)

    notifications = []

    def add_comaint(user: User):
        nonlocal notifications
        # Populate `notifications` with add_comaintainer's return value,
        # which is a ComaintainerAddNotification.
        notifications.append(add_comaintainer(pkgbase, user))

    # Move along: add all `users` as new `pkgbase` comaintainers.
    util.apply_all(users, add_comaint)

    # Send out notifications.
    util.apply_all(notifications, lambda n: n.send())
コード例 #3
0
ファイル: actions.py プロジェクト: Acidburn0zzz/aurweb
def pkgbase_disown_instance(request: Request, pkgbase: PackageBase) -> None:
    disowner = request.user
    notifs = [notify.DisownNotification(disowner.ID, pkgbase.ID)]

    is_maint = disowner == pkgbase.Maintainer
    if is_maint:
        with db.begin():
            # Comaintainer with the lowest Priority value; next-in-line.
            prio_comaint = pkgbase.comaintainers.order_by(
                PackageComaintainer.Priority.asc()
            ).first()
            if prio_comaint:
                # If there is such a comaintainer, promote them to maint.
                pkgbase.Maintainer = prio_comaint.User
                notifs.append(pkgbaseutil.remove_comaintainer(prio_comaint))
            else:
                # Otherwise, just orphan the package completely.
                pkgbase.Maintainer = None
    elif request.user.has_credential(creds.PKGBASE_DISOWN):
        # Otherwise, the request user performing this disownage is a
        # Trusted User and we treat it like a standard orphan request.
        notifs += handle_request(request, ORPHAN_ID, pkgbase)
        with db.begin():
            pkgbase.Maintainer = None
            db.delete_all(pkgbase.comaintainers)

    util.apply_all(notifs, lambda n: n.send())
コード例 #4
0
ファイル: asgi.py プロジェクト: Acidburn0zzz/aurweb
async def app_startup():
    # https://stackoverflow.com/questions/67054759/about-the-maximum-recursion-error-in-fastapi
    # Test failures have been observed by internal starlette code when
    # using starlette.testclient.TestClient. Looking around in regards
    # to the recursion error has really not recommended a course of action
    # other than increasing the recursion limit. For now, that is how
    # we handle the issue: an optional TEST_RECURSION_LIMIT env var
    # provided by the user. Docker uses .env's TEST_RECURSION_LIMIT
    # when running test suites.
    # TODO: Find a proper fix to this issue.
    recursion_limit = int(
        os.environ.get("TEST_RECURSION_LIMIT",
                       sys.getrecursionlimit() + 1000))
    sys.setrecursionlimit(recursion_limit)

    backend = aurweb.config.get("database", "backend")
    if backend not in aurweb.db.DRIVERS:
        raise ValueError(
            f"The configured database backend ({backend}) is unsupported. "
            f"Supported backends: {str(aurweb.db.DRIVERS.keys())}")

    session_secret = aurweb.config.get("fastapi", "session_secret")
    if not session_secret:
        raise Exception("[fastapi] session_secret must not be empty")

    if not os.environ.get("PROMETHEUS_MULTIPROC_DIR", None):
        logger.warning("$PROMETHEUS_MULTIPROC_DIR is not set, the /metrics "
                       "endpoint is disabled.")

    app.mount("/static/css",
              StaticFiles(directory="web/html/css"),
              name="static_css")
    app.mount("/static/js",
              StaticFiles(directory="web/html/js"),
              name="static_js")
    app.mount("/static/images",
              StaticFiles(directory="web/html/images"),
              name="static_images")

    # Add application middlewares.
    app.add_middleware(AuthenticationMiddleware, backend=BasicAuthBackend())
    app.add_middleware(SessionMiddleware, secret_key=session_secret)

    # Add application routes.
    def add_router(module):
        app.include_router(module.router)

    util.apply_all(APP_ROUTES, add_router)

    # Initialize the database engine and ORM.
    get_engine()
コード例 #5
0
def _main(force: bool = False):
    blacklist = set()
    providers = set()
    repomap = dict()

    db_path = aurweb.config.get("aurblup", "db-path")
    sync_dbs = aurweb.config.get('aurblup', 'sync-dbs').split(' ')
    server = aurweb.config.get('aurblup', 'server')

    h = pyalpm.Handle("/", db_path)
    for sync_db in sync_dbs:
        repo = h.register_syncdb(sync_db, pyalpm.SIG_DATABASE_OPTIONAL)
        repo.servers = [server.replace("%s", sync_db)]
        t = h.init_transaction()
        repo.update(force)
        t.release()

        for pkg in repo.pkgcache:
            blacklist.add(pkg.name)
            util.apply_all(pkg.replaces, blacklist.add)
            providers.add((pkg.name, pkg.name))
            repomap[(pkg.name, pkg.name)] = repo.name
            for provision in pkg.provides:
                provisionname = re.sub(r'(<|=|>).*', '', provision)
                providers.add((pkg.name, provisionname))
                repomap[(pkg.name, provisionname)] = repo.name

    with db.begin():
        old_providers = set(
            db.query(OfficialProvider).with_entities(
                OfficialProvider.Name.label("Name"),
                OfficialProvider.Provides.label("Provides")
            ).distinct().order_by("Name").all()
        )

        for name, provides in old_providers.difference(providers):
            db.delete_all(db.query(OfficialProvider).filter(
                and_(OfficialProvider.Name == name,
                     OfficialProvider.Provides == provides)
            ))

        for name, provides in providers.difference(old_providers):
            repo = repomap.get((name, provides))
            db.create(OfficialProvider, Name=name,
                      Repo=repo, Provides=provides)
コード例 #6
0
ファイル: actions.py プロジェクト: Acidburn0zzz/aurweb
def pkgbase_merge_instance(request: Request, pkgbase: PackageBase,
                           target: PackageBase, comments: str = str()) -> None:
    pkgbasename = str(pkgbase.Name)

    # Create notifications.
    notifs = handle_request(request, MERGE_ID, pkgbase, target)

    # Target votes and notifications sets of user IDs that are
    # looking to be migrated.
    target_votes = set(v.UsersID for v in target.package_votes)
    target_notifs = set(n.UserID for n in target.notifications)

    with db.begin():
        # Merge pkgbase's comments.
        for comment in pkgbase.comments:
            comment.PackageBase = target

        # Merge notifications that don't yet exist in the target.
        for notif in pkgbase.notifications:
            if notif.UserID not in target_notifs:
                notif.PackageBase = target

        # Merge votes that don't yet exist in the target.
        for vote in pkgbase.package_votes:
            if vote.UsersID not in target_votes:
                vote.PackageBase = target

    # Run popupdate.
    popupdate.run_single(target)

    with db.begin():
        # Delete pkgbase and its packages now that everything's merged.
        for pkg in pkgbase.packages:
            db.delete(pkg)
        db.delete(pkgbase)

    # Log this out for accountability purposes.
    logger.info(f"Trusted User '{request.user.Username}' merged "
                f"'{pkgbasename}' into '{target.Name}'.")

    # Send notifications.
    util.apply_all(notifs, lambda n: n.send())
コード例 #7
0
ファイル: packages.py プロジェクト: Acidburn0zzz/aurweb
async def packages_delete(request: Request,
                          package_ids: List[int] = [],
                          confirm: bool = False,
                          merge_into: str = str(),
                          **kwargs):
    if not package_ids:
        return (False, ["You did not select any packages to delete."])

    if not confirm:
        return (False, [
            "The selected packages have not been deleted, "
            "check the confirmation checkbox."
        ])

    if not request.user.has_credential(creds.PKGBASE_DELETE):
        return (False, ["You do not have permission to delete packages."])

    # set-ify package_ids and query the database for related records.
    package_ids = set(package_ids)
    packages = db.query(models.Package).filter(
        models.Package.ID.in_(package_ids)).all()

    if len(packages) != len(package_ids):
        # Let the user know there was an issue with their input: they have
        # provided at least one package_id which does not exist in the DB.
        # TODO: This error has not yet been translated.
        return (False, ["One of the packages you selected does not exist."])

    # Make a set out of all package bases related to `packages`.
    bases = {pkg.PackageBase for pkg in packages}
    deleted_bases, notifs = [], []
    for pkgbase in bases:
        deleted_bases.append(pkgbase.Name)
        notifs += pkgbase_actions.pkgbase_delete_instance(request, pkgbase)

    # Log out the fact that this happened for accountability.
    logger.info(f"Privileged user '{request.user.Username}' deleted the "
                f"following package bases: {str(deleted_bases)}.")

    util.apply_all(notifs, lambda n: n.send())
    return (True, ["The selected packages have been deleted."])
コード例 #8
0
ファイル: util.py プロジェクト: Acidburn0zzz/aurweb
def remove_comaintainers(pkgbase: PackageBase, usernames: List[str]) -> None:
    """
    Remove comaintainers from `pkgbase`.

    :param pkgbase: PackageBase instance
    :param usernames: Iterable of username strings
    """
    notifications = []
    with db.begin():
        comaintainers = pkgbase.comaintainers.join(User).filter(
            User.Username.in_(usernames)).all()
        notifications = [
            notify.ComaintainerRemoveNotification(co.User.ID, pkgbase.ID)
            for co in comaintainers
        ]
        db.delete_all(comaintainers)

    # Rotate comaintainer priority values.
    with db.begin():
        rotate_comaintainers(pkgbase)

    # Send out notifications.
    util.apply_all(notifications, lambda n: n.send())
コード例 #9
0
ファイル: accounts.py プロジェクト: Acidburn0zzz/aurweb
async def terms_of_service_post(request: Request,
                                accept: bool = Form(default=False)):
    # Query the database for terms that were previously accepted,
    # but now have a bumped Revision that needs to be accepted.
    diffs = db.query(models.Term).join(models.AcceptedTerm).filter(
        models.AcceptedTerm.Revision < models.Term.Revision).all()

    # Query the database for any terms that have not yet been accepted.
    unaccepted = db.query(models.Term).filter(
        ~models.Term.ID.in_(db.query(models.AcceptedTerm.TermsID))).all()

    if not accept:
        # Translate the 'Terms of Service' part of our page title.
        _ = l10n.get_translator_for_request(request)
        title = f"AUR {_('Terms of Service')}"
        context = await make_variable_context(request, title)

        # We already did the database filters here, so let's just use
        # them instead of reiterating the process in terms_of_service.
        accept_needed = sorted(unaccepted + diffs)
        return render_terms_of_service(
            request, context, util.apply_all(accept_needed, db.refresh))

    with db.begin():
        # For each term we found, query for the matching accepted term
        # and update its Revision to the term's current Revision.
        for term in diffs:
            db.refresh(term)
            accepted_term = request.user.accepted_terms.filter(
                models.AcceptedTerm.TermsID == term.ID).first()
            accepted_term.Revision = term.Revision

        # For each term that was never accepted, accept it!
        for term in unaccepted:
            db.refresh(term)
            db.create(models.AcceptedTerm, User=request.user,
                      Term=term, Revision=term.Revision)

    return RedirectResponse("/", status_code=HTTPStatus.SEE_OTHER)
コード例 #10
0
def _main():
    archivedir = aurweb.config.get("mkpkglists", "archivedir")
    os.makedirs(archivedir, exist_ok=True)

    PACKAGES = aurweb.config.get('mkpkglists', 'packagesfile')
    META = aurweb.config.get('mkpkglists', 'packagesmetafile')
    META_EXT = aurweb.config.get('mkpkglists', 'packagesmetaextfile')
    PKGBASE = aurweb.config.get('mkpkglists', 'pkgbasefile')
    USERS = aurweb.config.get('mkpkglists', 'userfile')

    bench = Benchmark()
    logger.info("Started re-creating archives, wait a while...")

    query = db.query(Package).join(
        PackageBase, PackageBase.ID == Package.PackageBaseID).join(
            User, PackageBase.MaintainerUID == User.ID, isouter=True).filter(
                PackageBase.PackagerUID.isnot(None)).with_entities(
                    Package.ID, Package.Name,
                    PackageBase.ID.label("PackageBaseID"),
                    PackageBase.Name.label("PackageBase"), Package.Version,
                    Package.Description, Package.URL, PackageBase.NumVotes,
                    PackageBase.Popularity,
                    PackageBase.OutOfDateTS.label("OutOfDate"),
                    User.Username.label("Maintainer"),
                    PackageBase.SubmittedTS.label("FirstSubmitted"),
                    PackageBase.ModifiedTS.label(
                        "LastModified")).distinct().order_by("Name")

    # Produce packages-meta-v1.json.gz
    output = list()
    snapshot_uri = aurweb.config.get("options", "snapshot_uri")

    tmpdir = tempfile.mkdtemp()
    tmp_packages = os.path.join(tmpdir, os.path.basename(PACKAGES))
    tmp_meta = os.path.join(tmpdir, os.path.basename(META))
    tmp_metaext = os.path.join(tmpdir, os.path.basename(META_EXT))
    gzips = {
        "packages": gzip.open(tmp_packages, "wt"),
        "meta": gzip.open(tmp_meta, "wb"),
    }

    # Append list opening to the metafile.
    gzips["meta"].write(b"[\n")

    # Produce packages.gz + packages-meta-ext-v1.json.gz
    extended = False
    if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
        gzips["meta_ext"] = gzip.open(tmp_metaext, "wb")
        # Append list opening to the meta_ext file.
        gzips.get("meta_ext").write(b"[\n")
        f = EXTENDED_FIELD_HANDLERS.get(sys.argv[1])
        data = f()
        extended = True

    results = query.all()
    n = len(results) - 1
    for i, result in enumerate(results):
        # Append to packages.gz.
        gzips.get("packages").write(f"{result.Name}\n")

        # Construct our result JSON dictionary.
        item = as_dict(result)
        item["URLPath"] = snapshot_uri % result.Name

        # We stream out package json objects line per line, so
        # we also need to include the ',' character at the end
        # of package lines (excluding the last package).
        suffix = b",\n" if i < n else b'\n'

        # Write out to packagesmetafile
        output.append(item)
        gzips.get("meta").write(orjson.dumps(output[-1]) + suffix)

        if extended:
            # Write out to packagesmetaextfile.
            data_ = data.get(result.ID, {})
            output[-1].update(data_)
            gzips.get("meta_ext").write(orjson.dumps(output[-1]) + suffix)

    # Append the list closing to meta/meta_ext.
    gzips.get("meta").write(b"]")
    if extended:
        gzips.get("meta_ext").write(b"]")

    # Close gzip files.
    util.apply_all(gzips.values(), lambda gz: gz.close())

    # Produce pkgbase.gz
    query = db.query(PackageBase.Name).filter(
        PackageBase.PackagerUID.isnot(None)).all()
    tmp_pkgbase = os.path.join(tmpdir, os.path.basename(PKGBASE))
    with gzip.open(tmp_pkgbase, "wt") as f:
        f.writelines([f"{base.Name}\n" for i, base in enumerate(query)])

    # Produce users.gz
    query = db.query(User.Username).all()
    tmp_users = os.path.join(tmpdir, os.path.basename(USERS))
    with gzip.open(tmp_users, "wt") as f:
        f.writelines([f"{user.Username}\n" for i, user in enumerate(query)])

    files = [
        (tmp_packages, PACKAGES),
        (tmp_meta, META),
        (tmp_pkgbase, PKGBASE),
        (tmp_users, USERS),
    ]
    if len(sys.argv) > 1 and sys.argv[1] in EXTENDED_FIELD_HANDLERS:
        files.append((tmp_metaext, META_EXT))

    for src, dst in files:
        checksum = sha256sum(src)
        base = os.path.basename(src)
        checksum_formatted = f"SHA256 ({base}) = {checksum}"

        checksum_file = f"{dst}.sha256"
        with open(checksum_file, "w") as f:
            f.write(checksum_formatted)

        # Move the new archive into its rightful place.
        shutil.move(src, dst)

    os.removedirs(tmpdir)
    seconds = filters.number_format(bench.end(), 4)
    logger.info(f"Completed in {seconds} seconds.")
コード例 #11
0
ファイル: accounts.py プロジェクト: Acidburn0zzz/aurweb
async def accounts_post(request: Request,
                        O: int = Form(default=0),  # Offset
                        SB: str = Form(default=str()),  # Sort By
                        U: str = Form(default=str()),  # Username
                        T: str = Form(default=str()),  # Account Type
                        S: bool = Form(default=False),  # Suspended
                        E: str = Form(default=str()),  # Email
                        R: str = Form(default=str()),  # Real Name
                        I: str = Form(default=str()),  # IRC Nick
                        K: str = Form(default=str())):  # PGP Key
    context = await make_variable_context(request, "Accounts")
    context["pp"] = pp = 50  # Hits per page.

    offset = max(O, 0)  # Minimize offset at 0.
    context["offset"] = offset  # Offset.

    context["params"] = dict(await request.form())
    if "O" in context["params"]:
        context["params"].pop("O")

    # Setup order by criteria based on SB.
    order_by_columns = {
        "t": (models.AccountType.ID.asc(), models.User.Username.asc()),
        "r": (models.User.RealName.asc(), models.AccountType.ID.asc()),
        "i": (models.User.IRCNick.asc(), models.AccountType.ID.asc()),
    }
    default_order = (models.User.Username.asc(), models.AccountType.ID.asc())
    order_by = order_by_columns.get(SB, default_order)

    # Convert parameter T to an AccountType ID.
    account_types = {
        "u": at.USER_ID,
        "t": at.TRUSTED_USER_ID,
        "d": at.DEVELOPER_ID,
        "td": at.TRUSTED_USER_AND_DEV_ID
    }
    account_type_id = account_types.get(T, None)

    # Get a query handle to users, populate the total user
    # count into a jinja2 context variable.
    query = db.query(models.User).join(models.AccountType)

    # Populate this list with any additional statements to
    # be ANDed together.
    statements = [
        v for k, v in [
            (account_type_id is not None, models.AccountType.ID == account_type_id),
            (bool(U), models.User.Username.like(f"%{U}%")),
            (bool(S), models.User.Suspended == S),
            (bool(E), models.User.Email.like(f"%{E}%")),
            (bool(R), models.User.RealName.like(f"%{R}%")),
            (bool(I), models.User.IRCNick.like(f"%{I}%")),
            (bool(K), models.User.PGPKey.like(f"%{K}%")),
        ] if k
    ]

    # Filter the query by coe-mbining all statements added above into
    # an AND statement, unless there's just one statement, which
    # we pass on to filter() as args.
    if statements:
        query = query.filter(and_(*statements))

    context["total_users"] = query.count()

    # Finally, order and truncate our users for the current page.
    users = query.order_by(*order_by).limit(pp).offset(offset).all()
    context["users"] = util.apply_all(users, db.refresh)

    return render_template(request, "account/index.html", context)
コード例 #12
0
async def pkgbase_request_post(request: Request,
                               name: str,
                               type: str = Form(...),
                               merge_into: str = Form(default=None),
                               comments: str = Form(default=str()),
                               next: str = Form(default=str())):
    pkgbase = get_pkg_or_base(name, PackageBase)

    # Create our render context.
    context = await make_variable_context(request, "Submit Request")
    context["pkgbase"] = pkgbase

    types = {"deletion": DELETION_ID, "merge": MERGE_ID, "orphan": ORPHAN_ID}

    if type not in types:
        # In the case that someone crafted a POST request with an invalid
        # type, just return them to the request form with BAD_REQUEST status.
        return render_template(request,
                               "pkgbase/request.html",
                               context,
                               status_code=HTTPStatus.BAD_REQUEST)

    try:
        validate.request(pkgbase, type, comments, merge_into, context)
    except ValidationError as exc:
        logger.error(f"Request Validation Error: {str(exc.data)}")
        context["errors"] = exc.data
        return render_template(request, "pkgbase/request.html", context)

    # All good. Create a new PackageRequest based on the given type.
    now = time.utcnow()
    with db.begin():
        pkgreq = db.create(PackageRequest,
                           ReqTypeID=types.get(type),
                           User=request.user,
                           RequestTS=now,
                           PackageBase=pkgbase,
                           PackageBaseName=pkgbase.Name,
                           MergeBaseName=merge_into,
                           Comments=comments,
                           ClosureComment=str())

    # Prepare notification object.
    notif = notify.RequestOpenNotification(request.user.ID,
                                           pkgreq.ID,
                                           type,
                                           pkgreq.PackageBase.ID,
                                           merge_into=merge_into or None)

    # Send the notification now that we're out of the DB scope.
    notif.send()

    auto_orphan_age = config.getint("options", "auto_orphan_age")
    auto_delete_age = config.getint("options", "auto_delete_age")

    ood_ts = pkgbase.OutOfDateTS or 0
    flagged = ood_ts and (now - ood_ts) >= auto_orphan_age
    is_maintainer = pkgbase.Maintainer == request.user
    outdated = (now - pkgbase.SubmittedTS) <= auto_delete_age

    if type == "orphan" and flagged:
        # This request should be auto-accepted.
        with db.begin():
            pkgbase.Maintainer = None
            pkgreq.Status = ACCEPTED_ID
        notif = notify.RequestCloseNotification(request.user.ID, pkgreq.ID,
                                                pkgreq.status_display())
        notif.send()
        logger.debug(f"New request #{pkgreq.ID} is marked for auto-orphan.")
    elif type == "deletion" and is_maintainer and outdated:
        # This request should be auto-accepted.
        notifs = actions.pkgbase_delete_instance(request,
                                                 pkgbase,
                                                 comments=comments)
        util.apply_all(notifs, lambda n: n.send())
        logger.debug(f"New request #{pkgreq.ID} is marked for auto-deletion.")

    # Redirect the submitting user to /packages.
    return RedirectResponse("/packages", status_code=HTTPStatus.SEE_OTHER)
コード例 #13
0
ファイル: requests.py プロジェクト: Acidburn0zzz/aurweb
def handle_request(request: Request,
                   reqtype_id: int,
                   pkgbase: PackageBase,
                   target: PackageBase = None) -> List[notify.Notification]:
    """
    Handle package requests before performing an action.

    The actions we're interested in are disown (orphan), delete and
    merge. There is now an automated request generation and closure
    notification when a privileged user performs one of these actions
    without a pre-existing request. They all commit changes to the
    database, and thus before calling, state should be verified to
    avoid leaked database records regarding these requests.

    Otherwise, we accept and reject requests based on their state
    and send out the relevent notifications.

    :param requester: User who needs this a `pkgbase` request handled
    :param reqtype_id: RequestType.ID
    :param pkgbase: PackageBase which the request is about
    :param target: Optional target to merge into
    """
    notifs: List[notify.Notification] = []

    # If it's an orphan request, perform further verification
    # regarding existing requests.
    if reqtype_id == ORPHAN_ID:
        if not verify_orphan_request(request.user, pkgbase):
            _ = l10n.get_translator_for_request(request)
            raise InvariantError(
                _("No due existing orphan requests to accept for %s.") %
                pkgbase.Name)

    # Produce a base query for requests related to `pkgbase`, based
    # on ReqTypeID matching `reqtype_id`, pending status and a correct
    # PackagBaseName column.
    query: orm.Query = pkgbase.requests.filter(
        and_(PackageRequest.ReqTypeID == reqtype_id,
             PackageRequest.Status == PENDING_ID,
             PackageRequest.PackageBaseName == pkgbase.Name))

    # Build a query for records we should accept. For merge requests,
    # this is specific to a matching MergeBaseName. For others, this
    # just ends up becoming `query`.
    accept_query: orm.Query = query
    if target:
        # If a `target` was supplied, filter by MergeBaseName
        accept_query = query.filter(
            PackageRequest.MergeBaseName == target.Name)

    # Build an accept list out of `accept_query`.
    to_accept: List[PackageRequest] = accept_query.all()
    accepted_ids: Set[int] = set(p.ID for p in to_accept)

    # Build a reject list out of `query` filtered by IDs not found
    # in `to_accept`. That is, unmatched records of the same base
    # query properties.
    to_reject: List[PackageRequest] = query.filter(
        ~PackageRequest.ID.in_(accepted_ids)).all()

    # If we have no requests to accept, create a new one.
    # This is done to increase tracking of actions occurring
    # through the website.
    if not to_accept:
        utcnow = time.utcnow()
        with db.begin():
            pkgreq = db.create(PackageRequest,
                               ReqTypeID=reqtype_id,
                               RequestTS=utcnow,
                               User=request.user,
                               PackageBase=pkgbase,
                               PackageBaseName=pkgbase.Name,
                               Comments="Autogenerated by aurweb.",
                               ClosureComment=str())

            # If it's a merge request, set MergeBaseName to `target`.Name.
            if pkgreq.ReqTypeID == MERGE_ID:
                pkgreq.MergeBaseName = target.Name

            # Add the new request to `to_accept` and allow standard
            # flow to continue afterward.
            to_accept.append(pkgreq)

    # Update requests with their new status and closures.
    with db.begin():
        util.apply_all(
            to_accept, lambda p: close_pkgreq(p, request.user, pkgbase, target,
                                              ACCEPTED_ID))
        util.apply_all(
            to_reject, lambda p: close_pkgreq(p, request.user, pkgbase, target,
                                              REJECTED_ID))

    # Create RequestCloseNotifications for all requests involved.
    for pkgreq in (to_accept + to_reject):
        notif = notify.RequestCloseNotification(request.user.ID, pkgreq.ID,
                                                pkgreq.status_display())
        notifs.append(notif)

    # Return notifications to the caller for sending.
    return notifs