Exemple #1
0
def _send_mail(from_addr, to_addr, body):
    """
    Send emails with smtplib. This is a lower level function than send_e-mail().

    Args:
        from_addr (str): The e-mail address to use in the envelope from field.
        to_addr (str): The e-mail address to use in the envelope to field.
        body (str): The body of the e-mail.
    """
    smtp_server = config.get('smtp_server')
    if not smtp_server:
        log.info('Not sending email: No smtp_server defined')
        return
    smtp = None
    try:
        log.debug('Connecting to %s', smtp_server)
        smtp = smtplib.SMTP(smtp_server)
        smtp.sendmail(from_addr, [to_addr], body)
    except smtplib.SMTPRecipientsRefused as e:
        log.warning('"recipient refused" for %r, %r' % (to_addr, e))
    except Exception:
        log.exception('Unable to send mail')
    finally:
        if smtp:
            smtp.quit()
Exemple #2
0
def waive_test_results(request):
    """
    Waive all blocking test results on a given update when gating is on.

    Args:
        request (pyramid.request): The current request.
    Returns:
        dict: A dictionary mapping the key "update" to the update.
    """
    update = request.validated['update']
    comment = request.validated.pop('comment', None)
    tests = request.validated.pop('tests', None)

    try:
        update.waive_test_results(request.user.name, comment, tests)
    except LockedUpdateException as e:
        log.warning(str(e))
        request.errors.add('body', 'request', str(e))
    except BodhiException as e:
        log.error("Failed to waive the test results: %s", e)
        request.errors.add('body', 'request', str(e))
    except Exception as e:
        log.exception("Unhandled exception in waive_test_results")
        request.errors.add('body', 'request', str(e))

    return dict(update=update)
Exemple #3
0
def send_mail(from_addr: str,
              to_addr: str,
              subject: str,
              body_text: str,
              headers: typing.Optional[dict] = None) -> None:
    """
    Send an e-mail.

    Args:
        from_addr: The address to use in the From: header.
        to_addr: The address to send the e-mail to.
        subject: The subject of the e-mail.
        body_text: The body of the e-mail to be sent.
        headers: A mapping of header fields to values to be included in the e-mail,
            if not None.
    """
    if not from_addr:
        from_addr = config.get('bodhi_email')
    if not from_addr:
        log.warning(
            'Unable to send mail: bodhi_email not defined in the config')
        return
    if to_addr in config.get('exclude_mail'):
        return

    msg = [f'From: {from_addr}', f'To: {to_addr}']
    if headers:
        for key, value in headers.items():
            msg.append(f'{key}: {value}')
    msg.append(f"X-Bodhi: {config.get('default_email_domain')}")
    msg += [f'Subject: {subject}', '', body_text]
    body = '\r\n'.join(msg)

    log.info('Sending mail to %s: %s', to_addr, subject)
    _send_mail(from_addr, to_addr, body)
Exemple #4
0
def get_rpm_header(nvr, tries=0):
    """
    Get the rpm header for a given build.

    Args:
        nvr (basestring): The name-version-release string of the build you want headers for.
        tries (int): The number of attempts that have been made to retrieve the nvr so far. Defaults
            to 0.
    Returns:
        dict: A dictionary mapping RPM header names to their values, as returned by the Koji client.
    """
    tries += 1
    headers = [
        'name', 'summary', 'version', 'release', 'url', 'description',
        'changelogtime', 'changelogname', 'changelogtext',
    ]
    rpmID = nvr + '.src'
    koji_session = buildsys.get_session()
    try:
        result = koji_session.getRPMHeaders(rpmID=rpmID, headers=headers)
    except Exception as e:
        msg = "Failed %i times to get rpm header data from koji for %s:  %s"
        log.warning(msg % (tries, nvr, str(e)))
        if tries < 3:
            # Try again...
            return get_rpm_header(nvr, tries=tries)
        else:
            # Give up for good and re-raise the failure...
            raise

    if result:
        return result

    raise ValueError("No rpm headers found in koji for %r" % nvr)
Exemple #5
0
def exception_json_view(exc, request):
    """
    Return a json error response upon generic errors (404s, 403s, 500s, etc..).

    This is here to catch everything that isn't caught by our cornice error
    handlers.  When we do catch something, we transform it into a cornice
    Errors object and pass it to our nice cornice error handler.  That way, all
    the exception presentation and rendering we can keep in one place.

    Args:
        exc (Exception): The unhandled exception.
        request (pyramid.request.Request): The current request.
    Returns:
        bodhi.server.services.errors.json_handler: A pyramid.httpexceptions.HTTPError to be rendered
            to the user for the given exception.
    """
    errors = getattr(request, 'errors', [])
    status = getattr(exc, 'status_code', 500)

    if status not in (404, 403):
        log.exception("Error caught.  Handling JSON response.")
    else:
        log.warning(str(exc))

    if not len(errors):
        description = getattr(exc, 'explanation', None) or str(exc)

        errors = cornice.errors.Errors(status=status)
        errors.add('body',
                   description=description,
                   name=exc.__class__.__name__)
        request.errors = errors

    return bodhi.server.services.errors.json_handler(request)
Exemple #6
0
def get_critpath_components(collection='master', component_type='rpm', components=None):
    """
    Return a list of critical path packages for a given collection, filtered by components.

    Args:
        collection (str): The collection/branch to search. Defaults to 'master'.
        component_type (str): The component type to search for. This only affects PDC
            queries. Defaults to 'rpm'.
        components (frozenset or None): The list of components we are interested in. If None (the
            default), all components for the given collection and type are returned.
    Returns:
        list: The critpath components for the given collection and type.
    Raises:
        RuntimeError: If the PDC did not give us a 200 code.
    """
    critpath_components = []
    critpath_type = config.get('critpath.type')
    if critpath_type != 'pdc' and component_type != 'rpm':
        log.warning('The critpath.type of "{0}" does not support searching for'
                    ' non-RPM components'.format(component_type))

    if critpath_type == 'pdc':
        critpath_components = get_critpath_components_from_pdc(
            collection, component_type, components)
    else:
        critpath_components = config.get('critpath_pkgs')

    # Filter the list of components down to what was requested, in case the specific path did
    # not take our request into account.
    if components is not None:
        critpath_components = [c for c in critpath_components if c in components]

    return critpath_components
Exemple #7
0
def get_critpath_components(collection='master', component_type='rpm'):
    """
    Return a list of critical path packages for a given collection.

    Args:
    collection (basestring): The collection/branch to search. Defaults to 'master'.
    component_type (basestring): The component type to search for. This only affects PDC queries.
        Defaults to 'rpm'.
    Returns:
        list: The critpath components for the given collection and type.
    """
    critpath_components = []
    critpath_type = config.get('critpath.type')
    if critpath_type != 'pdc' and component_type != 'rpm':
        log.warning('The critpath.type of "{0}" does not support searching for'
                    ' non-RPM components'.format(component_type))

    if critpath_type == 'pkgdb':
        from pkgdb2client import PkgDB
        pkgdb = PkgDB(config.get('pkgdb_url'))
        results = pkgdb.get_critpath_packages(branches=collection)
        if collection in results['pkgs']:
            critpath_components = results['pkgs'][collection]
    elif critpath_type == 'pdc':
        critpath_components = get_critpath_components_from_pdc(
            collection, component_type)
    else:
        critpath_components = config.get('critpath_pkgs')
    return critpath_components
Exemple #8
0
def remember_me(context: 'mako.runtime.Context', request: 'pyramid.request.Request',
                info: dict, *args, **kw) -> HTTPFound:
    """
    Remember information about a newly logged in user given by the OpenID provider.

    This is configured via the openid.success_callback configuration, and is called upon successful
    login.

    Args:
        context: The current template rendering context. Unused.
        request: The current request.
        info: The information passed to Bodhi from the OpenID provider about the
            authenticated user. This includes things like the user's username, e-mail address and
            groups.
        args: A list of additional positional parameters. Unused.
        kw: A dictionary of additional keyword parameters. Unused.
    Returns:
        A 302 redirect to the URL the user was visiting before
            they clicked login, or home if they have not used a valid OpenID provider.
    """
    log.debug('remember_me(%s)' % locals())
    log.debug('remember_me: request.params = %r' % request.params)
    endpoint = request.params['openid.op_endpoint']
    if endpoint != request.registry.settings['openid.provider']:
        log.warning('Invalid OpenID provider: %s' % endpoint)
        raise HTTPUnauthorized(
            'Invalid OpenID provider. You can only use: %s' %
            request.registry.settings['openid.provider']
        )

    username = info['sreg']['nickname']
    email = info['sreg']['email']
    log.debug('remember_me: groups = %s' % info['groups'])
    log.info('%s successfully logged in' % username)

    create_or_update_user(request.db, username, email, info["groups"])

    headers = remember(request, username)

    response = get_final_redirect(request)
    response.headerlist.extend(headers)
    return response
Exemple #9
0
def authorize_oidc(request: 'pyramid.request.Request'):
    """Verify the response from the OpenID Connect provider and log the user in.

    Args:
        request (pyramid.request.Request): The Pyramid request.

    Returns:
        pyramid.response.Response: A redirection to the previously visited page.
    """
    # After user confirmed on Fedora authorization page, it will redirect back to Bodhi to
    # authorize. In this route, we get the user's profile information, store the user
    # information in the database, mark the user as logged in, etc.
    try:
        token = request.registry.oidc.fedora.authorize_access_token(request)
    except OAuthError as e:
        log.warning(f"OIDC authorization failed: {e}")
        raise HTTPUnauthorized(f'Authentication failed: {e.description}')
    response = get_final_redirect(request)
    get_and_store_user(request, token["access_token"], response)
    return response
Exemple #10
0
def send_mail(from_addr, to_addr, subject, body_text, headers=None):
    """
    Send an e-mail.

    Args:
        from_addr (basestring): The address to use in the From: header.
        to_addr (basestring): The address to send the e-mail to.
        subject (basestring): The subject of the e-mail.
        body_text (basestring): The body of the e-mail to be sent.
        headers (dict or None): A mapping of header fields to values to be included in the e-mail,
            if not None.
    """
    if not from_addr:
        from_addr = config.get('bodhi_email')
    if not from_addr:
        log.warning(
            'Unable to send mail: bodhi_email not defined in the config')
        return
    if to_addr in config.get('exclude_mail'):
        return

    from_addr = to_bytes(from_addr)
    to_addr = to_bytes(to_addr)
    subject = to_bytes(subject)
    body_text = to_bytes(body_text)

    msg = [b'From: %s' % from_addr, b'To: %s' % to_addr]
    if headers:
        for key, value in headers.items():
            msg.append(b'%s: %s' % (to_bytes(key), to_bytes(value)))
    msg.append(b'X-Bodhi: %s' % to_bytes(config.get('default_email_domain')))
    msg += [b'Subject: %s' % subject, b'', body_text]
    body = b'\r\n'.join(msg)

    log.info('Sending mail to %s: %s', to_addr, subject)
    _send_mail(from_addr, to_addr, body)
Exemple #11
0
def new_update(request):
    """
    Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's alias must be specified in
    the ``edited`` parameter.

    If the ``from_tag`` parameter is specified and ``builds`` is missing or
    empty, the list of builds will be filled with the latest builds in this
    Koji tag. This is done by validate_from_tag() because the list of builds
    needs to be available in validate_acls().

    If the release is composed by Bodhi (i.e. a branched or stable release
    after the Bodhi activation point), ensure that related tags
    ``from_tag``-pending-signing and ``from_tag``-testing exists and if not
    create them in Koji. If the state of the release is not `pending`, add its
    pending-signing tag and remove it if it's a side tag.

    Args:
        request (pyramid.request): The current request.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    # Same here, but it can be missing.
    data.pop('builds_from_tag', None)
    data.pop('sidetag_owner', None)

    build_nvrs = data.get('builds', [])
    from_tag = data.get('from_tag')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in build_nvrs:
            name, version, release = request.buildinfo[nvr]['nvr']

            package = Package.get_or_create(request.db, request.buildinfo[nvr])

            # Also figure out the build type and create the build if absent.
            build_class = ContentType.infer_content_class(
                base=Build, build=request.buildinfo[nvr]['info'])
            build = build_class.get(nvr)

            if build is None:
                log.debug("Adding nvr %s, type %r", nvr, build_class)
                build = build_class(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])

        # Disable manual updates for releases not composed by Bodhi
        # see #4058
        if not from_tag:
            for release in releases:
                if not release.composed_by_bodhi:
                    request.errors.add(
                        'body', 'builds',
                        "Cannot manually create updates for a Release which is not "
                        "composed by Bodhi.\nRead the 'Automatic updates' page in "
                        "Bodhi docs about this error.")
                    request.db.rollback()
                    return

        # We want to go ahead and commit the transaction now so that the Builds are in the database.
        # Otherwise, there will be a race condition between robosignatory signing the Builds and the
        # signed handler attempting to mark the builds as signed. When we lose that race, the signed
        # handler doesn't see the Builds in the database and gives up. After that, nothing will mark
        # the builds as signed.
        request.db.commit()

        # After we commit the transaction, we need to get the builds and releases again,
        # since they were tied to the previous session that has now been terminated.
        builds = []
        releases = set()
        for nvr in build_nvrs:
            # At this moment, we are sure the builds are in the database (that is what the commit
            # was for actually).
            build = Build.get(nvr)
            builds.append(build)
            releases.add(build.release)

        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            data['from_tag'] = from_tag
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name':
                    'releases',
                    'description':
                    'Your update is being split '
                    'into %i, one for each release.' % len(releases)
                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release
                _data['from_tag'] = from_tag

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('%s update created', result.alias)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)

            if from_tag:
                for u in updates:
                    builds = [b.nvr for b in u.builds]
                    if not u.release.composed_by_bodhi:
                        # Before the Bodhi activation point of a release, keep builds tagged
                        # with the side-tag and its associate tags.
                        side_tag_signing_pending = u.release.get_pending_signing_side_tag(
                            from_tag)
                        side_tag_testing_pending = u.release.get_pending_testing_side_tag(
                            from_tag)
                        handle_side_and_related_tags_task.delay(
                            builds=builds,
                            pending_signing_tag=side_tag_signing_pending,
                            from_tag=from_tag,
                            pending_testing_tag=side_tag_testing_pending)
                    else:
                        # After the Bodhi activation point of a release, add the pending-signing tag
                        # of the release to funnel the builds back into a normal workflow for a
                        # stable release.
                        pending_signing_tag = u.release.pending_signing_tag
                        candidate_tag = u.release.candidate_tag
                        handle_side_and_related_tags_task.delay(
                            builds=builds,
                            pending_signing_tag=pending_signing_tag,
                            from_tag=from_tag,
                            candidate_tag=candidate_tag)

    except LockedUpdateException as e:
        log.warning(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add('body', 'builds',
                           'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request.db))
        except Exception as e:
            caveats.append({
                'name':
                'update',
                'description':
                'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result
Exemple #12
0
def save_stack(request):
    """
    Save a stack.

    Args:
        request (pyramid.request): The current web request.
    Returns:
        dict: A dictionary with key "stack" that indexes the newly created Stack.
    """
    data = request.validated
    db = request.db
    user = User.get(request.user.name)

    # Fetch or create the stack
    stack = Stack.get(data['name'])
    if not stack:
        stack = Stack(name=data['name'], users=[user])
        db.add(stack)
        db.flush()

    if stack.users or stack.groups:
        if user in stack.users:
            log.info('%s is an owner of the %s', user.name, stack.name)
        else:
            for group in user.groups:
                if group in stack.groups:
                    log.info('%s is a member of the %s group', user.name, stack.name)
                    break
            else:
                log.warning('%s is not an owner of the %s stack',
                            user.name, stack.name)
                log.debug('owners = %s; groups = %s', stack.users, stack.groups)
                request.errors.add('body', 'name', '%s does not have privileges'
                                   ' to modify the %s stack' % (user.name, stack.name))
                request.errors.status = HTTPForbidden.code
                return

    # Update the stack description
    desc = data['description']
    if desc:
        stack.description = desc

    # Update the stack requirements
    # If the user passed in no value at all for requirements, then use
    # the site defaults.  If, however, the user passed in the empty string, we
    # assume they mean *really*, no requirements so we leave the value null.
    reqs = data['requirements']
    if reqs is None:
        stack.requirements = config.get('site_requirements')
    elif reqs:
        stack.requirements = reqs

    stack.update_relationship('users', User, data, db)
    stack.update_relationship('groups', Group, data, db)

    # We make a special case out of packages here, since when a package is
    # added to a stack, we want to give it the same requirements as the stack
    # has. See https://github.com/fedora-infra/bodhi/issues/101
    new, same, rem = stack.update_relationship('packages', Package, data, db)
    if stack.requirements:
        additional = list(tokenize(stack.requirements))

        for name in new:
            package = Package.get(name)
            original = package.requirements
            original = [] if not original else list(tokenize(original))
            package.requirements = " ".join(list(set(original + additional)))

    log.info('Saved %s stack', data['name'])
    notifications.publish(topic='stack.save', msg=dict(
        stack=stack, agent=user.name))

    return dict(stack=stack)
Exemple #13
0
def new_update(request):
    """
    Save an update.

    This entails either creating a new update, or editing an existing one. To
    edit an existing update, the update's alias must be specified in
    the ``edited`` parameter.

    Args:
        request (pyramid.request): The current request.
    """
    data = request.validated
    log.debug('validated = %s' % data)

    # This has already been validated at this point, but we need to ditch
    # it since the models don't care about a csrf argument.
    data.pop('csrf_token')

    caveats = []
    try:

        releases = set()
        builds = []

        # Create the Package and Build entities
        for nvr in data['builds']:
            name, version, release = request.buildinfo[nvr]['nvr']

            package = Package.get_or_create(request.buildinfo[nvr])

            # Also figure out the build type and create the build if absent.
            build_class = ContentType.infer_content_class(
                base=Build, build=request.buildinfo[nvr]['info'])
            build = build_class.get(nvr)

            if build is None:
                log.debug("Adding nvr %s, type %r", nvr, build_class)
                build = build_class(nvr=nvr, package=package)
                request.db.add(build)
                request.db.flush()

            build.package = package
            build.release = request.buildinfo[build.nvr]['release']
            builds.append(build)
            releases.add(request.buildinfo[build.nvr]['release'])

        # We want to go ahead and commit the transaction now so that the Builds are in the database.
        # Otherwise, there will be a race condition between robosignatory signing the Builds and the
        # signed handler attempting to mark the builds as signed. When we lose that race, the signed
        # handler doesn't see the Builds in the database and gives up. After that, nothing will mark
        # the builds as signed.
        request.db.commit()

        # After we commit the transaction, we need to get the builds and releases again, since they
        # were tied to the previous session that has now been terminated.
        builds = []
        releases = set()
        for nvr in data['builds']:
            # At this moment, we are sure the builds are in the database (that is what the commit
            # was for actually).
            build = Build.get(nvr)
            builds.append(build)
            releases.add(build.release)

        if data.get('edited'):

            log.info('Editing update: %s' % data['edited'])

            data['release'] = list(releases)[0]
            data['builds'] = [b.nvr for b in builds]
            result, _caveats = Update.edit(request, data)
            caveats.extend(_caveats)
        else:
            if len(releases) > 1:
                caveats.append({
                    'name':
                    'releases',
                    'description':
                    'Your update is being split '
                    'into %i, one for each release.' % len(releases)
                })
            updates = []
            for release in releases:
                _data = copy.copy(data)  # Copy it because .new(..) mutates it
                _data['builds'] = [b for b in builds if b.release == release]
                _data['release'] = release

                log.info('Creating new update: %r' % _data['builds'])
                result, _caveats = Update.new(request, _data)
                log.debug('%s update created', result.alias)

                updates.append(result)
                caveats.extend(_caveats)

            if len(releases) > 1:
                result = dict(updates=updates)
    except LockedUpdateException as e:
        log.warning(str(e))
        request.errors.add('body', 'builds', "%s" % str(e))
        return
    except Exception as e:
        log.exception('Failed to create update')
        request.errors.add('body', 'builds',
                           'Unable to create update.  %s' % str(e))
        return

    # Obsolete older updates for three different cases...
    # editing an update, submitting a new single update, submitting multiple.

    if isinstance(result, dict):
        updates = result['updates']
    else:
        updates = [result]

    for update in updates:
        try:
            caveats.extend(update.obsolete_older_updates(request.db))
        except Exception as e:
            caveats.append({
                'name':
                'update',
                'description':
                'Problem obsoleting older updates: %s' % str(e),
            })

    if not isinstance(result, dict):
        result = result.__json__()

    result['caveats'] = caveats

    return result