Example #1
0
def test_exists_for_forbidden(accounts, s3_client, mocker):
    forbidden_error = ClientError({"Error": {"Code": "403"}}, None)
    mock_aws_api = mocker.patch("reconcile.utils.state.AWSApi", autospec=True)
    mock_aws_api.return_value.get_session.return_value.client.return_value.head_object.side_effect = (
        forbidden_error)

    state = State("integration-name", accounts)

    with pytest.raises(StateInaccessibleException, match=r".*403.*"):
        state.exists("some-key")
def run(dry_run):
    accounts = queries.get_state_aws_accounts(reset_passwords=True)
    settings = queries.get_app_interface_settings()
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)

    for a in accounts:
        aws_api = None
        account_name = a['name']
        reset_passwords = a.get('resetPasswords')
        if not reset_passwords:
            continue
        for r in reset_passwords:
            user_name = r['user']['org_username']
            request_id = r['requestId']
            state_key = f"{account_name}/{user_name}/{request_id}"
            if state.exists(state_key):
                continue

            logging.info(['reset_password', account_name, user_name])
            if dry_run:
                continue

            if aws_api is None:
                aws_api = AWSApi(1, [a], settings=settings)

            aws_api.reset_password(account_name, user_name)
            aws_api.reset_mfa(account_name, user_name)
            state.add(state_key)
Example #3
0
def run(dry_run):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_aws_accounts()
    users = queries.get_users()
    state = State(
        integration=QONTRACT_INTEGRATION,
        accounts=accounts,
        settings=settings
    )
    smtp_client = SmtpClient(settings=settings)
    mails = smtp_client.get_mails(
        criteria='SUBJECT "Sentry Access Request"',
        folder='[Gmail]/Sent Mail'
    )
    user_names = get_sentry_users_from_mails(mails)
    if not dry_run:
        slack = init_slack_workspace(QONTRACT_INTEGRATION,
                                     init_usergroups=False)
    for user_name in user_names:
        guesses = guess_user(user_name, users)
        if not guesses:
            logging.debug(f'no users guessed for {user_name}')
            continue
        slack_username = \
            guesses[0].get('slack_username') or guesses[0]['org_username']
        if state.exists(slack_username):
            continue
        logging.info(['help_user', slack_username])
        if not dry_run:
            state.add(slack_username)
            slack.chat_post_message(
                f'yo <@{slack_username}>! it appears that you have ' +
                'requested access to a project in Sentry. ' +
                'access is managed automatically via app-interface. '
                'checkout https://url.corp.redhat.com/sentry-help')
Example #4
0
def run(dry_run,
        thread_pool_size=10,
        internal=None,
        use_jump_host=True,
        defer=None):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_state_aws_accounts()
    clusters = [c for c in queries.get_clusters(minimal=True) if c.get("ocm")]
    oc_map = OC_Map(
        clusters=clusters,
        integration=QONTRACT_INTEGRATION,
        settings=settings,
        internal=internal,
        use_jump_host=use_jump_host,
        thread_pool_size=thread_pool_size,
    )
    defer(oc_map.cleanup)
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)

    if not dry_run:
        slack = slackapi_from_queries(QONTRACT_INTEGRATION)

    now = datetime.utcnow()
    for cluster in oc_map.clusters(include_errors=True):
        oc = oc_map.get(cluster)
        if not oc:
            logging.log(level=oc.log_level, msg=oc.message)
            continue
        upgrade_config = oc.get(
            namespace="openshift-managed-upgrade-operator",
            kind="UpgradeConfig",
            allow_not_found=True,
        )["items"]
        if not upgrade_config:
            logging.debug(f"[{cluster}] UpgradeConfig not found.")
            continue
        [upgrade_config] = upgrade_config

        upgrade_spec = upgrade_config["spec"]
        upgrade_at = upgrade_spec["upgradeAt"]
        version = upgrade_spec["desired"]["version"]
        upgrade_at_obj = datetime.strptime(upgrade_at, "%Y-%m-%dT%H:%M:%SZ")
        state_key = f"{cluster}-{upgrade_at}"
        # if this is the first iteration in which 'now' had passed
        # the upgrade at date time, we send a notification
        if upgrade_at_obj < now:
            if state.exists(state_key):
                # already notified
                continue
            logging.info(["cluster_upgrade", cluster])
            if not dry_run:
                state.add(state_key)
                usergroup = f"{cluster}-cluster"
                usergroup_id = slack.get_usergroup_id(usergroup)
                slack.chat_post_message(
                    f"Heads up <!subteam^{usergroup_id}>! " +
                    f"cluster `{cluster}` is currently " +
                    f"being upgraded to version `{version}`")
Example #5
0
def test_exists_for_missing_key(accounts, s3_client, mocker):
    s3_client.create_bucket(Bucket="some-bucket")

    mock_aws_api = mocker.patch("reconcile.utils.state.AWSApi", autospec=True)
    mock_aws_api.return_value.get_session.return_value.client.return_value = s3_client

    state = State("integration-name", accounts)

    assert not state.exists("some-key")
Example #6
0
def run(dry_run,
        thread_pool_size=10,
        internal=None,
        use_jump_host=True,
        defer=None):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_aws_accounts()
    clusters = [c for c in queries.get_clusters(minimal=True) if c.get('ocm')]
    oc_map = OC_Map(clusters=clusters,
                    integration=QONTRACT_INTEGRATION,
                    settings=settings,
                    internal=internal,
                    use_jump_host=use_jump_host,
                    thread_pool_size=thread_pool_size)
    defer(oc_map.cleanup)
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)

    if not dry_run:
        slack = init_slack_workspace(QONTRACT_INTEGRATION)

    now = datetime.utcnow()
    for cluster in oc_map.clusters(include_errors=True):
        oc = oc_map.get(cluster)
        if not oc:
            logging.log(level=oc.log_level, msg=oc.message)
            continue
        upgrade_config = oc.get(namespace='openshift-managed-upgrade-operator',
                                kind='UpgradeConfig',
                                allow_not_found=True)['items']
        if not upgrade_config:
            logging.debug(f'[{cluster}] UpgradeConfig not found.')
            continue
        [upgrade_config] = upgrade_config

        upgrade_spec = upgrade_config['spec']
        upgrade_at = upgrade_spec['upgradeAt']
        version = upgrade_spec['desired']['version']
        upgrade_at_obj = datetime.strptime(upgrade_at, '%Y-%m-%dT%H:%M:%SZ')
        state_key = f'{cluster}-{upgrade_at}'
        # if this is the first iteration in which 'now' had passed
        # the upgrade at date time, we send a notification
        if upgrade_at_obj < now:
            if state.exists(state_key):
                # already notified
                continue
            logging.info(['cluster_upgrade', cluster])
            if not dry_run:
                state.add(state_key)
                usergroup = f'{cluster}-cluster'
                usergroup_id = slack.get_usergroup_id(usergroup)
                slack.chat_post_message(
                    f'Heads up <!subteam^{usergroup_id}>! ' +
                    f'cluster `{cluster}` is currently ' +
                    f'being upgraded to version `{version}`')
Example #7
0
def test_exists_for_existing_key(accounts, s3_client, mocker):
    key = "some-key"

    s3_client.create_bucket(Bucket="some-bucket")
    s3_client.put_object(Bucket="some-bucket",
                         Key=f"state/integration-name/{key}",
                         Body="test")

    mock_aws_api = mocker.patch("reconcile.utils.state.AWSApi", autospec=True)
    mock_aws_api.return_value.get_session.return_value.client.return_value = s3_client

    state = State("integration-name", accounts)

    assert state.exists(key)
Example #8
0
def run(dry_run):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_aws_accounts()
    smtp_client = SmtpClient(settings=settings)
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)
    credentials_requests = queries.get_credentials_requests()

    # validate no 2 requests have the same name
    credentials_requests_names = \
        set([r['name'] for r in credentials_requests])
    if len(credentials_requests) != len(credentials_requests_names):
        logging.error('request names must be unique.')
        sys.exit(1)

    error = False

    credentials_requests_to_send = \
        [r for r in credentials_requests if not state.exists(r['name'])]
    for credentials_request_to_send in credentials_requests_to_send:
        user = credentials_request_to_send['user']
        org_username = user['org_username']
        public_gpg_key = user.get('public_gpg_key')
        credentials_name = credentials_request_to_send['credentials']
        if not public_gpg_key:
            error = True
            logging.error(
                f"user {org_username} does not have a public gpg key")
            continue
        logging.info(['send_credentials', org_username, credentials_name])

        if not dry_run:
            request_name = credentials_request_to_send['name']
            names = [org_username]
            subject = request_name
            ecrypted_credentials = get_ecrypted_credentials(
                credentials_name, user, settings, smtp_client)
            if not ecrypted_credentials:
                error = True
                logging.error(
                    f"could not get encrypted credentials {credentials_name}")
                continue
            body = MESSAGE_TEMPLATE.format(request_name, credentials_name,
                                           ecrypted_credentials)
            smtp_client.send_mail(names, subject, body)
            state.add(request_name)

    if error:
        sys.exit(1)
Example #9
0
def run(dry_run):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_state_aws_accounts()
    smtp_client = SmtpClient(settings=settings)
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)
    credentials_requests = queries.get_credentials_requests()

    # validate no 2 requests have the same name
    credentials_requests_names = {r["name"] for r in credentials_requests}
    if len(credentials_requests) != len(credentials_requests_names):
        logging.error("request names must be unique.")
        sys.exit(1)

    error = False

    credentials_requests_to_send = [
        r for r in credentials_requests if not state.exists(r["name"])
    ]
    for credentials_request_to_send in credentials_requests_to_send:
        try:
            user = credentials_request_to_send["user"]
            credentials_name = credentials_request_to_send["credentials"]
            org_username = user["org_username"]
            logging.info(["send_credentials", org_username, credentials_name])

            request_name = credentials_request_to_send["name"]
            names = [org_username]
            subject = request_name
            encrypted_credentials = get_encrypted_credentials(
                credentials_name, user, settings)
            if not dry_run:
                body = MESSAGE_TEMPLATE.format(request_name, credentials_name,
                                               encrypted_credentials)
                smtp_client.send_mail(names, subject, body)
                state.add(request_name)
        except KeyError:
            logging.exception(
                f"Bad user details for {org_username} - {credentials_name}")
            error = True
        except CalledProcessError as e:
            logging.exception(f"Failed to handle GPG key for {org_username} "
                              f"({credentials_name}): {e.stdout}")
            error = True

    if error:
        sys.exit(1)
def test_exists_for_existing_key(accounts, s3_client, mocker):
    key = "some-key"

    s3_client.create_bucket(Bucket='some-bucket')
    s3_client.put_object(Bucket='some-bucket',
                         Key=f'state/integration-name/{key}',
                         Body='test')

    mock_aws_api = mocker.patch('reconcile.utils.state.AWSApi', autospec=True)
    mock_aws_api.return_value \
        .get_session.return_value \
        .client.return_value = s3_client

    state = State('integration-name', accounts)

    assert state.exists(key)
def run(dry_run):
    settings = queries.get_app_interface_settings()
    accounts = queries.get_aws_accounts()
    state = State(integration=QONTRACT_INTEGRATION,
                  accounts=accounts,
                  settings=settings)
    emails = queries.get_app_interface_emails()
    smtp_client = SmtpClient(settings=settings)
    # validate no 2 emails have the same name
    email_names = {e['name'] for e in emails}
    if len(emails) != len(email_names):
        logging.error('email names must be unique.')
        sys.exit(1)

    emails_to_send = [e for e in emails if not state.exists(e['name'])]
    for email in emails_to_send:
        logging.info(['send_email', email['name'], email['subject']])

        if not dry_run:
            names = collect_to(email['to'])
            subject = email['subject']
            body = email['body']
            smtp_client.send_mail(names, subject, body)
            state.add(email['name'])