Exemple #1
0
            result.append((role, attached_name))

    # Eliminate dupes
    result = sorted(list(set(result)))

    if do_headers:
        # Add headers
        result = [("Role", "Policy")] + result

    return result


# ---
# dss-ops.py command line utility
# ---
iam = dispatch.target("iam", arguments={}, help=__doc__)


@iam.action(
    "list",
    arguments={
        "cloud_provider":
        dict(choices=["aws", "gcp", "fusillade"],
             help="The cloud provider whose policies are being listed"),
        "--group-by":
        dict(
            required=False,
            choices=["users", "groups", "roles"],
            help=
            "Group the listed policies by asset type (user, group, or role)",
        ),
from concurrent.futures import ThreadPoolExecutor, as_completed
from threading import Lock
from string import hexdigits

import boto3
from elasticsearch_dsl import Search
from dcplib.aws.sqs import SQSMessenger, get_queue_url

from dss import Config, Replica
from dss.operations import dispatch
from dss.api.subscriptions_v1 import _delete_subscription
from dss.index.es import ElasticsearchClient

logger = logging.getLogger(__name__)

elasticsearch = dispatch.target("elasticsearch", help=__doc__)


@elasticsearch.action("index-keys",
                      arguments={
                          "--replica":
                          dict(choices=[r.name for r in Replica],
                               required=True),
                          "--keys":
                          dict(default=None, nargs="*", help="keys to index.")
                      })
def index_keys(argv: typing.List[str], args: argparse.Namespace):
    """
    Queue an SQS message to the indexer lambda for each key in `keys`.
    """
    index_queue_url = get_queue_url("dss-index-operation-" +
Exemple #3
0
from dcplib.aws.sqs import SQSMessenger
from dcplib.aws.clients import logs

from dss.config import Config, Replica
from dss.util.aws import resources
from dss.operations import dispatch
from dss.events import (get_bundle_metadata_document, record_event_for_bundle, journal_flashflood,
                        update_flashflood, list_new_flashflood_journals)
from dss.storage.bundles import Living
from dss.operations.util import monitor_logs, command_queue_url


logger = logging.getLogger(__name__)


events = dispatch.target("events", help=__doc__)


@events.action("bundle-metadata-document",
               arguments={"--replica": dict(choices=[r.name for r in Replica], required=True),
                          "--keys": dict(required=True,
                                         nargs="*",
                                         help="bundle keys to generate documents.")})
def bundle_metadata_document(argv: typing.List[str], args: argparse.Namespace):
    replica = Replica[args.replica]
    for key in args.keys:
        md = get_bundle_metadata_document(replica, key)
        if md is not None:
            print(md)

@events.action("record",
Exemple #4
0
    try:
        response = sm_client.get_secret_value(SecretId=secret_name)
    except ClientError as e:
        if "Error" in e.response:
            errtype = e.response["Error"]["Code"]
            if errtype == "ResourceNotFoundException":
                raise RuntimeError(
                    f"Error: secret {secret_name} was not found!")
        raise RuntimeError(
            f"Error: could not fetch secret {secret_name} from secrets manager"
        )
    else:
        return response


secrets = dispatch.target("secrets", arguments={}, help=__doc__)

json_flag_options = dict(
    default=False,
    action="store_true",
    help="format the output as JSON if this flag is present")
dryrun_flag_options = dict(default=False,
                           action="store_true",
                           help="do a dry run of the actual operation")
quiet_flag_options = dict(default=False,
                          action="store_true",
                          help="suppress output")


@secrets.action(
    "list",
Exemple #5
0
import json
import typing
import logging
import argparse

from collections import namedtuple
from cloud_blobstore import BlobNotFoundError

from dss import Config, Replica
from dcplib.aws.sqs import SQSMessenger, get_queue_url
from dss.storage.hcablobstore import compose_blob_key
from dss.operations import dispatch
from dss.storage.identifiers import BLOB_PREFIX, FILE_PREFIX, BUNDLE_PREFIX

logger = logging.getLogger(__name__)
sync = dispatch.target("sync", help=__doc__)
ReplicationAnomaly = namedtuple("ReplicationAnomaly", "key anomaly")


def _log_warning(**kwargs):
    logger.warning(json.dumps(kwargs))


@sync.action("verify-sync",
             arguments={
                 "--source-replica":
                 dict(choices=[r.name for r in Replica], required=True),
                 "--destination-replica":
                 dict(choices=[r.name for r in Replica], required=True),
                 "--keys":
                 dict(default=None, nargs="*", help="keys to check.")
Exemple #6
0
    def _parse_key(key):
        try:
            version = VERSION_REGEX.search(key).group(0)
            uuid = UUID_REGEX.search(key).group(0)
        except IndexError:
            raise RuntimeError(f'Unable to parse the key: {key}')
        return uuid, version

    def process_keys(self):
        raise NotImplementedError()

    def __call__(self, argv: typing.List[str], args: argparse.Namespace):
        self.process_keys()


checkout = dispatch.target("checkout", help=__doc__)


@checkout.action("remove",
                 arguments={
                     "--replica":
                     dict(choices=[r.name for r in Replica], required=True),
                     "--keys":
                     dict(nargs="+",
                          help="Keys to remove from checkout.",
                          required=True)
                 })
class Remove(CheckoutHandler):
    def process_keys(self):
        """Remove keys from the checkout bucket."""
        for _key in self.keys:
Exemple #7
0
            quiet,
            f"Nothing to unset for variable {env_var} in deployed lambda function {lambda_name}"
        )


def print_lambda_env(lambda_name, lambda_env):
    """Print environment variables set in a specified lambda function"""
    print(f"\n{lambda_name}:")
    for name, val in lambda_env.items():
        print(f"{name}={val}")


# ---
# Command line utility functions
# ---
lambda_params = dispatch.target("lambda", arguments={}, help=__doc__)
ssm_params = dispatch.target("params", arguments={}, help=__doc__)

json_flag_options = dict(
    default=False,
    action="store_true",
    help="format the output as JSON if this flag is present")
dryrun_flag_options = dict(default=False,
                           action="store_true",
                           help="do a dry run of the actual operation")
quiet_flag_options = dict(default=False,
                          action="store_true",
                          help="suppress output")


@ssm_params.action(