Ejemplo n.º 1
0
class Downtime(CommandBase):
    command = 'downtime'
    help = """
    Manage downtime for the selected environment.

    This notifies Datadog of the planned downtime so that is is recorded
    in the history, and so that during it service alerts are silenced.
    """
    arguments = (
        Argument('action', choices=('start', 'end')),
        Argument('-m', '--message', help="""
            Optional message to set on Datadog.
        """),
        Argument('-d', '--duration', default=24, help="""
            Max duration in hours for the Datadog downtime after which it will be auto-cancelled.
            This is a safeguard against downtime remaining active and preventing future
            alerts.
            Default: 24 hours
        """),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        environment.create_generated_yml()
        ansible_context = AnsibleContext(args)

        if args.action == 'start':
            start_downtime(environment, ansible_context, args)

        if args.action == 'end':
            end_downtime(environment, ansible_context)
class Tmux(_Ssh):
    command = 'tmux'
    help = """
    Connect to a remote host with ssh and open a tmux session.

    Example:

    Rejoin last open tmux window.

    ```
    commcare-cloud <env> tmux -
    ```
    """
    arguments = (
        Argument('server',
                 help="""
            Server to run tmux session on.
            Use '-' for default (django_manage:0)
        """),
        Argument('remote_command',
                 nargs='?',
                 help="""
            Command to run in the tmux.
            If a command specified, then it will always run in a new window.
            If a command is *not* specified, then a it will rejoin the most
            recently visited tmux window; only if there are no currently open
            tmux windows will a new one be opened.
        """),
    )

    def run(self, args, ssh_args):
        environment = get_environment(args.env_name)
        public_vars = environment.public_vars
        if args.server == '-':
            args.server = 'django_manage:0'
        # the default 'cchq' is redundant with ansible/group_vars/all.yml
        cchq_user = public_vars.get('cchq_user', 'cchq')
        # Name tabs like "droberts (2018-04-13)"
        window_name_expression = '"`whoami` (`date +%Y-%m-%d`)"'
        if args.remote_command:
            # add bash as second command to keep tmux open after command exits
            remote_command = shlex_quote('{} ; bash'.format(
                args.remote_command))
            ssh_args = [
                '-t',
                r'tmux attach \; new-window -n {window_name} {remote_command} '
                r'|| tmux new -n {window_name} {remote_command}'.format(
                    remote_command="sudo -iu {} -- sh -c {}".format(
                        cchq_user, remote_command),
                    window_name=window_name_expression,
                )
            ] + ssh_args
        else:
            ssh_args = [
                '-t',
                'tmux attach || tmux new -n {window_name} sudo -iu {cchq_user} '
                .format(cchq_user=cchq_user,
                        window_name=window_name_expression)
            ]
        return Ssh(self.parser).run(args, ssh_args)
Ejemplo n.º 3
0
class Service(CommandBase):
    command = 'service'
    # todo: auto-generate tables of service => subservices
    help = """
    Manage services.

    Example:

    ```
    cchq <env> service postgresql status
    cchq <env> service celery help
    cchq <env> service celery logs
    cchq <env> service celery restart --limit <host>
    cchq <env> service celery restart --only <queue-name>,<queue-name>:<queue_num>
    cchq <env> service pillowtop restart --limit <host> --only <pillow-name>
    ```

    Services are grouped together to form conceptual service groups.
    Thus the `postgresql` service group applies to both the `postgresql`
    service and the `pgbouncer` service. We'll call the actual services
    "subservices" here.
    """

    arguments = (
        Argument('services', nargs="+", choices=SERVICE_NAMES, help="""
        The name of the service group(s) to apply the action to.
        There is a preset list of service groups that are supported.
        More than one service may be supplied as separate arguments in a row.
        """),
        Argument('action', choices=ACTIONS, help="""
        Action can be `status`, `start`, `stop`, `restart`, or `logs`.
        This action is applied to every matching service.
        """),
        Argument('--limit', help=(
            "Restrict the hosts to run the command on."
            "\nUse 'help' action to list all options."
        ), include_in_docs=False),
        Argument('--only', type=validate_pattern, dest='process_pattern', help=(
            "Sub-service name to limit action to."
            "\nFormat as 'name' or 'name:number'."
            "\nUse 'help' action to list all options."
        )),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)

        services = [
            SERVICES_BY_NAME[name]
            for name in args.services
        ]

        ansible_context = AnsibleContext(args)
        non_zero_exits = []
        for service_cls in services:
            service = service_cls(environment, ansible_context)
            exit_code = service.run(args.action, args.limit, args.process_pattern)
            if exit_code != 0:
                non_zero_exits.append(exit_code)
        return non_zero_exits[0] if non_zero_exits else 0
Ejemplo n.º 4
0
class PillowTopicAssignments(CommandBase):
    command = 'pillow-topic-assignments'
    help = """
    Print out the list of Kafka partitions assigned to each pillow process.
    """

    run_setup_on_control_by_default = False

    arguments = (
        Argument('pillow_name', help=(
            "Name of the pillow."
        )),
        Argument('--csv', action='store_true', help=(
            "Output as CSV"
        )),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        processes_per_pillow = _get_pillow_resources_by_name(environment)
        total_processes = processes_per_pillow[args.pillow_name]
        manage_args = ['pillow_topic_assignments', args.pillow_name, str(total_processes)]
        if args.csv:
            manage_args.append('--csv')
        args.release = None
        args.server = "django_manage[0]"
        args.tmux = None
        args.tee_file = None
        return DjangoManage(self.parser).run(args, manage_args)
Ejemplo n.º 5
0
class Downtime(CommandBase):
    command = 'downtime'
    help = """
    Manage downtime for the selected environment.

    This notifies Datadog of the planned downtime so that is is recorded
    in the history, and so that during it service alerts are silenced.
    """
    arguments = (
        Argument('action', choices=('start', 'end')),
        Argument('-m',
                 '--message',
                 help="""
            Optional message to set on Datadog.
        """),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        environment.create_generated_yml()
        ansible_context = AnsibleContext(args)

        if args.action == 'start':
            start_downtime(environment, ansible_context, args)

        if args.action == 'end':
            end_downtime(environment, ansible_context)
Ejemplo n.º 6
0
class Secrets(CommandBase):
    command = 'secrets'
    help = (
        "View and edit secrets through the CLI"
    )

    arguments = (
        Argument(dest='subcommand', choices=['view', 'edit', 'list-append', 'list-remove']),
        Argument(dest='secret_name'),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        if args.subcommand == 'view':
            return self._secrets_view(environment, args.secret_name)
        if args.subcommand == 'edit':
            return self._secrets_edit(environment, args.secret_name)
        if args.subcommand == 'list-append':
            return self._secrets_append_to_list(environment, args.secret_name)
        if args.subcommand == 'list-remove':
            return self._secrets_remove_from_list(environment, args.secret_name)

    def _secrets_view(self, environment, secret_name):
        secret = environment.get_secret(secret_name)
        if isinstance(secret, six.string_types):
            print(secret)
        else:
            print(yaml.safe_dump(secret))

    def _secrets_edit(self, environment, secret_name):
        environment.secrets_backend.prompt_user_input()
        secret_value = getpass.getpass("New value for '{}' secret '{}': ".format(environment.name, secret_name))
        try:
            secret_value = json.loads(secret_value)
        except ValueError:
            pass
        environment.secrets_backend.set_secret(secret_name, secret_value)

    def _secrets_append_to_list(self, environment, secret_name):
        secret = environment.get_secret(secret_name)
        if not isinstance(secret, list):
            print("Cannot append. '{}' is not a list.".format(secret_name))
            exit(-1)
        value_to_append = getpass.getpass("Value for '{}' to append to '{}': ".format(environment.name, secret_name))
        secret.append(value_to_append)
        environment.secrets_backend.set_secret(secret_name, secret)

    def _secrets_remove_from_list(self, environment, secret_name):
        secret = environment.get_secret(secret_name)
        if not isinstance(secret, list):
            print("Cannot remove. '{}' is not a list.".format(secret_name))
            exit(-1)
        value_to_remove = getpass.getpass("Value for '{}' to remove from '{}': ".format(environment.name, secret_name))
        try:
            secret.remove(value_to_remove)
        except ValueError:
            print("Value not found in list.")
            exit(-1)
        environment.secrets_backend.set_secret(secret_name, secret)
Ejemplo n.º 7
0
class CeleryResourceReport(CommandBase):
    command = 'celery-resource-report'
    help = """
    Report of celery resources by queue.
    """

    arguments = (
        Argument('--show-workers',
                 action='store_true',
                 help=("Includes the list of worker nodes for each queue")),
        Argument('--csv', action='store_true', help=("Output table as CSV")),
    )

    def run(self, args, manage_args):
        environment = get_environment(args.env_name)
        celery_processes = environment.app_processes_config.celery_processes
        by_queue = defaultdict(
            lambda: {
                'num_workers': 0,
                'concurrency': 0,
                'pooling': set(),
                'worker_hosts': set()
            })
        for host, queues in celery_processes.items():
            for queue_name, options in queues.items():
                queue = by_queue[queue_name]
                queue['num_workers'] += options.num_workers
                queue[
                    'concurrency'] += options.concurrency * options.num_workers
                queue['pooling'].add(options.pooling)
                queue['worker_hosts'].add(host)

        headers = [
            'Pooling', 'Worker Queues', 'Processes', 'Concurrency',
            'Avg Concurrency per worker'
        ]
        if args.show_workers:
            headers.append('Worker Hosts')
        rows = []
        for queue_name, stats in sorted(by_queue.items(), key=itemgetter(0)):
            workers = stats['num_workers']
            concurrency_ = stats['concurrency']
            row = [
                list(stats['pooling'])[0], '`{}`'.format(queue_name), workers,
                concurrency_, concurrency_ // workers
            ]
            if args.show_workers:
                worker_hosts = stats['worker_hosts']
                row.append(','.join(
                    sorted([
                        get_machine_alias(environment, worker_host)
                        for worker_host in worker_hosts
                    ])))
            rows.append(row)

        print_table(headers, rows, args.csv)
Ejemplo n.º 8
0
class ListDatadogMonitors(CommandBase):
    command = 'list-datadog-monitors'
    help = """Lost Datadog Monitor definitions"""

    arguments = (
        Argument('config'),
        Argument('-f',
                 '--filenames',
                 action='store_true',
                 help="Show filenames instead of monitor names."),
        Argument('-l',
                 '--local',
                 action='store_true',
                 help="Only list what's local. Don't query Datadog."),
        Argument('--sort',
                 choices=('name', 'id'),
                 default='id',
                 help="Sort order."),
    )

    def run(self, args, unknown_args):
        config = get_config(args.config)
        local_monitor_api = LocalMonitorAPI(config)

        show_filenames = args.filenames
        sort_index = {'id': 0, 'name': 1}[args.sort]

        def _print(title, monitors):
            _print_monitors(local_monitor_api, title, monitors, show_filenames,
                            sort_index)

        local_monitors = local_monitor_api.get_all()
        if args.local:
            _print("\nMonitors", local_monitors)
        else:
            initialize_datadog(config)
            remote_monitor_api = RemoteMonitorAPI()
            remote_monitors = remote_monitor_api.get_all()

            only_remote = {
                id: remote_monitors[id]
                for id in set(remote_monitors) - set(local_monitors)
            }
            only_local = {
                id: local_monitors[id]
                for id in set(local_monitors) - set(remote_monitors)
            }
            shared_local_remote_monitors = {
                id: local_monitors[id]
                for id in set(local_monitors) & set(remote_monitors)
            }

            _print("\nMonitors", shared_local_remote_monitors)
            _print("\nMonitors only on Datadog", only_remote)
            _print("\nMonitors only on local", only_local)
Ejemplo n.º 9
0
class RunShellCommand(CommandBase):
    command = 'run-shell-command'
    help = """
    Run an arbitrary command via the Ansible shell module.

    Example:

    ```
    commcare-cloud <env> run-shell-command all 'df -h | grep /opt/data'
    ```

    to get disk usage stats for `/opt/data` on every machine.
    """

    arguments = (
        shared_args.INVENTORY_GROUP_ARG,
        Argument('shell_command',
                 help="""
            Command to run remotely.
            (Tip: put quotes around it, as it will likely contain spaces.)
            Cannot being with `sudo`; to do that use the ansible `--become` option.
        """),
        Argument(
            '--silence-warnings',
            action='store_true',
            help=
            "Silence shell warnings (such as to use another module instead)."),
    ) + NON_POSITIONAL_ARGUMENTS

    def modify_parser(self):
        RunAnsibleModule(self.parser).modify_parser()

    def run(self, args, unknown_args):
        if args.shell_command.strip().startswith('sudo '):
            puts(
                color_notice(
                    "To run as another user use `--become` (for root) or `--become-user <user>`.\n"
                    "Using 'sudo' directly in the command is non-standard practice."
                ))
            if not ask(
                    "Do you know what you're doing and want to run this anyway?",
                    quiet=args.quiet):
                return 0  # exit code

        args.module = 'shell'
        if args.silence_warnings:
            args.module_args = 'warn=false ' + args.shell_command
        else:
            args.module_args = args.shell_command
        args.skip_check = True
        args.quiet = True
        del args.shell_command
        return RunAnsibleModule(self.parser).run(args, unknown_args)
Ejemplo n.º 10
0
class MigrateCouchdb(CommandBase):
    command = 'migrate-couchdb'
    aliases = ('migrate_couchdb', )  # deprecated
    help = """
    Perform a CouchDB migration

    This is a recent and advanced addition to the capabilities,
    and is not yet ready for widespread use. At such a time as it is
    ready, it will be more thoroughly documented.
    """

    arguments = (
        Argument(dest='migration_plan', help="Path to migration plan file"),
        Argument(dest='action',
                 choices=['describe', 'plan', 'migrate', 'commit', 'clean'],
                 help="""
            Action to perform

            - describe: Print out cluster info
            - plan: generate plan details from migration plan
            - migrate: stop nodes and copy shard data according to plan
            - commit: update database docs with new shard allocation
            - clean: remove shard files from hosts where they aren't needed
        """),
        shared_args.SKIP_CHECK_ARG,
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        environment.create_generated_yml()

        migration = CouchMigration(environment, args.migration_plan)
        check_connection(migration.target_couch_config.get_control_node())
        if migration.separate_source_and_target:
            check_connection(migration.source_couch_config.get_control_node())

        ansible_context = AnsibleContext(args)

        if args.action == 'describe':
            return describe(migration)

        if args.action == 'plan':
            return plan(migration)

        if args.action == 'migrate':
            return migrate(migration, ansible_context, args.skip_check)

        if args.action == 'commit':
            return commit(migration)

        if args.actoin == 'clean':
            return clean(migration, ansible_context, args.skip_check)
Ejemplo n.º 11
0
class SendDatadogEvent(CommandBase):
    command = 'send-datadog-event'
    help = "Track an infrastructure maintainance event in Datadog"

    arguments = (
        Argument('event_title',
                 help="""
            Title of the datadog event.
        """),
        Argument('event_text',
                 help="""
            Text content of the datadog event.
        """),
        Argument('--tags',
                 nargs="*",
                 help="""
            Additional tags e.g. host:web2
        """),
        Argument('--alert_type',
                 choices=["error", "warning", "info", "success"],
                 default="info",
                 help="""
            Alert type.
        """),
    )

    def run(self, args, unknown_args):
        args.module = 'datadog_event'
        environment = get_environment(args.env_name)
        datadog_api_key = environment.get_secret('DATADOG_API_KEY')
        datadog_app_key = environment.get_secret('DATADOG_APP_KEY')
        tags = args.tags or []
        tags.append("environment:{}".format(args.env_name))
        args.module_args = "api_key={api_key} app_key={app_key} " \
            "tags='{tags}' text='{text}' title='{title}' aggregation_key={agg}".format(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                tags=",".join(tags),
                text=args.event_text,
                title=args.event_title,
                agg='commcare-cloud'
            )
        return run_ansible_module(environment,
                                  AnsibleContext(args),
                                  '127.0.0.1',
                                  args.module,
                                  args.module_args,
                                  become=False,
                                  quiet=True)
Ejemplo n.º 12
0
class AwsSignIn(CommandBase):
    command = 'aws-sign-in'
    help = """
        Use your MFA device to "sign in" to AWS for <duration> minutes (default {})

        This will store the temporary session credentials in ~/.aws/credentials
        under a profile named with the pattern "<aws_profile>:profile".
        After this you can use other AWS-related commands for up to <duration> minutes
        before having to sign in again.
    """.format(DEFAULT_SIGN_IN_DURATION_MINUTES)

    arguments = [
        Argument('--duration-minutes',
                 type=int,
                 default=DEFAULT_SIGN_IN_DURATION_MINUTES,
                 help="""
            Stay signed in for this many minutes
        """)
    ]

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        duration_minutes = args.duration_minutes
        aws_profile = environment.terraform_config.aws_profile
        aws_sign_in(aws_profile, duration_minutes, force_new=True)
Ejemplo n.º 13
0
class TerraformMigrateState(CommandBase):
    command = 'terraform-migrate-state'
    help = """
    Apply unapplied state migrations in commcare_cloud/commands/terraform/migrations

    This migration tool should exist as a generic tool for terraform,
    but terraform is still not that mature, and it doesn't seem to exist yet.

    Terraform assigns each resource an address so that it can map it back to the code.
    However, often when you change the code, the addresses no longer map to the same place.
    For this, terraform offers the terraform state mv <address> <new_address> command,
    so you can tell it how existing resources map to your new code.

    This is a tedious task, and often follows a very predictable renaming pattern.
    This command helps fill this gap.
    """

    arguments = [
        Argument('--replay-from', type=int, default=None,
                 help="Set the last applied migration value to this number before running."
                      " Will begin running migrations after this number, not including it.")
    ]

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        remote_migration_state_manager = RemoteMigrationStateManager(environment.terraform_config)
        remote_migration_state = remote_migration_state_manager.fetch()
        migrations = get_migrations()
        if args.replay_from is not None:
            migration = migrations[args.replay_from - 1]
            assert (migration.number == args.replay_from), migration.number
            remote_migration_state.number = migration.number
            remote_migration_state.slug = migration.slug

        applied_migrations = migrations[:remote_migration_state.number]
        unapplied_migrations = migrations[remote_migration_state.number:]

        # make sure remote checkpoint is consistent with migrations in code
        if applied_migrations:
            assert (applied_migrations[-1].number, applied_migrations[-1].slug) == \
                   (remote_migration_state.number, remote_migration_state.slug), \
                (remote_migration_state, applied_migrations[-1])
        else:
            assert (0, None) == (remote_migration_state.number, remote_migration_state.slug), \
                remote_migration_state

        if not unapplied_migrations:
            print("No migrations to apply")
            return
        state = terraform_list_state(args.env_name, unknown_args)
        print("Applying the following changes:{}".format(
            ''.join('\n  - {:0>4} {}'.format(migration.number, migration.slug)
                    for migration in unapplied_migrations)
        ))
        print("which will result in the following moves being made:")
        migration_plans = make_migration_plans(environment, state, unapplied_migrations, log=print)
        if ask("Do you want to apply this migration?"):
            apply_migration_plans(
                environment, migration_plans,
                remote_migration_state_manager=remote_migration_state_manager, log=print)
Ejemplo n.º 14
0
class Lookup(CommandBase):
    command = 'lookup'
    help = """
    Lookup remote hostname or IP address
    """
    arguments = (Argument("server",
                          nargs="?",
                          help="""
            Server name/group: postgresql, proxy, webworkers, ... The server
            name/group may be prefixed with 'username@' to login as a
            specific user and may be terminated with ':<n>' to choose one of
            multiple servers if there is more than one in the group. For
            example: webworkers:0 will pick the first webworker. May also be
            omitted for environments with only a single server.
        """), )

    def lookup_server_address(self, args):
        def exit(message):
            self.parser.error("\n" + message)

        if not args.server:
            return get_monolith_address(args.env_name, exit)
        return get_server_address(args.env_name, args.server, exit)

    def run(self, args, unknown_args):
        if unknown_args:
            sys.stderr.write(
                "Ignoring extra argument(s): {}\n".format(unknown_args))
        print(self.lookup_server_address(args))
Ejemplo n.º 15
0
class OpenvpnActivateUser(_AnsiblePlaybookAlias):
    command = 'openvpn-activate-user'
    help = """
    Give a OpenVPN user a temporary password (the ansible user password)

    to allow the user to connect to the VPN, log in, and change their password using

    ```
    cchq <env> openvpn-claim-user
    ```
    """

    arguments = _AnsiblePlaybookAlias.arguments + (Argument('vpn_user',
                                                            help="""
            The user to activate.

            Must be one of the defined ssh users defined for the environment.
        """), )

    def run(self, args, unknown_args):
        args.playbook = 'openvpn_playbooks/activate_vpn_user.yml'
        unknown_args += ('-e', 'vpn_user={}'.format(args.vpn_user))
        return AnsiblePlaybook(self.parser).run(args,
                                                unknown_args,
                                                always_skip_check=True)
class Lookup(CommandBase):
    command = 'lookup'
    help = """
    Lookup remote hostname or IP address
    """
    arguments = (Argument("server",
                          nargs="?",
                          help="""
            Server name/group: postgresql, proxy, webworkers, ... The server
            name/group may be prefixed with 'username@' to login as a
            specific user and may be terminated with '[<n>]' to choose one of
            multiple servers if there is more than one in the group. For
            example: webworkers[0] will pick the first webworker. May also be
            omitted for environments with only a single server.

            Use '-' for default (django_manage[0])
        """), )

    def lookup_server_address(self, args):
        try:
            return lookup_server_address(args.env_name, args.server)
        except Exception as e:
            self.parser.error("\n" + str(e))

    def run(self, args, unknown_args):
        if unknown_args:
            sys.stderr.write(
                "Ignoring extra argument(s): {}\n".format(unknown_args))
        print(self.lookup_server_address(args))
Ejemplo n.º 17
0
class MigrateSecrets(CommandBase):
    command = 'migrate-secrets'
    help = (
        "Migrate secrets from one backend to another"
    )

    arguments = (
        Argument(dest='from_backend'),
    )

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        from_backend = all_secrets_backends_by_name[args.from_backend].from_environment(environment)
        to_backend = environment.secrets_backend
        if from_backend.name == to_backend.name:
            puts(color_error(
                'Refusing to copy from {from_backend.name} to {to_backend.name}: backends must differ'
                .format(from_backend=from_backend, to_backend=to_backend)
            ))
            exit(-1)

        print("Copying data from {from_backend.name} to {to_backend.name}:".format(
            from_backend=from_backend, to_backend=to_backend))
        for secret_spec in get_known_secret_specs():
            try:
                secret_value = from_backend.get_secret(secret_spec.name)
            except KeyError:
                print("No value for {secret_spec.name}... Skipping".format(secret_spec=secret_spec))
                continue
            to_backend.set_secret(secret_spec.name, secret_value)
            print("Copied value for {secret_spec.name}".format(secret_spec=secret_spec))
class _Ssh(Lookup):

    arguments = Lookup.arguments + (Argument("--quiet",
                                             action='store_true',
                                             default=False,
                                             help="""
            Don't output the command to be run.
        """), )

    def run(self, args, ssh_args, env_vars=None):
        if args.server == '-':
            args.server = 'django_manage[0]'
        address = self.lookup_server_address(args)
        if ':' in address:
            address, port = address.split(':')
            ssh_args = ['-p', port] + ssh_args
        if '@' in address:
            username, address = address.split('@', 1)
            username = get_ssh_username(address,
                                        args.env_name,
                                        requested_username=username)
        elif '@' not in address:
            username = get_ssh_username(address, args.env_name)
        address = f"{username}@{address}"
        cmd_parts = [self.command, address, '-t'] + ssh_args
        cmd = ' '.join(shlex_quote(arg) for arg in cmd_parts)
        if not args.quiet:
            print_command(cmd)
        return subprocess.call(cmd_parts,
                               **({
                                   'env': env_vars
                               } if env_vars else {}))
Ejemplo n.º 19
0
def make_command_parser(available_envs,
                        formatter_class=RawTextHelpFormatter,
                        subparser_formatter_class=None,
                        prog=None,
                        add_help=True,
                        for_docs=False):
    if subparser_formatter_class is None:
        subparser_formatter_class = formatter_class
    parser = ArgumentParser(formatter_class=formatter_class,
                            prog=prog,
                            add_help=add_help)
    if available_envs:
        env_name_kwargs = dict(choices=available_envs)
    else:
        env_name_kwargs = dict(metavar='<env>')
    parser.add_argument('env_name',
                        help=("server environment to run against"),
                        **env_name_kwargs)
    Argument('--control',
             action='store_true',
             help="""
        Run command remotely on the control machine.

        You can add `--control` _directly after_ `commcare-cloud` to any command
        in order to run the command not from the local machine
        using the local code,
        but from from the control machine for that environment,
        using the latest version of `commcare-cloud` available.

        It works by issuing a command to ssh into the control machine,
        update the code, and run the same command entered locally but with
        `--control` removed. For long-running commands,
        you will have to remain connected to the the control machine
        for the entirety of the run.
    """).add_to_parser(parser)
    subparsers = parser.add_subparsers(dest='command')

    commands = {}

    for command_type in COMMAND_TYPES:
        assert issubclass(command_type, CommandBase), command_type
        cmd = command_type(
            subparsers.add_parser(
                command_type.command,
                help=inspect.cleandoc(command_type.help).splitlines()[0],
                aliases=command_type.aliases,
                description=inspect.cleandoc(command_type.help),
                formatter_class=subparser_formatter_class,
                add_help=add_help))
        cmd.make_parser(for_docs=for_docs)
        commands[cmd.command] = cmd
        for alias in cmd.aliases:
            commands[alias] = cmd
    return parser, subparsers, commands
Ejemplo n.º 20
0
class AwsFillInventory(CommandBase):
    command = 'aws-fill-inventory'
    help = """
        Fill inventory.ini.j2 using AWS resource values cached in aws-resources.yml

        If --cached is not specified, also refresh aws-resources.yml
        to match what is actually in AWS.
    """

    arguments = [
        Argument('--cached',
                 action='store_true',
                 help="""
            Use the values set in aws-resources.yml rather than fetching from AWS.

            This runs much more quickly and gives the same result, provided no changes
            have been made to our actual resources in AWS.
        """)
    ]

    def run(self, args, unknown_args):
        environment = get_environment(args.env_name)
        if not os.path.exists(environment.paths.inventory_ini_j2):
            print(
                "Env {} not using templated inventory (inventory.ini.j2). Skipping"
                .format(args.env_name))
            return 0

        if not args.cached:
            resources = get_aws_resources(environment)
            with open(environment.paths.aws_resources_yml,
                      "w",
                      encoding="utf-8") as f:
                f.write(yaml.safe_dump(resources, default_flow_style=False))
        else:
            with open(environment.paths.aws_resources_yml,
                      'r',
                      encoding='utf-8') as f:
                # PY2: yaml.safe_load will return bytes when the content is ASCII-only bytes
                resources = yaml.safe_load(f.read())

        with open(environment.paths.inventory_ini_j2, 'r',
                  encoding='utf-8') as f:
            inventory_ini_j2 = f.read()

        with open(environment.paths.inventory_ini, 'w', encoding='utf-8') as f:
            # by putting this inside the with
            # we make sure that if the it fails, inventory.ini is made empty
            # reflecting that we were unable to create it
            out_string = AwsFillInventoryHelper(environment, inventory_ini_j2,
                                                resources).render()
            # PY2: out_string is unicode based on Jinja2 render method
            f.write(out_string)
class UpdateUserPublicKey(_AnsiblePlaybookAlias):
    command = 'update-user-key'
    help = "Update a single user's public key (because update-users takes forever)."
    arguments = _AnsiblePlaybookAlias.arguments + (Argument(
        "username", help="username who owns the public key"), )

    def run(self, args, unknown_args):
        puts(
            color_notice(
                "The 'update-user-key' command has been removed. Please use 'update-users' instead."
            ))
        return 0  # exit code
Ejemplo n.º 22
0
class AnsiblePlaybook(CommandBase):
    command = 'ansible-playbook'
    help = """
    Run a playbook as you would with ansible-playbook

    By default, you will see --check output and then asked whether to apply.
    
    Example:

    ```
    commcare-cloud <env> ansible-playbook deploy_proxy.yml --limit=proxy
    ```
    """
    aliases = ('ap', )
    arguments = (shared_args.SKIP_CHECK_ARG, shared_args.QUIET_ARG,
                 shared_args.BRANCH_ARG, shared_args.STDOUT_CALLBACK_ARG,
                 shared_args.FACTORY_AUTH_ARG, shared_args.LIMIT_ARG,
                 Argument('playbook',
                          help="""
            The ansible playbook .yml file to run.
            Options are the `*.yml` files located under `commcare_cloud/ansible`
            which is under `src` for an egg install and under
            `<virtualenv>/lib/python2.7/site-packages` for a wheel install.
        """))

    def modify_parser(self):
        add_to_help_text(
            self.parser, "\n{}\n{}".format(
                "The ansible-playbook options below are available as well:",
                filtered_help_message(
                    "ansible-playbook -h",
                    below_line='Options:',
                    above_line=None,
                    exclude_args=DEPRECATED_ANSIBLE_ARGS + [
                        '--help',
                        '--diff',
                        '--check',
                        '-i',
                        '--ask-vault-pass',
                        '--vault-password-file',
                        '--limit',
                    ],
                )))

    def run(self, args, unknown_args, always_skip_check=False):
        environment = get_environment(args.env_name)
        environment.create_generated_yml()
        ansible_context = AnsibleContext(args)
        check_branch(args)
        run_ansible_playbook(environment, args.playbook, ansible_context,
                             args.skip_check, args.quiet, always_skip_check,
                             args.limit, args.use_factory_auth, unknown_args)
Ejemplo n.º 23
0
class SendDatadogEvent(CommandBase):
    command = 'send-datadog-event'
    help = "Track an infrastructure maintainance event in Datadog"

    arguments = (
        Argument('event_title',
                 help="""
            Title of the datadog event.
        """),
        Argument('event_text',
                 help="""
            Text content of the datadog event.
        """),
    )

    def run(self, args, unknown_args):
        args.module = 'datadog_event'
        environment = get_environment(args.env_name)
        vault = environment.get_vault_variables()['secrets']
        tags = "environment:{}".format(args.env_name)
        args.module_args = "api_key={api_key} app_key={app_key} " \
            "tags='{tags}' text='{text}' title='{title}' aggregation_key={agg}".format(
                api_key=vault['DATADOG_API_KEY'],
                app_key=vault['DATADOG_APP_KEY'],
                tags=tags,
                text=args.event_text,
                title=args.event_title,
                agg='commcare-cloud'
            )
        return run_ansible_module(
            environment,
            AnsibleContext(args),
            '127.0.0.1',
            args.module,
            args.module_args,
            False,
            False,
            False,
        )
Ejemplo n.º 24
0
    def modify_parser(self):
        env_name = sys.argv[1]
        if env_name not in get_available_envs():
            return

        environment = get_environment(env_name)
        if environment.meta_config.git_repositories:
            for repo in environment.meta_config.git_repositories:
                Argument('--{}-rev'.format(repo.name),
                         help="""
                    The name of the git branch, tag, or SHA-1 commit hash to deploy for the
                    '{}' ({}) repository
                """.format(repo.name, repo.url)).add_to_parser(self.parser)
Ejemplo n.º 25
0
class DeployStack(_AnsiblePlaybookAlias):
    command = 'deploy-stack'
    aliases = ('aps', )
    help = """
        Run the ansible playbook for deploying the entire stack.

        Often used in conjunction with --limit and/or --tag
        for a more specific update.
    """

    arguments = _AnsiblePlaybookAlias.arguments + (Argument(
        '--first-time',
        action='store_true',
        help="""
        Use this flag for running against a newly-created machine.

        It will first use factory auth to set up users,
        and then will do the rest of deploy-stack normally,
        but skipping check mode.

        Running with this flag is equivalent to

        ```
        commcare-cloud <env> bootstrap-users <...args>
        commcare-cloud <env> deploy-stack --skip-check --skip-tags=users <...args>
        ```

        If you run and it fails half way, when you're ready to retry, you're probably
        better off running
        ```
        commcare-cloud <env> deploy-stack --skip-check --skip-tags=users <...args>
        ```
        since if it made it through bootstrap-users
        you won't be able to run bootstrap-users again.
        """), )

    def run(self, args, unknown_args):
        always_skip_check = False
        if args.first_time:
            rc = BootstrapUsers(self.parser).run(deepcopy(args),
                                                 deepcopy(unknown_args))
            if rc != 0:
                return rc
            # the above just ran --tags=users
            # no need to run it a second time
            unknown_args += ('--skip-tags=users', )
            args.quiet = True
            always_skip_check = True
        args.playbook = 'deploy_stack.yml'
        return AnsiblePlaybook(self.parser).run(
            args, unknown_args, always_skip_check=always_skip_check)
Ejemplo n.º 26
0
class UpdateUserPublicKey(_AnsiblePlaybookAlias):
    command = 'update-user-key'
    help = "Update a single user's public key (because update-users takes forever)."
    arguments = _AnsiblePlaybookAlias.arguments + (Argument(
        "username", help="username who owns the public key"), )

    def run(self, args, unknown_args):
        args.playbook = 'deploy_stack.yml'
        unknown_args += (
            '--tags=pubkey',
            '--extra-vars={{"dev_users": {{"present": [{}]}}}}'.format(
                args.username),
        )
        return AnsiblePlaybook(self.parser).run(args, unknown_args)
Ejemplo n.º 27
0
class MakeChangelog(CommandBase):
    command = 'make-changelog'
    help = "Build the commcare-cloud CLI tool's individual changelog files"
    arguments = (
        Argument(dest='changelog_yml', help="""Path to the yaml changelog file"""),
    )

    def run(self, args, unknown_args):
        changelog_yml = args.changelog_yml
        ordinal = int(changelog_yml.split('/')[-1].split('-')[0])
        j2 = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), keep_trailing_newline=True)

        changelog_entry = load_changelog_entry(changelog_yml)
        template = j2.get_template('changelog.md.j2')

        text = template.render(changelog_entry=changelog_entry, ordinal=ordinal)
        print(text.rstrip().encode("utf-8"))
Ejemplo n.º 28
0
class NewChangelog(CommandBase):
    command = 'new-changelog'
    help = "Create a blank changelog"
    arguments = (Argument(dest="name",
                          nargs="?",
                          help="""Name of the changelog"""), )

    def run(self, args, unknown_args):
        j2 = jinja2.Environment(loader=jinja2.FileSystemLoader(
            os.path.dirname(__file__)),
                                keep_trailing_newline=True)

        changelog_dir = 'changelog'
        for filename in _sort_files(changelog_dir):
            if filename.endswith(".yml"):
                last_log = filename
                break
        else:
            puts(
                color_error(
                    "Unable to find last changelog file. Please create a changelog manually."
                ))
            return 1

        last_index = int(re.search(r"^(\d+)", last_log).group())

        name = args.name
        date = datetime.utcnow()
        if not name:
            name = "auto {}".format(date.strftime("%Y%m%d_%H%M"))

        name = re.sub("[\n\r\t]", " ", name)
        key = name.replace(" ", "_")
        file_name = "{:04d}-{}.yml".format(last_index + 1, key)

        template = j2.get_template('changelog-template.yml.j2')
        path = os.path.join(changelog_dir, file_name)
        with open(path, 'w') as f:
            f.write(
                template.render(name=name,
                                key=key,
                                date=date.strftime("%Y-%m-%d")).rstrip())

        print("Changelog created at {}".format(path))
Ejemplo n.º 29
0
class OpenvpnClaimUser(_AnsiblePlaybookAlias):
    command = 'openvpn-claim-user'
    help = """
    Claim an OpenVPN user as your own, setting its password
    """

    arguments = _AnsiblePlaybookAlias.arguments + (Argument('vpn_user',
                                                            help="""
            The user to claim.

            Must be one of the defined ssh users defined for the environment.
        """), )

    def run(self, args, unknown_args):
        args.playbook = 'openvpn_playbooks/mark_vpn_user_claimed.yml'
        unknown_args += ('-e', 'vpn_user={}'.format(args.vpn_user))
        return AnsiblePlaybook(self.parser).run(args,
                                                unknown_args,
                                                always_skip_check=True)
Ejemplo n.º 30
0
class PillowResourceReport(CommandBase):
    command = 'pillow-resource-report'
    help = """
    Report of pillow resources.
    """

    arguments = (Argument('--csv',
                          action='store_true',
                          help=("Output table as CSV")), )

    def run(self, args, manage_args):
        environment = get_environment(args.env_name)
        by_process = _get_pillow_resources_by_name(environment)

        headers = ['Pillow', 'Processes']
        rows = [[queue_name, stats['num_processes']]
                for queue_name, stats in sorted(by_process.items(),
                                                key=itemgetter(0))]

        print_table(headers, rows, args.csv)