def _setup_app_list_subparser(subparsers): """Add the app list subparser: manage.py app list""" generate_subparser( subparsers, 'list', description='List all configured app functions, grouped by cluster', subcommand=True)
def _setup_athena_drop_all_subparser(subparsers): """Add the athena drop-all-tables subparser: manage.py athena drop-all-tables""" generate_subparser( subparsers, 'drop-all-tables', description='Drop all tables from an Athena database', subcommand=True)
def _setup_threat_intel_auth_subparser(subparsers): """Add threat intel downloader update-auth subparser manage.py threat-intel-downloader update-auth """ generate_subparser( subparsers, 'update-auth', description= 'Enable, disable, or configure the threat intel downloader function', subcommand=True)
def _setup_athena_rebuild_subparser(cls, subparsers): """ Add the athena rebuild-partitions subparser: $ manage.py athena rebuild-partitions [options] """ athena_rebuild_parser = generate_subparser( subparsers, 'rebuild-partitions', description='Rebuild the partitions for an Athena table', subcommand=True ) set_parser_epilog( athena_rebuild_parser, epilog=( '''\ Examples: manage.py athena rebuild-partitions \\ --bucket s3.bucket.name \\ --table-name my_athena_table ''' ) ) cls._add_default_athena_args(athena_rebuild_parser)
def setup_subparser(cls, subparser): get_parser = generate_subparser( subparser, 'get', description='Retrieves a key from the requested LookupTable', subcommand=True ) set_parser_epilog( get_parser, epilog=( '''\ Examples: manage.py lookup-tables get -t [table] -k [key] ''' ) ) get_parser.add_argument( '-t', '--table', help='Name of the LookupTable', required=True ) get_parser.add_argument( '-k', '--key', help='Key to fetch on the LookupTable', required=True )
def setup_subparser(cls, subparser): """Setup: manage.py output set [options] Args: outputs (list): List of available output services """ outputs = sorted(StreamAlertOutput.get_all_outputs().keys()) set_parser = generate_subparser(subparser, 'set', description=cls.description, help=cls.description, subcommand=True) # Add the required positional arg of service set_parser.add_argument( 'service', choices=outputs, metavar='SERVICE', help= 'Create a new StreamAlert output for one of the available services: {}' .format(', '.join(outputs))) # Add the optional update flag, which allows existing outputs to be updated set_parser.add_argument( '--update', '-u', action='store_true', default=False, help='If the output already exists, overwrite it')
def setup_subparser(cls, subparser): """Setup: manage.py output set-from-file [options] Args: outputs (list): List of available output services """ set_from_file_parser = generate_subparser(subparser, 'set-from-file', description=cls.description, help=cls.description, subcommand=True) # Add the optional file flag set_from_file_parser.add_argument( '--file', '-f', default=OUTPUTS_FILE, help= 'Path to the json file, relative to the current working directory') # Add the optional update flag, which allows existing outputs to be updated set_from_file_parser.add_argument( '--update', '-u', action='store_true', default=False, help='Allow existing outputs to be overwritten')
def setup_subparser(cls, subparser): """Add generate-skeleton subparser to the output subparser""" outputs = sorted(StreamAlertOutput.get_all_outputs().keys()) # Create the generate-skeleton parser generate_skeleton_parser = generate_subparser( subparser, 'generate-skeleton', description=cls.description, help=cls.description, subcommand=True) # Add the optional ability to pass services generate_skeleton_parser.add_argument( '--services', choices=outputs, nargs='+', metavar='SERVICE', default=outputs, help= 'Pass the services to generate the skeleton for from services: {}'. format(', '.join(outputs))) # Add the optional file flag generate_skeleton_parser.add_argument( '--file', '-f', default=OUTPUTS_FILE, help='File to write to, relative to the current working directory')
def setup_subparser(cls, subparser): """Add the output get subparser: manage.py output get [options]""" outputs = sorted(StreamAlertOutput.get_all_outputs().keys()) get_parser = generate_subparser( subparser, 'get', description=cls.description, help=cls.description, subcommand=True, ) # Add the positional arg of service get_parser.add_argument( 'service', choices=outputs, metavar='SERVICE', help= 'Service to pull configured outputs and their secrets, select from: {}' .format(', '.join(outputs))) # Add the optional ability to pass multiple descriptors get_parser.add_argument( '--descriptors', '-d', nargs="+", default=False, help= 'Pass descriptor and service to pull back the relevant configuration' )
def build_parser(): """Build the argument parser.""" # Map of top-level commands and their setup functions/description # New top-level commands should be added to this dictionary commands = StreamAlertCLICommandRepository.command_parsers() description_template = """ StreamAlert v{} Configure, test, build, and deploy StreamAlert Available Commands: {} For additional help with any command above, try: {} [command] --help """ parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, prog=__file__) parser.add_argument('-v', '--version', action='version', version=version) parser.add_argument( '-d', '--debug', help= 'enable debugging logger output for all of the StreamAlert loggers', action='store_true') # Dynamically generate subparsers, and create a 'commands' block for the prog description command_block = [] subparsers = parser.add_subparsers(dest='command', required=True) command_col_size = max([len(command) for command in commands]) + 10 for command in sorted(commands): setup_subparser_func, description = commands[command] subparser = generate_subparser(subparsers, command, description=description) # If there are additional arguments to set for this command, call its setup function if setup_subparser_func: setup_subparser_func(subparser) command_block.append('\t{command: <{pad}}{description}'.format( command=command, pad=command_col_size, description=description)) # Update the description on the top level parser parser.description = description_template.format(version, '\n'.join(command_block), __file__) parser.epilog = 'Issues? Please report here: https://github.com/airbnb/streamalert/issues' return parser
def _setup_rule_staging_unstage_subparser(cls, subparsers): """Add the rule staging unstage subparser: manage.py rule-staging unstage""" rule_staging_unstage_parser = generate_subparser( subparsers, 'unstage', description='Unstage the provided rules', subcommand=True) cls._add_default_rule_staging_args(rule_staging_unstage_parser)
def _setup_test_classifier_subparser(cls, subparsers): """Add the test validation subparser: manage.py test classifier [options]""" test_validate_parser = generate_subparser( subparsers, 'classifier', description= 'Validate defined log schemas using integration test files', subcommand=True) cls._add_default_test_args(test_validate_parser)
def _setup_test_live_subparser(cls, subparsers): """Add the test live subparser: manage.py test live [options]""" test_live_parser = generate_subparser( subparsers, 'live', description= ('Run end-to-end tests that will attempt to send alerts to each rule\'s outputs' ), subcommand=True) cls._add_default_test_args(test_live_parser)
def setup_subparser(cls, subparser): set_parser = generate_subparser( subparser, 'list-add', description='Sets a key on the requested LookupTable', subcommand=True ) set_parser_epilog( set_parser, epilog=( '''\ Examples: manage.py lookup-tables list-add -t [table] -k [key] -v [value] ''' ) ) set_parser.add_argument( '-t', '--table', help='Name of the LookupTable', required=True ) set_parser.add_argument( '-k', '--key', help='Key to modify on the LookupTable', required=True ) set_parser.add_argument( '-v', '--value', help='Value to add to the key', required=True ) set_parser.add_argument( '-u', '--unique', help='Remove duplicate values from the final list', action='store_true' ) set_parser.add_argument( '-s', '--sort', help='Sort the final list', action='store_true' )
def _setup_rule_staging_status_subparser(subparsers): """Add the rule staging status subparser: manage.py rule-staging status""" rule_staging_status_parser = generate_subparser( subparsers, 'status', description= 'List all rules within the rule database and their staging status', subcommand=True) rule_staging_status_parser.add_argument( '-v', '--verbose', action='store_true', help='Output additional information for rules in the database')
def _setup_athena_create_table_subparser(cls, subparsers): """Add the athena create-table subparser: manage.py athena create-table [options]""" athena_create_table_parser = generate_subparser( subparsers, 'create-table', description='Create an Athena table', subcommand=True ) set_parser_epilog( athena_create_table_parser, epilog=( '''\ Examples: manage.py athena create-table \\ --bucket s3.bucket.name \\ --table-name my_athena_table ''' ) ) cls._add_default_athena_args(athena_create_table_parser) # Validate the provided schema-override options def _validate_override(val): """Make sure the input is in the format column_name=type""" err = ('Invalid override expression [{}]. The proper format is ' '"column_name=value_type"').format(val) if '=' not in val: raise athena_create_table_parser.error(err) if len(val.split('=')) != 2: raise athena_create_table_parser.error(err) athena_create_table_parser.add_argument( '--schema-override', nargs='+', help=( 'Value types to override with new types in the log schema. ' 'The provided input should be space-separated ' 'directives like "column_name=value_type"' ), action=UniqueSortedListAction, default=[], type=_validate_override )
def setup_subparser(cls, subparser): set_parser = generate_subparser( subparser, 'set', description='Sets a key on the requested LookupTable', subcommand=True ) set_parser_epilog( set_parser, epilog=( '''\ Examples: manage.py lookup-tables set -t [table] -k [key] -v [value] ''' ) ) set_parser.add_argument( '-t', '--table', help='Name of the LookupTable', required=True ) set_parser.add_argument( '-k', '--key', help='Key to set on the LookupTable', required=True ) set_parser.add_argument( '-v', '--value', help='Value to save into LookupTable', required=True ) set_parser.add_argument( '-j', '--json', help='Interpret the value as a JSON-encoded string', action='store_true' )
def setup_subparser(cls, subparser): describe_tables_parser = generate_subparser( subparser, 'describe-tables', description='Shows metadata about all currently configured LookupTables', subcommand=True ) set_parser_epilog( describe_tables_parser, epilog=( '''\ Examples: manage.py lookup-tables describe-tables ''' ) )
def _setup_app_update_auth_subparser(cls, subparsers): """Add the app update-auth subparser: manage.py app update-auth [options]""" app_update_parser = generate_subparser( subparsers, 'update-auth', description= 'Update the authentication information for an existing app', subcommand=True) set_parser_epilog(app_update_parser, epilog=('''\ Example: manage.py app update-auth \\ --cluster prod \\ --name duo_prod_collector ''')) cls._add_default_app_args(app_update_parser)
def _setup_test_rules_subparser(cls, subparsers): """Add the test rules subparser: manage.py test rules [options]""" test_rules_parser = generate_subparser( subparsers, 'rules', description='Test rules using integration test files', subcommand=True) # Flag to run additional stats during testing test_rules_parser.add_argument( '-s', '--stats', action='store_true', help='Enable outputing of statistical information on rules that run' ) # Validate the provided repitition value def _validate_repitition(val): """Make sure the input is between 1 and 1000""" err = ( 'Invalid repitition value [{}]. Must be an integer between 1 ' 'and 1000').format(val) try: count = int(val) except TypeError: raise test_rules_parser.error(err) if not 1 <= count <= 1000: raise test_rules_parser.error(err) return count # flag to run these tests a given number of times test_rules_parser.add_argument( '-n', '--repeat', default=1, type=_validate_repitition, help= 'Number of times to repeat the tests, to be used as a form performance testing' ) cls._add_default_test_args(test_rules_parser)
def _setup_app_new_subparser(cls, subparsers): """Add the app new subparser: manage.py app new [options]""" app_new_parser = generate_subparser( subparsers, 'new', description= 'Create a new StreamAlert app to poll logs from various services', subcommand=True) set_parser_epilog(app_new_parser, epilog=('''\ Example: manage.py app new \\ duo_auth \\ --cluster prod \\ --name duo_prod_collector \\ --schedule-expression 'rate(2 hours)' \\ --timeout 60 \\ --memory 256 ''')) cls._add_default_app_args(app_new_parser) app_types = sorted(StreamAlertApp.get_all_apps()) # App type options app_new_parser.add_argument( 'type', choices=app_types, metavar='APP_TYPE', help='Type of app being configured: {}'.format( ', '.join(app_types))) # Function schedule expression (rate) arg add_schedule_expression_arg(app_new_parser) # Function timeout arg add_timeout_arg(app_new_parser) # Function memory arg add_memory_arg(app_new_parser)
def _setup_rule_staging_enable_subparser(subparsers): """Add the rule staging enable subparser: manage.py rule-staging enable""" rule_staging_enable_parser = generate_subparser( subparsers, 'enable', description='Enable or disable the rule staging feature', subcommand=True) toggle_group = rule_staging_enable_parser.add_mutually_exclusive_group( required=True) toggle_group.add_argument('-t', '--true', dest='enable', help='Enable the rule staging feature', action='store_true') toggle_group.add_argument('-f', '--false', dest='enable', help='Disable the rule staging feature', action='store_false')
def setup_subparser(cls, subparser): set_parser = generate_subparser( subparser, 'set-from-json-file', description='Pushes the contents of a given json file into the LookupTable key', subcommand=True ) set_parser_epilog( set_parser, epilog=( '''\ Examples: manage.py lookup-tables set-from-json-file -t [table] -k [key] -f \ [path/to/file.json] ''' ) ) set_parser.add_argument( '-t', '--table', help='Name of the LookupTable', required=True ) set_parser.add_argument( '-k', '--key', help='Key to modify on the LookupTable', required=True ) set_parser.add_argument( '-f', '--file', help='Path to the json file, relative to the current working directory', required=True )
def setup_subparser(cls, subparser): """Add the output list subparser: manage.py output list [options]""" outputs = sorted(StreamAlertOutput.get_all_outputs().keys()) list_parser = generate_subparser( subparser, 'list', description=cls.description, help=cls.description, subcommand=True, ) # Add the optional arg of service list_parser.add_argument( '--service', '-s', choices=outputs, default=outputs, nargs='*', metavar='SERVICE', help= 'Pass Services to list configured output descriptors, select from: {}' .format(', '.join(outputs)))
def build_parser(): """Build the argument parser.""" # Map of top-level commands and their setup functions/description # New top-level commands should be added to this dictionary commands = StreamAlertCLICommandRepository.command_parsers() description_template = """ StreamAlert v{} Configure, test, build, and deploy StreamAlert Available Commands: {} For additional help with any command above, try: {} [command] --help """ parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter, prog=__file__) parser.add_argument('-v', '--version', action='version', version=version) parser.add_argument( '-d', '--debug', help= 'enable debugging logger output for all of the StreamAlert loggers', action='store_true') parser.add_argument( '-c', '--config-dir', default=DEFAULT_CONFIG_PATH, help='Path to directory containing configuration files', type=DirectoryType()) parser.add_argument( '-t', '--terraform-file', dest='terraform_files', help=('Path to one or more additional Terraform configuration ' 'files to include in this deployment'), action=UniqueSortedFileListAppendAction, type=FileType('r'), default=[]) parser.add_argument( '-b', '--build-directory', help= ('Path to directory to use for building StreamAlert and its infrastructure. ' 'If no path is provided, a temporary directory will be used.'), type=str) # Dynamically generate subparsers, and create a 'commands' block for the prog description command_block = [] subparsers = parser.add_subparsers(dest='command', required=True) command_col_size = max([len(command) for command in commands]) + 10 for command in sorted(commands): setup_subparser_func, description = commands[command] subparser = generate_subparser(subparsers, command, description=description) # If there are additional arguments to set for this command, call its setup function if setup_subparser_func: setup_subparser_func(subparser) command_block.append('\t{command: <{pad}}{description}'.format( command=command, pad=command_col_size, description=description)) # Update the description on the top level parser parser.description = description_template.format(version, '\n'.join(command_block), __file__) parser.epilog = 'Issues? Please report here: https://github.com/airbnb/streamalert/issues' return parser
def _setup_threat_intel_configure_subparser(subparsers): """Add threat intel downloader configure subparser manage.py threat-intel-downloader configure [options] """ ti_downloader_configure_parser = generate_subparser( subparsers, 'configure', description= 'Enable, disable, or configure the threat intel downloader function', subcommand=True) # Enable/Disable toggle group toggle_group = ti_downloader_configure_parser.add_mutually_exclusive_group( required=False) toggle_group.add_argument( '-e', '--enable', dest='enable_threat_intel_downloader', help='Enable the threat intel downloader function', action='store_true') toggle_group.add_argument( '-d', '--disable', dest='enable_threat_intel_downloader', help='Disable the threat intel downloader function', action='store_false') # Function schedule expression (rate) arg add_schedule_expression_arg(ti_downloader_configure_parser) # Function timeout arg add_timeout_arg(ti_downloader_configure_parser) # Function memory arg add_memory_arg(ti_downloader_configure_parser) ti_downloader_configure_parser.add_argument( '-r', '--table-rcu', help='Read capacity units to use for the DynamoDB table', type=int, default=10) ti_downloader_configure_parser.add_argument( '-w', '--table-wcu', help='Write capacity units to use for the DynamoDB table', type=int, default=10) ti_downloader_configure_parser.add_argument( '-k', '--ioc-keys', help='One or more IOC keys to store in DynamoDB table', nargs='+', action=UniqueSortedListAction, default=['expiration_ts', 'itype', 'source', 'type', 'value']) ti_downloader_configure_parser.add_argument( '-f', '--ioc-filters', help= 'One or more filters to apply when retrieving IOCs from Threat Feed', nargs='+', action=UniqueSortedListAction, default=['crowdstrike', '@airbnb.com']) ti_downloader_configure_parser.add_argument( '-i', '--ioc-types', help= 'One or more IOC type defined by the Threat Feed. IOC types can vary by feed', nargs='+', action=UniqueSortedListAction, default=['domain', 'ip', 'md5']) ti_downloader_configure_parser.add_argument( '-x', '--excluded-sub-types', help='IOC subtypes to be excluded', action=UniqueSortedListAction, default=['bot_ip', 'brute_ip', 'scan_ip', 'spam_ip', 'tor_ip']) ti_downloader_configure_parser.add_argument( '-a', '--autoscale', help='Enable auto scaling for the threat intel DynamoDB table', default=False, action='store_true') ti_downloader_configure_parser.add_argument( '--max-read-capacity', help='Maximum read capacity to use when auto scaling is enabled', type=int, default=5) ti_downloader_configure_parser.add_argument( '--min-read-capacity', help='Minimum read capacity to use when auto scaling is enabled', type=int, default=5) ti_downloader_configure_parser.add_argument( '-u', '--target-utilization', help= ('Target percentage of consumed provisioned throughput at a point in time ' 'to use for auto-scaling the read capacity units'), type=int, default=70)