Esempio n. 1
0
def cli_runner(options):
    """Main Stream Alert CLI handler

    Args:
        options (dict): command line arguments passed from the argparser.
            Contains the following keys for terraform commands:
                (command, subcommand, target)
            Contains the following keys for lambda commands:
                (command, subcommand, env, func, source)
    """
    cli_load_message = ('Issues? Report here: '
                        'https://github.com/airbnb/streamalert/issues')
    LOGGER_CLI.info(cli_load_message)

    if options.command == 'output':
        configure_output(options)

    elif options.command == 'lambda':
        lambda_runner(options)

    elif options.command == 'terraform':
        terraform_runner(options)
Esempio n. 2
0
def run_command(runner_args, **kwargs):
    """Helper function to run commands with error handling.

    Args:
        runner_args (list): Commands to run via subprocess
        kwargs:
            cwd (str): A path to execute commands from
            error_message (str): Message to show if command fails
            quiet (bool): Whether to show command output or hide it

    """
    default_error_message = "An error occurred while running: {}".format(
        ' '.join(runner_args)
    )
    error_message = kwargs.get('error_message', default_error_message)

    default_cwd = 'terraform'
    cwd = kwargs.get('cwd', default_cwd)

    # Add the -force-copy flag for s3 state copying to suppress dialogs that
    # the user must type 'yes' into.
    if runner_args[0] == 'terraform':
        if runner_args[1] == 'init':
            runner_args.append('-force-copy')

    stdout_option = None
    if kwargs.get('quiet'):
        stdout_option = open(os.devnull, 'w')

    try:
        subprocess.check_call(runner_args, stdout=stdout_option, cwd=cwd)
    except subprocess.CalledProcessError as err:
        LOGGER_CLI.error('%s\n%s', error_message, err.cmd)
        return False
    except OSError as err:
        LOGGER_CLI.error('%s\n%s (%s)', error_message, err.strerror, runner_args[0])
        return False

    return True
Esempio n. 3
0
def generate_cloudwatch_metric_alarms(cluster_name, cluster_dict, config):
    """Add the CloudWatch Metric Alarms information to the Terraform cluster dict.

    Args:
        cluster_name (str): The name of the currently generating cluster
        cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster.
        config (dict): The loaded config from the 'conf/' directory
    """
    infrastructure_config = config['global'].get('infrastructure')

    if not (infrastructure_config and 'monitoring' in infrastructure_config):
        LOGGER_CLI.error(
            'Invalid config: Make sure you declare global infrastructure options!'
        )
        return

    sns_topic_arn = monitoring_topic_arn(config)

    cluster_dict['module']['stream_alert_{}'.format(
        cluster_name)]['sns_topic_arn'] = sns_topic_arn

    stream_alert_config = config['clusters'][cluster_name]['modules'][
        'stream_alert']

    # Add cluster metric alarms for the rule and alert processors
    formatted_alarms = []
    for func_config in stream_alert_config.values():
        if 'metric_alarms' not in func_config:
            continue

        # TODO: update this logic to simply use a list of maps once Terraform fixes
        # their support for this, instead of the comma-separated string this creates
        metric_alarms = func_config['metric_alarms']
        for name, alarm_info in metric_alarms.iteritems():
            formatted_alarms.append(_format_metric_alarm(name, alarm_info))

    cluster_dict['module']['stream_alert_{}'.format(
        cluster_name)]['metric_alarms'] = formatted_alarms
Esempio n. 4
0
def format_record(test_record):
    """Create a properly formatted Kinesis, S3, or SNS record.

    Supports a dictionary or string based data record.  Reads in
    event templates from the test/integration/templates folder.

    Args:
        test_record: Test record metadata dict with the following structure:
            data - string or dict of the raw data
            description - a string describing the test that is being performed
            trigger - bool of if the record should produce an alert
            source - which stream/s3 bucket originated the data
            service - which aws service originated the data

    Returns:
        dict in the format of the specific service
    """
    service = test_record['service']
    source = test_record['source']

    data_type = type(test_record['data'])
    if data_type == dict:
        data = json.dumps(test_record['data'])
    elif data_type in (unicode, str):
        data = test_record['data']
    else:
        LOGGER_CLI.info('Invalid data type: %s', type(test_record['data']))
        return

    # Get the template file for this particular service
    template_path = os.path.join(DIR_TEMPLATES, '{}.json'.format(service))
    with open(template_path, 'r') as service_template:
        try:
            template = json.load(service_template)
        except ValueError as err:
            LOGGER_CLI.error('Error loading %s.json: %s', service, err)
            return
    if service == 's3':
        # Set the S3 object key to a random value for testing
        test_record['key'] = ('{:032X}'.format(random.randrange(16**32)))
        template['s3']['object']['key'] = test_record['key']
        template['s3']['bucket']['arn'] = 'arn:aws:s3:::{}'.format(source)
        template['s3']['bucket']['name'] = source

        # Create the mocked s3 object in the designated bucket with the random key
        put_mocked_s3_object(source, test_record['key'], data)
    elif service == 'kinesis':
        template['kinesis']['data'] = base64.b64encode(data)
        template[
            'eventSourceARN'] = 'arn:aws:kinesis:us-east-1:111222333:stream/{}'.format(
                source)
    elif service == 'sns':
        # TODO implement sns testing
        raise NotImplementedError
    else:
        LOGGER_CLI.info('Invalid service %s', service)

    return template
Esempio n. 5
0
    def create_and_upload(self):
        """Create a Lambda deployment package, hash it, and upload it to S3.

        Reference:
            package_name: Generated name based on date/time/version/name
            temp_package_path: Temp package to store deployment package files
            package_path: Full path to zipped deployment package
            package_sha256: Checksum of package_path
            package_sha256_path: Full path to package_path checksum file
        """
        LOGGER_CLI.info('Creating package for %s', self.package_name)
        # get tmp dir and copy files
        temp_package_path = self._get_tmpdir()
        self._copy_files(temp_package_path)
        # download third-party libs
        if not self._resolve_third_party(temp_package_path):
            LOGGER_CLI.exception(
                'Failed to install necessary third-party libraries')
            exit(1)

        # zip up files
        package_path = self.zip(temp_package_path)
        generated_package_name = package_path.split('/')[-1]
        # checksum files
        package_sha256, package_sha256_path = self._sha256sum(package_path)
        # upload to s3
        if self._upload(package_path):
            # remove generated deployment files
            self._cleanup(package_path, package_sha256_path)
            # set new config values and update
            full_package_name = os.path.join(self.package_name,
                                             generated_package_name)
            # make all config changes here
            self.config['lambda'][
                self.config_key]['source_object_key'] = full_package_name
            self.config['lambda'][
                self.config_key]['source_current_hash'] = package_sha256
            self.config.write()
Esempio n. 6
0
def generate_s3_events(cluster_name, cluster_dict, config):
    """Add the S3 Events module to the Terraform cluster dict.

    Args:
        cluster_name (str): The name of the currently generating cluster
        cluster_dict (defaultdict): The dict containing all Terraform config for a given cluster.
        config (dict): The loaded config from the 'conf/' directory

    Returns:
        bool: Result of applying the s3_events module
    """
    modules = config['clusters'][cluster_name]['modules']
    s3_event_buckets = modules['s3_events']

    # Detect legacy and convert
    if isinstance(s3_event_buckets, dict) and 's3_bucket_id' in s3_event_buckets:
        del config['clusters'][cluster_name]['modules']['s3_events']
        s3_event_buckets = [{'bucket_id': s3_event_buckets['s3_bucket_id']}]
        config['clusters'][cluster_name]['modules']['s3_events'] = s3_event_buckets
        LOGGER_CLI.info('Converting legacy S3 Events config')
        config.write()

    for bucket_info in s3_event_buckets:
        if 'bucket_id' not in bucket_info:
            LOGGER_CLI.error('Config Error: Missing bucket_id key from s3_event configuration')
            return False

        cluster_dict['module']['s3_events_{}'.format(bucket_info['bucket_id'].replace(
            '.', '_'))] = {
                'source': 'modules/tf_stream_alert_s3_events',
                'lambda_function_arn':
                '${{module.stream_alert_{}.lambda_arn}}'.format(cluster_name),
                'bucket_id': bucket_info['bucket_id'],
                'enable_events': bucket_info.get('enable_events', True),
                'lambda_role_id': '${{module.stream_alert_{}.lambda_role_id}}'.format(cluster_name)
            }

    return True
Esempio n. 7
0
def terraform_generate(config, init=False):
    """Generate all Terraform plans for the configured clusters.

    Keyword Args:
        config (dict): The loaded config from the 'conf/' directory
        init (bool): Indicates if main.tf.json is generated for `terraform init`

    Returns:
        bool: Result of cluster generating
    """
    cleanup_old_tf_files(config)

    # Setup the main.tf.json file
    LOGGER_CLI.debug('Generating cluster file: main.tf.json')
    with open('terraform/main.tf.json', 'w') as tf_file:
        json.dump(generate_main(init=init, config=config),
                  tf_file,
                  indent=2,
                  sort_keys=True)

    # Return early during the init process, clusters are not needed yet
    if init:
        return True

    # Setup cluster files
    for cluster in config.clusters():
        if cluster in RESTRICTED_CLUSTER_NAMES:
            raise InvalidClusterName(
                'Rename cluster "main" or "athena" to something else!')

        LOGGER_CLI.debug('Generating cluster file: %s.tf.json', cluster)
        cluster_dict = generate_cluster(cluster_name=cluster, config=config)
        if not cluster_dict:
            LOGGER_CLI.error(
                'An error was generated while creating the %s cluster',
                cluster)
            return False

        with open('terraform/{}.tf.json'.format(cluster), 'w') as tf_file:
            json.dump(cluster_dict, tf_file, indent=2, sort_keys=True)

    # Setup Athena if it is enabled
    generate_global_lambda_settings(
        config,
        config_name='athena_partition_refresh_config',
        config_generate_func=generate_athena,
        tf_tmp_file='terraform/athena.tf.json',
        message='Removing old Athena Terraform file')

    # Setup Threat Intel Downloader Lambda function if it is enabled
    generate_global_lambda_settings(
        config,
        config_name='threat_intel_downloader_config',
        config_generate_func=generate_threat_intel_downloader,
        tf_tmp_file='terraform/ti_downloader.tf.json',
        message='Removing old Threat Intel Downloader Terraform file')

    return True
Esempio n. 8
0
def save_app_auth_info(app, info, overwrite=False):
    """Function to add app auth information to parameter store

    Args:
        info (dict): Required values needed to save the requested authentication
            information to AWS Parameter Store
    """
    # Get all of the required authentication values from the user for this app integration
    auth_dict = {auth_key: user_input(info['description'], False, info['format'])
                 for auth_key, info in app.required_auth_info().iteritems()}

    description = ('Required authentication information for the \'{}\' service for '
                   'use in the \'{}\' app'.format(info['type'], info['app_name']))

    # Save these to the parameter store
    param_name = '{}_{}'.format(info['function_name'], AppConfig.AUTH_CONFIG_SUFFIX)
    saved = save_parameter(info['region'], param_name, auth_dict, description, overwrite)
    if saved:
        LOGGER_CLI.info('App authentication info successfully saved to parameter store.')
    else:
        LOGGER_CLI.error('App authentication info was not saved to parameter store.')

    return saved
Esempio n. 9
0
def generate_global_lambda_settings(config, config_name, generate_func,
                                    tf_tmp_file, message):
    """Generate settings for global Lambda functions

    Args:
        config (dict): lambda function settings read from 'conf/' directory
        config_name (str): keyname of lambda function settings in config.
        generate_func (func): method to generate lambda function settings.
        tf_tmp_file (str): filename of terraform file, generated by CLI.
        message (str): Message will be logged by LOGGER.
    """
    if not config['lambda'].get(config_name):
        LOGGER_CLI.info('Config for \'%s\' not in lambda.json', config_name)
        remove_temp_terraform_file(tf_tmp_file, message)
        return

    if config['lambda'][config_name].get('enabled', True):
        generated_config = generate_func(config=config)
        if generated_config:
            with open(tf_tmp_file, 'w') as tf_file:
                json.dump(generated_config, tf_file, indent=2, sort_keys=True)
    else:
        remove_temp_terraform_file(tf_tmp_file, message)
Esempio n. 10
0
    def zip(temp_package_path):
        """Create the StreamAlert Lambda deployment package archive.

        Zips all dependency files to run the function,
        and names this zipfile based on the current date/time,
        along with the Lambda function module version.

            example filename: stream_alert_1.0.0_20161010_00:11:22.zip

            Only package in the `.py` files per AWS's instructions
            for creation of lambda functions.

        Args:
            temp_package_path (str): the temporary file path to store the zip.

        Returns:
            str: Deployment package full path
        """
        LOGGER_CLI.debug('Creating Lambda package: %s', temp_package_path + '.zip')
        package_path = shutil.make_archive(temp_package_path, 'zip', temp_package_path)
        LOGGER_CLI.info('Package successfully created')

        return package_path
Esempio n. 11
0
    def toggle_metrics(self, enabled, clusters, lambda_functions):
        """Toggle CloudWatch metric logging and filter creation

        Args:
            enabled (bool): False if disabling metrics, true if enable_logging
            clusters (list): Clusters to enable or disable metrics on
            lambda_functions (list): Which lambda functions to enable or disable
                metrics on (rule, alert, or athena)
        """
        for function in lambda_functions:
            if function == metrics.ATHENA_PARTITION_REFRESH_NAME:
                if 'athena_partition_refresh_config' in self.config['lambda']:
                    self.config['lambda']['athena_partition_refresh_config'] \
                        ['enable_metrics'] = enabled
                else:
                    LOGGER_CLI.error('No Athena configuration found; please initialize first.')
                continue

            for cluster in clusters:
                self.config['clusters'][cluster]['modules']['stream_alert'] \
                    [function]['enable_metrics'] = enabled

        self.write()
def generate_global_lambda_settings(config, **kwargs):
    """Generate settings of global Lambda funcitons, Athena and Threat Intel Downloader
    Args:
        config (dict): lambda function settings read from 'conf/' directory

    Keyword Args:
        config_name (str): keyname of lambda function settings in config.
        config_generate_func (func): method to generate lambda function settings.
        tf_tmp_file (str): filename of terraform file, generated by CLI.
        message (str): Message will be logged by LOGGER.
    """
    config_name = kwargs.get('config_name')
    tf_tmp_file = kwargs.get('tf_tmp_file')
    if config_name and config['lambda'].get(config_name) and tf_tmp_file:
        if config['lambda'].get(config_name)['enabled']:
            generated_config = kwargs.get('config_generate_func')(config=config)
            if generated_config:
                with open(tf_tmp_file, 'w') as tf_file:
                    json.dump(generated_config, tf_file, indent=2, sort_keys=True)
        else:
            if os.path.isfile(tf_tmp_file):
                LOGGER_CLI.info(kwargs.get('message'))
                os.remove(tf_tmp_file)
Esempio n. 13
0
def send_creds_to_s3(region, bucket, key, blob_data):
    """Put the encrypted credential blob for this service and destination in s3

    Args:
        region (str): AWS region to use for boto3 client
        bucket (str): The name of the s3 bucket to write the encrypted credentials to
        key (str): ID for the s3 object to write the encrypted credentials to
        blob_data (bytes): Cipher text blob from the kms encryption
    """
    try:
        client = boto3.client('s3', region_name=region)
        client.put_object(Body=blob_data,
                          Bucket=bucket,
                          Key=key,
                          ServerSideEncryption='AES256')

        return True
    except ClientError as err:
        LOGGER_CLI.error(
            'An error occurred while sending credentials to S3 for key \'%s\' '
            'in bucket \'%s\': %s', key, bucket,
            err.response['Error']['Message'])
        return False
Esempio n. 14
0
def user_input(requested_info, mask, input_restrictions):
    """Prompt user for requested information

    Args:
        requested_info (str): Description of the information needed
        mask (bool): Decides whether to mask input or not

    Returns:
        str: response provided by the user
    """
    # pylint: disable=protected-access
    response = ''
    prompt = '\nPlease supply {}: '.format(requested_info)

    if not mask:
        while not response:
            response = raw_input(prompt)

        # Restrict having spaces or colons in items (applies to things like
        # descriptors, etc)
        if isinstance(input_restrictions, re._pattern_type):
            if not input_restrictions.match(response):
                LOGGER_CLI.error(
                    'The supplied input should match the following '
                    'regular expression: %s', input_restrictions.pattern)
                return user_input(requested_info, mask, input_restrictions)
        else:
            if any(x in input_restrictions for x in response):
                LOGGER_CLI.error(
                    'The supplied input should not contain any of the following: %s',
                    '"{}"'.format('", "'.join(input_restrictions)))
                return user_input(requested_info, mask, input_restrictions)
    else:
        while not response:
            response = getpass(prompt=prompt)

    return response
Esempio n. 15
0
def terraform_generate(**kwargs):
    """Generate all Terraform plans for the configured clusters.

    Keyword Args:
        config [dict]: The loaded config from the 'conf/' directory
        init [bool]: Indicates if main.tf is generated for `terraform init`
    """
    config = kwargs.get('config')
    init = kwargs.get('init', False)

    # Setup main
    LOGGER_CLI.info('Generating cluster file: main.tf')
    main_json = json.dumps(generate_main(init=init, config=config),
                           indent=2,
                           sort_keys=True)
    with open('terraform/main.tf', 'w') as tf_file:
        tf_file.write(main_json)

    # Break out early during the init process, clusters aren't needed yet
    if init:
        return True

    # Setup clusters
    for cluster in config.clusters():
        if cluster == 'main':
            raise InvalidClusterName(
                'Rename cluster "main" to something else!')

        LOGGER_CLI.info('Generating cluster file: %s.tf', cluster)
        cluster_json = json.dumps(generate_cluster(cluster_name=cluster,
                                                   config=config),
                                  indent=2,
                                  sort_keys=True)
        with open('terraform/{}.tf'.format(cluster), 'w') as tf_file:
            tf_file.write(cluster_json)

    return True
Esempio n. 16
0
    def _resolve_third_party(self, temp_package_path):
        """Install all third-party packages into the deployment package folder

        Args:
            temp_package_path (str): Full path to temp package path

        Returns:
            bool: False if the pip command failed to install requirements, True otherwise
        """
        third_party_libs = self.config['lambda'][self.config_key]['third_party_libraries']
        # Return a default of True here if no libraries to install
        if not third_party_libs:
            LOGGER_CLI.info('No third-party libraries to install.')
            return True

        LOGGER_CLI.info(
            'Installing third-party libraries: %s',
            ', '.join(third_party_libs))
        pip_command = ['pip', 'install']
        pip_command.extend(third_party_libs)
        pip_command.extend(['--upgrade', '--target', temp_package_path])

        # Return True if the pip command is successfully run
        return run_command(pip_command, cwd=temp_package_path, quiet=True)
Esempio n. 17
0
    def _extract_precompiled_libs(self, temp_package_path):
        """Extract any precompiled third-party packages into the deployment package folder

        Args:
            temp_package_path (str): Full path to temp package path

        Returns:
            bool: True if precompiled libs were extracted successfully, False if some are missing
        """
        dependency_files = {
        }  # Map library name to location of its precompiled .zip file
        for path in self.package_files:
            if path.endswith('_dependencies.zip'):
                dependency_files[os.path.basename(path)] = path
            elif os.path.isdir(path):
                # Traverse directory looking for .zip files
                for root, _, package_files in os.walk(path):
                    dependency_files.update({
                        package_file: os.path.join(root, package_file)
                        for package_file in package_files
                        if package_file.endswith('_dependencies.zip')
                    })

        for lib in self.precompiled_libs:
            libs_name = '_'.join([lib, 'dependencies.zip'])
            if libs_name not in dependency_files:
                LOGGER_CLI.error('Missing precompiled libs for package: %s',
                                 libs_name)
                return False

            # Copy the contents of the dependency zip to the package directory
            with zipfile.ZipFile(dependency_files[libs_name],
                                 'r') as libs_file:
                libs_file.extractall(temp_package_path)

        return True
Esempio n. 18
0
    def _extract_precompiled_libs(self, temp_package_path):
        """Extract any precompiled third-party packages into the deployment package folder

        Args:
            temp_package_path (str): Full path to temp package path

        Returns:
            bool: False if the required libs were not found, True if otherwise
        """
        # Return true immediately if there are no precompiled requirements for this package
        if not self.precompiled_libs:
            return True

        # Get any dependency files throughout the package folders that have
        # the _dependencies.zip suffix
        dependency_files = {
            package_file: os.path.join(root, package_file)
            for folder in self.package_folders
            for root, _, package_files in os.walk(folder)
            for package_file in package_files
            if package_file.endswith('_dependencies.zip')
        }

        for lib in self.precompiled_libs:
            libs_name = '_'.join([lib, 'dependencies.zip'])
            if libs_name not in dependency_files:
                LOGGER_CLI.error('Missing precompiled libs for package: %s',
                                 libs_name)
                return False

            # Copy the contents of the dependency zip to the package directory
            with zipfile.ZipFile(dependency_files[libs_name],
                                 'r') as libs_file:
                libs_file.extractall(temp_package_path)

        return True
Esempio n. 19
0
def generate_flow_logs(cluster_name, cluster_dict, config):
    """Add the VPC Flow Logs module to the Terraform cluster dict.

    Args:
        cluster_name [string]: The name of the currently generating cluster
        cluster_dict [defaultdict]: The dict containing all Terraform config for
                                    a given cluster.
        config [dict]: The loaded config from the 'conf/' directory

    Returns:
        [bool] Result of applying the flow_logs module
    """
    modules = config['clusters'][cluster_name]['modules']
    flow_log_group_name_default = '{}_{}_streamalert_flow_logs'.format(
        config['global']['account']['prefix'], cluster_name)
    flow_log_group_name = modules['flow_logs'].get(
        'log_group_name', flow_log_group_name_default)

    if modules['flow_logs']['enabled']:
        cluster_dict['module']['flow_logs_{}'.format(cluster_name)] = {
            'source':
            'modules/tf_stream_alert_flow_logs',
            'destination_stream_arn':
            '${{module.kinesis_{}.arn}}'.format(cluster_name),
            'flow_log_group_name':
            flow_log_group_name
        }
        for flow_log_input in ('vpcs', 'subnets', 'enis'):
            input_data = modules['flow_logs'].get(flow_log_input)
            if input_data:
                cluster_dict['module']['flow_logs_{}'.format(
                    cluster_name)][flow_log_input] = input_data
        return True
    else:
        LOGGER_CLI.info('Flow logs disabled, nothing to do')
        return False
Esempio n. 20
0
def _terraform_clean(config):
    """Remove leftover Terraform statefiles and main/cluster files

    Args:
        config (CLIConfig): Loaded StreamAlert CLI
    """
    LOGGER_CLI.info('Cleaning Terraform files')

    cleanup_files = [
        '{}.tf.json'.format(cluster) for cluster in config.clusters()
    ]
    cleanup_files.extend([
        'athena.tf.json', 'main.tf.json', 'terraform.tfstate',
        'terraform.tfstate.backup'
    ])
    for tf_file in cleanup_files:
        file_to_remove = 'terraform/{}'.format(tf_file)
        if not os.path.isfile(file_to_remove):
            continue
        os.remove(file_to_remove)

    # Finally, delete the Terraform directory
    if os.path.isdir('terraform/.terraform/'):
        shutil.rmtree('terraform/.terraform/')
Esempio n. 21
0
    def set_prefix(self, prefix):
        """Set the Org Prefix in Global settings"""
        if not isinstance(prefix, (unicode, str)):
            LOGGER_CLI.error('Invalid prefix type, must be string')
            return

        self.config['global']['account']['prefix'] = prefix
        self.config['global']['terraform']['tfstate_bucket'] = self.config[
            'global']['terraform']['tfstate_bucket'].replace(
                'PREFIX_GOES_HERE', prefix)

        self.config['lambda']['alert_processor_config'][
            'source_bucket'] = self.config['lambda']['alert_processor_config'][
                'source_bucket'].replace('PREFIX_GOES_HERE', prefix)
        self.config['lambda']['rule_processor_config'][
            'source_bucket'] = self.config['lambda']['rule_processor_config'][
                'source_bucket'].replace('PREFIX_GOES_HERE', prefix)
        self.config['lambda']['stream_alert_apps_config'][
            'source_bucket'] = self.config['lambda'][
                'stream_alert_apps_config']['source_bucket'].replace(
                    'PREFIX_GOES_HERE', prefix)
        self.write()

        LOGGER_CLI.info('Prefix successfully configured')
Esempio n. 22
0
    def _resolve_third_party(self, temp_package_path):
        """Install all third-party packages into the deployment package folder

        Args:
            temp_package_path [string]: Full path to temp package path

        Returns:
            [boolean] False if the pip command failed to install requirements, True otherwise
        """
        third_party_libs = self.config['lambda'][
            self.config_key]['third_party_libraries']
        if third_party_libs:
            LOGGER_CLI.info('Installing third-party libraries: %s',
                            ', '.join(third_party_libs))
            pip_command = ['install']
            pip_command.extend(third_party_libs)
            pip_command.extend(['--upgrade', '--target', temp_package_path])
            # Return True if the pip result code is 0
            return pip.main(pip_command) == 0
        else:
            LOGGER_CLI.info('No third-party libraries to install.')

        # Return a default of True here if pip is not called
        return True
Esempio n. 23
0
    def _publish_helper(self, **kwargs):
        """Handle clustered or single Lambda function publishing

        Keyword Arguments:
            cluster [string]: The cluster to deploy to, this is optional

        Returns:
            [Boolean]: Result of the function publishes
        """
        cluster = kwargs.get('cluster')
        date = datetime.utcnow().strftime("%Y%m%d_T%H%M%S")

        # Clustered Lambda functions have a different naming pattern
        if cluster:
            region = self.config['clusters'][cluster]['region']
            function_name = '{}_{}_streamalert_{}'.format(
                self.config['global']['account']['prefix'],
                cluster,
                self.package.package_name
            )
        else:
            region = self.config['global']['account']['region']
            function_name = '{}_streamalert_{}'.format(
                self.config['global']['account']['prefix'],
                self.package.package_name
            )

        # Configure the Lambda client
        client = boto3.client('lambda', region_name=region)
        code_sha_256 = self.config['lambda'][self.package.config_key]['source_current_hash']

        # Publish the function
        LOGGER_CLI.debug('Publishing %s', function_name)
        new_version = self._version_helper(
            client=client,
            function_name=function_name,
            code_sha_256=code_sha_256,
            date=date)

        if not new_version:
            return False

        # Update the config
        if cluster:
            LOGGER_CLI.info('Published version %s for %s:%s',
                            new_version, cluster, function_name)
            self.config['clusters'][cluster]['modules']['stream_alert'][self.package.package_name]['current_version'] = new_version
        else:
            LOGGER_CLI.info('Published version %s for %s',
                            new_version, function_name)
            self.config['lambda'][self.package.config_key]['current_version'] = new_version
        self.config.write()

        return True
Esempio n. 24
0
def user_input(requested_info, mask, input_restrictions):
    """Prompt user for requested information

    Args:
        requested_info (str): Description of the information needed
        mask (bool): Decides whether to mask input or not

    Returns:
        str: response provided by the user
    """
    # pylint: disable=protected-access
    response = ''
    prompt = '\nPlease supply {}: '.format(requested_info)

    if not mask:
        while not response:
            response = raw_input(prompt)

        # Restrict having spaces or colons in items (applies to things like
        # descriptors, etc)
        valid_response = False
        if isinstance(input_restrictions, re._pattern_type):
            valid_response = input_restrictions.match(response)
            if not valid_response:
                LOGGER_CLI.error(
                    'The supplied input should match the following '
                    'regular expression: %s', input_restrictions.pattern)
        elif callable(input_restrictions):
            # Functions can be passed here to perform complex validation of input
            # Transform the response with the validating function
            response = input_restrictions(response)
            valid_response = response is not None and response is not False
            if not valid_response:
                LOGGER_CLI.error(
                    'The supplied input failed to pass the validation '
                    'function: %s', input_restrictions.__doc__)
        else:
            valid_response = not any(x in input_restrictions for x in response)
            if not valid_response:
                restrictions = ', '.join('\'{}\''.format(restriction)
                                         for restriction in input_restrictions)
                LOGGER_CLI.error(
                    'The supplied input should not contain any of the following: %s',
                    restrictions)

        if not valid_response:
            return user_input(requested_info, mask, input_restrictions)
    else:
        while not response:
            response = getpass(prompt=prompt)

    return response
Esempio n. 25
0
    def _validate_options(options):
        if not options.interval:
            LOGGER_CLI.error('Missing command line argument --interval')
            return False

        if not options.timeout:
            LOGGER_CLI.error('Missing command line argument --timeout')
            return False

        if not options.memory:
            LOGGER_CLI.error('Missing command line argument --memory')
            return False

        return True
Esempio n. 26
0
    def create_and_upload(self):
        """Create a Lambda deployment package, hash it, and upload it to S3.

        Reference:
            package_name: Generated name based on date/time/version/name
            temp_package_path: Temp package to store deployment package files
            package_path: Full path to zipped deployment package
            package_sha256: Checksum of package_path
            package_sha256_path: Full path to package_path checksum file
        """
        LOGGER_CLI.info('Creating package for %s', self.package_name)

        temp_package_path = self._get_tmpdir()
        self._copy_files(temp_package_path)

        if not self._resolve_third_party(temp_package_path):
            LOGGER_CLI.exception(
                'Failed to install necessary third-party libraries')
            exit(1)

        # Extract any precompiled third-party libs for this package
        if not self._extract_precompiled_libs(temp_package_path):
            LOGGER_CLI.exception(
                'Failed to extract precompiled third-party libraries')
            exit(1)

        # Zip up files
        package_path = self.zip(temp_package_path)
        generated_package_name = package_path.split('/')[-1]
        # SHA256 checksum files
        package_sha256, package_sha256_path = self._sha256sum(package_path)
        # Upload to s3
        if not self._upload(package_path):
            return False

        self._cleanup(package_path, package_sha256_path)

        # Set new config values and update
        full_package_name = os.path.join(self.package_name,
                                         generated_package_name)
        self.config['lambda'][
            self.config_key]['source_object_key'] = full_package_name
        self.config['lambda'][
            self.config_key]['source_current_hash'] = package_sha256
        self.config.write()

        return True
Esempio n. 27
0
def create_database(athena_client):
    """Create the 'streamalert' Athena database

    Args:
        athena_client (boto3.client): Instantiated CLI AthenaClient
    """
    if athena_client.check_database_exists():
        LOGGER_CLI.info(
            'The \'streamalert\' database already exists, nothing to do')
        return

    create_db_success, create_db_result = athena_client.run_athena_query(
        query='CREATE DATABASE streamalert')

    if create_db_success and create_db_result['ResultSet'].get('Rows'):
        LOGGER_CLI.info('streamalert database successfully created!')
        LOGGER_CLI.info('results: %s', create_db_result['ResultSet']['Rows'])
Esempio n. 28
0
def check_credentials():
    """Check for valid AWS credentials in environment variables

    Returns:
        bool: True any of the AWS env variables exist
    """
    try:
        response = boto3.client('sts').get_caller_identity()
    except NoCredentialsError:
        LOGGER_CLI.error('No valid AWS Credentials found in your environment!')
        LOGGER_CLI.error('Please follow the setup instructions here: '
                         'https://www.streamalert.io/getting-started.html'
                         '#configure-aws-credentials')
        return False

    LOGGER_CLI.debug(
        'Using credentials for user \'%s\' with user ID \'%s\' in account '
        '\'%s\'', response['Arn'], response['UserId'], response['Account'])

    return True
Esempio n. 29
0
    def set_prefix(self, prefix):
        """Set the Org Prefix in Global settings"""
        if not isinstance(prefix, (unicode, str)):
            LOGGER_CLI.error('Invalid prefix type, must be string')
            return

        if '_' in prefix:
            LOGGER_CLI.error('Prefix cannot contain underscores')
            return

        tf_state_bucket = '{}.streamalert.terraform.state'.format(prefix)
        self.config['global']['account']['prefix'] = prefix
        self.config['global']['account'][
            'kms_key_alias'] = '{}_streamalert_secrets'.format(prefix)
        self.config['global']['terraform']['tfstate_bucket'] = tf_state_bucket
        self.config['lambda']['athena_partition_refresh_config'][
            'buckets'].clear()
        self.config['lambda']['athena_partition_refresh_config']['buckets'] \
            ['{}.streamalerts'.format(prefix)] = 'alerts'

        self.write()

        LOGGER_CLI.info('Prefix successfully configured')
Esempio n. 30
0
def tf_runner(action='apply', refresh=True, auto_approve=False, targets=None):
    """Terraform wrapper to build StreamAlert infrastructure.

    Resolves modules with `terraform get` before continuing.

    Args:
        action (str): Terraform action ('apply' or 'destroy').
        refresh (bool): If True, Terraform will refresh its state before applying the change.
        auto_approve (bool): If True, Terraform will *not* prompt the user for approval.
        targets (list): Optional list of affected targets.
            If not specified, Terraform will run against all of its resources.

    Returns:
        bool: True if the terraform command was successful
    """
    LOGGER_CLI.debug('Resolving Terraform modules')
    if not run_command(['terraform', 'get'], quiet=True):
        return False

    tf_command = [
        'terraform', action, '-var-file=../conf/lambda.json',
        '-refresh={}'.format(str(refresh).lower())
    ]

    if action == 'destroy':
        # Terraform destroy has a '-force' flag instead of '-auto-approve'
        LOGGER_CLI.info('Destroying infrastructure')
        tf_command.append('-force={}'.format(str(auto_approve).lower()))
    else:
        LOGGER_CLI.info('%s changes',
                        'Applying' if auto_approve else 'Planning')
        tf_command.append('-auto-approve={}'.format(str(auto_approve).lower()))

    if targets:
        tf_command.extend('-target={}'.format(x) for x in targets)

    return run_command(tf_command)