Exemplo n.º 1
0
    def _upload(self, package_path):
        """Upload the StreamAlert package and sha256 sum to S3.

        Args:
            package path (str): Full path to the zipped dpeloyment package

        Returns:
            bool: Indicating a successful S3 upload
        """
        LOGGER_CLI.info('Uploading StreamAlert package to S3')
        client = boto3.client(
            's3', region_name=self.config['global']['account']['region'])
        for package_file in (package_path, '{}.sha256'.format(package_path)):
            package_name = package_file.split('/')[-1]
            package_fh = open(package_file, 'r')

            try:
                client.put_object(Bucket=self.config['lambda'][self.config_key]
                                  ['source_bucket'],
                                  Key=os.path.join(self.package_name,
                                                   package_name),
                                  Body=package_fh,
                                  ServerSideEncryption='AES256')
            except ClientError:
                LOGGER_CLI.exception('An error occurred while uploading %s',
                                     package_name)
                return False

            package_fh.close()
            LOGGER_CLI.debug('Uploaded %s to S3', package_name)
        return True
Exemplo n.º 2
0
def terraform_generate(config, init=False):
    """Generate all Terraform plans for the configured clusters.

    Keyword Args:
        config (dict): The loaded config from the 'conf/' directory
        init (bool): Indicates if main.tf.json is generated for `terraform init`

    Returns:
        bool: Result of cluster generating
    """
    cleanup_old_tf_files(config)

    # Setup the main.tf.json file
    LOGGER_CLI.debug('Generating cluster file: main.tf.json')
    with open('terraform/main.tf.json', 'w') as tf_file:
        json.dump(generate_main(init=init, config=config),
                  tf_file,
                  indent=2,
                  sort_keys=True)

    # Return early during the init process, clusters are not needed yet
    if init:
        return True

    # Setup cluster files
    for cluster in config.clusters():
        if cluster in RESTRICTED_CLUSTER_NAMES:
            raise InvalidClusterName(
                'Rename cluster "main" or "athena" to something else!')

        LOGGER_CLI.debug('Generating cluster file: %s.tf.json', cluster)
        cluster_dict = generate_cluster(cluster_name=cluster, config=config)
        if not cluster_dict:
            LOGGER_CLI.error(
                'An error was generated while creating the %s cluster',
                cluster)
            return False

        with open('terraform/{}.tf.json'.format(cluster), 'w') as tf_file:
            json.dump(cluster_dict, tf_file, indent=2, sort_keys=True)

    # Setup Athena if it is enabled
    athena_config = config['lambda'].get('athena_partition_refresh_config')
    if athena_config:
        athena_file = 'terraform/athena.tf.json'
        if athena_config['enabled']:
            athena_generated_config = generate_athena(config=config)
            if athena_generated_config:
                with open(athena_file, 'w') as tf_file:
                    json.dump(athena_generated_config,
                              tf_file,
                              indent=2,
                              sort_keys=True)
        # Remove Athena file if it's disabled
        else:
            if os.path.isfile(athena_file):
                LOGGER_CLI.info('Removing old Athena Terraform file')
                os.remove(athena_file)

    return True
Exemplo n.º 3
0
    def zip(temp_package_path):
        """Create the StreamAlert Lambda deployment package archive.

        Zips all dependency files to run the function,
        and names this zipfile based on the current date/time,
        along with the Lambda function module version.

            example filename: stream_alert_1.0.0_20161010_00:11:22.zip

            Only package in the `.py` files per AWS's instructions
            for creation of lambda functions.

        Args:
            temp_package_path (str): the temporary file path to store the zip.

        Returns:
            str: Deployment package full path
        """
        LOGGER_CLI.debug('Creating Lambda package: %s',
                         temp_package_path + '.zip')
        package_path = shutil.make_archive(temp_package_path, 'zip',
                                           temp_package_path)
        LOGGER_CLI.info('Package successfully created')

        return package_path
Exemplo n.º 4
0
def terraform_generate(config, init=False):
    """Generate all Terraform plans for the configured clusters.

    Keyword Args:
        config (dict): The loaded config from the 'conf/' directory
        init (bool): Indicates if main.tf.json is generated for `terraform init`

    Returns:
        bool: Result of cluster generating
    """
    cleanup_old_tf_files(config)

    # Setup the main.tf.json file
    LOGGER_CLI.debug('Generating cluster file: main.tf.json')
    with open('terraform/main.tf.json', 'w') as tf_file:
        json.dump(generate_main(init=init, config=config),
                  tf_file,
                  indent=2,
                  sort_keys=True)

    # Return early during the init process, clusters are not needed yet
    if init:
        return True

    # Setup cluster files
    for cluster in config.clusters():
        if cluster in RESTRICTED_CLUSTER_NAMES:
            raise InvalidClusterName(
                'Rename cluster "main" or "athena" to something else!')

        LOGGER_CLI.debug('Generating cluster file: %s.tf.json', cluster)
        cluster_dict = generate_cluster(cluster_name=cluster, config=config)
        if not cluster_dict:
            LOGGER_CLI.error(
                'An error was generated while creating the %s cluster',
                cluster)
            return False

        with open('terraform/{}.tf.json'.format(cluster), 'w') as tf_file:
            json.dump(cluster_dict, tf_file, indent=2, sort_keys=True)

    # Setup Athena if it is enabled
    generate_global_lambda_settings(
        config,
        config_name='athena_partition_refresh_config',
        config_generate_func=generate_athena,
        tf_tmp_file='terraform/athena.tf.json',
        message='Removing old Athena Terraform file')

    # Setup Threat Intel Downloader Lambda function if it is enabled
    generate_global_lambda_settings(
        config,
        config_name='threat_intel_downloader_config',
        config_generate_func=generate_threat_intel_downloader,
        tf_tmp_file='terraform/ti_downloader.tf.json',
        message='Removing old Threat Intel Downloader Terraform file')

    return True
Exemplo n.º 5
0
    def _cleanup(*files):
        """Removes the temporary StreamAlert package and checksum.

        Args:
            files (str): File paths to remove after uploading to S3.
        """
        LOGGER_CLI.debug('Removing local files')
        for obj in files:
            os.remove(obj)
Exemplo n.º 6
0
def tf_runner(**kwargs):
    """Terraform wrapper to build StreamAlert infrastructure.

    Steps:
        - resolve modules with `terraform get`
        - run `terraform plan` for the given targets
        - if plan is successful and user confirms prompt,
          then the infrastructure is applied

    kwargs:
        targets: a list of Terraform targets
        action: 'apply' or 'destroy'

    Returns:
        bool: True if the terraform command was successful
    """
    targets = kwargs.get('targets', [])
    action = kwargs.get('action', None)
    tf_action_index = 1  # The index to the terraform 'action'

    var_files = {'conf/lambda.json'}
    tf_opts = ['-var-file=../{}'.format(x) for x in var_files]
    tf_targets = ['-target={}'.format(x) for x in targets]
    tf_command = ['terraform', 'plan'] + tf_opts + tf_targets
    if action == 'destroy':
        tf_command.append('-destroy')

    LOGGER_CLI.debug('Resolving Terraform modules')
    if not run_command(['terraform', 'get'], quiet=True):
        return False

    LOGGER_CLI.info('Planning infrastructure')
    if not run_command(tf_command):
        return False

    if not continue_prompt():
        sys.exit(1)

    if action == 'destroy':
        LOGGER_CLI.info('Destroying infrastructure')
        tf_command[tf_action_index] = action
        tf_command.remove('-destroy')
        tf_command.append('-force')

    elif action:
        tf_command[tf_action_index] = action

    else:
        LOGGER_CLI.info('Creating infrastructure')
        tf_command[tf_action_index] = 'apply'
        tf_command.append('-refresh=false')

    if not run_command(tf_command):
        return False

    return True
Exemplo n.º 7
0
    def _publish(self, client, function_name, code_sha_256):
        """Publish the function"""
        date = datetime.utcnow().strftime("%Y%m%d_T%H%M%S")
        LOGGER_CLI.debug('Publishing %s', function_name)
        new_version = self._version_helper(client=client,
                                           function_name=function_name,
                                           code_sha_256=code_sha_256,
                                           date=date)

        return new_version
Exemplo n.º 8
0
    def _publish_helper(self, **kwargs):
        """Handle clustered or single Lambda function publishing

        Keyword Arguments:
            cluster [string]: The cluster to deploy to, this is optional

        Returns:
            [Boolean]: Result of the function publishes
        """
        cluster = kwargs.get('cluster')
        date = datetime.utcnow().strftime("%Y%m%d_T%H%M%S")

        # Clustered Lambda functions have a different naming pattern
        if cluster:
            region = self.config['clusters'][cluster]['region']
            function_name = '{}_{}_streamalert_{}'.format(
                self.config['global']['account']['prefix'],
                cluster,
                self.package.package_name
            )
        else:
            region = self.config['global']['account']['region']
            function_name = '{}_streamalert_{}'.format(
                self.config['global']['account']['prefix'],
                self.package.package_name
            )

        # Configure the Lambda client
        client = boto3.client('lambda', region_name=region)
        code_sha_256 = self.config['lambda'][self.package.config_key]['source_current_hash']

        # Publish the function
        LOGGER_CLI.debug('Publishing %s', function_name)
        new_version = self._version_helper(
            client=client,
            function_name=function_name,
            code_sha_256=code_sha_256,
            date=date)

        if not new_version:
            return False

        # Update the config
        if cluster:
            LOGGER_CLI.info('Published version %s for %s:%s',
                            new_version, cluster, function_name)
            self.config['clusters'][cluster]['modules']['stream_alert'][self.package.package_name]['current_version'] = new_version
        else:
            LOGGER_CLI.info('Published version %s for %s',
                            new_version, function_name)
            self.config['lambda'][self.package.config_key]['current_version'] = new_version
        self.config.write()

        return True
Exemplo n.º 9
0
    def _publish(client, function_name, code_sha_256):
        """Publish the function"""
        date = datetime.utcnow().strftime("%Y%m%d_T%H%M%S")
        LOGGER_CLI.debug('Publishing %s', function_name)
        try:
            version = client.publish_version(
                FunctionName=function_name,
                CodeSha256=code_sha_256,
                Description='Publish Lambda {} on {}'.format(
                    function_name, date))['Version']
        except ClientError as err:
            LOGGER_CLI.error(err)
            return

        return int(version)
Exemplo n.º 10
0
def check_credentials():
    """Check for valid AWS credentials in environment variables

    Returns:
        bool: True any of the AWS env variables exist
    """
    try:
        response = boto3.client('sts').get_caller_identity()
    except NoCredentialsError:
        LOGGER_CLI.error('No valid AWS Credentials found in your environment!')
        LOGGER_CLI.error('Please follow the setup instructions here: '
                         'https://www.streamalert.io/getting-started.html'
                         '#configure-aws-credentials')
        return False

    LOGGER_CLI.debug(
        'Using credentials for user \'%s\' with user ID \'%s\' in account '
        '\'%s\'', response['Arn'], response['UserId'], response['Account'])

    return True
Exemplo n.º 11
0
def tf_runner(action='apply', refresh=True, auto_approve=False, targets=None):
    """Terraform wrapper to build StreamAlert infrastructure.

    Resolves modules with `terraform get` before continuing.

    Args:
        action (str): Terraform action ('apply' or 'destroy').
        refresh (bool): If True, Terraform will refresh its state before applying the change.
        auto_approve (bool): If True, Terraform will *not* prompt the user for approval.
        targets (list): Optional list of affected targets.
            If not specified, Terraform will run against all of its resources.

    Returns:
        bool: True if the terraform command was successful
    """
    LOGGER_CLI.debug('Resolving Terraform modules')
    if not run_command(['terraform', 'get'], quiet=True):
        return False

    tf_command = [
        'terraform', action, '-var-file=../conf/lambda.json',
        '-refresh={}'.format(str(refresh).lower())
    ]

    if action == 'destroy':
        # Terraform destroy has a '-force' flag instead of '-auto-approve'
        LOGGER_CLI.info('Destroying infrastructure')
        tf_command.append('-force={}'.format(str(auto_approve).lower()))
    else:
        LOGGER_CLI.info('%s changes',
                        'Applying' if auto_approve else 'Planning')
        tf_command.append('-auto-approve={}'.format(str(auto_approve).lower()))

    if targets:
        tf_command.extend('-target={}'.format(x) for x in targets)

    return run_command(tf_command)