def _upload(self, package_path): """Upload the StreamAlert package and sha256 sum to S3. Args: package path (str): Full path to the zipped dpeloyment package Returns: bool: Indicating a successful S3 upload """ LOGGER_CLI.info('Uploading StreamAlert package to S3') client = boto3.client( 's3', region_name=self.config['global']['account']['region']) for package_file in (package_path, '{}.sha256'.format(package_path)): package_name = package_file.split('/')[-1] package_fh = open(package_file, 'r') try: client.put_object(Bucket=self.config['lambda'][self.config_key] ['source_bucket'], Key=os.path.join(self.package_name, package_name), Body=package_fh, ServerSideEncryption='AES256') except ClientError: LOGGER_CLI.exception('An error occurred while uploading %s', package_name) return False package_fh.close() LOGGER_CLI.debug('Uploaded %s to S3', package_name) return True
def create(self): """Create a Lambda deployment package .zip file.""" LOGGER_CLI.info('Creating package for %s', self.package_name) temp_package_path = os.path.join(tempfile.gettempdir(), self.package_name) if os.path.exists(temp_package_path): shutil.rmtree(temp_package_path) self._copy_files(temp_package_path) if not self._resolve_third_party(temp_package_path): LOGGER_CLI.exception( 'Failed to install necessary third-party libraries') exit(1) # Extract any precompiled third-party libs for this package if self.precompiled_libs and not self._extract_precompiled_libs( temp_package_path): LOGGER_CLI.exception( 'Failed to extract precompiled third-party libraries') exit(1) # Zip up files result = shutil.make_archive( os.path.join(BUILD_DIRECTORY, self.package_name), 'zip', temp_package_path) LOGGER_CLI.info('Successfully created %s', os.path.basename(result)) # Remove temp files shutil.rmtree(temp_package_path) return True
def create_and_upload(self): """Create a Lambda deployment package, hash it, and upload it to S3. Reference: package_name: Generated name based on date/time/version/name temp_package_path: Temp package to store deployment package files package_path: Full path to zipped deployment package package_sha256: Checksum of package_path package_sha256_path: Full path to package_path checksum file """ # get tmp dir and copy files temp_package_path = self._get_tmpdir() self._copy_files(temp_package_path) # download third-party libs if not self._resolve_third_party(temp_package_path): LOGGER_CLI.exception('Failed to install necessary third-party libraries') exit(1) # zip up files package_path = self.zip(temp_package_path) generated_package_name = package_path.split('/')[-1] # checksum files package_sha256, package_sha256_path = self._sha256sum(package_path) # upload to s3 if self._upload(package_path): # remove generated deployment files self._cleanup(package_path, package_sha256_path) # set new config values and update full_package_name = os.path.join(self.package_name, generated_package_name) # make all config changes here self.config[self.config_key]['source_object_key'] = full_package_name self.config[self.config_key]['source_current_hash'] = package_sha256 self.config.write()
def _rollback_production(lambda_client, function_name): """Rollback the production alias for the given function name.""" version = lambda_client.get_alias(FunctionName=function_name, Name='production')['FunctionVersion'] if version == '$LATEST': # This won't happen with Terraform, but the alias could have been manually changed. LOGGER_CLI.error( '%s:production is pointing to $LATEST instead of a published version', function_name) return current_version = int(version) if current_version == 1: LOGGER_CLI.warn('%s:production is already at version 1', function_name) return LOGGER_CLI.info('Rolling back %s:production from version %d => %d', function_name, current_version, current_version - 1) try: lambda_client.update_alias(FunctionName=function_name, Name='production', FunctionVersion=str(current_version - 1)) except ClientError: LOGGER_CLI.exception('version not updated')
def load_outputs_config(conf_dir='conf'): """Load the outputs configuration file from disk Args: conf_dir [string='conf']: Directory to read outputs config from Returns: [dict] The output configuration settings """ with open(os.path.join(conf_dir, OUTPUTS_CONFIG)) as outputs: try: values = json.load(outputs) except ValueError: LOGGER_CLI.exception('the %s file could not be loaded into json', OUTPUTS_CONFIG) return values
def create_and_upload(self): """Create a Lambda deployment package, hash it, and upload it to S3. Reference: package_name: Generated name based on date/time/version/name temp_package_path: Temp package to store deployment package files package_path: Full path to zipped deployment package package_sha256: Checksum of package_path package_sha256_path: Full path to package_path checksum file """ LOGGER_CLI.info('Creating package for %s', self.package_name) temp_package_path = self._get_tmpdir() self._copy_files(temp_package_path) if not self._resolve_third_party(temp_package_path): LOGGER_CLI.exception( 'Failed to install necessary third-party libraries') exit(1) # Extract any precompiled third-party libs for this package if not self._extract_precompiled_libs(temp_package_path): LOGGER_CLI.exception( 'Failed to extract precompiled third-party libraries') exit(1) # Zip up files package_path = self.zip(temp_package_path) generated_package_name = package_path.split('/')[-1] # SHA256 checksum files package_sha256, package_sha256_path = self._sha256sum(package_path) # Upload to s3 if not self._upload(package_path): return False self._cleanup(package_path, package_sha256_path) # Set new config values and update full_package_name = os.path.join(self.package_name, generated_package_name) self.config['lambda'][ self.config_key]['source_object_key'] = full_package_name self.config['lambda'][ self.config_key]['source_current_hash'] = package_sha256 self.config.write() return True
def kms_encrypt(region, data): """Encrypt data with AWS KMS. Args: region [string]: AWS region to use for boto3 client data [string]: json string to be encrypted Returns: [string] Encrypted ciphertext data blob """ try: client = boto3.client('kms', region_name=region) response = client.encrypt(KeyId='alias/stream_alert_secrets', Plaintext=data) return response['CiphertextBlob'] except ClientError: LOGGER_CLI.exception('an error occurred during credential encryption')