示例#1
0
    def fetch_all(self,
                  credentials,
                  regions=[],
                  partition_name='aws',
                  targets=None):
        """
        Fetch all the SNS configuration supported by Scout2

        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.

        """
        global status, formatted_string
        # Initialize targets
        if not targets:
            targets = type(self).targets
        printInfo('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None
        api_service = self.service.lower()
        # Connect to the service
        if self.service in ['s3'
                            ]:  # S3 namespace is global but APIs aren't....
            api_clients = {}
            for region in build_region_list(self.service, regions,
                                            partition_name):
                api_clients[region] = connect_service('s3', credentials,
                                                      region)
            api_client = api_clients[list(api_clients.keys())[0]]
        elif self.service == 'route53domains':
            api_client = connect_service(
                self.service, credentials,
                'us-east-1')  # TODO: use partition's default region
        else:
            api_client = connect_service(self.service, credentials)
        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}
        if self.service in ['s3']:
            params['api_clients'] = api_clients
        q = self._init_threading(self.__fetch_target, params, 20)
        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': q}
        if self.service in ['s3']:
            params['api_clients'] = api_clients
        qt = self._init_threading(self.__fetch_service, params, 10)
        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)
        # Go
        for target in targets:
            qt.put(target)
        # Join
        qt.join()
        q.join()
        # Show completion and force newline
        if self.service != 'iam':
            self.fetchstatuslogger.show(True)
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.org_creds = assume_role(
         'OpinelUnitTest', self.creds,
         'arn:aws:iam::990492604467:role/OpinelUnitTest',
         'opinelunittesting')
     self.badapi_client = connect_service('organizations', self.creds,
                                          'us-east-1')
     self.api_client = connect_service('organizations', self.org_creds,
                                       'us-east-1')
示例#3
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('user-name', help = 'Name of the user(s) to be deleted.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Require at least one user names
    if not len(args.user_name):
        printError("Error, you need to provide at least one user name.")
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Delete users
    for user in args.user_name:
        delete_user(iam_client, user)
示例#4
0
def assume_role(role_name,
                credentials,
                role_arn,
                role_session_name,
                silent=False):
    # Connect to STS
    sts_client = connect_service('sts', credentials, silent=silent)
    # Set required arguments for assume role call
    sts_args = {'RoleArn': role_arn, 'RoleSessionName': role_session_name}
    # MFA used ?
    if 'mfa_serial' in credentials and 'mfa_code' in credentials:
        sts_args['TokenCode'] = credentials['mfa_code']
        sts_args['SerialNumber'] = credentials['mfa_serial']
    # External ID used ?
    if 'ExternalId' in credentials and credentials['ExternalId']:
        sts_args['ExternalId'] = credentials['ExternalId']
    # Assume the role
    sts_response = sts_client.assume_role(**sts_args)
    credentials = sts_response['Credentials']
    cached_credentials_filename = get_cached_credentials_filename(
        role_name, role_arn)
    #with open(cached_credentials_filename, 'wt+') as f:
    #   write_data_to_file(f, sts_response, True, False)
    cached_credentials_path = os.path.dirname(cached_credentials_filename)
    if not os.path.isdir(cached_credentials_path):
        os.makedirs(cached_credentials_path)
    save_blob_as_json(cached_credentials_filename, sts_response, True,
                      False)  # blob, force_write, debug):
    return credentials
示例#5
0
文件: iam.py 项目: nccgroup/Scout2
    def fetch_credential_report(self, credentials, ignore_exception = False):
        """
        Fetch the credential report

        :param: api_client
        :type: FOO
        :param: ignore_exception : initiate credential report creation as not  always ready
        :type: Boolean
        """
        iam_report = {}
        try:
            api_client = connect_service('iam', credentials, silent = True)
            response = api_client.generate_credential_report()
            if response['State'] != 'COMPLETE':
                if not ignore_exception:
                    printError('Failed to generate a credential report.')
                return
            report = api_client.get_credential_report()['Content']
            lines = report.splitlines()
            keys = lines[0].decode('utf-8').split(',')
            for line in lines[1:]:
                values = line.decode('utf-8').split(',')
                manage_dictionary(iam_report, values[0], {})
                for key, value in zip(keys, values):
                    iam_report[values[0]][key] = value
            self.credential_report = iam_report
            self.fetchstatuslogger.counts['credential_report']['fetched'] = 1
        except Exception as e:
            if ignore_exception:
                return
            printError('Failed to download a credential report.')
            printException(e)
示例#6
0
    def fetch_credential_report(self, credentials, ignore_exception=False):
        """
        Fetch the credential report

        :param: api_client
        :type: FOO
        :param: ignore_exception : initiate credential report creation as not  always ready
        :type: Boolean
        """
        iam_report = {}
        try:
            api_client = connect_service('iam', credentials, silent=True)
            response = api_client.generate_credential_report()
            if response['State'] != 'COMPLETE':
                if not ignore_exception:
                    printError('Failed to generate a credential report.')
                return
            report = api_client.get_credential_report()['Content']
            lines = report.splitlines()
            keys = lines[0].decode('utf-8').split(',')
            for line in lines[1:]:
                values = line.decode('utf-8').split(',')
                manage_dictionary(iam_report, values[0], {})
                for key, value in zip(keys, values):
                    iam_report[values[0]][key] = value
            self.credential_report = iam_report
            self.fetchstatuslogger.counts['credential_report']['fetched'] = 1
        except Exception as e:
            if ignore_exception:
                return
            printError('Failed to download a credential report.')
            printException(e)
示例#7
0
文件: iam.py 项目: nccgroup/Scout2
    def fetch_password_policy(self, credentials):
        """
        Fetch the password policy that applies to all IAM users within the AWS account
        """
        self.fetchstatuslogger.counts['password_policy']['discovered'] = 0
        self.fetchstatuslogger.counts['password_policy']['fetched'] = 0
        try:
            api_client = connect_service('iam', credentials, silent = True)
            self.password_policy = api_client.get_account_password_policy()['PasswordPolicy']
            if 'PasswordReusePrevention' not in self.password_policy:
                self.password_policy['PasswordReusePrevention'] = False
            else:
                self.password_policy['PreviousPasswordPrevented'] = self.password_policy['PasswordReusePrevention']
                self.password_policy['PasswordReusePrevention'] = True
            # There is a bug in the API: ExpirePasswords always returns false
            if 'MaxPasswordAge' in self.password_policy:
                self.password_policy['ExpirePasswords'] = True
            self.fetchstatuslogger.counts['password_policy']['discovered'] = 1
            self.fetchstatuslogger.counts['password_policy']['fetched'] = 1

        except ClientError as e:
            if e.response['Error']['Code'] == 'NoSuchEntity':
                    self.password_policy = {}
                    self.password_policy['MinimumPasswordLength'] = '1' # As of 10/10/2016, 1-character passwords were authorized when no policy exists, even though the console displays 6
                    self.password_policy['RequireUppercaseCharacters'] = False
                    self.password_policy['RequireLowercaseCharacters'] = False
                    self.password_policy['RequireNumbers'] = False
                    self.password_policy['RequireSymbols'] = False
                    self.password_policy['PasswordReusePrevention'] = False
                    self.password_policy['ExpirePasswords'] = False
            else:
                raise e
        except Exception as e:
            printError(str(e))
示例#8
0
    def fetch_password_policy(self, credentials):
        """
        Fetch the password policy that applies to all IAM users within the AWS account
        """
        self.fetchstatuslogger.counts['password_policy']['discovered'] = 0
        self.fetchstatuslogger.counts['password_policy']['fetched'] = 0
        try:
            api_client = connect_service('iam', credentials, silent=True)
            self.password_policy = api_client.get_account_password_policy()['PasswordPolicy']
            if 'PasswordReusePrevention' not in self.password_policy:
                self.password_policy['PasswordReusePrevention'] = False
            else:
                self.password_policy['PreviousPasswordPrevented'] = self.password_policy['PasswordReusePrevention']
                self.password_policy['PasswordReusePrevention'] = True
            # There is a bug in the API: ExpirePasswords always returns false
            if 'MaxPasswordAge' in self.password_policy:
                self.password_policy['ExpirePasswords'] = True
            self.fetchstatuslogger.counts['password_policy']['discovered'] = 1
            self.fetchstatuslogger.counts['password_policy']['fetched'] = 1

        except ClientError as e:
            if e.response['Error']['Code'] == 'NoSuchEntity':
                self.password_policy = {}
                self.password_policy[
                    'MinimumPasswordLength'] = '1'  # As of 10/10/2016, 1-character passwords were authorized when no policy exists, even though the console displays 6
                self.password_policy['RequireUppercaseCharacters'] = False
                self.password_policy['RequireLowercaseCharacters'] = False
                self.password_policy['RequireNumbers'] = False
                self.password_policy['RequireSymbols'] = False
                self.password_policy['PasswordReusePrevention'] = False
                self.password_policy['ExpirePasswords'] = False
            else:
                raise e
        except Exception as e:
            printError(str(e))
示例#9
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.parser.add_argument('--user-name',
                               dest='user_name',
                               default=None,
                               nargs='+',
                               required='True',
                               help='Name of the user(s) to be deleted.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)

    # Delete users
    for user in args.user_name:
        delete_user(iam_client, user)
示例#10
0
def get_stackset_ready_accounts(credentials, account_ids, quiet=True):
    """
    Verify which AWS accounts have been configured for CloudFormation stack set by attempting to assume the stack set execution role

    :param credentials:                 AWS credentials to use when calling sts:assumerole
    :param org_account_ids:             List of AWS accounts to check for Stackset configuration

    :return:                            List of account IDs in which assuming the stackset execution role worked
    """
    api_client = connect_service('sts', credentials, silent=True)
    configured_account_ids = []
    for account_id in account_ids:
        try:
            role_arn = 'arn:aws:iam::%s:role/AWSCloudFormationStackSetExecutionRole' % account_id
            api_client.assume_role(
                RoleArn=role_arn,
                RoleSessionName='opinel-get_stackset_ready_accounts')
            configured_account_ids.append(account_id)
        except Exception as e:
            pass

    if len(configured_account_ids) != len(account_ids) and not quiet:
        printInfo(
            'Only %d of these accounts have the necessary stack set execution role:'
            % len(configured_account_ids))
        printDebug(str(configured_account_ids))
    return configured_account_ids
示例#11
0
 def _fetch_region(self, q, params):
     global api_clients
     try:
         while True:
             try:
                 region, targets = q.get()
                 #print('Targets for region %s : %s' % (region, str(targets)))
                 self.init_region_config(region)
                 api_client = connect_service(params['api_service'],
                                              params['credentials'],
                                              region,
                                              silent=True)
                 api_clients[region] = api_client
                 # TODO : something here for single_region stuff
                 self.regions[region].fetch_all(
                     api_client, self.fetchstatuslogger, params['q'],
                     targets)  # params['targets'])
                 self.fetchstatuslogger.counts['regions']['fetched'] += 1
             except Exception as e:
                 printException(e)
             finally:
                 q.task_done()
     except Exception as e:
         printException(e)
         pass
示例#12
0
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.api_client = connect_service('iam', self.creds)
     self.python = re.sub(r'\W+', '', sys.version)
     self.cleanup = {'groups': [], 'users': []}
示例#13
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument(
        'user-name',
        help_string=
        'Your username (automatically fetched using iam:GetUser if not provided).'
    )

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]
    user_name = args.user_name[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        printError('Error: failed to create IAM API client.')
        return 42

    # Set the user name
    if not user_name:
        try:
            printInfo('Searching for username...')
            user_name = iam_client.get_user()['User']['UserName']
            if not user_name:
                printInfo('Error: could not find user name to enable MFA for.')
                return 42
        except Exception as e:
            configPrintException(e)

    # Create and activate the MFA device
    credentials['SerialNumber'] = enable_mfa(iam_client, user_name)

    # Update the credentials file
    write_creds_to_aws_credentials_file(profile_name, credentials)
    sample_command = 'awsrecipes_init_sts_session.py %s' % (
        ('--profile %s' % profile_name) if profile_name != 'default' else '')
    printInfo('Your credentials file has been updated.\n' \
              'You may now initiate STS sessions to access the AWS APIs with the following command:\n' \
              '\n    %s\n' % sample_command)
示例#14
0
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.api_client = connect_service('cloudformation', self.creds,
                                       'us-east-1')
     self.python = re.sub(r'\W+', '', sys.version)
     self.cleanup = {'stacks': [], 'stacksets': []}
示例#15
0
    def fetch_credential_reports(self, credentials, ignore_exception=False):
        """
        Fetch the credential report

        :param: api_client
        :type: FOO
        :param: ignore_exception : initiate credential report creation as not  always ready
        :type: Boolean
        """
        credential_reports = {}
        try:
            api_client = connect_service('iam', credentials, silent=True)
            response = api_client.generate_credential_report()
            if response['State'] != 'COMPLETE':
                if not ignore_exception:
                    printError('Failed to generate a credential report.')
                return

            report = api_client.get_credential_report()['Content']
            lines = report.splitlines()
            keys = lines[0].decode('utf-8').split(',')
            self.fetchstatuslogger.counts['credential_reports'][
                'discovered'] = len(lines) - 1

            for line in lines[1:]:
                credential_report = {}
                values = line.decode('utf-8').split(',')
                user_id = values[0]
                for key, value in zip(keys, values):
                    credential_report[key] = value

                credential_report['password_last_used'] = self._sanitize_date(
                    credential_report['password_last_used'])
                credential_report[
                    'access_key_1_last_used_date'] = self._sanitize_date(
                        credential_report['access_key_1_last_used_date'])
                credential_report[
                    'access_key_2_last_used_date'] = self._sanitize_date(
                        credential_report['access_key_2_last_used_date'])
                credential_report['last_used'] = self._compute_last_used(
                    credential_report)
                credential_report['name'] = user_id
                credential_report['id'] = user_id
                manage_dictionary(credential_reports, user_id,
                                  credential_report)
                self.fetchstatuslogger.counts['credential_reports'][
                    'fetched'] = len(credential_reports)

            self.credential_reports = credential_reports

        except Exception as e:
            if ignore_exception:
                return
            printError('Failed to download a credential report.')
            printException(e)
def main():

    # Parse arguments
    parser = OpinelArgumentParser(os.path.basename(__file__))
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('common-groups',
                        default=[],
                        nargs='+',
                        help='List of groups each IAM user should belong to.')
    parser.add_argument(
        'category-groups',
        default=[],
        nargs='+',
        help='List of category groups; each IAM user must belong to one.')
    parser.add_argument(
        'category-regex',
        default=[],
        nargs='+',
        help='List of regex enabling auto-assigment of category groups.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Create groups
    for group in args.category_groups + args.common_groups:
        try:
            printInfo('Creating group %s...' % group)
            iam_client.create_group(GroupName=group)
        except Exception as e:
            if e.response['Error']['Code'] != 'EntityAlreadyExists':
                printException(e)
示例#17
0
 def _fetch_region(self, q, params):
     global api_clients
     try:
         while True:
             try:
                 region, targets = q.get()
                 #print('Targets for region %s : %s' % (region, str(targets)))
                 self.init_region_config(region)
                 api_client = connect_service(params['api_service'], params['credentials'], region, silent = True)
                 api_clients[region] = api_client
                 # TODO : something here for single_region stuff
                 self.regions[region].fetch_all(api_client, self.fetchstatuslogger, params['q'], targets)  # params['targets'])
                 self.fetchstatuslogger.counts['regions']['fetched'] += 1
             except Exception as e:
                 printException(e)
             finally:
                 q.task_done()
     except Exception as e:
         printException(e)
         pass
示例#18
0
 def _fetch_region(self, q, params):
     global api_clients
     try:
         while True:
             try:
                 region = q.get()
                 self.init_region_config(region)
                 api_client = connect_service(params['api_service'],
                                              params['credentials'], region)
                 api_clients[region] = api_client
                 self.regions[region].fetch_all(api_client,
                                                self.fetchstatuslogger,
                                                params['q'],
                                                params['targets'])
                 self.fetchstatuslogger.counts['regions']['fetched'] += 1
             except Exception as e:
                 printException(e)
             finally:
                 q.task_done()
     except Exception as e:
         printException(e)
         pass
示例#19
0
    def fetch_all(self, credentials, regions=[], partition_name='aws', targets=None):
        """
        :param credentials:             F
        :param service:                 Name of the service
        :param regions:                 Name of regions to fetch data from
        :param partition_name:          AWS partition to connect to
        :param targets:                 Type of resources to be fetched; defaults to all.
        :return:
        """
        global status, formatted_string

        # Initialize targets
        if not targets:
            targets = type(self).targets
        printInfo('Fetching %s config...' % format_service_name(self.service))
        formatted_string = None

        # FIXME the below should be in moved to each provider's code

        # Connect to the service
        if self._is_provider('aws'):
            if self.service in ['s3']:  # S3 namespace is global but APIs aren't....
                api_clients = {}
                for region in build_region_list(self.service, regions, partition_name):
                    api_clients[region] = connect_service('s3', credentials, region, silent=True)
                api_client = api_clients[list(api_clients.keys())[0]]
            elif self.service == 'route53domains':
                api_client = connect_service(self.service, credentials, 'us-east-1',
                                             silent=True)  # TODO: use partition's default region
            else:
                api_client = connect_service(self.service, credentials, silent=True)

        elif self._is_provider('gcp'):
            api_client = gcp_connect_service(service=self.service, credentials=credentials)

        elif self._is_provider('azure'):
            api_client = azure_connect_service(service=self.service, credentials=credentials)

        # Threading to fetch & parse resources (queue consumer)
        params = {'api_client': api_client}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        # Threading to parse resources (queue feeder)
        target_queue = self._init_threading(self.__fetch_target, params, self.thread_config['parse'])

        # Threading to list resources (queue feeder)
        params = {'api_client': api_client, 'q': target_queue}

        if self._is_provider('aws'):
            if self.service in ['s3']:
                params['api_clients'] = api_clients

        service_queue = self._init_threading(self.__fetch_service, params, self.thread_config['list'])

        # Init display
        self.fetchstatuslogger = FetchStatusLogger(targets)

        # Go
        for target in targets:
            service_queue.put(target)

        # Join
        service_queue.join()
        target_queue.join()

        if self._is_provider('aws'):
            # Show completion and force newline
            if self.service != 'iam':
                self.fetchstatuslogger.show(True)
        else:
            self.fetchstatuslogger.show(True)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Fetch the long-lived key ID if STS credentials are used
    if credentials['SessionToken']:
        akia_creds = read_creds(profile_name + '-nomfa')
    else:
        akia_creds = credentials
    aws_key_id = akia_creds['AccessKeyId']
    aws_secret = akia_creds['SecretAccessKey']

    # Fetch username
    printInfo('Fetching username...')
    user_name = get_username(credentials)

    # Create the new key
    try:
        # Create a new IAM key
        printInfo('Creating a new access key for \'%s\'...' % user_name)
        new_credentials = iam_client.create_access_key(
            UserName=user_name)['AccessKey']
        show_access_keys(iam_client, user_name)
    except Exception as e:
        printException(e)
        return 42

    # Save the new key
    if credentials['SessionToken']:
        write_creds_to_aws_credentials_file(profile_name + '-nomfa',
                                            new_credentials)
    else:
        write_creds_to_aws_credentials_file(profile_name, new_credentials)
    printInfo('Credentials file updated with new access key.')

    printInfo('Verifying access with the new key...')
    # Sleep because the access key may not be active server-side...
    time.sleep(5)
    if credentials['SessionToken']:
        new_credentials = read_creds(profile_name + '-nomfa')
        new_credentials = init_sts_session(profile_name, new_credentials)
    else:
        new_credentials = read_creds(profile_name)
    # Confirm that it works...
    try:
        new_iam_client = connect_service('iam', new_credentials)
        printInfo('Deleting the old access key...')
        new_iam_client.delete_access_key(AccessKeyId=aws_key_id,
                                         UserName=user_name)
    except Exception as e:
        printException(e)
        printInfo('Restoring your old credentials...')
        # Restore the old key here
        if credentials['SessionToken']:
            write_creds_to_aws_credentials_file(profile_name + '-nomfa',
                                                akia_creds)
        else:
            write_creds_to_aws_credentials_file(profile_name, akia_creds)
        return 42

    # Summary of existing access keys
    try:
        show_access_keys(new_iam_client, user_name)
        printInfo('Success !')
    except Exception as e:
        printException(e)
        return 42
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('common-groups',
                        default=[],
                        nargs='+',
                        help='List of groups each IAM user should belong to.')
    parser.add_argument(
        'category-groups',
        default=[],
        nargs='+',
        help='List of category groups; each IAM user must belong to one.')
    parser.add_argument(
        'category-regex',
        default=[],
        nargs='+',
        help='List of regex enabling auto-assigment of category groups.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Initialize and compile the list of regular expression for category groups
    category_regex = init_group_category_regex(args.category_groups,
                                               args.category_regex)

    # Ensure all default groups exist
    create_groups(iam_client, args.category_groups + args.common_groups)

    # Download IAM users and their group memberships
    printInfo('Downloading group membership information...')
    user_info = {}
    users = handle_truncated_response(iam_client.list_users, {},
                                      ['Users'])['Users']
    show_status(user_info, total=len(users), newline=False)
    thread_work(users,
                get_group_membership, {
                    'iam_client': iam_client,
                    'user_info': user_info
                },
                num_threads=30)
    show_status(user_info)

    # Iterate through users
    for user in user_info:
        printInfo('Checking configuration of \'%s\'...' % user)
        for group in args.common_groups:
            if group not in user_info[user]['groups']:
                printInfo(' - Adding to common group: %s' % group)
                iam_client.add_user_to_group(UserName=user, GroupName=group)
        category_found = False
        for i, regex in enumerate(category_regex):
            if regex and regex.match(user):
                category_found = True
                group = args.category_groups[i]
                if group not in user_info[user]['groups']:
                    printInfo(' - Adding to category group: %s' % group)
                    iam_client.add_user_to_group(UserName=user,
                                                 GroupName=group)
            elif not regex:
                default_group = args.category_groups[i]
        if not category_found and default_group not in user_info[user][
                'groups']:
            printInfo(' - Adding to default category group: %s' %
                      default_group)
            iam_client.add_user_to_group(UserName=user,
                                         GroupName=default_group)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('csv-credentials')
    parser.add_argument('mfa-serial')
    parser.add_argument('mfa-code')
    parser.parser.add_argument('--role-arn',
                                dest='role_arn',
                                default=None,
                                help='ARN of the assumed role.')
    parser.parser.add_argument('--external-id',
                                dest='external_id',
                                default=None,
                                help='External ID to use when assuming the role.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]

    if args.csv_credentials:
        # Read credentials from a CSV file
        credentials = {}
        credentials['AccessKeyId'], credentials['SecretAccessKey'], credentials['SerialNumber'] = read_creds_from_csv(args.csv_credentials)
        if not credentials['AccessKeyId'] or not credentials['SecretAccessKey']:
            printError('Failed to read credentials from %s' % args.csv_credentials)
            return 42
        use_found_credentials = True
    else:
        # Check for migration from existing profile to no-mfa profile
        use_found_credentials = False
        credentials = read_creds_from_aws_credentials_file(profile_name)
        if 'AccessKeyId' in credentials and credentials['AccessKeyId'] != None and credentials['SecretAccessKey'] != None and credentials['SerialNumber'] == None and credentials['SessionToken'] == None:
            if prompt_4_yes_no('Found long-lived credentials for the profile \'%s\'. Do you want to use those when configuring MFA' % profile_name):
               use_found_credentials = True
               iam_client = connect_service('iam', credentials)
               try:
                   printInfo('Trying to read the MFA serial number associated with this IAM user...')
                   user_name = iam_client.get_user()['User']['UserName']
                   mfa_devices = iam_client.list_mfa_devices(UserName = user_name)['MFADevices']
                   credentials['SerialNumber'] = mfa_devices[0]['SerialNumber']
               except Exception as e:
                   printException(e)
                   pass

    if not use_found_credentials:
       # Get values
        credentials['AccessKeyId'] = prompt_4_value('AWS Access Key ID: ', no_confirm = True)
        credentials['SecretAccessKey'] = prompt_4_value('AWS Secret Access Key: ', no_confirm = True)
    if 'SerialNumber' not in credentials or not credentials['SerialNumber']:
        credentials['SerialNumber'] = prompt_4_mfa_serial()

    # Check for overwrite
    while True:
        c = read_creds_from_aws_credentials_file(profile_name)
        if 'AccessKeyId' in c and c['AccessKeyId']:
            if not prompt_4_yes_no('The profile \'%s\' already exists. Do you want to overwrite the existing values' % profile_name):
                if not prompt_4_yes_no('Do you want to create a new profile with these credentials'):
                    printError('Configuration aborted.')
                    return
                profile_name = prompt_4_value('Profile name: ')
            else:
                break
        else:
            break

    # Write values to credentials file
    write_creds_to_aws_credentials_file(profile_name, credentials)

    # Delete CSV file?
    if args.csv_credentials and prompt_4_yes_no('Do you want to delete the CSV file that contains your long-lived credentials?'):
        os.remove(args.csv_credentials)
示例#23
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('user-name', help='Name of the IAM user(s).')
    parser.parser.add_argument('--all-users',
                               dest='all_users',
                               default=False,
                               action='store_true',
                               help='Go through all IAM users')
    parser.parser.add_argument(
        '--arn',
        dest='arn',
        default=[],
        nargs='+',
        help='ARN of the target group(s), role(s), or user(s)')
    parser.parser.add_argument('--group-name',
                               dest='group_name',
                               default=[],
                               nargs='+',
                               help='Name of the IAM group(s)')
    parser.parser.add_argument('--all-groups',
                               dest='all_groups',
                               default=False,
                               action='store_true',
                               help='Go through all IAM groups')
    parser.parser.add_argument('--role-name',
                               dest='role_name',
                               default=[],
                               nargs='+',
                               help='Name of the IAM role(s)')
    parser.parser.add_argument('--all-roles',
                               dest='all_roles',
                               default=False,
                               action='store_true',
                               help='Go through all IAM roles')
    parser.parser.add_argument('--policy-arn',
                               dest='policy_arn',
                               default=[],
                               nargs='+',
                               help='ARN of the IAM policy/ies')
    parser.parser.add_argument('--all',
                               dest='all',
                               default=False,
                               action='store_true',
                               help='Go through all IAM resources')

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Normalize targets
    targets = []
    for arn in args.arn:
        arn_match = re_arn.match(arn)
        if arn_match:
            resource = arn_match.groups()[4].split('/')
            targets.append((resource[0], resource[-1]))
    for group_name in args.group_name:
        if group_name:
            targets.append(('group', group_name))
    for role_name in args.role_name:
        if role_name:
            targets.append(('role', role_name))
    for user_name in args.user_name:
        if user_name:
            targets.append(('user', user_name))
    if args.all or args.all_groups:
        printInfo('Fetching all IAM groups...')
        for group in handle_truncated_response(iam_client.list_groups, {},
                                               ['Groups'])['Groups']:
            targets.append(('group', group['GroupName']))
    if args.all or args.all_roles:
        printInfo('Fetching all IAM roles...')
        for role in handle_truncated_response(iam_client.list_roles, {},
                                              ['Roles'])['Roles']:
            targets.append(('role', role['RoleName']))
    if args.all or args.all_users:
        printInfo('Fetching all IAM users...')
        for user in handle_truncated_response(iam_client.list_users, {},
                                              ['Users'])['Users']:
            targets.append(('user', user['UserName']))

    # Get all policies that apply to the targets and aggregate them into a single file
    printInfo('Fetching all inline and managed policies in use...')
    managed_policies = {}
    for resource_type, resource_name in targets:
        policy_documents = get_policies(iam_client, managed_policies,
                                        resource_type, resource_name)
        write_permissions(merge_policies(policy_documents), resource_type,
                          resource_name)

    # Get requested managed policies
    for policy_arn in args.policy_arn:
        policy_documents = [
            get_managed_policy_document(iam_client, policy_arn,
                                        managed_policies)
        ]
        write_permissions(merge_policies(policy_documents), 'policy',
                          policy_arn)
示例#24
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.parser.add_argument('--role-name',
                                dest='role_name',
                                default=[],
                                nargs='+',
                                required=True,
                                help='Name of the role to be assumed in each child account.')
    parser.parser.add_argument('--ou',
                                dest='org_unit',
                                default=[],
                                nargs='+',
                                help='')
    parser.parser.add_argument('--profile-prefix',
                                dest='profile_prefix',
                                default=None,
                                help='')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    source_profile = AWSProfiles.get(args.profile)[0]
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Get all accounts to setup
    api_client = connect_service('organizations', credentials)
    if len(args.org_unit) == 0:
        if prompt_4_yes_no('Do you want to specify a particular organizational unit'):
            ous = get_organizational_units(api_client)
            choice = prompt_4_value('Which OU do you want to configure IAM for', choices = [ou['Name'] for ou in ous], display_indices = True, no_confirm = True, return_index = True)
            account_list = list_accounts_for_parent(api_client, ous[choice]) 
        else:
            account_list = get_organization_accounts(api_client)

    # Setup the accounts
    organization_profiles = {'ready': [], 'notready': []}
    
    for account in account_list:
        printInfo('Validating role name in %s...' % account['Name'], newLine = False)
        profile_name = account['Name'].lower().replace(' ', '_')
        if args.profile_prefix:
            profile_name = '%s-%s' % (args.profile_prefix, profile_name)
        profile = AWSProfile(filename = aws_config_file, name = profile_name, account_id = account['Id'])
        profile.set_attribute('source_profile', source_profile.name)
        success = False
        for role_name in args.role_name:
            try:
                role_arn = 'arn:aws:iam::%s:role/%s' % (account['Id'], role_name)
                role_credentials = assume_role(role_name, credentials, role_arn, 'aws-recipes', silent = True )
                profile.set_attribute('role_arn', 'arn:aws:iam::%s:role/%s' % (account['Id'], role_name))
                profile.set_attribute('source_profile', source_profile.name)
                organization_profiles['ready'].append(profile)
                printInfo(' success')
                success = True
                break
            except Exception as e:
                pass
        if not success:
            printInfo(' failure')
            organization_profiles['notready'].append(profile)

    for profile in organization_profiles['ready']:
        profile.write()

    for profile in organization_profiles['notready']:
        printError('Failed to determine a valid role in %s (%s)' % (profile.name, profile.account_id))
示例#25
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('user-name', help='Name of user(s) to be created.')
    parser.add_argument('group-name',
                        help='Name of group(s) the user(s) will belong to.')
    parser.add_argument(
        'force-common-group',
        default=False,
        action='store_true',
        help='Automatically add user(s) to the common group(s)')
    parser.add_argument('no-mfa',
                        default=False,
                        action='store_true',
                        help='Do not configure and enable MFA.')
    parser.add_argument('no-password',
                        default=False,
                        action='store_true',
                        help='Do not create a password and login')
    parser.add_argument('no-access-key',
                        default=False,
                        action='store_true',
                        help='Do not generate an access key')
    parser.add_argument('always-trust',
                        default=False,
                        action='store_true',
                        help='A not generate an access key')
    parser.add_argument('allow-plaintext',
                        default=False,
                        action='store_true',
                        help='')
    parser.add_argument('no-prompt-before-plaintext',
                        dest='prompt_before_plaintext',
                        default=True,
                        action='store_false',
                        help='')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]
    if not len(args.user_name):
        printError("Error, you need to provide at least one user name")
        return 42

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Initialize and compile the list of regular expression for category groups
    #if 'category_groups' in default_args and 'category_regex' in default_args:
    #    category_regex = init_iam_group_category_regex(default_args['category_groups'], default_args['category_regex'])

    # Iterate over users
    for user in args.user_name:

        # Search for the GPG key
        abort = False
        gpg_key = get_gpg_key(user)
        if not gpg_key:
            printInfo('No PGP key found for user matching %s' % user)
            if args.allow_plaintext:
                if args.prompt_before_plaintext and not prompt_4_yes_no(
                        'Save unencrypted value'):
                    abort = True
            else:
                abort = True
        if abort:
            printError(
                'Will not create user %s as credentials cannot be saved. Use --allow-plaintext to enable storage of unencrypted credentials.'
            )
            continue

        # Prepare the output folder
        try:
            user_dir = 'users/%s' % user
            os.makedirs(user_dir)
        except Exception as e:
            printError(
                'Error, failed to create a temporary folder for user %s.' %
                user)
            continue

        # Determine the groups Groups
        groups = args.group_name
        if args.force_common_group:
            groups += args.common_groups
        # Add user to a category group


#        if 'category_groups' in default_args and len(default_args['category_groups']) > 0:
#            add_user_to_category_group(iam_client, args.group_name, default_args['category_groups'], category_regex, user)

# Create the user
        user_data = create_user(iam_client, user, groups, not args.no_password,
                                not args.no_mfa, not args.no_access_key)
        if 'errors' in user_data and len(user_data['errors']) > 0:
            printError('Error doing the following actions:\n%s' %
                       '\n'.join(' - %s' % action
                                 for action in user_data['errors']))

        # Save data
        if 'password' in user_data:
            gpg_and_write('%s/password.txt' % user_dir, user_data['password'],
                          gpg_key, args.always_trust)
        if 'AccessKeyId' in user_data:
            credentials = '[%s]\naws_access_key_id = %s\naws_secret_access_key = %s\n' % (
                profile_name, user_data['AccessKeyId'],
                user_data['SecretAccessKey'])
            # TODO: mfa
            gpg_and_write('%s/credentials' % user_dir, credentials, gpg_key,
                          args.always_trust)

        # Create a zip archive
        f = zipfile.ZipFile('users/%s.zip' % user, 'w')
        for root, dirs, files in os.walk(user_dir):
            for file in files:
                f.write(os.path.join(root, file))
        f.close()
        shutil.rmtree(user_dir)
 def setup(self):
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.api_client = connect_service('cloudtrail', self.creds,
                                       'us-east-1')
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--filters',
                               dest='filters',
                               default=None,
                               help='')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Build list of region
    regions = build_region_list('ec2', args.regions, args.partition_name)
    printInfo(str(regions))

    # Build filters
    filters = json.loads(args.filters) if args.filters else None

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # List all EC2 instances
    instances = []
    for region in regions:
        printInfo('Fetching instances in %s...' % region)
        ec2_client = connect_service('ec2', credentials, region_name=region)
        args = {'Filters': filters} if filters else {}
        reservations = handle_truncated_response(
            ec2_client.describe_instances, args,
            ['Reservations'])['Reservations']
        for r in reservations:
            instances += r['Instances']
    printInfo(' Found %d instances' % len(instances))

    # Build list of private and public IPs
    prvips = {}
    pubips = {}
    for i in instances:
        security_groups = i['SecurityGroups']
        for eni in i['NetworkInterfaces']:
            for prvip in eni['PrivateIpAddresses']:
                prvips[prvip['PrivateIpAddress']] = {
                    'security_groups': security_groups
                }
                if 'Association' in prvip:
                    pubips[prvip['Association']['PublicIp']] = {
                        'security_groups': security_groups
                    }

    # Create target files
    with open('targets-%s-prv.txt' % profile_name, 'wt') as f:
        for prvip in prvips:
            f.write('%s\n' % prvip)
    with open('targets-%s-pub.txt' % profile_name, 'wt') as f:
        for pubip in pubips:
            f.write('%s\n' % pubip)
示例#28
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions', help = 'Regions where stack instances will be created.')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--stack-set-region',
                               dest='stack_set_region',
                               default=None,
                               required=True,
                               help='Region where the stack set will be created.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Validate the stack set region
    regions = build_region_list('events', args.regions, args.partition_name)
    if args.stack_set_region not in regions:
        printError('Error, the stack set region \'%s\' is not valid. Acceptable values are:' % args.stack_set_region)
        printError(', '.join(regions))
        return 42

    # Determine the master account id to exclude it from the list of accounts to be configured for event forwarding
    monitoring_account_id = get_aws_account_id(credentials)

    # Connect to the AWS Organizations API
    api_client = connect_service('organizations', credentials)

    # List all accounts in the organization
    org_account_ids = []
    org_accounts = handle_truncated_response(api_client.list_accounts, {}, ['Accounts'])['Accounts']
    org_account_ids = [ account['Id'] for account in org_accounts if account['Status'] == 'ACTIVE' and account['Id'] != monitoring_account_id ]
    printInfo('Found %d accounts in the organization.' % len(org_account_ids))
    printDebug(str(org_account_ids))

    # Verify that the account has been configured for stack sets by attempting to assume the stack set execution role
    api_client = connect_service('sts', credentials, silent = True)
    configured_org_account_ids = []
    for account_id in org_account_ids:
        try:
            role_arn = 'arn:aws:iam::%s:role/AWSCloudFormationStackSetExecutionRole' % account_id
            api_client.assume_role(RoleArn = role_arn, RoleSessionName = 'foobar')
            configured_org_account_ids.append(account_id)
        except Exception as e:
            pass
    if len(configured_org_account_ids) != len(org_account_ids):
        printInfo('Only %d of these accounts have the necessary stack set execution role:' % len(configured_org_account_ids))
        printInfo(str(configured_org_account_ids))

    # For each region with cloudwatch events, put a permission for each account
    printInfo('Adding permissions on the default event buses...')
    for region in regions:
        api_client = connect_service('events', credentials, region)
        for account in org_accounts:
            account_id = account['Id']
            if account_id not in configured_org_account_ids:
                continue
            account_name = account['Name']
            api_client.put_permission(Action = 'events:PutEvents', Principal = account_id, StatementId = 'AWSRecipesAllow%s' % account_id)

    # Create the stack set
    try:
        stack_set_name = 'CloudwatchEventsForwarding'
        api_client = connect_service('cloudformation', credentials, args.stack_set_region)
        # TBD:  need for the region where the stack set is created and the regions where the stack instances are created...
        template_path = os.path.join((os.path.dirname(os.path.realpath(__file__))), '../CloudFormationTemplates/CloudwatchEventsForwarding.region.yml')
        with open(template_path, 'rt') as f:
            template_body = f.read()
        template_parameters = [ {'ParameterKey': 'EventsMonitoringAccountID', 'ParameterValue': get_aws_account_id(credentials) } ]
        printInfo('Creating the stack set...')
        response = api_client.create_stack_set(StackSetName = stack_set_name, TemplateBody = template_body, Parameters = template_parameters)
    except Exception as e:
        if e.response['Error']['Code'] != 'NameAlreadyExistsException':
            printException(e)
            printError('Failed to create the stack set.')
            return 42

    # Create the stack instances: one per region in every account
    operation_preferences = {
        'FailureTolerancePercentage': 100,
        'MaxConcurrentPercentage': 100
    }
    response = api_client.create_stack_instances(StackSetName = stack_set_name, Accounts = configured_org_account_ids, Regions = regions, OperationPreferences = operation_preferences)
    printInfo('Successfully started operation Id %s' % response['OperationId'])
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('force')
    parser.add_argument('dry-run')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--interactive',
                        dest='interactive',
                        default=False,
                        action='store_true',
                        help='Interactive prompt to manually enter CIDRs.')
    parser.parser.add_argument('--csv-ip-ranges',
                        dest='csv_ip_ranges',
                        default=[],
                        nargs='+',
                        help='CSV file(s) containing CIDRs information.')
    parser.parser.add_argument('--skip-first-line',
                        dest='skip_first_line',
                        default=False,
                        action='store_true',
                        help='Skip first line when parsing CSV file.')
    parser.parser.add_argument('--attributes',
                        dest='attributes',
                        default=[],
                        nargs='+',
                        help='Name of the attributes to enter for each CIDR.')
    parser.parser.add_argument('--mappings',
                        dest='mappings',
                        default=[],
                        nargs='+',
                        help='Column number matching attributes when headers differ.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Initialize the list of regions to work with
    regions = build_region_list('ec2', args.regions, args.partition_name)

    # For each profile/environment...
    for profile_name in args.profile:

        # Interactive mode
        if args.interactive:

            # Initalize prefixes
            attributes = args.attributes
            filename = 'ip-ranges-%s.json' % profile_name
            if os.path.isfile(filename):
                printInfo('Loading existing IP ranges from %s' % filename)
                prefixes = read_ip_ranges(filename)
                # Initialize attributes from existing values
                if attributes == []:
                    for prefix in prefixes:
                        for key in prefix:
                            if key not in attributes:
                                attributes.append(key)
            else:
                prefixes = []

            # IP prefix does not need to be specified as an attribute
            attributes = [a for a in attributes if a != 'ip_prefix']

            # Prompt for new entries
            while prompt_4_yes_no('Add a new IP prefix to the ip ranges'):
                ip_prefix = prompt_4_value('Enter the new IP prefix:')
                obj = {}
                for a in attributes:
                    obj[a] = prompt_4_value('Enter the \'%s\' value:' % a)
                prefixes.append(new_prefix(ip_prefix, obj))

        # Support loading from CSV file
        elif len(args.csv_ip_ranges) > 0:

            # Initalize prefixes
            prefixes = []

            # Load CSV file contents
            for filename in args.csv_ip_ranges:
                with open(filename, 'rt') as f:
                    csv_contents = f.readlines()

                # Initialize mappings
                attributes = args.attributes
                mappings = {}
                if attributes == []:
                    # Follow structure of first line
                    headers = csv_contents.pop(0).strip().split(',')
                    for index, attribute in enumerate(headers):
                        mappings[attribute] = index
                elif attributes and args.mappings == []:
                    # Follow structure of first line but only map a subset of fields
                    headers = csv_contents.pop(0).strip().split(',')
                    attributes.append('ip_prefix')
                    for attribute in set(attributes):
                        mappings[attribute] = headers.index(attribute)
                else:
                    # Indices of columns are provided as an argument
                    for index, attribute in enumerate(attributes):
                        mappings[attribute] = int(args.mappings[index])
                    if args.skip_first_line:
                        csv_contents.pop(0)

                # For each line...
                for line in csv_contents:
                    ip_prefix = {}
                    values = line.strip().split(',')
                    if len(values) < len(mappings):
                        continue
                    for attribute in mappings:
                        ip_prefix[attribute] = values[mappings[attribute]]
                    if 'ip_prefix' in mappings and 'mask' in mappings:
                        ip = ip_prefix.pop('ip_prefix')
                        mask = ip_prefix.pop('mask')
                        ip_prefix['ip_prefix'] = '%s/%s' % (ip, mask.replace('/',''))
                    prefixes.append(ip_prefix)

        # AWS mode
        else:

            # Initialize IP addresses
            printInfo('Fetching public IP information for the \'%s\' environment...' % profile_name)
            ip_addresses = {}

            # Search for AWS credentials
            credentials = read_creds(profile_name)
            if not credentials['AccessKeyId']:
                return 42

            # For each region...
            for region in regions:

                # Connect to EC2
                ec2_client = connect_service('ec2', credentials, region)
                if not ec2_client:
                    continue

                # Get public IP addresses associated with EC2 instances
                printInfo('...in %s: EC2 instances' % region)
                reservations = handle_truncated_response(ec2_client.describe_instances, {}, ['Reservations'])
                for reservation in reservations['Reservations']:
                    for i in reservation['Instances']:
                        if 'PublicIpAddress' in i:
                            ip_addresses[i['PublicIpAddress']] = new_ip_info(region, i['InstanceId'], False)
                            get_name(i, ip_addresses[i['PublicIpAddress']], 'InstanceId')
                        if 'NetworkInterfaces' in i:
                            for eni in i['NetworkInterfaces']:
                                if 'Association' in eni:
                                    ip_addresses[eni['Association']['PublicIp']] = new_ip_info(region, i['InstanceId'], False) # At that point, we don't know whether it's an EIP or not...
                                    get_name(i, ip_addresses[eni['Association']['PublicIp']], 'InstanceId')

                # Get all EIPs (to handle unassigned cases)
                printInfo('...in %s: Elastic IP addresses' % region)
                eips = handle_truncated_response(ec2_client.describe_addresses, {}, ['Addresses'])
                for eip in eips['Addresses']:
                    instance_id = eip['InstanceId'] if 'InstanceId' in eip else None
                    # EC2-Classic non associated EIPs have an empty string for instance ID (instead of lacking the attribute in VPC)
                    if instance_id == '':
                        instance_id = None
                    ip_addresses[eip['PublicIp']] = new_ip_info(region, instance_id, True)
                    ip_addresses[eip['PublicIp']]['name'] = instance_id

                # Format
                prefixes = []
                for ip in ip_addresses:
                    prefixes.append(new_prefix(ip, ip_addresses[ip]))

        # Generate an ip-ranges-<profile>.json file
        save_ip_ranges(profile_name, prefixes, args.force_write, args.debug)
示例#30
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.add_argument('bucket-name')
    parser.parser.add_argument('--aws-account-id',
                                dest='aws_account_id',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')
    parser.parser.add_argument('--from',
                                dest='from_date',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')
    parser.parser.add_argument('--to',
                                dest='to_date',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42



    # Arguments
    profile_name = args.profile[0]
    try:
        from_date = datetime.datetime.strptime(args.from_date[0], "%Y/%m/%d").date()
        to_date = datetime.datetime.strptime(args.to_date[0], "%Y/%m/%d").date()
        delta = to_date - from_date
    except Exception as e:
        printException(e)
        printError('Error: dates must be formatted of the following format YYYY/MM/DD')
        return 42
    if delta.days < 0:
        printError('Error: your \'to\' date is earlier than your \'from\' date')
        return 42

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Fetch AWS account ID
    if not args.aws_account_id[0]:
        printInfo('Fetching the AWS account ID...')
        aws_account_id = get_aws_account_id(credentials)
    else:
        aws_account_id = args.aws_account_id[0]
    global cloudtrail_log_path
    cloudtrail_log_path = cloudtrail_log_path.replace('AWS_ACCOUNT_ID', aws_account_id)

    # Create download dir
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Iterate through regions
    s3_clients = {}
    for region in build_region_list('cloudtrail', args.regions, args.partition_name):

        # Connect to CloudTrail
        cloudtrail_client = connect_service('cloudtrail', credentials, region)
        if not cloudtrail_client:
            continue

        # Get information about the S3 bucket that receives CloudTrail logs
        trails = cloudtrail_client.describe_trails()
        for trail in trails['trailList']:
            bucket_name = trail['S3BucketName']
            prefix = trail['S3KeyPrefix'] if 'S3KeyPrefix' in trail else ''

        # Connect to S3
        manage_dictionary(s3_clients, region, connect_service('s3', credentials, region))
        target_bucket_region = get_s3_bucket_location(s3_clients[region], bucket_name)
        manage_dictionary(s3_clients, target_bucket_region, connect_service('s3', credentials, target_bucket_region))
        s3_client = s3_clients[target_bucket_region]

        # Generate base path for files
        log_path = os.path.join(prefix, cloudtrail_log_path.replace('REGION', region))

        # Download files
        printInfo('Downloading log files in %s... ' % region, False)
        keys = []
        for i in range(delta.days + 1):
            day = from_date + timedelta(days=i)
            folder_path = os.path.join(log_path, day.strftime("%Y/%m/%d"))
            try:
                objects = handle_truncated_response(s3_client.list_objects, {'Bucket': bucket_name, 'Prefix': folder_path}, ['Contents'])
                for o in objects['Contents']:
                    keys.append([o['Key'], 0])
            except Exception as e:
                printException(e)
                pass
        thread_work(keys, download_object, params = {'Bucket': bucket_name, 'S3Client': s3_client}, num_threads = 100)
        printInfo('Done')

    # Iterate through files and gunzip 'em
    printInfo('Decompressing files...')
    gzlogs = []
    for root, dirnames, filenames in os.walk(download_folder):
        for filename in filenames:
            gzlogs.append(filename)
    thread_work(gzlogs, gunzip_file, num_threads = 30)
示例#31
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions',
                        help='Regions where the stack(s) will be created.')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--template',
                               dest='template',
                               default=None,
                               required=True,
                               help='Path to the CloudFormation template.')
    parser.parser.add_argument('--parameters',
                               dest='parameters',
                               default=None,
                               nargs='+',
                               help='Optional parameters for the stack.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if __name__ == '__main__':
        if not credentials['AccessKeyId']:
            return 42

    # Validate the regions
    regions = build_region_list('cloudformation', args.regions,
                                args.partition_name)
    if len(args.regions) == 0 and not prompt_4_yes_no(
            'You didn\'t specify a region for this stack, do you want to create it in all regions ?'
    ):
        return 42

    for region in regions:
        try:
            # Create stack
            api_client = connect_service('cloudformation', credentials, region)
            params = {}
            params['api_client'] = api_client
            if not args.template.startswith('/'):
                params['template_path'] = os.path.join(
                    (os.path.dirname(os.path.realpath(__file__))),
                    args.template)
            else:
                params['template_path'] = args.template
            if args.parameters:
                params['template_parameters'] = args.parameters
            params['stack_name'] = make_awsrecipes_stack_name(
                params['template_path'])
            create_or_update_stack(**params)
        except Exception as e:
            printException(e)
示例#32
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions',
                        help='Regions where stack instances will be created.')
    parser.add_argument('partition-name')
    parser.parser.add_argument(
        '--master-region',
        dest='master_region',
        default=None,
        required=True,
        help='Region where the global stacks and stack sets will be created.')
    parser.parser.add_argument(
        '--stack-prefix',
        dest='stack_prefix',
        default=None,
        required=True,
        help=
        'Prefix of the CF Templates to be used when creating/updating stacks.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Get the master AWS account ID
    master_account_id = get_aws_account_id(credentials)

    # Get list of accounts ready for Stack sets
    api_client = connect_service('organizations', credentials, silent=True)
    try:
        org_account_ids = get_organization_account_ids(api_client, quiet=False)
    except:
        org_account_ids = [master_account_id]
    configured_org_account_ids = get_stackset_ready_accounts(credentials,
                                                             org_account_ids,
                                                             quiet=False)

    # Validate the stack set region
    regions = build_region_list('cloudformation', args.regions,
                                args.partition_name)
    if args.master_region not in regions:
        printError(
            'Error, the stack set region \'%s\' is not valid. Acceptable values are:'
            % args.master_region)
        printError(', '.join(regions))
        return 42

    # Connect
    printInfo('')
    api_client = connect_service('cloudformation',
                                 credentials,
                                 args.master_region,
                                 silent=True)

    # Establish the list of existing stacks and stack sets
    deployed_resources = {'stacks': {}, 'stack_sets': {}}
    printInfo('Fetching existing stacks and stack sets in %s in %s...' %
              (args.master_region, master_account_id))
    for stack in handle_truncated_response(
            api_client.list_stacks, {}, ['StackSummaries'])['StackSummaries']:
        if stack['StackStatus'] not in ['CREATE_FAILED', 'DELETE_COMPLETE']:
            deployed_resources['stacks'][stack['StackName']] = stack
    for stack_set in handle_truncated_response(api_client.list_stack_sets,
                                               {'Status': 'ACTIVE'},
                                               ['Summaries'])['Summaries']:
        stack_set = api_client.describe_stack_set(
            StackSetName=stack_set['StackSetName'])['StackSet']
        deployed_resources['stack_sets'][stack_set['StackSetName']] = stack_set
    printInfo(' - Found %d stacks.' % len(deployed_resources['stacks']))
    for stack_name in deployed_resources['stacks']:
        printInfo('   - %s' % stack_name)
    printInfo(' - Found %d stacks sets.' %
              len(deployed_resources['stack_sets']))
    for stack_set_name in deployed_resources['stack_sets']:
        printInfo('   - %s' % stack_set_name)

    # Create the list of stacks to deploy
    templates = get_cloudformation_templates(args.stack_prefix)

    # Master stacks
    for stack_name in sorted(templates['master_stacks'].keys()):
        if stack_name not in deployed_resources['stacks']:
            create_stack(api_client,
                         stack_name,
                         templates['master_stacks'][stack_name]['file_path'],
                         wait_for_completion=templates['master_stacks']
                         [stack_name]['wait_for_completion'])
        elif resource_older_than_template(
                'stack', deployed_resources['stacks'][stack_name],
                templates['master_stacks'][stack_name]['file_path']):
            update_stack(api_client,
                         stack_name,
                         templates['master_stacks'][stack_name]['file_path'],
                         wait_for_completion=templates['master_stacks']
                         [stack_name]['wait_for_completion'])

    if len(configured_org_account_ids) == 0:
        printInfo(
            '\nNo account IDs that support stack sets were found, skipping stack set configuration.'
        )
        return

    return

    # Stack sets
    for stack_set_name in sorted(templates['master_stack_sets'].keys()):
        if stack_set_name not in deployed_resources['stack_sets']:
            create_stack_set(
                api_client,
                stack_set_name,
                templates['master_stack_sets'][stack_set_name]['file_path'],
                wait_for_completion=True)
        elif resource_older_than_template(
                'stack_set', deployed_resources['stack_sets'][stack_set_name],
                templates['master_stack_sets'][stack_set_name]['file_path']):
            update_stack_set(
                api_client,
                stack_set_name,
                templates['master_stack_sets'][stack_set_name]['file_path'],
                wait_for_completion=True)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('managed',
                        dest='is_managed',
                        default=False,
                        action='store_true',
                        help='Create a managed policy.')
    parser.add_argument(
        'type',
        default=[None],
        nargs='+',
        choices=['group', 'managed', 'role', 'user'],
        help='Type of target that the policy will apply or be attached to.')
    parser.add_argument(
        'targets',
        default=[],
        nargs='+',
        help=
        'Name of the IAM entity the policy will be added to (required for inline policies).'
    )
    parser.add_argument(
        'templates',
        default=[],
        nargs='+',
        help='Path to the template IAM policies that will be created.')
    parser.add_argument('save',
                        dest='save_locally',
                        default=False,
                        action='store_true',
                        help='Generates the policies and store them locally.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]
    target_type = args.type[0]
    if len(args.templates) == 0:
        printError(
            'Error: you must specify the path the template IAM policies.')
        return 42
    if not args.is_managed and target_type == None:
        printError(
            'Error: you must either create a managed policy or specify the type of IAM entity the policy will be attached to.'
        )
        return 42
    if not args.is_managed and target_type == None and len(args.targets) < 1:
        printError(
            'Error: you must provide the name of at least one IAM %s you will attach this inline policy to.'
            % target_type)
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Get AWS account ID
    aws_account_id = get_aws_account_id(credentials)

    # Create the policies
    for template in args.templates:
        if not os.path.isfile(template):
            printError('Error: file \'%s\' does not exist.' % template)
            continue
        with open(template, 'rt') as f:
            policy = f.read()
        policy = re_aws_account_id.sub(aws_account_id, policy)
        policy_name = os.path.basename(template).split('.')[0]
        if not args.is_managed:
            callback = getattr(iam_client, 'put_' + target_type + '_policy')
            params = {}
            params['PolicyName'] = policy_name
            params['PolicyDocument'] = policy
            for target in args.targets:
                params[target_type.title() + 'Name'] = target
                try:
                    printInfo(
                        'Creating policy \'%s\' for the \'%s\' IAM %s...' %
                        (policy_name, target, target_type))
                    callback(**params)
                except Exception as e:
                    printException(e)
                    pass
        else:
            params = {}
            params['PolicyDocument'] = policy
            params['PolicyName'] = policy_name
            description = ''
            # Search for a description file
            descriptions_dir = os.path.join(os.path.dirname(template),
                                            'descriptions')
            if os.path.exists(descriptions_dir):
                description_file = os.path.join(
                    descriptions_dir,
                    os.path.basename(template).replace('.json', '.txt'))
                if os.path.isfile(description_file):
                    with open(description_file, 'rt') as f:
                        params['Description'] = f.read()
            elif prompt_4_yes_no(
                    'Do you want to add a description to the \'%s\' policy' %
                    policy_name):
                params['Description'] = prompt_4_value(
                    'Enter the policy description:')
            params['Description'] = params['Description'].strip()
            printInfo('Creating policy \'%s\'...' % (policy_name))
            new_policy = iam_client.create_policy(**params)
            if len(args.targets):
                callback = getattr(iam_client,
                                   'attach_' + target_type + '_policy')
                for target in args.targets:
                    printInfo('Attaching policy to the \'%s\' IAM %s...' %
                              (target, target_type))
                    params = {}
                    params['PolicyArn'] = new_policy['Policy']['Arn']
                    params[target_type.title() + 'Name'] = target
                    callback(**params)

        if args.save_locally:
            with open('%s-%s.json' % (policy_name, profile_name), 'wt') as f:
                f.write(policy)
                f.close()