Example #1
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('user-name', help = 'Name of the user(s) to be deleted.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Require at least one user names
    if not len(args.user_name):
        printError("Error, you need to provide at least one user name.")
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Delete users
    for user in args.user_name:
        delete_user(iam_client, user)
 def setup(self):
     configPrintException(True)
     self.test_dir = os.path.dirname(os.path.realpath(__file__))
     self.test_ruleset_001 = os.path.join(self.test_dir,
                                          'data/test-ruleset.json')
     self.test_ruleset_002 = os.path.join(
         self.test_dir, 'data/test-ruleset-absolute-path.json')
Example #3
0
def main():

    # Parse arguments
    parser = RulesArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Load ruleset
    ruleset = Ruleset(filename=args.base_ruleset,
                      name=args.ruleset_name,
                      load_rules=False,
                      rules_dir=args.rules_dir)

    # Generate the HTML generator
    ruleset_generator = RulesetGenerator(args.ruleset_name, args.generator_dir)
    ruleset.ruleset_generator_metadata = Scout2Config('default', None, None,
                                                      [], []).metadata
    ruleset_generator_path = ruleset_generator.save(ruleset, args.force_write,
                                                    args.debug)

    # Open the HTML ruleset generator in a browser
    printInfo('Starting the HTML ruleset generator...')
    url = 'file://%s' % os.path.abspath(ruleset_generator_path)
    webbrowser.open(url, new=2)
Example #4
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.parser.add_argument('--user-name',
                               dest='user_name',
                               default=None,
                               nargs='+',
                               required='True',
                               help='Name of the user(s) to be deleted.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)

    # Delete users
    for user in args.user_name:
        delete_user(iam_client, user)
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.api_client = connect_service('iam', self.creds)
     self.python = re.sub(r'\W+', '', sys.version)
     self.cleanup = {'groups': [], 'users': []}
Example #6
0
 def setup(self):
     configPrintException(True)
     self.tmp_aws_dir = '%s-opineltest' % aws_dir
     if os.path.isdir(aws_dir):
         os.rename(aws_dir, self.tmp_aws_dir)
     os.mkdir(aws_dir)
     shutil.copyfile('tests/data/config', os.path.join(aws_dir, 'config'))
     shutil.copyfile('tests/data/credentials', os.path.join(aws_dir, 'credentials'))
Example #7
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument(
        'user-name',
        help_string=
        'Your username (automatically fetched using iam:GetUser if not provided).'
    )

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]
    user_name = args.user_name[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        printError('Error: failed to create IAM API client.')
        return 42

    # Set the user name
    if not user_name:
        try:
            printInfo('Searching for username...')
            user_name = iam_client.get_user()['User']['UserName']
            if not user_name:
                printInfo('Error: could not find user name to enable MFA for.')
                return 42
        except Exception as e:
            configPrintException(e)

    # Create and activate the MFA device
    credentials['SerialNumber'] = enable_mfa(iam_client, user_name)

    # Update the credentials file
    write_creds_to_aws_credentials_file(profile_name, credentials)
    sample_command = 'awsrecipes_init_sts_session.py %s' % (
        ('--profile %s' % profile_name) if profile_name != 'default' else '')
    printInfo('Your credentials file has been updated.\n' \
              'You may now initiate STS sessions to access the AWS APIs with the following command:\n' \
              '\n    %s\n' % sample_command)
Example #8
0
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.api_client = connect_service('cloudformation', self.creds,
                                       'us-east-1')
     self.python = re.sub(r'\W+', '', sys.version)
     self.cleanup = {'stacks': [], 'stacksets': []}
 def setup(self):
     configPrintException(True)
     self.creds = read_creds_from_environment_variables()
     if self.creds['AccessKeyId'] == None:
         self.creds = read_creds('travislike')
     self.org_creds = assume_role(
         'OpinelUnitTest', self.creds,
         'arn:aws:iam::990492604467:role/OpinelUnitTest',
         'opinelunittesting')
     self.badapi_client = connect_service('organizations', self.creds,
                                          'us-east-1')
     self.api_client = connect_service('organizations', self.org_creds,
                                       'us-east-1')
def main():

    # Parse arguments
    parser = OpinelArgumentParser(os.path.basename(__file__))
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('common-groups',
                        default=[],
                        nargs='+',
                        help='List of groups each IAM user should belong to.')
    parser.add_argument(
        'category-groups',
        default=[],
        nargs='+',
        help='List of category groups; each IAM user must belong to one.')
    parser.add_argument(
        'category-regex',
        default=[],
        nargs='+',
        help='List of regex enabling auto-assigment of category groups.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Create groups
    for group in args.category_groups + args.common_groups:
        try:
            printInfo('Creating group %s...' % group)
            iam_client.create_group(GroupName=group)
        except Exception as e:
            if e.response['Error']['Code'] != 'EntityAlreadyExists':
                printException(e)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('mfa-serial')
    parser.add_argument('mfa-code')
    parser.parser.add_argument('--role-session-name',
                                dest='role_session_name',
                                default=None,
                                help='Identifier for the assumed role\'s session.')
    parser.parser.add_argument('--role-name',
                                dest='role_name',
                                default=None,
                                help='Identifier for the assumed role.')
    parser.parser.add_argument('--role-arn',
                                dest='role_arn',
                                default=None,
                                help='ARN of the assumed role.')
    parser.parser.add_argument('--external-id',
                                dest='external_id',
                                default=None,
                                help='External ID to use when assuming the role.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Read creds automatically prompts for MFA code and assumes a role if role is already configured
    try:
        credentials = read_creds(args.profile[0], mfa_code = args.mfa_code, mfa_serial_arg = args.mfa_serial)
    except Exception as e:
        printException(e)
        return 42

    # If the role's ARN was provided...
    if args.role_arn:
        if not args.role_name:
            printError('Error: you must specify a name for this role.')
            return 42
        role_session_name = args.role_session_name if args.role_session_name else 'aws-recipes-%s' % str(datetime.datetime.utcnow()).replace(' ', '_').replace(':','-')
        assume_role(args.role_name, credentials, args.role_arn, role_session_name)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('mfa-serial')
    parser.add_argument('mfa-code')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Read creds automatically prompts for MFA code and initiates a session if expired
    credentials = read_creds(args.profile[0], mfa_code = args.mfa_code, mfa_serial_arg = args.mfa_serial, force_init = True)
    if not credentials['AccessKeyId']:
        return 42
Example #13
0
class TestScout2CloudWatchUtilsClass:
    configPrintException(True)

    #
    # Test get_cloudwatch_region in us-east-1
    #
    def test_get_cloudwatch_region(self):
        # TODO: change to us-east-1
        credentials = read_creds('default')
        service_config = {'regions': {'us-east-1': {}}}
        get_cloudwatch_region(
            params={
                'region': 'us-east-1',
                'creds': credentials,
                'cloudwatch_config': service_config
            })

    #
    # Test get_cloudwatch_info (multiple thread of get_cloudwatch_region)
    # 1. in us-east-1 and us-west-1
    # 2. in empty region intersection
    #
    def test_get_cloudwatch_info(self):
        credentials = read_creds('default')
        service_config = {
            'regions': {
                'us-east-1': {},
                'us-west-1': {}
            }
        }  # , 'cn-north-1': {}}}
        get_cloudwatch_info(credentials, service_config,
                            ['us-east-1', 'us-west-1'], 'aws')
        get_cloudwatch_info(credentials, service_config,
                            ['us-east-1', 'us-west-1'], 'aws-us-gov')

    #        get_cloudwatch_info(credentials, service_config, ['us-gov-west-1'], 'aws-us-gov')

    #
    # Smoke tests for status display functions
    #
    def test_cloudwatch_status_init(self):
        cloudwatch_status_init()

    def test_cloudwatch_status(self):
        cloudwatch_status(True)
        cloudwatch_status(False)
        cloudwatch_status()

    def test_formatted_status(self):
        formatted_status(1, 42, True)
        formatted_status(42, 1, False)
Example #14
0
def main():

    # Parse arguments
    parser = RulesArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Load ruleset
    ruleset = Ruleset(filename=args.base_ruleset,
                      name=args.ruleset_name,
                      rules_dir=args.rules_dir,
                      ruleset_generator=True)

    # Generate the HTML generator
    ruleset_generator = RulesetGenerator(args.ruleset_name, args.generator_dir)

    #FIXME this is broken in Scout Suite
    # Create a cloud provider object
    cloud_provider = get_provider(provider='aws', profile='default')

    ruleset.ruleset_generator_metadata = Scout2Config('default', None, None,
                                                      [], []).metadata

    ruleset_generator_path = ruleset_generator.save(ruleset, args.force_write,
                                                    args.debug)

    # Open the HTML ruleset generator in a browser
    if not args.no_browser:
        printInfo('Starting the HTML ruleset generator...')
        url = 'file://%s' % os.path.abspath(ruleset_generator_path)
        webbrowser.open(url, new=2)
Example #15
0
def main(args):
    setup_logging(args.debug)

    if args.version:
        print(aws_inventory.__version__)
        return

    api_model = build_api_model()

    if args.list_svcs:
        print('\n'.join(sorted(filter_services(api_model))))
        return

    # configure the debug level for opinel
    configPrintException(args.debug)

    # validate services against API mode #

    available_services = api_model.keys()

    if args.services:
        invalid_included_services = [
            svc for svc in args.services if svc not in available_services
        ]
        if invalid_included_services:
            raise EnvironmentError('Invalid service(s) specified: {}'.format(
                ', '.join(invalid_included_services)))

    if args.excluded_services:
        invalid_excluded_services = [
            svc for svc in args.excluded_services
            if svc not in available_services
        ]
        if invalid_excluded_services:
            raise EnvironmentError('Invalid service(s) to exclude: {}'.format(
                ', '.join(invalid_excluded_services)))

    # validate regions against API model #

    if args.regions:
        available_regions = set()
        for svc in available_services:
            available_regions.update(api_model[svc]['regions'])
        invalid_regions = [
            region for region in args.regions
            if region not in available_regions
        ]
        if invalid_regions:
            raise EnvironmentError('Invalid region(s) specified: {}'.format(
                ', '.join(invalid_regions)))

    # create the list of services to analyze
    services = filter_services(api_model, frozenset(args.services),
                               frozenset(args.excluded_services))
    if not services:
        raise EnvironmentError('List of AWS services to be analyzed is empty.')
    LOGGER.debug('%d AWS service(s) to inspect: %s.', len(services),
                 ', '.join(services))

    op_blacklist_parser = aws_inventory.blacklist.OpBlacklistParser(
        args.op_blacklist, api_model)
    service_descriptors = filter_operations(api_model, op_blacklist_parser,
                                            args.regions, services)
    if not service_descriptors:
        raise EnvironmentError(
            'No operations to invoke for specifed AWS services and regions.')

    ops_count = 0
    for svc_name in service_descriptors:
        ops_count += (len(service_descriptors[svc_name]['ops']) *
                      len(service_descriptors[svc_name]['regions']))
        if args.list_operations:
            print('[{}]\n{}\n'.format(
                svc_name, '\n'.join(service_descriptors[svc_name]['ops'])
                or '# NONE'))

    profiles = [args.profile]
    if args.regex:
        # find unique profiles that match the regex
        profiles = list(
            set(
                map(lambda profile: profile.split()[-1],
                    get_profiles_from_aws_credentials_file())))
        profiles = list(
            filter(lambda profile: re.search(args.profile, profile), profiles))

    tqdm.tqdm.monitor_interval = 0

    if args.list_operations:
        print('Total operations to invoke: {}'.format(ops_count *
                                                      len(profiles)))
    else:
        LOGGER.debug('Total operations to invoke: %d.',
                     ops_count * len(profiles))
        for profile in profiles:
            args.profile = profile
            gui_data_file = data_file_name(
                args.gui_file_template,
                profile) if 'gui' in args.output else None
            response_data_file = data_file_name(
                args.response_file_template,
                profile) if 'response' in args.output else None
            exception_data_file = data_file_name(
                args.exception_file_template,
                profile) if 'exception' in args.output else None
            aws_inventory.invoker.ApiInvoker(
                args, service_descriptors, ops_count, gui_data_file,
                response_data_file, exception_data_file).probe_services()
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('csv-credentials')
    parser.add_argument('mfa-serial')
    parser.add_argument('mfa-code')
    parser.parser.add_argument('--role-arn',
                                dest='role_arn',
                                default=None,
                                help='ARN of the assumed role.')
    parser.parser.add_argument('--external-id',
                                dest='external_id',
                                default=None,
                                help='External ID to use when assuming the role.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]

    if args.csv_credentials:
        # Read credentials from a CSV file
        credentials = {}
        credentials['AccessKeyId'], credentials['SecretAccessKey'], credentials['SerialNumber'] = read_creds_from_csv(args.csv_credentials)
        if not credentials['AccessKeyId'] or not credentials['SecretAccessKey']:
            printError('Failed to read credentials from %s' % args.csv_credentials)
            return 42
        use_found_credentials = True
    else:
        # Check for migration from existing profile to no-mfa profile
        use_found_credentials = False
        credentials = read_creds_from_aws_credentials_file(profile_name)
        if 'AccessKeyId' in credentials and credentials['AccessKeyId'] != None and credentials['SecretAccessKey'] != None and credentials['SerialNumber'] == None and credentials['SessionToken'] == None:
            if prompt_4_yes_no('Found long-lived credentials for the profile \'%s\'. Do you want to use those when configuring MFA' % profile_name):
               use_found_credentials = True
               iam_client = connect_service('iam', credentials)
               try:
                   printInfo('Trying to read the MFA serial number associated with this IAM user...')
                   user_name = iam_client.get_user()['User']['UserName']
                   mfa_devices = iam_client.list_mfa_devices(UserName = user_name)['MFADevices']
                   credentials['SerialNumber'] = mfa_devices[0]['SerialNumber']
               except Exception as e:
                   printException(e)
                   pass

    if not use_found_credentials:
       # Get values
        credentials['AccessKeyId'] = prompt_4_value('AWS Access Key ID: ', no_confirm = True)
        credentials['SecretAccessKey'] = prompt_4_value('AWS Secret Access Key: ', no_confirm = True)
    if 'SerialNumber' not in credentials or not credentials['SerialNumber']:
        credentials['SerialNumber'] = prompt_4_mfa_serial()

    # Check for overwrite
    while True:
        c = read_creds_from_aws_credentials_file(profile_name)
        if 'AccessKeyId' in c and c['AccessKeyId']:
            if not prompt_4_yes_no('The profile \'%s\' already exists. Do you want to overwrite the existing values' % profile_name):
                if not prompt_4_yes_no('Do you want to create a new profile with these credentials'):
                    printError('Configuration aborted.')
                    return
                profile_name = prompt_4_value('Profile name: ')
            else:
                break
        else:
            break

    # Write values to credentials file
    write_creds_to_aws_credentials_file(profile_name, credentials)

    # Delete CSV file?
    if args.csv_credentials and prompt_4_yes_no('Do you want to delete the CSV file that contains your long-lived credentials?'):
        os.remove(args.csv_credentials)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('managed',
                        dest='is_managed',
                        default=False,
                        action='store_true',
                        help='Create a managed policy.')
    parser.add_argument(
        'type',
        default=[None],
        nargs='+',
        choices=['group', 'managed', 'role', 'user'],
        help='Type of target that the policy will apply or be attached to.')
    parser.add_argument(
        'targets',
        default=[],
        nargs='+',
        help=
        'Name of the IAM entity the policy will be added to (required for inline policies).'
    )
    parser.add_argument(
        'templates',
        default=[],
        nargs='+',
        help='Path to the template IAM policies that will be created.')
    parser.add_argument('save',
                        dest='save_locally',
                        default=False,
                        action='store_true',
                        help='Generates the policies and store them locally.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]
    target_type = args.type[0]
    if len(args.templates) == 0:
        printError(
            'Error: you must specify the path the template IAM policies.')
        return 42
    if not args.is_managed and target_type == None:
        printError(
            'Error: you must either create a managed policy or specify the type of IAM entity the policy will be attached to.'
        )
        return 42
    if not args.is_managed and target_type == None and len(args.targets) < 1:
        printError(
            'Error: you must provide the name of at least one IAM %s you will attach this inline policy to.'
            % target_type)
        return 42

    # Read creds
    credentials = read_creds(args.profile[0])
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM APIs
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Get AWS account ID
    aws_account_id = get_aws_account_id(credentials)

    # Create the policies
    for template in args.templates:
        if not os.path.isfile(template):
            printError('Error: file \'%s\' does not exist.' % template)
            continue
        with open(template, 'rt') as f:
            policy = f.read()
        policy = re_aws_account_id.sub(aws_account_id, policy)
        policy_name = os.path.basename(template).split('.')[0]
        if not args.is_managed:
            callback = getattr(iam_client, 'put_' + target_type + '_policy')
            params = {}
            params['PolicyName'] = policy_name
            params['PolicyDocument'] = policy
            for target in args.targets:
                params[target_type.title() + 'Name'] = target
                try:
                    printInfo(
                        'Creating policy \'%s\' for the \'%s\' IAM %s...' %
                        (policy_name, target, target_type))
                    callback(**params)
                except Exception as e:
                    printException(e)
                    pass
        else:
            params = {}
            params['PolicyDocument'] = policy
            params['PolicyName'] = policy_name
            description = ''
            # Search for a description file
            descriptions_dir = os.path.join(os.path.dirname(template),
                                            'descriptions')
            if os.path.exists(descriptions_dir):
                description_file = os.path.join(
                    descriptions_dir,
                    os.path.basename(template).replace('.json', '.txt'))
                if os.path.isfile(description_file):
                    with open(description_file, 'rt') as f:
                        params['Description'] = f.read()
            elif prompt_4_yes_no(
                    'Do you want to add a description to the \'%s\' policy' %
                    policy_name):
                params['Description'] = prompt_4_value(
                    'Enter the policy description:')
            params['Description'] = params['Description'].strip()
            printInfo('Creating policy \'%s\'...' % (policy_name))
            new_policy = iam_client.create_policy(**params)
            if len(args.targets):
                callback = getattr(iam_client,
                                   'attach_' + target_type + '_policy')
                for target in args.targets:
                    printInfo('Attaching policy to the \'%s\' IAM %s...' %
                              (target, target_type))
                    params = {}
                    params['PolicyArn'] = new_policy['Policy']['Arn']
                    params[target_type.title() + 'Name'] = target
                    callback(**params)

        if args.save_locally:
            with open('%s-%s.json' % (policy_name, profile_name), 'wt') as f:
                f.write(policy)
                f.close()
Example #18
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('user-name', help='Name of the IAM user(s).')
    parser.parser.add_argument('--all-users',
                               dest='all_users',
                               default=False,
                               action='store_true',
                               help='Go through all IAM users')
    parser.parser.add_argument(
        '--arn',
        dest='arn',
        default=[],
        nargs='+',
        help='ARN of the target group(s), role(s), or user(s)')
    parser.parser.add_argument('--group-name',
                               dest='group_name',
                               default=[],
                               nargs='+',
                               help='Name of the IAM group(s)')
    parser.parser.add_argument('--all-groups',
                               dest='all_groups',
                               default=False,
                               action='store_true',
                               help='Go through all IAM groups')
    parser.parser.add_argument('--role-name',
                               dest='role_name',
                               default=[],
                               nargs='+',
                               help='Name of the IAM role(s)')
    parser.parser.add_argument('--all-roles',
                               dest='all_roles',
                               default=False,
                               action='store_true',
                               help='Go through all IAM roles')
    parser.parser.add_argument('--policy-arn',
                               dest='policy_arn',
                               default=[],
                               nargs='+',
                               help='ARN of the IAM policy/ies')
    parser.parser.add_argument('--all',
                               dest='all',
                               default=False,
                               action='store_true',
                               help='Go through all IAM resources')

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Arguments
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Normalize targets
    targets = []
    for arn in args.arn:
        arn_match = re_arn.match(arn)
        if arn_match:
            resource = arn_match.groups()[4].split('/')
            targets.append((resource[0], resource[-1]))
    for group_name in args.group_name:
        if group_name:
            targets.append(('group', group_name))
    for role_name in args.role_name:
        if role_name:
            targets.append(('role', role_name))
    for user_name in args.user_name:
        if user_name:
            targets.append(('user', user_name))
    if args.all or args.all_groups:
        printInfo('Fetching all IAM groups...')
        for group in handle_truncated_response(iam_client.list_groups, {},
                                               ['Groups'])['Groups']:
            targets.append(('group', group['GroupName']))
    if args.all or args.all_roles:
        printInfo('Fetching all IAM roles...')
        for role in handle_truncated_response(iam_client.list_roles, {},
                                              ['Roles'])['Roles']:
            targets.append(('role', role['RoleName']))
    if args.all or args.all_users:
        printInfo('Fetching all IAM users...')
        for user in handle_truncated_response(iam_client.list_users, {},
                                              ['Users'])['Users']:
            targets.append(('user', user['UserName']))

    # Get all policies that apply to the targets and aggregate them into a single file
    printInfo('Fetching all inline and managed policies in use...')
    managed_policies = {}
    for resource_type, resource_name in targets:
        policy_documents = get_policies(iam_client, managed_policies,
                                        resource_type, resource_name)
        write_permissions(merge_policies(policy_documents), resource_type,
                          resource_name)

    # Get requested managed policies
    for policy_arn in args.policy_arn:
        policy_documents = [
            get_managed_policy_document(iam_client, policy_arn,
                                        managed_policies)
        ]
        write_permissions(merge_policies(policy_documents), 'policy',
                          policy_arn)
Example #19
0
 def setup(self):
     configPrintException(True)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--filters',
                               dest='filters',
                               default=None,
                               help='')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Build list of region
    regions = build_region_list('ec2', args.regions, args.partition_name)
    printInfo(str(regions))

    # Build filters
    filters = json.loads(args.filters) if args.filters else None

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # List all EC2 instances
    instances = []
    for region in regions:
        printInfo('Fetching instances in %s...' % region)
        ec2_client = connect_service('ec2', credentials, region_name=region)
        args = {'Filters': filters} if filters else {}
        reservations = handle_truncated_response(
            ec2_client.describe_instances, args,
            ['Reservations'])['Reservations']
        for r in reservations:
            instances += r['Instances']
    printInfo(' Found %d instances' % len(instances))

    # Build list of private and public IPs
    prvips = {}
    pubips = {}
    for i in instances:
        security_groups = i['SecurityGroups']
        for eni in i['NetworkInterfaces']:
            for prvip in eni['PrivateIpAddresses']:
                prvips[prvip['PrivateIpAddress']] = {
                    'security_groups': security_groups
                }
                if 'Association' in prvip:
                    pubips[prvip['Association']['PublicIp']] = {
                        'security_groups': security_groups
                    }

    # Create target files
    with open('targets-%s-prv.txt' % profile_name, 'wt') as f:
        for prvip in prvips:
            f.write('%s\n' % prvip)
    with open('targets-%s-pub.txt' % profile_name, 'wt') as f:
        for pubip in pubips:
            f.write('%s\n' % pubip)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('force')
    parser.add_argument('dry-run')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.parser.add_argument('--interactive',
                        dest='interactive',
                        default=False,
                        action='store_true',
                        help='Interactive prompt to manually enter CIDRs.')
    parser.parser.add_argument('--csv-ip-ranges',
                        dest='csv_ip_ranges',
                        default=[],
                        nargs='+',
                        help='CSV file(s) containing CIDRs information.')
    parser.parser.add_argument('--skip-first-line',
                        dest='skip_first_line',
                        default=False,
                        action='store_true',
                        help='Skip first line when parsing CSV file.')
    parser.parser.add_argument('--attributes',
                        dest='attributes',
                        default=[],
                        nargs='+',
                        help='Name of the attributes to enter for each CIDR.')
    parser.parser.add_argument('--mappings',
                        dest='mappings',
                        default=[],
                        nargs='+',
                        help='Column number matching attributes when headers differ.')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Initialize the list of regions to work with
    regions = build_region_list('ec2', args.regions, args.partition_name)

    # For each profile/environment...
    for profile_name in args.profile:

        # Interactive mode
        if args.interactive:

            # Initalize prefixes
            attributes = args.attributes
            filename = 'ip-ranges-%s.json' % profile_name
            if os.path.isfile(filename):
                printInfo('Loading existing IP ranges from %s' % filename)
                prefixes = read_ip_ranges(filename)
                # Initialize attributes from existing values
                if attributes == []:
                    for prefix in prefixes:
                        for key in prefix:
                            if key not in attributes:
                                attributes.append(key)
            else:
                prefixes = []

            # IP prefix does not need to be specified as an attribute
            attributes = [a for a in attributes if a != 'ip_prefix']

            # Prompt for new entries
            while prompt_4_yes_no('Add a new IP prefix to the ip ranges'):
                ip_prefix = prompt_4_value('Enter the new IP prefix:')
                obj = {}
                for a in attributes:
                    obj[a] = prompt_4_value('Enter the \'%s\' value:' % a)
                prefixes.append(new_prefix(ip_prefix, obj))

        # Support loading from CSV file
        elif len(args.csv_ip_ranges) > 0:

            # Initalize prefixes
            prefixes = []

            # Load CSV file contents
            for filename in args.csv_ip_ranges:
                with open(filename, 'rt') as f:
                    csv_contents = f.readlines()

                # Initialize mappings
                attributes = args.attributes
                mappings = {}
                if attributes == []:
                    # Follow structure of first line
                    headers = csv_contents.pop(0).strip().split(',')
                    for index, attribute in enumerate(headers):
                        mappings[attribute] = index
                elif attributes and args.mappings == []:
                    # Follow structure of first line but only map a subset of fields
                    headers = csv_contents.pop(0).strip().split(',')
                    attributes.append('ip_prefix')
                    for attribute in set(attributes):
                        mappings[attribute] = headers.index(attribute)
                else:
                    # Indices of columns are provided as an argument
                    for index, attribute in enumerate(attributes):
                        mappings[attribute] = int(args.mappings[index])
                    if args.skip_first_line:
                        csv_contents.pop(0)

                # For each line...
                for line in csv_contents:
                    ip_prefix = {}
                    values = line.strip().split(',')
                    if len(values) < len(mappings):
                        continue
                    for attribute in mappings:
                        ip_prefix[attribute] = values[mappings[attribute]]
                    if 'ip_prefix' in mappings and 'mask' in mappings:
                        ip = ip_prefix.pop('ip_prefix')
                        mask = ip_prefix.pop('mask')
                        ip_prefix['ip_prefix'] = '%s/%s' % (ip, mask.replace('/',''))
                    prefixes.append(ip_prefix)

        # AWS mode
        else:

            # Initialize IP addresses
            printInfo('Fetching public IP information for the \'%s\' environment...' % profile_name)
            ip_addresses = {}

            # Search for AWS credentials
            credentials = read_creds(profile_name)
            if not credentials['AccessKeyId']:
                return 42

            # For each region...
            for region in regions:

                # Connect to EC2
                ec2_client = connect_service('ec2', credentials, region)
                if not ec2_client:
                    continue

                # Get public IP addresses associated with EC2 instances
                printInfo('...in %s: EC2 instances' % region)
                reservations = handle_truncated_response(ec2_client.describe_instances, {}, ['Reservations'])
                for reservation in reservations['Reservations']:
                    for i in reservation['Instances']:
                        if 'PublicIpAddress' in i:
                            ip_addresses[i['PublicIpAddress']] = new_ip_info(region, i['InstanceId'], False)
                            get_name(i, ip_addresses[i['PublicIpAddress']], 'InstanceId')
                        if 'NetworkInterfaces' in i:
                            for eni in i['NetworkInterfaces']:
                                if 'Association' in eni:
                                    ip_addresses[eni['Association']['PublicIp']] = new_ip_info(region, i['InstanceId'], False) # At that point, we don't know whether it's an EIP or not...
                                    get_name(i, ip_addresses[eni['Association']['PublicIp']], 'InstanceId')

                # Get all EIPs (to handle unassigned cases)
                printInfo('...in %s: Elastic IP addresses' % region)
                eips = handle_truncated_response(ec2_client.describe_addresses, {}, ['Addresses'])
                for eip in eips['Addresses']:
                    instance_id = eip['InstanceId'] if 'InstanceId' in eip else None
                    # EC2-Classic non associated EIPs have an empty string for instance ID (instead of lacking the attribute in VPC)
                    if instance_id == '':
                        instance_id = None
                    ip_addresses[eip['PublicIp']] = new_ip_info(region, instance_id, True)
                    ip_addresses[eip['PublicIp']]['name'] = instance_id

                # Format
                prefixes = []
                for ip in ip_addresses:
                    prefixes.append(new_prefix(ip, ip_addresses[ip]))

        # Generate an ip-ranges-<profile>.json file
        save_ip_ranges(profile_name, prefixes, args.force_write, args.debug)
Example #22
0
def main():

    # Parse arguments
    parser = ListallArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Support multiple environments
    for profile_name in args.profile:

        # Load the config
        try:
            report = Scout2Report(profile_name, args.report_dir, args.timestamp)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            services = aws_config['service_list']
        except Exception as e:
            printException(e)
            printError('Error, failed to load the configuration for profile %s' % profile_name)
            continue


        # Create a ruleset with only whatever rules were specified...
        if args.config:
            rule_filename = args.config
            ruleset = TmpRuleset(rule_dirs = [os.getcwd()], rule_filename = args.config, rule_args = args.config_args)
        elif len(args.path) > 0:
            # Create a local tmp rule
            rule_dict = {'description': 'artifact'}
            rule_dict['path'] = args.path[0]
            rule_dict['conditions'] = []
            rule_filename = 'listall-artifact.json'
            with open(os.path.join(os.getcwd(), rule_filename), 'wt') as f:
                f.write(json.dumps(rule_dict))
            ruleset = TmpRuleset(rule_dirs = [os.getcwd()], rule_filename = rule_filename, rule_args = [])
        else:
            printError('Error, you must provide either a rule configuration file or the path to the resources targeted.')
            continue


        # Process the rule
        pe = ProcessingEngine(ruleset)
        pe.run(aws_config, skip_dashboard = True)

        # Retrieve items
        rule = ruleset.rules[rule_filename][0]
        rule_service = rule.service.lower()
        rule_key = rule.key
        rule_type = rule.rule_type
        resources = aws_config['services'][rule_service][rule_type][rule_key]['items']

        # Set the keys to output
        if len(args.keys):
            # 1. Explicitly provided on the CLI
            rule.keys = args.keys
        elif len(args.keys_file):
            # 2. Explicitly provided files that contain the list of keys
            rule.keys = []
            for filename in args.keys_file:
                with open(filename, 'rt') as f:
                    rule.keys += json.load(f)['keys']
        else:
            try:
                # 3. Load default set of keys based on path
                target_path = rule.display_path if hasattr(rule, 'display_path') else rule.path
                listall_configs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'output/data/listall-configs')
                target_file = os.path.join(listall_configs_dir, '%s.json' % target_path)
                if os.path.isfile(target_file):
                    with open(target_file, 'rt') as f:
                        rule.keys = json.load(f)['keys']
            except:
                # 4. Print the object name
                rule.keys = ['name']

        # Prepare the output format
        (lines, template) = format_listall_output(args.format_file[0], None, args.format, rule)

        # Print the output
        printInfo(generate_listall_output(lines, resources, aws_config, template, []))
Example #23
0
def main():

    # Parse arguments
    parser = Scout2ArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Set the profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    if not args.fetch_local:
        credentials = read_creds(args.profile[0], args.csv_credentials, args.mfa_serial, args.mfa_code)
        if credentials['AccessKeyId'] is None:
            return 42

    # Create a new Scout2 config
    report = Scout2Report(profile_name, args.report_dir, args.timestamp)
    aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp, args.services, args.skipped_services)

    if not args.fetch_local:

        # Fetch data from AWS APIs if not running a local analysis
        try:
            aws_config.fetch(credentials, regions=args.regions, partition_name=args.partition_name)
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130
        aws_config = report.jsrw.to_dict(aws_config)

        # Update means we reload the whole config and overwrite part of it
        if args.update == True:
            new_aws_config = copy.deepcopy(aws_config)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            for service in new_aws_config['service_list']:
                # Per service only for now, may add per region & per VPC later...
                aws_config['services'][service] = new_aws_config['services'][service]

    else:

        # Reload to flatten everything into a python dictionary
        aws_config = report.jsrw.load_from_file(AWSCONFIG)

    # Pre processing
    preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    ruleset = Ruleset(profile_name, filename = args.ruleset, ip_ranges = args.ip_ranges)
    ruleset.analyze(aws_config)

    # Create display filters
    filters = Ruleset(filename = 'filters.json', rule_type = 'filters')
    filters.analyze(aws_config)

    # Handle exceptions
    process_exceptions(aws_config, args.exceptions[0])

    # Finalize
    postprocessing(aws_config, report.current_time, ruleset)

    # Save config and create HTML report
    html_report_path = report.save(aws_config, {}, args.force_write, args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)
Example #24
0
 def setup(self):
     configPrintException(True)
     self.test_dir = os.path.dirname(os.path.realpath(__file__))
     self.test_ruleset_001 = os.path.join(self.test_dir, 'data/test-ruleset.json')
     self.test_ruleset_002 = os.path.join(self.test_dir, 'data/test-ruleset-absolute-path.json')
Example #25
0
def main(args=None):
    """
    Main method that runs a scan

    :return:
    """
    if not args:
        parser = ScoutSuiteArgumentParser()
        args = parser.parse_args()

    # Get the dictionnary to get None instead of a crash
    args = args.__dict__

    # Configure the debug level
    configPrintException(args.get('debug'))

    # Create a cloud provider object
    cloud_provider = get_provider(
        provider=args.get('provider'),
        profile=args.get('profile'),
        project_id=args.get('project_id'),
        folder_id=args.get('folder_id'),
        organization_id=args.get('organization_id'),
        all_projects=args.get('all_projects'),
        report_dir=args.get('report_dir'),
        timestamp=args.get('timestamp'),
        services=args.get('services'),
        skipped_services=args.get('skipped_services'),
        thread_config=args.get('thread_config'),
    )

    report_file_name = generate_report_name(cloud_provider.provider_code, args)

    # TODO move this to after authentication, so that the report can be more specific to what's being scanned.
    # For example if scanning with a GCP service account, the SA email can only be known after authenticating...
    # Create a new report
    report = Scout2Report(args.get('provider'), report_file_name,
                          args.get('report_dir'), args.get('timestamp'))

    # Complete run, including pulling data from provider
    if not args.get('fetch_local'):
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(
            profile=args.get('profile'),
            csv_credentials=args.get('csv_credentials'),
            mfa_serial=args.get('mfa_serial'),
            mfa_code=args.get('mfa_code'),
            user_account=args.get('user_account'),
            service_account=args.get('service_account'),
            cli=args.get('cli'),
            msi=args.get('msi'),
            service_principal=args.get('service_principal'),
            file_auth=args.get('file_auth'),
            tenant_id=args.get('tenant_id'),
            subscription_id=args.get('subscription_id'),
            client_id=args.get('client_id'),
            client_secret=args.get('client_secret'),
            username=args.get('username'),
            password=args.get('password'))

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.get('regions'))
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.get('update'):
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[
                    service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.get('ip_ranges'),
                                 args.get('ip_ranges_name_key'))

    # Analyze config
    finding_rules = Ruleset(environment_name=args.get('profile'),
                            cloud_provider=args.get('provider'),
                            filename=args.get('ruleset'),
                            ip_ranges=args.get('ip_ranges'),
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.get('provider'),
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.get('profile'),
                                    args.get('exceptions')[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.get('profile'))[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(
                os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                   profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions,
                                   args.get('force_write'), args.get('debug'))

    # Open the report by default
    if not args.get('no_browser'):
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
Example #26
0
def main():

    # Parse arguments
    parser = Scout2ArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Set the profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    if not args.fetch_local:
        credentials = read_creds(args.profile[0], args.csv_credentials,
                                 args.mfa_serial, args.mfa_code)
        if credentials['AccessKeyId'] is None:
            return 42

    # Create a new Scout2 config
    report = Scout2Report(profile_name, args.report_dir, args.timestamp)
    aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp,
                              args.services, args.skipped_services,
                              args.thread_config)

    if not args.fetch_local:

        # Fetch data from AWS APIs if not running a local analysis
        try:
            aws_config.fetch(credentials,
                             regions=args.regions,
                             partition_name=get_partition_name(credentials))
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130
        aws_config = report.jsrw.to_dict(aws_config)

        # Set the account ID
        aws_config['aws_account_id'] = get_aws_account_id(credentials)

        # Update means we reload the whole config and overwrite part of it
        if args.update == True:
            new_aws_config = copy.deepcopy(aws_config)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            for service in new_aws_config['service_list']:
                # Per service only for now, may add per region & per VPC later...
                aws_config['services'][service] = new_aws_config['services'][
                    service]
            # Update the metadata too
            aws_config['metadata'] = Scout2Config('default', None, None, [],
                                                  []).metadata

    else:

        # Reload to flatten everything into a python dictionary
        aws_config = report.jsrw.load_from_file(AWSCONFIG)

    # Pre processing
    preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    finding_rules = Ruleset(profile_name,
                            filename=args.ruleset,
                            ip_ranges=args.ip_ranges,
                            aws_account_id=aws_config['aws_account_id'])
    pe = ProcessingEngine(finding_rules)
    pe.run(aws_config)

    # Create display filters
    filter_rules = Ruleset(filename='filters.json',
                           rule_type='filters',
                           aws_account_id=aws_config['aws_account_id'])
    pe = ProcessingEngine(filter_rules)
    pe.run(aws_config)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(profile_name, args.exceptions[0])
        exceptions.process(aws_config)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    postprocessing(aws_config, report.current_time, finding_rules)

    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(profile_name)[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(
                os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                   profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    aws_config['organization'] = org
    except:
        pass

    if args.json:
        printInfo('Writing to results.json')
        fp = open('results.json', 'w')
        json.dump(aws_config, fp, default=json_helper)
        fp.close()
        sys.exit()

    # Save config and create HTML report
    html_report_path = report.save(aws_config, exceptions, args.force_write,
                                   args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Get profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Connect to IAM
    iam_client = connect_service('iam', credentials)
    if not iam_client:
        return 42

    # Fetch the long-lived key ID if STS credentials are used
    if credentials['SessionToken']:
        akia_creds = read_creds(profile_name + '-nomfa')
    else:
        akia_creds = credentials
    aws_key_id = akia_creds['AccessKeyId']
    aws_secret = akia_creds['SecretAccessKey']

    # Fetch username
    printInfo('Fetching username...')
    user_name = get_username(credentials)

    # Create the new key
    try:
        # Create a new IAM key
        printInfo('Creating a new access key for \'%s\'...' % user_name)
        new_credentials = iam_client.create_access_key(
            UserName=user_name)['AccessKey']
        show_access_keys(iam_client, user_name)
    except Exception as e:
        printException(e)
        return 42

    # Save the new key
    if credentials['SessionToken']:
        write_creds_to_aws_credentials_file(profile_name + '-nomfa',
                                            new_credentials)
    else:
        write_creds_to_aws_credentials_file(profile_name, new_credentials)
    printInfo('Credentials file updated with new access key.')

    printInfo('Verifying access with the new key...')
    # Sleep because the access key may not be active server-side...
    time.sleep(5)
    if credentials['SessionToken']:
        new_credentials = read_creds(profile_name + '-nomfa')
        new_credentials = init_sts_session(profile_name, new_credentials)
    else:
        new_credentials = read_creds(profile_name)
    # Confirm that it works...
    try:
        new_iam_client = connect_service('iam', new_credentials)
        printInfo('Deleting the old access key...')
        new_iam_client.delete_access_key(AccessKeyId=aws_key_id,
                                         UserName=user_name)
    except Exception as e:
        printException(e)
        printInfo('Restoring your old credentials...')
        # Restore the old key here
        if credentials['SessionToken']:
            write_creds_to_aws_credentials_file(profile_name + '-nomfa',
                                                akia_creds)
        else:
            write_creds_to_aws_credentials_file(profile_name, akia_creds)
        return 42

    # Summary of existing access keys
    try:
        show_access_keys(new_iam_client, user_name)
        printInfo('Success !')
    except Exception as e:
        printException(e)
        return 42
Example #28
0
def main():

    # Parse arguments
    parser = ListallArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Support multiple environments
    for profile_name in args.profile:

        # Load the config
        try:
            report = Scout2Report(args.provider, profile_name, args.report_dir,
                                  args.timestamp)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            services = aws_config['service_list']
        except Exception as e:
            printException(e)
            printError(
                'Error, failed to load the configuration for profile %s' %
                profile_name)
            continue

        # Create a ruleset with only whatever rules were specified...
        if args.config:
            rule_filename = args.config
            ruleset = TmpRuleset(rule_dirs=[os.getcwd()],
                                 rule_filename=args.config,
                                 rule_args=args.config_args)
        elif len(args.path) > 0:
            # Create a local tmp rule
            rule_dict = {'description': 'artifact'}
            rule_dict['path'] = args.path[0]
            rule_dict['conditions'] = []
            rule_filename = 'listall-artifact.json'
            with open(os.path.join(os.getcwd(), rule_filename), 'wt') as f:
                f.write(json.dumps(rule_dict))
            ruleset = TmpRuleset(rule_dirs=[os.getcwd()],
                                 rule_filename=rule_filename,
                                 rule_args=[])
        else:
            printError(
                'Error, you must provide either a rule configuration file or the path to the resources targeted.'
            )
            continue

        # Process the rule
        pe = ProcessingEngine(ruleset)
        pe.run(aws_config, skip_dashboard=True)

        # Retrieve items
        rule = ruleset.rules[rule_filename][0]
        rule_service = rule.service.lower()
        rule_key = rule.key
        rule_type = rule.rule_type
        resources = aws_config['services'][rule_service][rule_type][rule_key][
            'items']

        # Set the keys to output
        if len(args.keys):
            # 1. Explicitly provided on the CLI
            rule.keys = args.keys
        elif len(args.keys_file):
            # 2. Explicitly provided files that contain the list of keys
            rule.keys = []
            for filename in args.keys_file:
                with open(filename, 'rt') as f:
                    rule.keys += json.load(f)['keys']
        else:
            try:
                # 3. Load default set of keys based on path
                target_path = rule.display_path if hasattr(
                    rule, 'display_path') else rule.path
                listall_configs_dir = os.path.join(
                    os.path.dirname(os.path.realpath(__file__)),
                    'output/data/listall-configs')
                target_file = os.path.join(listall_configs_dir,
                                           '%s.json' % target_path)
                if os.path.isfile(target_file):
                    with open(target_file, 'rt') as f:
                        rule.keys = json.load(f)['keys']
            except:
                # 4. Print the object name
                rule.keys = ['name']

        # Prepare the output format
        (lines, template) = format_listall_output(args.format_file[0], None,
                                                  args.format, rule)

        # Print the output
        printInfo(
            generate_listall_output(lines, resources, aws_config, template,
                                    []))
Example #29
0
 def setUpClass(cls):
     configPrintException(True)
     creds = read_creds_from_environment_variables()
     cls.profile_name = 'travislike' if creds[
         'AccessKeyId'] == None else None
     cls.has_run_scout_suite = False
 def setup(self):
     configPrintException(True)
Example #31
0
 def setUpClass(cls):
     configPrintException(True)
     creds = read_creds_from_environment_variables()
     cls.profile_name = 'travislike' if creds['AccessKeyId'] == None else None
     cls.has_run_scout2 = False
def main(passed_args=None):
    """
    Main method that runs a scan

    :return:
    """

    # FIXME check that all requirements are installed
    # # Check version of opinel
    # requirements_file_path = '%s/requirements.txt' % os.path.dirname(sys.modules['__main__'].__file__)
    # if not check_requirements(requirements_file_path):
    #     return 42

    # Parse arguments
    parser = ScoutSuiteArgumentParser()

    if passed_args:
        args = parser.parse_args(passed_args)
    else:
        args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Create a cloud provider object
    cloud_provider = get_provider(provider=args.provider,
                                  profile=args.profile[0],
                                  project_id=args.project_id,
                                  folder_id=args.folder_id,
                                  organization_id=args.organization_id,
                                  report_dir=args.report_dir,
                                  timestamp=args.timestamp,
                                  services=args.services,
                                  skipped_services=args.skipped_services,
                                  thread_config=args.thread_config)

    if cloud_provider.provider_code == 'aws':
        if args.profile:
            report_file_name = 'aws-%s' % args.profile[0]
        else:
            report_file_name = 'aws'
    if cloud_provider.provider_code == 'gcp':
        if args.project_id:
            report_file_name = 'gcp-%s' % args.project_id
        elif args.organization_id:
            report_file_name = 'gcp-%s' % args.organization_id
        elif args.folder_id:
            report_file_name = 'gcp-%s' % args.folder_id
        else:
            report_file_name = 'gcp'
    if cloud_provider.provider_code == 'azure':
        report_file_name = 'azure'

    # Create a new report
    report = Scout2Report(args.provider, report_file_name, args.report_dir, args.timestamp)

    # Complete run, including pulling data from provider
    if not args.fetch_local:
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(profile=args.profile[0],
                                                    csv_credentials=args.csv_credentials,
                                                    mfa_serial=args.mfa_serial,
                                                    mfa_code=args.mfa_code,
                                                    key_file=args.key_file,
                                                    user_account=args.user_account,
                                                    service_account=args.service_account,
                                                    azure_cli=args.azure_cli,
                                                    azure_msi=args.azure_msi,
                                                    azure_service_principal=args.azure_service_principal,
                                                    azure_file_auth=args.azure_file_auth,
                                                    azure_user_credentials=args.azure_user_credentials)

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.regions)
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.update:
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    finding_rules = Ruleset(environment_name=args.profile[0],
                            cloud_provider=args.provider,
                            filename=args.ruleset,
                            ip_ranges=args.ip_ranges,
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.provider,
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.profile[0], args.exceptions[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug('Warning, failed to load exceptions. The file may not exist or may have an invalid format.')
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.profile[0])[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                                                     profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions, args.force_write, args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
Example #33
0
def main():

    # Parse arguments
    parser = OpinelArgumentParser()
    parser.add_argument('debug')
    parser.add_argument('profile')
    parser.add_argument('regions')
    parser.add_argument('partition-name')
    parser.add_argument('bucket-name')
    parser.parser.add_argument('--aws-account-id',
                                dest='aws_account_id',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')
    parser.parser.add_argument('--from',
                                dest='from_date',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')
    parser.parser.add_argument('--to',
                                dest='to_date',
                                default=[ None ],
                                nargs='+',
                                help='Bleh.')

    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42



    # Arguments
    profile_name = args.profile[0]
    try:
        from_date = datetime.datetime.strptime(args.from_date[0], "%Y/%m/%d").date()
        to_date = datetime.datetime.strptime(args.to_date[0], "%Y/%m/%d").date()
        delta = to_date - from_date
    except Exception as e:
        printException(e)
        printError('Error: dates must be formatted of the following format YYYY/MM/DD')
        return 42
    if delta.days < 0:
        printError('Error: your \'to\' date is earlier than your \'from\' date')
        return 42

    # Search for AWS credentials
    credentials = read_creds(profile_name)
    if not credentials['AccessKeyId']:
        return 42

    # Fetch AWS account ID
    if not args.aws_account_id[0]:
        printInfo('Fetching the AWS account ID...')
        aws_account_id = get_aws_account_id(credentials)
    else:
        aws_account_id = args.aws_account_id[0]
    global cloudtrail_log_path
    cloudtrail_log_path = cloudtrail_log_path.replace('AWS_ACCOUNT_ID', aws_account_id)

    # Create download dir
    if not os.path.exists(download_folder):
        os.makedirs(download_folder)

    # Iterate through regions
    s3_clients = {}
    for region in build_region_list('cloudtrail', args.regions, args.partition_name):

        # Connect to CloudTrail
        cloudtrail_client = connect_service('cloudtrail', credentials, region)
        if not cloudtrail_client:
            continue

        # Get information about the S3 bucket that receives CloudTrail logs
        trails = cloudtrail_client.describe_trails()
        for trail in trails['trailList']:
            bucket_name = trail['S3BucketName']
            prefix = trail['S3KeyPrefix'] if 'S3KeyPrefix' in trail else ''

        # Connect to S3
        manage_dictionary(s3_clients, region, connect_service('s3', credentials, region))
        target_bucket_region = get_s3_bucket_location(s3_clients[region], bucket_name)
        manage_dictionary(s3_clients, target_bucket_region, connect_service('s3', credentials, target_bucket_region))
        s3_client = s3_clients[target_bucket_region]

        # Generate base path for files
        log_path = os.path.join(prefix, cloudtrail_log_path.replace('REGION', region))

        # Download files
        printInfo('Downloading log files in %s... ' % region, False)
        keys = []
        for i in range(delta.days + 1):
            day = from_date + timedelta(days=i)
            folder_path = os.path.join(log_path, day.strftime("%Y/%m/%d"))
            try:
                objects = handle_truncated_response(s3_client.list_objects, {'Bucket': bucket_name, 'Prefix': folder_path}, ['Contents'])
                for o in objects['Contents']:
                    keys.append([o['Key'], 0])
            except Exception as e:
                printException(e)
                pass
        thread_work(keys, download_object, params = {'Bucket': bucket_name, 'S3Client': s3_client}, num_threads = 100)
        printInfo('Done')

    # Iterate through files and gunzip 'em
    printInfo('Decompressing files...')
    gzlogs = []
    for root, dirnames, filenames in os.walk(download_folder):
        for filename in filenames:
            gzlogs.append(filename)
    thread_work(gzlogs, gunzip_file, num_threads = 30)
Example #34
0
def main(args):
    setup_logging(args.debug)

    if args.version:
        print aws_inventory.__version__
        return

    api_model = build_api_model()

    if args.list_svcs:
        print '\n'.join(sorted(filter_services(api_model)))
        return

    # configure the debug level for opinel
    configPrintException(args.debug)

    # validate services against API mode #

    available_services = api_model.keys()

    if args.services:
        invalid_included_services = [
            svc for svc in args.services if svc not in available_services
        ]
        if invalid_included_services:
            raise EnvironmentError('Invalid service(s) specified: {}'.format(
                ', '.join(invalid_included_services)))

    if args.excluded_services:
        invalid_excluded_services = [
            svc for svc in args.excluded_services
            if svc not in available_services
        ]
        if invalid_excluded_services:
            raise EnvironmentError('Invalid service(s) to exclude: {}'.format(
                ', '.join(invalid_excluded_services)))

    # validate regions against API model #

    if args.regions:
        available_regions = set()
        for svc in available_services:
            available_regions.update(api_model[svc]['regions'])
        invalid_regions = [
            region for region in args.regions
            if region not in available_regions
        ]
        if invalid_regions:
            raise EnvironmentError('Invalid region(s) specified: {}'.format(
                ', '.join(invalid_regions)))

    # create the list of services to analyze
    services = filter_services(api_model, frozenset(args.services),
                               frozenset(args.excluded_services))
    if not services:
        raise EnvironmentError('List of AWS services to be analyzed is empty.')
    LOGGER.debug('%d AWS service(s) to inspect: %s.', len(services),
                 ', '.join(services))

    op_blacklist_parser = aws_inventory.blacklist.OpBlacklistParser(
        args.op_blacklist, api_model)
    service_descriptors = filter_operations(api_model, op_blacklist_parser,
                                            args.regions, services)
    if not service_descriptors:
        raise EnvironmentError(
            'No operations to invoke for specifed AWS services and regions.')

    ops_count = 0
    for svc_name in service_descriptors:
        ops_count += (len(service_descriptors[svc_name]['ops']) *
                      len(service_descriptors[svc_name]['regions']))
        if args.list_operations:
            print '[{}]\n{}\n'.format(
                svc_name, '\n'.join(service_descriptors[svc_name]['ops'])
                or '# NONE')

    if args.list_operations:
        print 'Total operations to invoke: {}'.format(ops_count)
    else:
        LOGGER.debug('Total operations to invoke: %d.', ops_count)
        aws_inventory.invoker.ApiInvoker(args, service_descriptors,
                                         ops_count).start()
Example #35
0
def main():

    # Parse arguments
    parser = Scout2ArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Set the profile name
    profile_name = args.profile[0]

    # Search for AWS credentials
    if not args.fetch_local:
        credentials = read_creds(args.profile[0], args.csv_credentials, args.mfa_serial, args.mfa_code)
        if credentials['AccessKeyId'] is None:
            return 42

    # Create a new Scout2 config
    report = Scout2Report(profile_name, args.report_dir, args.timestamp)
    aws_config = Scout2Config(profile_name, args.report_dir, args.timestamp, args.services, args.skipped_services, args.thread_config)

    if not args.fetch_local:

        # Fetch data from AWS APIs if not running a local analysis
        try:
            aws_config.fetch(credentials, regions=args.regions, partition_name = get_partition_name(credentials))
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130
        aws_config = report.jsrw.to_dict(aws_config)

        # Set the account ID
        aws_config['aws_account_id'] = get_aws_account_id(credentials)

        # Update means we reload the whole config and overwrite part of it
        if args.update == True:
            new_aws_config = copy.deepcopy(aws_config)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            for service in new_aws_config['service_list']:
                # Per service only for now, may add per region & per VPC later...
                aws_config['services'][service] = new_aws_config['services'][service]
            # Update the metadata too
            aws_config['metadata'] = Scout2Config('default', None, None, [], []).metadata

    else:

        # Reload to flatten everything into a python dictionary
        aws_config = report.jsrw.load_from_file(AWSCONFIG)

    # Pre processing
    preprocessing(aws_config, args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    finding_rules = Ruleset(profile_name, filename = args.ruleset, ip_ranges = args.ip_ranges, aws_account_id = aws_config['aws_account_id'])
    pe = ProcessingEngine(finding_rules)
    pe.run(aws_config)

    # Create display filters
    filter_rules = Ruleset(filename = 'filters.json', rule_type = 'filters', aws_account_id = aws_config['aws_account_id'])
    pe = ProcessingEngine(filter_rules)
    pe.run(aws_config)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(profile_name, args.exceptions[0])
        exceptions.process(aws_config)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug('Warning, failed to load exceptions. The file may not exist or may have an invalid format.')
        exceptions = {}

    # Finalize
    postprocessing(aws_config, report.current_time, finding_rules)

    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(profile_name)[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(os.path.expanduser('~/.aws/recipes/%s/organization.json' %  profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    aws_config['organization'] = org
    except:
        pass

    # Save config and create HTML report
    html_report_path = report.save(aws_config, exceptions, args.force_write, args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0