Exemplo n.º 1
0
    def test_get_report_name(
            self,
            mock_get_partiton_name,
            mock_get_aws_account_id,
            mock_facade_aws_account_id,
            mock_facade_aws_partition_name,
    ):
        # no account_id, no profile
        mock_get_aws_account_id.return_value = None
        mock_get_partiton_name.return_value = None
        aws_provider = get_provider(
            provider="aws", credentials=mock.MagicMock(session="123"),
        )
        assert aws_provider.get_report_name() == "aws"

        # profile and account_id
        mock_get_aws_account_id.return_value = "12345"
        aws_provider = get_provider(
            provider="aws", profile="9999", credentials=mock.MagicMock(session="123"),
        )
        assert aws_provider.get_report_name() == "aws-9999"

        # account_id
        aws_provider = get_provider(
            provider="aws", credentials=mock.MagicMock(session="123"),
        )
        assert aws_provider.get_report_name() == "aws-12345"
Exemplo n.º 2
0
def main():

    # Parse arguments
    parser = RulesArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Load ruleset
    ruleset = Ruleset(filename=args.base_ruleset,
                      name=args.ruleset_name,
                      rules_dir=args.rules_dir,
                      ruleset_generator=True)

    # Generate the HTML generator
    ruleset_generator = RulesetGenerator(args.ruleset_name, args.generator_dir)

    #FIXME this is broken in Scout Suite
    # Create a cloud provider object
    cloud_provider = get_provider(provider='aws', profile='default')

    ruleset.ruleset_generator_metadata = Scout2Config('default', None, None,
                                                      [], []).metadata

    ruleset_generator_path = ruleset_generator.save(ruleset, args.force_write,
                                                    args.debug)

    # Open the HTML ruleset generator in a browser
    if not args.no_browser:
        printInfo('Starting the HTML ruleset generator...')
        url = 'file://%s' % os.path.abspath(ruleset_generator_path)
        webbrowser.open(url, new=2)
Exemplo n.º 3
0
def main(passed_args=None):
    """
    Main method that runs a scan

    :return:
    """

    # FIXME check that all requirements are installed
    # # Check version of opinel
    # requirements_file_path = '%s/requirements.txt' % os.path.dirname(sys.modules['__main__'].__file__)
    # if not check_requirements(requirements_file_path):
    #     return 42

    # Parse arguments
    parser = ScoutSuiteArgumentParser()

    if passed_args:
        args = parser.parse_args(passed_args)
    else:
        args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Create a cloud provider object
    cloud_provider = get_provider(provider=args.provider,
                                  profile=args.profile[0],
                                  project_id=args.project_id,
                                  folder_id=args.folder_id,
                                  organization_id=args.organization_id,
                                  report_dir=args.report_dir,
                                  timestamp=args.timestamp,
                                  services=args.services,
                                  skipped_services=args.skipped_services,
                                  thread_config=args.thread_config)

    if cloud_provider.provider_code == 'aws':
        if args.profile:
            report_file_name = 'aws-%s' % args.profile[0]
        else:
            report_file_name = 'aws'
    if cloud_provider.provider_code == 'gcp':
        if args.project_id:
            report_file_name = 'gcp-%s' % args.project_id
        elif args.organization_id:
            report_file_name = 'gcp-%s' % args.organization_id
        elif args.folder_id:
            report_file_name = 'gcp-%s' % args.folder_id
        else:
            report_file_name = 'gcp'
    if cloud_provider.provider_code == 'azure':
        report_file_name = 'azure'

    # Create a new report
    report = Scout2Report(args.provider, report_file_name, args.report_dir, args.timestamp)

    # Complete run, including pulling data from provider
    if not args.fetch_local:
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(profile=args.profile[0],
                                                    csv_credentials=args.csv_credentials,
                                                    mfa_serial=args.mfa_serial,
                                                    mfa_code=args.mfa_code,
                                                    key_file=args.key_file,
                                                    user_account=args.user_account,
                                                    service_account=args.service_account,
                                                    azure_cli=args.azure_cli,
                                                    azure_msi=args.azure_msi,
                                                    azure_service_principal=args.azure_service_principal,
                                                    azure_file_auth=args.azure_file_auth,
                                                    azure_user_credentials=args.azure_user_credentials)

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.regions)
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.update:
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    finding_rules = Ruleset(environment_name=args.profile[0],
                            cloud_provider=args.provider,
                            filename=args.ruleset,
                            ip_ranges=args.ip_ranges,
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.provider,
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.profile[0], args.exceptions[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug('Warning, failed to load exceptions. The file may not exist or may have an invalid format.')
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.profile[0])[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                                                     profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions, args.force_write, args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
Exemplo n.º 4
0
def main(args):
    # Configure the debug level
    config_debug_level(args.debug)

    # FIXME check that all requirements are installed
    # # Check version of opinel
    # if not check_requirements(os.path.realpath(__file__)):
    #     return 42

    # Support multiple environments
    for profile_name in args.profile:

        # Load the config
        try:
            # FIXME this is specific to AWS
            report_file_name = 'aws-%s' % profile_name
            report = Scout2Report('aws', report_file_name, args.report_dir, args.timestamp)
            aws_config = report.jsrw.load_from_file(AWSCONFIG)
            services = aws_config['service_list']
        except Exception as e:
            print_exception(e)
            print_error('Error, failed to load the configuration for profile %s' % profile_name)
            continue

        # Create a ruleset with only whatever rules were specified...
        if args.config:
            rule_filename = args.config
            ruleset = TmpRuleset(environment_name=args.profile[0],
                                 cloud_provider='aws',
                                 rule_dirs=[os.getcwd()],
                                 rule_filename=args.config,
                                 rule_args=args.config_args)
        elif len(args.path) > 0:
            # Create a local tmp rule
            rule_dict = {'description': 'artifact'}
            rule_dict['path'] = args.path[0]
            rule_dict['conditions'] = []
            rule_filename = 'listall-artifact.json'
            with open(os.path.join(os.getcwd(), rule_filename), 'wt') as f:
                f.write(json.dumps(rule_dict))
            ruleset = TmpRuleset(rule_dirs=[os.getcwd()], rule_filename=rule_filename, rule_args=[])
        else:
            print_error(
                'Error, you must provide either a rule configuration file or the path to the resources targeted.')
            continue

        # FIXME is broken in Scout Suite, only handles AWS
        cloud_provider = get_provider(provider='aws',
                                      profile=args.profile[0])

        # Process the rule
        pe = ProcessingEngine(ruleset)
        pe.run(cloud_provider, skip_dashboard=True)

        # Retrieve items
        rule = ruleset.rules[rule_filename][0]
        rule_service = rule.service.lower()
        rule_key = rule.key
        rule_type = rule.rule_type
        resources = aws_config['services'][rule_service][rule_type][rule_key]['items']

        # Set the keys to output
        if len(args.keys):
            # 1. Explicitly provided on the CLI
            rule.keys = args.keys
        elif len(args.keys_file):
            # 2. Explicitly provided files that contain the list of keys
            rule.keys = []
            for filename in args.keys_file:
                with open(filename, 'rt') as f:
                    rule.keys += json.load(f)['keys']
        else:
            try:
                # 3. Load default set of keys based on path
                target_path = rule.display_path if hasattr(rule, 'display_path') else rule.path
                listall_configs_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                                   'output/data/listall-configs')
                target_file = os.path.join(listall_configs_dir, '%s.json' % target_path)
                if os.path.isfile(target_file):
                    with open(target_file, 'rt') as f:
                        rule.keys = json.load(f)['keys']
            except:
                # 4. Print the object name
                rule.keys = ['name']

        # Prepare the output format
        (lines, template) = format_listall_output(args.format_file[0], None, args.format, rule)

        # Print the output
        print_info(generate_listall_output(lines, resources, aws_config, template, []))
Exemplo n.º 5
0
async def _run(provider,
               # AWS
               profile,
               aws_access_key_id,
               aws_secret_access_key,
               aws_session_token,
               # Azure
               user_account, service_account,
               cli, msi, service_principal, file_auth, tenant_id, subscription_id,
               client_id, client_secret,
               username, password,
               # GCP
               project_id, folder_id, organization_id, all_projects,
               # Aliyun
               access_key_id, access_key_secret,
               # General
               report_name, report_dir,
               timestamp,
               services, skipped_services,
               result_format,
               database_name, host_ip, host_port,
               regions,
               excluded_regions,
               fetch_local, update,
               ip_ranges, ip_ranges_name_key,
               ruleset, exceptions,
               force_write,
               debug,
               quiet,
               log_file,
               no_browser,
               programmatic_execution,
               **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)
    try:
        credentials = auth_strategy.authenticate(profile=profile,
                                                 aws_access_key_id=aws_access_key_id,
                                                 aws_secret_access_key=aws_secret_access_key,
                                                 aws_session_token=aws_session_token,
                                                 user_account=user_account,
                                                 service_account=service_account,
                                                 cli=cli,
                                                 msi=msi,
                                                 service_principal=service_principal,
                                                 file_auth=file_auth,
                                                 tenant_id=tenant_id,
                                                 subscription_id=subscription_id,
                                                 client_id=client_id,
                                                 client_secret=client_secret,
                                                 username=username,
                                                 password=password,
                                                 access_key_id=access_key_id,
                                                 access_key_secret=access_key_secret,
                                                 programmatic_execution=programmatic_execution)

        if not credentials:
            return 101
    except Exception as e:
        print_exception('Authentication failure: {}'.format(e))
        return 101

    # Create a cloud provider object
    cloud_provider = get_provider(provider=provider,
                                  profile=profile,
                                  project_id=project_id,
                                  folder_id=folder_id,
                                  organization_id=organization_id,
                                  all_projects=all_projects,
                                  report_dir=report_dir,
                                  timestamp=timestamp,
                                  services=services,
                                  skipped_services=skipped_services,
                                  credentials=credentials)

    # Create a new report
    report_name = report_name if report_name else cloud_provider.get_report_name()
    report = ScoutReport(cloud_provider.provider_code,
                         report_name,
                         report_dir,
                         timestamp,
                         result_format=result_format)

    if database_name:
        database_file, _ = get_filename('RESULTS', report_name, report_dir, file_extension="db")
        Server.init(database_file, host_ip, host_port)
        return

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if update:
            print_info('Updating existing data')
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.encoder.load_from_file('RESULTS')
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        print_info('Using local data')
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.encoder.load_from_file('RESULTS')
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(
        ip_ranges, ip_ranges_name_key)

    # Analyze config
    print_info('Running rule engine')
    finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                            environment_name=cloud_provider.environment,
                            filename=ruleset,
                            ip_ranges=ip_ranges,
                            account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    print_info('Applying display filters')
    filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                           environment_name=cloud_provider.environment,
                           rule_type='filters',
                           account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception('Failed to load exceptions: {}'.format(e))
            exceptions = {}
    else:
        exceptions = {}

    run_parameters = {
        'services': services,
        'skipped_services': skipped_services,
        'regions': regions,
        'excluded_regions': excluded_regions,
    }
    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters)

    # Save config and create HTML report
    html_report_path = report.save(
        cloud_provider, exceptions, force_write, debug)

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0
Exemplo n.º 6
0
def main(args=None):
    """
    Main method that runs a scan

    :return:
    """
    if not args:
        parser = ScoutSuiteArgumentParser()
        args = parser.parse_args()

    # Get the dictionnary to get None instead of a crash
    args = args.__dict__

    # Configure the debug level
    configPrintException(args.get('debug'))

    # Create a cloud provider object
    cloud_provider = get_provider(
        provider=args.get('provider'),
        profile=args.get('profile'),
        project_id=args.get('project_id'),
        folder_id=args.get('folder_id'),
        organization_id=args.get('organization_id'),
        all_projects=args.get('all_projects'),
        report_dir=args.get('report_dir'),
        timestamp=args.get('timestamp'),
        services=args.get('services'),
        skipped_services=args.get('skipped_services'),
        thread_config=args.get('thread_config'),
    )

    report_file_name = generate_report_name(cloud_provider.provider_code, args)

    # TODO move this to after authentication, so that the report can be more specific to what's being scanned.
    # For example if scanning with a GCP service account, the SA email can only be known after authenticating...
    # Create a new report
    report = Scout2Report(args.get('provider'), report_file_name,
                          args.get('report_dir'), args.get('timestamp'))

    # Complete run, including pulling data from provider
    if not args.get('fetch_local'):
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(
            profile=args.get('profile'),
            csv_credentials=args.get('csv_credentials'),
            mfa_serial=args.get('mfa_serial'),
            mfa_code=args.get('mfa_code'),
            user_account=args.get('user_account'),
            service_account=args.get('service_account'),
            cli=args.get('cli'),
            msi=args.get('msi'),
            service_principal=args.get('service_principal'),
            file_auth=args.get('file_auth'),
            tenant_id=args.get('tenant_id'),
            subscription_id=args.get('subscription_id'),
            client_id=args.get('client_id'),
            client_secret=args.get('client_secret'),
            username=args.get('username'),
            password=args.get('password'))

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.get('regions'))
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.get('update'):
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[
                    service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.get('ip_ranges'),
                                 args.get('ip_ranges_name_key'))

    # Analyze config
    finding_rules = Ruleset(environment_name=args.get('profile'),
                            cloud_provider=args.get('provider'),
                            filename=args.get('ruleset'),
                            ip_ranges=args.get('ip_ranges'),
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.get('provider'),
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.get('profile'),
                                    args.get('exceptions')[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.get('profile'))[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(
                os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                   profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions,
                                   args.get('force_write'), args.get('debug'))

    # Open the report by default
    if not args.get('no_browser'):
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
Exemplo n.º 7
0
async def _run(provider,
               # AWS
               profile,
               aws_access_key_id,
               aws_secret_access_key,
               aws_session_token,
               # Azure
               user_account, service_account,
               cli, msi, service_principal, file_auth,
               tenant_id, subscription_id,
               client_id, client_secret,
               username, password,
               # GCP
               project_id, folder_id, organization_id, all_projects,
               timestamp,
               services, skipped_services,
               max_workers,
               regions,
               excluded_regions,
               fetch_local, update,
               max_rate,
               ip_ranges, ip_ranges_name_key,
               ruleset, exceptions,
               force_write,
               debug,
               quiet,
               no_browser,
               programmatic_execution,
               **kwargs):
    """
    Run a scout job.
    """

    auth_strategy = get_authentication_strategy(provider)
    try:
        credentials = auth_strategy.authenticate(profile=profile,
                                                 aws_access_key_id=aws_access_key_id,
                                                 aws_secret_access_key=aws_secret_access_key,
                                                 aws_session_token=aws_session_token,
                                                 user_account=user_account,
                                                 service_account=service_account,
                                                 cli=cli,
                                                 msi=msi,
                                                 service_principal=service_principal,
                                                 file_auth=file_auth,
                                                 tenant_id=tenant_id,
                                                 subscription_id=subscription_id,
                                                 client_id=client_id,
                                                 client_secret=client_secret,
                                                 username=username,
                                                 password=password,
                                                 access_key_id=None,
                                                 access_key_secret=None,
                                                 programmatic_execution=programmatic_execution)

        if not credentials:
            return {'error': "Credentials failed"}
    except Exception as e:
        print_exception('Authentication failure: {}'.format(e))
        return {'error': f"Exception {e}"}

    # Create a cloud provider object
    cloud_provider = get_provider(provider=provider,
                                  profile=profile,
                                  project_id=project_id,
                                  folder_id=folder_id,
                                  organization_id=organization_id,
                                  all_projects=all_projects,
                                  report_dir=None,
                                  timestamp=timestamp,
                                  services=services,
                                  skipped_services=skipped_services,
                                  credentials=credentials)

    # Create a new report
    report_name = cloud_provider.get_report_name()
    report = ScoutReport(cloud_provider.provider_code,
                         report_name,
                         './',
                         timestamp,
                         result_format='json')

    # Complete run, including pulling data from provider
    if not fetch_local:
        # Fetch data from provider APIs
        try:
            # Gathering data from APIs
            await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            # nCancelled by user
            return 130

        # Update means we reload the whole config and overwrite part of it
        if update:
            # Updating existing data
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.encoder.load_from_file('RESULTS')
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        # Using local data
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.encoder.load_from_file('RESULTS')
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(
        ip_ranges, ip_ranges_name_key)

    # Analyze config
    # Running rule engine
    finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                            environment_name=cloud_provider.environment,
                            filename=ruleset,
                            ip_ranges=ip_ranges,
                            account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    # Applying display filters
    filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                           environment_name=cloud_provider.environment,
                           rule_type='filters',
                           account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    if exceptions:
        # Applying exceptions
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
        except Exception as e:
            print_exception('Failed to load exceptions: {}'.format(e))

    run_parameters = {
        'services': services,
        'skipped_services': skipped_services,
        'regions': regions,
        'excluded_regions': excluded_regions,
    }
    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters)

    cloud_provider.credentials = None
    return cloud_provider
Exemplo n.º 8
0
async def run_scan(args):
    # Configure the debug level
    set_config_debug_level(args.get('debug'))

    print_info('Launching Scout')

    credentials = None
    if not args.get('fetch_local'):
        auth_strategy = get_authentication_strategy(args.get('provider'))
        credentials = auth_strategy.authenticate(
            profile=args.get('profile'),
            user_account=args.get('user_account'),
            service_account=args.get('service_account'),
            cli=args.get('cli'),
            msi=args.get('msi'),
            service_principal=args.get('service_principal'),
            file_auth=args.get('file_auth'),
            tenant_id=args.get('tenant_id'),
            subscription_id=args.get('subscription_id'),
            client_id=args.get('client_id'),
            client_secret=args.get('client_secret'),
            username=args.get('username'),
            password=args.get('password'))

        if not credentials:
            return 401

    # Create a cloud provider object
    cloud_provider = get_provider(
        provider=args.get('provider'),
        profile=args.get('profile'),
        project_id=args.get('project_id'),
        folder_id=args.get('folder_id'),
        organization_id=args.get('organization_id'),
        all_projects=args.get('all_projects'),
        report_dir=args.get('report_dir'),
        timestamp=args.get('timestamp'),
        services=args.get('services'),
        skipped_services=args.get('skipped_services'),
        thread_config=args.get('thread_config'),
        credentials=credentials)

    report_file_name = generate_report_name(cloud_provider.provider_code, args)

    # TODO: move this to after authentication, so that the report can be more specific to what's being scanned.
    # For example if scanning with a GCP service account, the SA email can only be known after authenticating...
    # Create a new report
    report = Scout2Report(args.get('provider'), report_file_name,
                          args.get('report_dir'), args.get('timestamp'))

    # Complete run, including pulling data from provider
    if not args.get('fetch_local'):
        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=args.get('regions'))
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.get('update'):
            print_info('Updating existing data')
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[
                    service]

    # Partial run, using pre-pulled data
    else:
        print_info('Using local data')
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.get('ip_ranges'),
                                 args.get('ip_ranges_name_key'))

    # Analyze config
    print_info('Running rule engine')
    finding_rules = Ruleset(environment_name=args.get('profile'),
                            cloud_provider=args.get('provider'),
                            filename=args.get('ruleset'),
                            ip_ranges=args.get('ip_ranges'),
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    print_info('Applying display filters')
    filter_rules = Ruleset(cloud_provider=args.get('provider'),
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    if args.get('exceptions')[0]:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(args.get('profile'),
                                        args.get('exceptions')[0])
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_debug(
                'Failed to load exceptions. The file may not exist or may have an invalid format.'
            )
            exceptions = {}
    else:
        exceptions = {}
    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.get('profile'),
                                    args.get('exceptions')[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        print_debug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions,
                                   args.get('force_write'), args.get('debug'))

    # Open the report by default
    if not args.get('no_browser'):
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
Exemplo n.º 9
0
async def _run(
        provider,
        # AWS
        profile,
        aws_access_key_id,
        aws_secret_access_key,
        aws_session_token,
        # Azure
        cli,
        user_account,
        user_account_browser,
        msi,
        service_principal,
        file_auth,
        tenant_id,
        subscription_ids,
        all_subscriptions,
        client_id,
        client_secret,
        username,
        password,
        # GCP
        service_account,
        project_id,
        folder_id,
        organization_id,
        all_projects,
        # Aliyun
        access_key_id,
        access_key_secret,
        # General
        report_name,
        report_dir,
        timestamp,
        services,
        skipped_services,
        list_services,
        result_format,
        database_name,
        host_ip,
        host_port,
        regions,
        excluded_regions,
        fetch_local,
        update,
        ip_ranges,
        ip_ranges_name_key,
        ruleset,
        exceptions,
        force_write,
        debug,
        quiet,
        log_file,
        no_browser,
        programmatic_execution,
        **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)

    try:
        credentials = auth_strategy.authenticate(
            profile=profile,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            aws_session_token=aws_session_token,
            user_account=user_account,
            user_account_browser=user_account_browser,
            service_account=service_account,
            cli=cli,
            msi=msi,
            service_principal=service_principal,
            file_auth=file_auth,
            tenant_id=tenant_id,
            client_id=client_id,
            client_secret=client_secret,
            username=username,
            password=password,
            access_key_id=access_key_id,
            access_key_secret=access_key_secret)

        if not credentials:
            return 101
    except Exception as e:
        print_exception(f'Authentication failure: {e}')
        return 101

    # Create a cloud provider object
    try:
        cloud_provider = get_provider(
            provider=provider,
            # AWS
            profile=profile,
            # Azure
            subscription_ids=subscription_ids,
            all_subscriptions=all_subscriptions,
            # GCP
            project_id=project_id,
            folder_id=folder_id,
            organization_id=organization_id,
            all_projects=all_projects,
            # Other
            report_dir=report_dir,
            timestamp=timestamp,
            services=services,
            skipped_services=skipped_services,
            programmatic_execution=programmatic_execution,
            credentials=credentials)
    except Exception as e:
        print_exception(f'Initialization failure: {e}')
        return 102

    # Create a new report
    try:
        report_name = report_name if report_name else cloud_provider.get_report_name(
        )
        report = ScoutReport(cloud_provider.provider_code,
                             report_name,
                             report_dir,
                             timestamp,
                             result_format=result_format)

        if database_name:
            database_file, _ = get_filename('RESULTS',
                                            report_name,
                                            report_dir,
                                            file_extension="db")
            Server.init(database_file, host_ip, host_port)
            return
    except Exception as e:
        print_exception('Report initialization failure: {}'.format(e))
        return 103

    # If this command, run and exit
    if list_services:
        available_services = [
            x for x in dir(cloud_provider.services)
            if not (x.startswith('_') or x in ['credentials', 'fetch'])
        ]
        print_info('The available services are: "{}"'.format(
            '", "'.join(available_services)))
        return 0

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions,
                                       excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130
        except Exception as e:
            print_exception(
                'Unhandled exception thrown while gathering data: {}'.format(
                    e))
            return 104

        # Update means we reload the whole config and overwrite part of it
        if update:
            try:
                print_info('Updating existing data')
                #Load previous results
                last_run_dict = report.encoder.load_from_file('RESULTS')
                #Get list of previous services which were not updated during this run
                previous_services = [
                    prev_service
                    for prev_service in last_run_dict['service_list']
                    if prev_service not in cloud_provider.service_list
                ]
                #Add previous services
                for service in previous_services:
                    cloud_provider.service_list.append(service)
                    cloud_provider.services[service] = last_run_dict[
                        'services'][service]
            except Exception as e:
                print_exception('Failure while updating report: {}'.format(e))

    # Partial run, using pre-pulled data
    else:
        try:
            print_info('Using local data')
            # Reload to flatten everything into a python dictionary
            last_run_dict = report.encoder.load_from_file('RESULTS')
            for key in last_run_dict:
                setattr(cloud_provider, key, last_run_dict[key])
        except Exception as e:
            print_exception('Failure while updating report: {}'.format(e))

    # Pre processing
    try:
        print_info('Running pre-processing engine')
        cloud_provider.preprocessing(ip_ranges, ip_ranges_name_key)
    except Exception as e:
        print_exception(
            'Failure while running pre-processing engine: {}'.format(e))
        return 105

    # Analyze config
    try:
        print_info('Running rule engine')
        finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                                environment_name=cloud_provider.environment,
                                filename=ruleset,
                                ip_ranges=ip_ranges,
                                account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(finding_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while running rule engine: {}'.format(e))
        return 106

    # Create display filters
    try:
        print_info('Applying display filters')
        filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                               environment_name=cloud_provider.environment,
                               filename='filters.json',
                               rule_type='filters',
                               account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(filter_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while applying display filters: {}'.format(e))
        return 107

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception(f'Failed to load exceptions: {e}')
            exceptions = {}
    else:
        exceptions = {}

    # Finalize
    try:
        print_info('Running post-processing engine')
        run_parameters = {
            'services': services,
            'skipped_services': skipped_services,
            'regions': regions,
            'excluded_regions': excluded_regions,
        }
        cloud_provider.postprocessing(report.current_time, finding_rules,
                                      run_parameters)
    except Exception as e:
        print_exception(
            'Failure while running post-processing engine: {}'.format(e))
        return 108

    # Save config and create HTML report
    try:
        html_report_path = report.save(cloud_provider, exceptions, force_write,
                                       debug)
    except Exception as e:
        print_exception('Failure while generating HTML report: {}'.format(e))
        return 109

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0