コード例 #1
0
    def test_ruleset_class(self):
        test001 = Ruleset(filename=self.test_ruleset_001)
        assert (os.path.isdir(test001.rules_data_path))
        assert (os.path.isfile(test001.filename))
        assert (test001.name == "test-ruleset")
        assert (test001.about == "regression test")

        test_file_key = 'iam-password-policy-no-expiration.json'
        assert (test_file_key in test001.rules)
        assert (type(test001.rules[test_file_key]) == list)
        assert (type(test001.rules[test_file_key][0] == Rule))
        assert (hasattr(test001.rules[test_file_key][0], 'path'))
        for rule in test001.rules:
            printDebug(test001.rules[rule][0].to_string())

        assert (test_file_key in test001.rule_definitions)
        assert (test001.rule_definitions[test_file_key].description ==
                "Password expiration disabled")
        for rule_def in test001.rule_definitions:
            printDebug(str(test001.rule_definitions[rule_def]))

        test002 = Ruleset(filename=self.test_ruleset_002)
        for rule in test002.rules:
            printDebug(test002.rules[rule][0].to_string())
        test005 = Ruleset(filename=self.test_ruleset_001,
                          ruleset_generator=True)
コード例 #2
0
    def test_file_search(self, prompt_yes_no):
        prompt_yes_no.return_value = False

        target = Ruleset(filename=None)
        assert (prompt_yes_no.call_count == 0)
        assert (os.path.samefile(
            target.filename,
            os.path.join(target.rules_data_path, './rulesets/default.json')))

        target = Ruleset(filename=None, environment_name="notexist")
        assert (prompt_yes_no.call_count == 0)
        assert (os.path.samefile(
            target.filename,
            os.path.join(target.rules_data_path, './rulesets/default.json')))

        target = Ruleset(filename=None, environment_name="sample")
        assert (prompt_yes_no.call_count == 1)
        assert (os.path.samefile(
            target.filename,
            os.path.join(target.rules_data_path, './rulesets/default.json')))

        prompt_yes_no.reset_mock()
        prompt_yes_no.return_value = True

        target = Ruleset(filename=None, environment_name="sample")
        assert (prompt_yes_no.call_count == 1)
        assert (os.path.samefile(
            target.filename,
            os.path.join(target.rules_data_path,
                         './rulesets/ruleset-sample.json')))
コード例 #3
0
    def test_ruleset_class(self, printError):
        test001 = Ruleset(filename=self.test_ruleset_001)
        assert (os.path.isdir(test001.rules_data_path))
        assert (os.path.isfile(test001.filename))
        assert (test001.name == "test-ruleset")
        assert (test001.about == "regression test")

        test_file_key = 'iam-password-policy-no-expiration.json'
        assert (test_file_key in test001.rules)
        assert (type(test001.rules[test_file_key]) == list)
        assert (type(test001.rules[test_file_key][0] == Rule))
        assert (hasattr(test001.rules[test_file_key][0], 'path'))
        for rule in test001.rules:
            print_debug(test001.rules[rule][0].to_string())

        assert (test_file_key in test001.rule_definitions)
        assert (test001.rule_definitions[test_file_key].description ==
                "Password expiration disabled")
        for rule_def in test001.rule_definitions:
            print_debug(str(test001.rule_definitions[rule_def]))
        assert (printError.call_count == 0)

        test002 = Ruleset(filename=self.test_ruleset_002)
        for rule in test002.rules:
            print_debug(test002.rules[rule][0].to_string())
        assert (printError.call_count == 1)  # is this expected ??
        assert ("test-ruleset-absolute-path.json does not exist."
                in printError.call_args_list[0][0][0])

        test005 = Ruleset(filename=self.test_ruleset_001,
                          ruleset_generator=True)
コード例 #4
0
    def test_path_for_cloud_providers(self):
        target = Ruleset(cloud_provider='aws', filename=self.test_ruleset_001)
        assert (os.path.samefile(target.rules_data_path, './ScoutSuite/providers/aws/rules'))

        target = Ruleset(cloud_provider='azure', filename=self.test_ruleset_001)
        assert (os.path.samefile(target.rules_data_path, './ScoutSuite/providers/azure/rules'))

        target = Ruleset(cloud_provider='gcp', filename=self.test_ruleset_001)
        assert (os.path.samefile(target.rules_data_path, './ScoutSuite/providers/gcp/rules'))
コード例 #5
0
    def test_path_search_default(self):
        target = Ruleset(filename=None)
        norms = os.path.normpath(
            os.path.join(self.test_dir,
                         '../ScoutSuite/core/data/rulesets/default.json'))
        # assert (os.path.normpath(target.filename) == norms)

        assert (os.path.exists("ruleset-notexist.json") == False)
        target = Ruleset(filename=None, environment_name="notexist")
        norms = os.path.normpath(
            os.path.join(self.test_dir,
                         '../ScoutSuite/core/data/rulesets/default.json'))
コード例 #6
0
    def test_path_for_ruletypes(self):
        rpath = "./ScoutSuite/providers/aws/rules/"

        target = Ruleset(cloud_provider='aws', filename='default.json')
        assert (os.path.samefile(target.filename, rpath + 'rulesets/default.json'))
        target = Ruleset(cloud_provider='aws', filename='default')
        assert (os.path.samefile(target.filename, rpath + 'rulesets/default.json'))

        target = Ruleset(cloud_provider='aws', filename='filters.json')
        assert (os.path.samefile(target.filename, rpath + 'rulesets/filters.json'))

        target = Ruleset(cloud_provider='aws', filename='filters')
        assert (os.path.samefile(target.filename, rpath + 'rulesets/filters.json'))
コード例 #7
0
 def test_ruleset_file_not_exist(self, printError):
     test003 = Ruleset(cloud_provider='aws',
                       filename='tests/data/no-such-file.json')
     assert (test003.rules == [])
     assert (printError.call_count == 1)
     assert ("no-such-file.json does not exist"
             in printError.call_args_list[0][0][0])
コード例 #8
0
 def test_ruleset_invalid(self, printError):
     test004 = Ruleset(cloud_provider='aws',
                       filename='tests/data/invalid-file.json')
     assert (test004.rules == [])
     assert (printError.call_count == 1)
     assert ("invalid-file.json contains malformed JSON"
             in printError.call_args_list[0][0][0])
コード例 #9
0
    def _generate_ruleset(self, rule_file_name, rule):
        test_ruleset = {'rules': {}, 'about': 'regression test'}
        test_ruleset['rules'][rule_file_name] = [rule]

        with tempfile.NamedTemporaryFile('wt', delete=False) as f:
            f.write(json.dumps(test_ruleset, indent=4))

        return Ruleset(cloud_provider='aws', filename=f.name)
コード例 #10
0
    def test_path_search_withenv_prompt_yes(self, patched):
        with open("ruleset-special.json", "w") as f:
            f.write(".")

        target = Ruleset(filename=None, environment_name="special")
        norms = os.path.abspath('./ruleset-special.json')
        # assert (os.path.normpath(target.filename) == norms)

        os.unlink("ruleset-special.json")
コード例 #11
0
    def test_path_search_withenv_prompt_no(self, patched):
        with open("ruleset-special.json", "w") as f:
            f.write(".")

        target = Ruleset(filename=None, environment_name="special")
        norms = os.path.normpath(
            os.path.join(self.test_dir,
                         '../ScoutSuite/core/data/rulesets/default.json'))
        # assert (os.path.normpath(target.filename) == norms)

        os.unlink("ruleset-special.json")
コード例 #12
0
 def test_ruleset_class(self):
     test001 = Ruleset(filename=self.test_ruleset_001)
     assert ('iam-password-policy-no-expiration.json' in test001.rules)
     assert (type(
         test001.rules['iam-password-policy-no-expiration.json']) == list)
     assert (type(
         test001.rules['iam-password-policy-no-expiration.json'][0] == Rule)
             )
     assert (hasattr(
         test001.rules['iam-password-policy-no-expiration.json'][0],
         'path'))
     for rule in test001.rules:
         printDebug(test001.rules[rule][0].to_string())
     test002 = Ruleset(filename=self.test_ruleset_002)
     for rule in test002.rules:
         printDebug(test002.rules[rule][0].to_string())
     test003 = Ruleset(filename='tests/data/no-such-file.json')
     assert (test003.rules == [])
     test004 = Ruleset(filename='tests/data/invalid-file.json')
     test005 = Ruleset(filename=self.test_ruleset_001,
                       ruleset_generator=True)
コード例 #13
0
    def test_all_finding_rules(self):
        test_dir = os.path.dirname(os.path.realpath(__file__))
        test_ruleset_file_name = os.path.join(test_dir, 'data/ruleset-test.json')

        #FIXME this is only for AWS
        with open(os.path.join(test_dir, '../ScoutSuite/providers/aws/rules/rulesets/default.json'), 'rt') as f:
            ruleset = json.load(f)

        rule_counters = {'found': 0, 'tested': 0, 'verified': 0}
        for file_name in ruleset['rules']:
            rule_counters['found'] += 1
            test_config_file_name = os.path.join(test_dir, 'data/rule-configs/%s' % file_name)
            if not os.path.isfile(test_config_file_name):
                continue
            rule_counters['tested'] += 1
            test_ruleset = {'rules': {}, 'about': 'regression test'}
            test_ruleset['rules'][file_name] = []
            rule = ruleset['rules'][file_name][0]
            rule['enabled'] = True
            test_ruleset['rules'][file_name].append(rule)
            with open(test_ruleset_file_name, 'wt') as f:
                f.write(json.dumps(test_ruleset, indent=4))
            #            printError('Ruleset ::')
            #            printError(str(test_ruleset))
            rules = Ruleset(filename=test_ruleset_file_name)
            pe = ProcessingEngine(rules)
            with open(test_config_file_name, 'rt') as f:
                dummy_provider = DummyObject()
                test_config_dict = json.load(f)
                for key in test_config_dict:
                    setattr(dummy_provider, key, test_config_dict[key])
            pe.run(dummy_provider)
            service = file_name.split('-')[0]
            findings = dummy_provider.services[service]['findings']
            findings = findings[list(findings.keys())[0]]['items']
            test_result_file_name = os.path.join(test_dir, 'data/rule-results/%s' % file_name)
            if not os.path.isfile(test_result_file_name):
                printError('Expected findings:: ')
                printError(json.dumps(findings, indent=4))
                continue
            rule_counters['verified'] += 1
            with open(test_result_file_name, 'rt') as f:
                items = json.load(f)
            try:
                assert (set(sorted(findings)) == set(sorted(items)))
            except Exception as e:
                printError('Expected items:\n %s' % json.dumps(sorted(items)))
                printError('Reported items:\n %s' % json.dumps(sorted(findings)))
                assert (False)
        printError('Existing  rules: %d' % rule_counters['found'])
        printError('Processed rules: %d' % rule_counters['tested'])
        printError('Verified  rules: %d' % rule_counters['verified'])
コード例 #14
0
def main():

    # Parse arguments
    parser = RulesArgumentParser()
    args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Check version of opinel
    if not check_requirements(os.path.realpath(__file__)):
        return 42

    # Load ruleset
    ruleset = Ruleset(filename=args.base_ruleset,
                      name=args.ruleset_name,
                      rules_dir=args.rules_dir,
                      ruleset_generator=True)

    # Generate the HTML generator
    ruleset_generator = RulesetGenerator(args.ruleset_name, args.generator_dir)

    #FIXME this is broken in Scout Suite
    # Create a cloud provider object
    cloud_provider = get_provider(provider='aws', profile='default')

    ruleset.ruleset_generator_metadata = Scout2Config('default', None, None,
                                                      [], []).metadata

    ruleset_generator_path = ruleset_generator.save(ruleset, args.force_write,
                                                    args.debug)

    # Open the HTML ruleset generator in a browser
    if not args.no_browser:
        printInfo('Starting the HTML ruleset generator...')
        url = 'file://%s' % os.path.abspath(ruleset_generator_path)
        webbrowser.open(url, new=2)
コード例 #15
0
ファイル: api_run.py プロジェクト: guardicode/ScoutSuite
async def _run(provider,
               # AWS
               profile,
               aws_access_key_id,
               aws_secret_access_key,
               aws_session_token,
               # Azure
               user_account, service_account,
               cli, msi, service_principal, file_auth,
               tenant_id, subscription_id,
               client_id, client_secret,
               username, password,
               # GCP
               project_id, folder_id, organization_id, all_projects,
               timestamp,
               services, skipped_services,
               max_workers,
               regions,
               excluded_regions,
               fetch_local, update,
               max_rate,
               ip_ranges, ip_ranges_name_key,
               ruleset, exceptions,
               force_write,
               debug,
               quiet,
               no_browser,
               programmatic_execution,
               **kwargs):
    """
    Run a scout job.
    """

    auth_strategy = get_authentication_strategy(provider)
    try:
        credentials = auth_strategy.authenticate(profile=profile,
                                                 aws_access_key_id=aws_access_key_id,
                                                 aws_secret_access_key=aws_secret_access_key,
                                                 aws_session_token=aws_session_token,
                                                 user_account=user_account,
                                                 service_account=service_account,
                                                 cli=cli,
                                                 msi=msi,
                                                 service_principal=service_principal,
                                                 file_auth=file_auth,
                                                 tenant_id=tenant_id,
                                                 subscription_id=subscription_id,
                                                 client_id=client_id,
                                                 client_secret=client_secret,
                                                 username=username,
                                                 password=password,
                                                 access_key_id=None,
                                                 access_key_secret=None,
                                                 programmatic_execution=programmatic_execution)

        if not credentials:
            return {'error': "Credentials failed"}
    except Exception as e:
        print_exception('Authentication failure: {}'.format(e))
        return {'error': f"Exception {e}"}

    # Create a cloud provider object
    cloud_provider = get_provider(provider=provider,
                                  profile=profile,
                                  project_id=project_id,
                                  folder_id=folder_id,
                                  organization_id=organization_id,
                                  all_projects=all_projects,
                                  report_dir=None,
                                  timestamp=timestamp,
                                  services=services,
                                  skipped_services=skipped_services,
                                  credentials=credentials)

    # Create a new report
    report_name = cloud_provider.get_report_name()
    report = ScoutReport(cloud_provider.provider_code,
                         report_name,
                         './',
                         timestamp,
                         result_format='json')

    # Complete run, including pulling data from provider
    if not fetch_local:
        # Fetch data from provider APIs
        try:
            # Gathering data from APIs
            await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            # nCancelled by user
            return 130

        # Update means we reload the whole config and overwrite part of it
        if update:
            # Updating existing data
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.encoder.load_from_file('RESULTS')
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        # Using local data
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.encoder.load_from_file('RESULTS')
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(
        ip_ranges, ip_ranges_name_key)

    # Analyze config
    # Running rule engine
    finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                            environment_name=cloud_provider.environment,
                            filename=ruleset,
                            ip_ranges=ip_ranges,
                            account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    # Applying display filters
    filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                           environment_name=cloud_provider.environment,
                           rule_type='filters',
                           account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    if exceptions:
        # Applying exceptions
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
        except Exception as e:
            print_exception('Failed to load exceptions: {}'.format(e))

    run_parameters = {
        'services': services,
        'skipped_services': skipped_services,
        'regions': regions,
        'excluded_regions': excluded_regions,
    }
    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters)

    cloud_provider.credentials = None
    return cloud_provider
コード例 #16
0
 def test_ruleset_invalid(self):
     test004 = Ruleset(filename='tests/data/invalid-file.json')
     assert (test004.rules == [])
コード例 #17
0
 def test_find_file(self):
     test101 = Ruleset().find_file(self.test_ruleset_001)
     test102 = Ruleset().find_file('default')
コード例 #18
0
 def test_search_ruleset(self):
     test201 = Ruleset(cloud_provider='aws').search_ruleset('test',
                                                            no_prompt=True)
コード例 #19
0
 def test_find_file(self):
     test101 = Ruleset(cloud_provider='aws').find_file(
         self.test_ruleset_001)
     test102 = Ruleset(cloud_provider='aws').find_file('default')
コード例 #20
0
 def test_search_ruleset(self):
     test201 = Ruleset().search_ruleset('test', no_prompt=True)
コード例 #21
0
def main(passed_args=None):
    """
    Main method that runs a scan

    :return:
    """

    # FIXME check that all requirements are installed
    # # Check version of opinel
    # requirements_file_path = '%s/requirements.txt' % os.path.dirname(sys.modules['__main__'].__file__)
    # if not check_requirements(requirements_file_path):
    #     return 42

    # Parse arguments
    parser = ScoutSuiteArgumentParser()

    if passed_args:
        args = parser.parse_args(passed_args)
    else:
        args = parser.parse_args()

    # Configure the debug level
    configPrintException(args.debug)

    # Create a cloud provider object
    cloud_provider = get_provider(provider=args.provider,
                                  profile=args.profile[0],
                                  project_id=args.project_id,
                                  folder_id=args.folder_id,
                                  organization_id=args.organization_id,
                                  report_dir=args.report_dir,
                                  timestamp=args.timestamp,
                                  services=args.services,
                                  skipped_services=args.skipped_services,
                                  thread_config=args.thread_config)

    if cloud_provider.provider_code == 'aws':
        if args.profile:
            report_file_name = 'aws-%s' % args.profile[0]
        else:
            report_file_name = 'aws'
    if cloud_provider.provider_code == 'gcp':
        if args.project_id:
            report_file_name = 'gcp-%s' % args.project_id
        elif args.organization_id:
            report_file_name = 'gcp-%s' % args.organization_id
        elif args.folder_id:
            report_file_name = 'gcp-%s' % args.folder_id
        else:
            report_file_name = 'gcp'
    if cloud_provider.provider_code == 'azure':
        report_file_name = 'azure'

    # Create a new report
    report = Scout2Report(args.provider, report_file_name, args.report_dir, args.timestamp)

    # Complete run, including pulling data from provider
    if not args.fetch_local:
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(profile=args.profile[0],
                                                    csv_credentials=args.csv_credentials,
                                                    mfa_serial=args.mfa_serial,
                                                    mfa_code=args.mfa_code,
                                                    key_file=args.key_file,
                                                    user_account=args.user_account,
                                                    service_account=args.service_account,
                                                    azure_cli=args.azure_cli,
                                                    azure_msi=args.azure_msi,
                                                    azure_service_principal=args.azure_service_principal,
                                                    azure_file_auth=args.azure_file_auth,
                                                    azure_user_credentials=args.azure_user_credentials)

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.regions)
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.update:
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.ip_ranges, args.ip_ranges_name_key)

    # Analyze config
    finding_rules = Ruleset(environment_name=args.profile[0],
                            cloud_provider=args.provider,
                            filename=args.ruleset,
                            ip_ranges=args.ip_ranges,
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.provider,
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.profile[0], args.exceptions[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug('Warning, failed to load exceptions. The file may not exist or may have an invalid format.')
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.profile[0])[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                                                     profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions, args.force_write, args.debug)

    # Open the report by default
    if not args.no_browser:
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
コード例 #22
0
async def _run(
        provider,
        # AWS
        profile,
        aws_access_key_id,
        aws_secret_access_key,
        aws_session_token,
        # Azure
        cli,
        user_account,
        user_account_browser,
        msi,
        service_principal,
        file_auth,
        tenant_id,
        subscription_ids,
        all_subscriptions,
        client_id,
        client_secret,
        username,
        password,
        # GCP
        service_account,
        project_id,
        folder_id,
        organization_id,
        all_projects,
        # Aliyun
        access_key_id,
        access_key_secret,
        # General
        report_name,
        report_dir,
        timestamp,
        services,
        skipped_services,
        list_services,
        result_format,
        database_name,
        host_ip,
        host_port,
        regions,
        excluded_regions,
        fetch_local,
        update,
        ip_ranges,
        ip_ranges_name_key,
        ruleset,
        exceptions,
        force_write,
        debug,
        quiet,
        log_file,
        no_browser,
        programmatic_execution,
        **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)

    try:
        credentials = auth_strategy.authenticate(
            profile=profile,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            aws_session_token=aws_session_token,
            user_account=user_account,
            user_account_browser=user_account_browser,
            service_account=service_account,
            cli=cli,
            msi=msi,
            service_principal=service_principal,
            file_auth=file_auth,
            tenant_id=tenant_id,
            client_id=client_id,
            client_secret=client_secret,
            username=username,
            password=password,
            access_key_id=access_key_id,
            access_key_secret=access_key_secret)

        if not credentials:
            return 101
    except Exception as e:
        print_exception(f'Authentication failure: {e}')
        return 101

    # Create a cloud provider object
    try:
        cloud_provider = get_provider(
            provider=provider,
            # AWS
            profile=profile,
            # Azure
            subscription_ids=subscription_ids,
            all_subscriptions=all_subscriptions,
            # GCP
            project_id=project_id,
            folder_id=folder_id,
            organization_id=organization_id,
            all_projects=all_projects,
            # Other
            report_dir=report_dir,
            timestamp=timestamp,
            services=services,
            skipped_services=skipped_services,
            programmatic_execution=programmatic_execution,
            credentials=credentials)
    except Exception as e:
        print_exception(f'Initialization failure: {e}')
        return 102

    # Create a new report
    try:
        report_name = report_name if report_name else cloud_provider.get_report_name(
        )
        report = ScoutReport(cloud_provider.provider_code,
                             report_name,
                             report_dir,
                             timestamp,
                             result_format=result_format)

        if database_name:
            database_file, _ = get_filename('RESULTS',
                                            report_name,
                                            report_dir,
                                            file_extension="db")
            Server.init(database_file, host_ip, host_port)
            return
    except Exception as e:
        print_exception('Report initialization failure: {}'.format(e))
        return 103

    # If this command, run and exit
    if list_services:
        available_services = [
            x for x in dir(cloud_provider.services)
            if not (x.startswith('_') or x in ['credentials', 'fetch'])
        ]
        print_info('The available services are: "{}"'.format(
            '", "'.join(available_services)))
        return 0

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions,
                                       excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130
        except Exception as e:
            print_exception(
                'Unhandled exception thrown while gathering data: {}'.format(
                    e))
            return 104

        # Update means we reload the whole config and overwrite part of it
        if update:
            try:
                print_info('Updating existing data')
                #Load previous results
                last_run_dict = report.encoder.load_from_file('RESULTS')
                #Get list of previous services which were not updated during this run
                previous_services = [
                    prev_service
                    for prev_service in last_run_dict['service_list']
                    if prev_service not in cloud_provider.service_list
                ]
                #Add previous services
                for service in previous_services:
                    cloud_provider.service_list.append(service)
                    cloud_provider.services[service] = last_run_dict[
                        'services'][service]
            except Exception as e:
                print_exception('Failure while updating report: {}'.format(e))

    # Partial run, using pre-pulled data
    else:
        try:
            print_info('Using local data')
            # Reload to flatten everything into a python dictionary
            last_run_dict = report.encoder.load_from_file('RESULTS')
            for key in last_run_dict:
                setattr(cloud_provider, key, last_run_dict[key])
        except Exception as e:
            print_exception('Failure while updating report: {}'.format(e))

    # Pre processing
    try:
        print_info('Running pre-processing engine')
        cloud_provider.preprocessing(ip_ranges, ip_ranges_name_key)
    except Exception as e:
        print_exception(
            'Failure while running pre-processing engine: {}'.format(e))
        return 105

    # Analyze config
    try:
        print_info('Running rule engine')
        finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                                environment_name=cloud_provider.environment,
                                filename=ruleset,
                                ip_ranges=ip_ranges,
                                account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(finding_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while running rule engine: {}'.format(e))
        return 106

    # Create display filters
    try:
        print_info('Applying display filters')
        filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                               environment_name=cloud_provider.environment,
                               filename='filters.json',
                               rule_type='filters',
                               account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(filter_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while applying display filters: {}'.format(e))
        return 107

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception(f'Failed to load exceptions: {e}')
            exceptions = {}
    else:
        exceptions = {}

    # Finalize
    try:
        print_info('Running post-processing engine')
        run_parameters = {
            'services': services,
            'skipped_services': skipped_services,
            'regions': regions,
            'excluded_regions': excluded_regions,
        }
        cloud_provider.postprocessing(report.current_time, finding_rules,
                                      run_parameters)
    except Exception as e:
        print_exception(
            'Failure while running post-processing engine: {}'.format(e))
        return 108

    # Save config and create HTML report
    try:
        html_report_path = report.save(cloud_provider, exceptions, force_write,
                                       debug)
    except Exception as e:
        print_exception('Failure while generating HTML report: {}'.format(e))
        return 109

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0
コード例 #23
0
async def run_scan(args):
    # Configure the debug level
    set_config_debug_level(args.get('debug'))

    print_info('Launching Scout')

    credentials = None
    if not args.get('fetch_local'):
        auth_strategy = get_authentication_strategy(args.get('provider'))
        credentials = auth_strategy.authenticate(
            profile=args.get('profile'),
            user_account=args.get('user_account'),
            service_account=args.get('service_account'),
            cli=args.get('cli'),
            msi=args.get('msi'),
            service_principal=args.get('service_principal'),
            file_auth=args.get('file_auth'),
            tenant_id=args.get('tenant_id'),
            subscription_id=args.get('subscription_id'),
            client_id=args.get('client_id'),
            client_secret=args.get('client_secret'),
            username=args.get('username'),
            password=args.get('password'))

        if not credentials:
            return 401

    # Create a cloud provider object
    cloud_provider = get_provider(
        provider=args.get('provider'),
        profile=args.get('profile'),
        project_id=args.get('project_id'),
        folder_id=args.get('folder_id'),
        organization_id=args.get('organization_id'),
        all_projects=args.get('all_projects'),
        report_dir=args.get('report_dir'),
        timestamp=args.get('timestamp'),
        services=args.get('services'),
        skipped_services=args.get('skipped_services'),
        thread_config=args.get('thread_config'),
        credentials=credentials)

    report_file_name = generate_report_name(cloud_provider.provider_code, args)

    # TODO: move this to after authentication, so that the report can be more specific to what's being scanned.
    # For example if scanning with a GCP service account, the SA email can only be known after authenticating...
    # Create a new report
    report = Scout2Report(args.get('provider'), report_file_name,
                          args.get('report_dir'), args.get('timestamp'))

    # Complete run, including pulling data from provider
    if not args.get('fetch_local'):
        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=args.get('regions'))
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.get('update'):
            print_info('Updating existing data')
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[
                    service]

    # Partial run, using pre-pulled data
    else:
        print_info('Using local data')
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(DEFAULT_RESULT_FILE)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.get('ip_ranges'),
                                 args.get('ip_ranges_name_key'))

    # Analyze config
    print_info('Running rule engine')
    finding_rules = Ruleset(environment_name=args.get('profile'),
                            cloud_provider=args.get('provider'),
                            filename=args.get('ruleset'),
                            ip_ranges=args.get('ip_ranges'),
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    print_info('Applying display filters')
    filter_rules = Ruleset(cloud_provider=args.get('provider'),
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    if args.get('exceptions')[0]:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(args.get('profile'),
                                        args.get('exceptions')[0])
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_debug(
                'Failed to load exceptions. The file may not exist or may have an invalid format.'
            )
            exceptions = {}
    else:
        exceptions = {}
    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.get('profile'),
                                    args.get('exceptions')[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        print_debug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions,
                                   args.get('force_write'), args.get('debug'))

    # Open the report by default
    if not args.get('no_browser'):
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
コード例 #24
0
 def test_ruleset_file_not_exist(self):
     test003 = Ruleset(cloud_provider='aws',
                       filename='tests/data/no-such-file.json')
     assert (test003.rules == [])
コード例 #25
0
ファイル: __main__.py プロジェクト: zoobinn/ScoutSuite
async def _run(provider,
               # AWS
               profile,
               aws_access_key_id,
               aws_secret_access_key,
               aws_session_token,
               # Azure
               user_account, service_account,
               cli, msi, service_principal, file_auth, tenant_id, subscription_id,
               client_id, client_secret,
               username, password,
               # GCP
               project_id, folder_id, organization_id, all_projects,
               # Aliyun
               access_key_id, access_key_secret,
               # General
               report_name, report_dir,
               timestamp,
               services, skipped_services,
               result_format,
               database_name, host_ip, host_port,
               regions,
               excluded_regions,
               fetch_local, update,
               ip_ranges, ip_ranges_name_key,
               ruleset, exceptions,
               force_write,
               debug,
               quiet,
               log_file,
               no_browser,
               programmatic_execution,
               **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)
    try:
        credentials = auth_strategy.authenticate(profile=profile,
                                                 aws_access_key_id=aws_access_key_id,
                                                 aws_secret_access_key=aws_secret_access_key,
                                                 aws_session_token=aws_session_token,
                                                 user_account=user_account,
                                                 service_account=service_account,
                                                 cli=cli,
                                                 msi=msi,
                                                 service_principal=service_principal,
                                                 file_auth=file_auth,
                                                 tenant_id=tenant_id,
                                                 subscription_id=subscription_id,
                                                 client_id=client_id,
                                                 client_secret=client_secret,
                                                 username=username,
                                                 password=password,
                                                 access_key_id=access_key_id,
                                                 access_key_secret=access_key_secret,
                                                 programmatic_execution=programmatic_execution)

        if not credentials:
            return 101
    except Exception as e:
        print_exception('Authentication failure: {}'.format(e))
        return 101

    # Create a cloud provider object
    cloud_provider = get_provider(provider=provider,
                                  profile=profile,
                                  project_id=project_id,
                                  folder_id=folder_id,
                                  organization_id=organization_id,
                                  all_projects=all_projects,
                                  report_dir=report_dir,
                                  timestamp=timestamp,
                                  services=services,
                                  skipped_services=skipped_services,
                                  credentials=credentials)

    # Create a new report
    report_name = report_name if report_name else cloud_provider.get_report_name()
    report = ScoutReport(cloud_provider.provider_code,
                         report_name,
                         report_dir,
                         timestamp,
                         result_format=result_format)

    if database_name:
        database_file, _ = get_filename('RESULTS', report_name, report_dir, file_extension="db")
        Server.init(database_file, host_ip, host_port)
        return

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if update:
            print_info('Updating existing data')
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.encoder.load_from_file('RESULTS')
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        print_info('Using local data')
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.encoder.load_from_file('RESULTS')
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(
        ip_ranges, ip_ranges_name_key)

    # Analyze config
    print_info('Running rule engine')
    finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                            environment_name=cloud_provider.environment,
                            filename=ruleset,
                            ip_ranges=ip_ranges,
                            account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    print_info('Applying display filters')
    filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                           environment_name=cloud_provider.environment,
                           rule_type='filters',
                           account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception('Failed to load exceptions: {}'.format(e))
            exceptions = {}
    else:
        exceptions = {}

    run_parameters = {
        'services': services,
        'skipped_services': skipped_services,
        'regions': regions,
        'excluded_regions': excluded_regions,
    }
    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters)

    # Save config and create HTML report
    html_report_path = report.save(
        cloud_provider, exceptions, force_write, debug)

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0
コード例 #26
0
ファイル: __main__.py プロジェクト: trevenen/ScoutSuite
def main(args=None):
    """
    Main method that runs a scan

    :return:
    """
    if not args:
        parser = ScoutSuiteArgumentParser()
        args = parser.parse_args()

    # Get the dictionnary to get None instead of a crash
    args = args.__dict__

    # Configure the debug level
    configPrintException(args.get('debug'))

    # Create a cloud provider object
    cloud_provider = get_provider(
        provider=args.get('provider'),
        profile=args.get('profile'),
        project_id=args.get('project_id'),
        folder_id=args.get('folder_id'),
        organization_id=args.get('organization_id'),
        all_projects=args.get('all_projects'),
        report_dir=args.get('report_dir'),
        timestamp=args.get('timestamp'),
        services=args.get('services'),
        skipped_services=args.get('skipped_services'),
        thread_config=args.get('thread_config'),
    )

    report_file_name = generate_report_name(cloud_provider.provider_code, args)

    # TODO move this to after authentication, so that the report can be more specific to what's being scanned.
    # For example if scanning with a GCP service account, the SA email can only be known after authenticating...
    # Create a new report
    report = Scout2Report(args.get('provider'), report_file_name,
                          args.get('report_dir'), args.get('timestamp'))

    # Complete run, including pulling data from provider
    if not args.get('fetch_local'):
        # Authenticate to the cloud provider
        authenticated = cloud_provider.authenticate(
            profile=args.get('profile'),
            csv_credentials=args.get('csv_credentials'),
            mfa_serial=args.get('mfa_serial'),
            mfa_code=args.get('mfa_code'),
            user_account=args.get('user_account'),
            service_account=args.get('service_account'),
            cli=args.get('cli'),
            msi=args.get('msi'),
            service_principal=args.get('service_principal'),
            file_auth=args.get('file_auth'),
            tenant_id=args.get('tenant_id'),
            subscription_id=args.get('subscription_id'),
            client_id=args.get('client_id'),
            client_secret=args.get('client_secret'),
            username=args.get('username'),
            password=args.get('password'))

        if not authenticated:
            return 42

        # Fetch data from provider APIs
        try:
            cloud_provider.fetch(regions=args.get('regions'))
        except KeyboardInterrupt:
            printInfo('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if args.get('update'):
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[
                    service]

    # Partial run, using pre-pulled data
    else:
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.jsrw.load_from_file(AWSCONFIG)
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(args.get('ip_ranges'),
                                 args.get('ip_ranges_name_key'))

    # Analyze config
    finding_rules = Ruleset(environment_name=args.get('profile'),
                            cloud_provider=args.get('provider'),
                            filename=args.get('ruleset'),
                            ip_ranges=args.get('ip_ranges'),
                            aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    filter_rules = Ruleset(cloud_provider=args.get('provider'),
                           filename='filters.json',
                           rule_type='filters',
                           aws_account_id=cloud_provider.aws_account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    try:
        exceptions = RuleExceptions(args.get('profile'),
                                    args.get('exceptions')[0])
        exceptions.process(cloud_provider)
        exceptions = exceptions.exceptions
    except Exception as e:
        printDebug(
            'Warning, failed to load exceptions. The file may not exist or may have an invalid format.'
        )
        exceptions = {}

    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules)

    # TODO this is AWS-specific - move to postprocessing?
    # Get organization data if it exists
    try:
        profile = AWSProfiles.get(args.get('profile'))[0]
        if 'source_profile' in profile.attributes:
            organization_info_file = os.path.join(
                os.path.expanduser('~/.aws/recipes/%s/organization.json' %
                                   profile.attributes['source_profile']))
            if os.path.isfile(organization_info_file):
                with open(organization_info_file, 'rt') as f:
                    org = {}
                    accounts = json.load(f)
                    for account in accounts:
                        account_id = account.pop('Id')
                        org[account_id] = account
                    setattr(cloud_provider, 'organization', org)
    except Exception as e:
        pass

    # Save config and create HTML report
    html_report_path = report.save(cloud_provider, exceptions,
                                   args.get('force_write'), args.get('debug'))

    # Open the report by default
    if not args.get('no_browser'):
        printInfo('Opening the HTML report...')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    return 0
コード例 #27
0
 def test_ruleset_file_not_exist(self):
     test003 = Ruleset(filename='tests/data/no-such-file.json')
     assert (test003.rules == [])
コード例 #28
0
 def test_ruleset_invalid(self):
     test004 = Ruleset(cloud_provider='aws',
                       filename='tests/data/invalid-file.json')
     assert (test004.rules == [])