Exemplo n.º 1
0
 def create_html_report(self, force_write):
     contents = ''
     # Use the script corresponding to the result format
     contents += self.get_content_from_file('/%s_format.html' % self.result_format)
     # Use all scripts under html/partials/
     contents += self.get_content_from_folder('partials')
     contents += self.get_content_from_folder('partials/%s' % self.provider)
     # Use all scripts under html/summaries/
     contents += self.get_content_from_folder('summaries')
     contents += self.get_content_from_folder('summaries/%s' % self.provider)
     new_file, first_line = get_filename('REPORT', self.report_name, self.report_dir)
     print_info('Creating %s' % new_file)
     if prompt_for_overwrite(new_file, force_write):
         if os.path.exists(new_file):
             os.remove(new_file)
         with open(os.path.join(self.html_data_path, 'report.html')) as f:
             with open(new_file, 'wt') as nf:
                 for line in f:
                     newline = line
                     newline = newline.replace('<!-- CONTENTS PLACEHOLDER -->', contents)
                     newline = newline.replace('<!-- RESULTS PLACEHOLDER -->',
                                               get_filename('RESULTS',
                                                            self.report_name,
                                                            self.report_dir,
                                                            relative_path=True)[0])
                     newline = newline.replace('<!-- EXCEPTIONS PLACEHOLDER -->',
                                               get_filename('EXCEPTIONS',
                                                            self.report_name,
                                                            self.report_dir,
                                                            relative_path=True)[0])
                     newline = newline.replace('<!-- SQLITE JS PLACEHOLDER -->',
                                               '{}/sqlite.js'.format(DEFAULT_INCLUDES_DIRECTORY))
                     nf.write(newline)
     return new_file
Exemplo n.º 2
0
    def save_to_file(self, content, file_type, force_write, debug):
        config_path, first_line = get_filename(file_type, self.report_name,
                                               self.report_dir)
        print_info('Saving data to %s' % config_path)
        try:
            with self.__open_file(config_path, force_write) as f:
                if first_line:
                    print('%s' % first_line, file=f)
                results = json.dumps(content,
                                     indent=4 if debug else None,
                                     separators=(',', ': '),
                                     sort_keys=True,
                                     cls=ScoutJsonEncoder)
                print('%s' % results, file=f)
                if file_type == 'RESULTS':
                    timestamp = datetime.datetime.now().strftime(
                        "%d-%m-%y %H:%M:%S")
                    store_custom_format(json.loads(results), config_path,
                                        self.report_name, force_write,
                                        content.account_id, timestamp)

        except AttributeError as e:
            # __open_file returned None
            pass
        except Exception as e:
            print_exception(e)
Exemplo n.º 3
0
 def create_html_report(self, force_write):
     contents = ''
     # Use all scripts under html/partials/
     contents += self.get_content_from('partials')
     contents += self.get_content_from('partials/%s' % self.provider)
     # Use all scripts under html/summaries/
     contents += self.get_content_from('summaries')
     contents += self.get_content_from('summaries/%s' % self.provider)
     new_file, first_line = get_filename(HTMLREPORT, self.profile,
                                         self.report_dir)
     printInfo('Creating %s ...' % new_file)
     if prompt_4_overwrite(new_file, force_write):
         if os.path.exists(new_file):
             os.remove(new_file)
         with open(os.path.join(self.html_data_path, self.html_root)) as f:
             with open(new_file, 'wt') as nf:
                 for line in f:
                     newline = line
                     if self.profile != 'default':
                         newline = newline.replace(
                             AWSCONFIG_FILE,
                             AWSCONFIG_FILE.replace('.js', '-%s.js' %
                                                    self.profile))
                         newline = newline.replace(
                             EXCEPTIONS_FILE,
                             EXCEPTIONS_FILE.replace(
                                 '.js', '-%s.js' % self.profile))
                     newline = newline.replace('<!-- PLACEHOLDER -->',
                                               contents)
                     nf.write(newline)
     return new_file
Exemplo n.º 4
0
 def load_from_file(self, file_type, config_path = None, first_line = None):
     if not config_path:
         config_path, first_line = get_filename(file_type, self.profile, self.report_dir)
     with open(config_path, 'rt') as f:
         json_payload = f.readlines()
         if first_line:
             json_payload.pop(0)
         json_payload = ''.join(json_payload)
     return json.loads(json_payload)
Exemplo n.º 5
0
 def save_to_file(self, config, config_type, force_write, debug):
     config_path, first_line = get_filename(config_type, self.profile, self.report_dir)
     print('Saving data to %s' % config_path)
     try:
         with self.__open_file(config_path, force_write, False) as f:
             if first_line:
                 print('%s' % first_line, file=f)
             print('%s' % json.dumps(config, indent=4 if debug else None, separators=(',', ': '), sort_keys=True, cls=Scout2Encoder), file=f)
     except Exception as e:
         printException(e)
Exemplo n.º 6
0
 def save_to_file(self, config, config_type, force_write, _debug):
     config_path, first_line = get_filename(config_type, self.report_name, self.report_dir, file_extension="db")
     print_info('Saving data to %s' % config_path)
     try:
         with self.__open_file(config_path, force_write) as database:
             result_dict = self.to_dict(config)
             for k, v in result_dict.items():
                 database[k] = v
             database.commit()
     except Exception as e:
         print_exception(e)
Exemplo n.º 7
0
 def save_to_file(self, content, file_type, force_write, debug):
     config_path, first_line = get_filename(file_type, self.report_name, self.report_dir)
     print_info('Saving data to %s' % config_path)
     try:
         with self.__open_file(config_path, force_write) as f:
             if first_line:
                 print('%s' % first_line, file=f)
             print('%s' % json.dumps(content, indent=4 if debug else None, separators=(',', ': '), sort_keys=True,
                                     cls=ScoutJsonEncoder), file=f)
     except AttributeError as e:
         # __open_file returned None
         pass
     except Exception as e:
         print_exception(e)
Exemplo n.º 8
0
 def load_from_file(self, config_type, config_path=None):
     if not config_path:
         config_path, _ = get_filename(config_type, self.report_name,
                                       self.report_dir)
     return SqliteDict(config_path, autocommit=True).data
Exemplo n.º 9
0
async def _run(provider,
               # AWS
               profile,
               aws_access_key_id,
               aws_secret_access_key,
               aws_session_token,
               # Azure
               user_account, service_account,
               cli, msi, service_principal, file_auth, tenant_id, subscription_id,
               client_id, client_secret,
               username, password,
               # GCP
               project_id, folder_id, organization_id, all_projects,
               # Aliyun
               access_key_id, access_key_secret,
               # General
               report_name, report_dir,
               timestamp,
               services, skipped_services,
               result_format,
               database_name, host_ip, host_port,
               regions,
               excluded_regions,
               fetch_local, update,
               ip_ranges, ip_ranges_name_key,
               ruleset, exceptions,
               force_write,
               debug,
               quiet,
               log_file,
               no_browser,
               programmatic_execution,
               **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)
    try:
        credentials = auth_strategy.authenticate(profile=profile,
                                                 aws_access_key_id=aws_access_key_id,
                                                 aws_secret_access_key=aws_secret_access_key,
                                                 aws_session_token=aws_session_token,
                                                 user_account=user_account,
                                                 service_account=service_account,
                                                 cli=cli,
                                                 msi=msi,
                                                 service_principal=service_principal,
                                                 file_auth=file_auth,
                                                 tenant_id=tenant_id,
                                                 subscription_id=subscription_id,
                                                 client_id=client_id,
                                                 client_secret=client_secret,
                                                 username=username,
                                                 password=password,
                                                 access_key_id=access_key_id,
                                                 access_key_secret=access_key_secret,
                                                 programmatic_execution=programmatic_execution)

        if not credentials:
            return 101
    except Exception as e:
        print_exception('Authentication failure: {}'.format(e))
        return 101

    # Create a cloud provider object
    cloud_provider = get_provider(provider=provider,
                                  profile=profile,
                                  project_id=project_id,
                                  folder_id=folder_id,
                                  organization_id=organization_id,
                                  all_projects=all_projects,
                                  report_dir=report_dir,
                                  timestamp=timestamp,
                                  services=services,
                                  skipped_services=skipped_services,
                                  credentials=credentials)

    # Create a new report
    report_name = report_name if report_name else cloud_provider.get_report_name()
    report = ScoutReport(cloud_provider.provider_code,
                         report_name,
                         report_dir,
                         timestamp,
                         result_format=result_format)

    if database_name:
        database_file, _ = get_filename('RESULTS', report_name, report_dir, file_extension="db")
        Server.init(database_file, host_ip, host_port)
        return

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions, excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130

        # Update means we reload the whole config and overwrite part of it
        if update:
            print_info('Updating existing data')
            current_run_services = copy.deepcopy(cloud_provider.services)
            last_run_dict = report.encoder.load_from_file('RESULTS')
            cloud_provider.services = last_run_dict['services']
            for service in cloud_provider.service_list:
                cloud_provider.services[service] = current_run_services[service]

    # Partial run, using pre-pulled data
    else:
        print_info('Using local data')
        # Reload to flatten everything into a python dictionary
        last_run_dict = report.encoder.load_from_file('RESULTS')
        for key in last_run_dict:
            setattr(cloud_provider, key, last_run_dict[key])

    # Pre processing
    cloud_provider.preprocessing(
        ip_ranges, ip_ranges_name_key)

    # Analyze config
    print_info('Running rule engine')
    finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                            environment_name=cloud_provider.environment,
                            filename=ruleset,
                            ip_ranges=ip_ranges,
                            account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(finding_rules)
    processing_engine.run(cloud_provider)

    # Create display filters
    print_info('Applying display filters')
    filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                           environment_name=cloud_provider.environment,
                           rule_type='filters',
                           account_id=cloud_provider.account_id)
    processing_engine = ProcessingEngine(filter_rules)
    processing_engine.run(cloud_provider)

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception('Failed to load exceptions: {}'.format(e))
            exceptions = {}
    else:
        exceptions = {}

    run_parameters = {
        'services': services,
        'skipped_services': skipped_services,
        'regions': regions,
        'excluded_regions': excluded_regions,
    }
    # Finalize
    cloud_provider.postprocessing(report.current_time, finding_rules, run_parameters)

    # Save config and create HTML report
    html_report_path = report.save(
        cloud_provider, exceptions, force_write, debug)

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0
Exemplo n.º 10
0
async def _run(
        provider,
        # AWS
        profile,
        aws_access_key_id,
        aws_secret_access_key,
        aws_session_token,
        # Azure
        cli,
        user_account,
        user_account_browser,
        msi,
        service_principal,
        file_auth,
        tenant_id,
        subscription_ids,
        all_subscriptions,
        client_id,
        client_secret,
        username,
        password,
        # GCP
        service_account,
        project_id,
        folder_id,
        organization_id,
        all_projects,
        # Aliyun
        access_key_id,
        access_key_secret,
        # General
        report_name,
        report_dir,
        timestamp,
        services,
        skipped_services,
        list_services,
        result_format,
        database_name,
        host_ip,
        host_port,
        regions,
        excluded_regions,
        fetch_local,
        update,
        ip_ranges,
        ip_ranges_name_key,
        ruleset,
        exceptions,
        force_write,
        debug,
        quiet,
        log_file,
        no_browser,
        programmatic_execution,
        **kwargs):
    """
    Run a scout job.
    """

    # Configure the debug level
    set_logger_configuration(debug, quiet, log_file)

    print_info('Launching Scout')

    print_info('Authenticating to cloud provider')
    auth_strategy = get_authentication_strategy(provider)

    try:
        credentials = auth_strategy.authenticate(
            profile=profile,
            aws_access_key_id=aws_access_key_id,
            aws_secret_access_key=aws_secret_access_key,
            aws_session_token=aws_session_token,
            user_account=user_account,
            user_account_browser=user_account_browser,
            service_account=service_account,
            cli=cli,
            msi=msi,
            service_principal=service_principal,
            file_auth=file_auth,
            tenant_id=tenant_id,
            client_id=client_id,
            client_secret=client_secret,
            username=username,
            password=password,
            access_key_id=access_key_id,
            access_key_secret=access_key_secret)

        if not credentials:
            return 101
    except Exception as e:
        print_exception(f'Authentication failure: {e}')
        return 101

    # Create a cloud provider object
    try:
        cloud_provider = get_provider(
            provider=provider,
            # AWS
            profile=profile,
            # Azure
            subscription_ids=subscription_ids,
            all_subscriptions=all_subscriptions,
            # GCP
            project_id=project_id,
            folder_id=folder_id,
            organization_id=organization_id,
            all_projects=all_projects,
            # Other
            report_dir=report_dir,
            timestamp=timestamp,
            services=services,
            skipped_services=skipped_services,
            programmatic_execution=programmatic_execution,
            credentials=credentials)
    except Exception as e:
        print_exception(f'Initialization failure: {e}')
        return 102

    # Create a new report
    try:
        report_name = report_name if report_name else cloud_provider.get_report_name(
        )
        report = ScoutReport(cloud_provider.provider_code,
                             report_name,
                             report_dir,
                             timestamp,
                             result_format=result_format)

        if database_name:
            database_file, _ = get_filename('RESULTS',
                                            report_name,
                                            report_dir,
                                            file_extension="db")
            Server.init(database_file, host_ip, host_port)
            return
    except Exception as e:
        print_exception('Report initialization failure: {}'.format(e))
        return 103

    # If this command, run and exit
    if list_services:
        available_services = [
            x for x in dir(cloud_provider.services)
            if not (x.startswith('_') or x in ['credentials', 'fetch'])
        ]
        print_info('The available services are: "{}"'.format(
            '", "'.join(available_services)))
        return 0

    # Complete run, including pulling data from provider
    if not fetch_local:

        # Fetch data from provider APIs
        try:
            print_info('Gathering data from APIs')
            await cloud_provider.fetch(regions=regions,
                                       excluded_regions=excluded_regions)
        except KeyboardInterrupt:
            print_info('\nCancelled by user')
            return 130
        except Exception as e:
            print_exception(
                'Unhandled exception thrown while gathering data: {}'.format(
                    e))
            return 104

        # Update means we reload the whole config and overwrite part of it
        if update:
            try:
                print_info('Updating existing data')
                #Load previous results
                last_run_dict = report.encoder.load_from_file('RESULTS')
                #Get list of previous services which were not updated during this run
                previous_services = [
                    prev_service
                    for prev_service in last_run_dict['service_list']
                    if prev_service not in cloud_provider.service_list
                ]
                #Add previous services
                for service in previous_services:
                    cloud_provider.service_list.append(service)
                    cloud_provider.services[service] = last_run_dict[
                        'services'][service]
            except Exception as e:
                print_exception('Failure while updating report: {}'.format(e))

    # Partial run, using pre-pulled data
    else:
        try:
            print_info('Using local data')
            # Reload to flatten everything into a python dictionary
            last_run_dict = report.encoder.load_from_file('RESULTS')
            for key in last_run_dict:
                setattr(cloud_provider, key, last_run_dict[key])
        except Exception as e:
            print_exception('Failure while updating report: {}'.format(e))

    # Pre processing
    try:
        print_info('Running pre-processing engine')
        cloud_provider.preprocessing(ip_ranges, ip_ranges_name_key)
    except Exception as e:
        print_exception(
            'Failure while running pre-processing engine: {}'.format(e))
        return 105

    # Analyze config
    try:
        print_info('Running rule engine')
        finding_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                                environment_name=cloud_provider.environment,
                                filename=ruleset,
                                ip_ranges=ip_ranges,
                                account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(finding_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while running rule engine: {}'.format(e))
        return 106

    # Create display filters
    try:
        print_info('Applying display filters')
        filter_rules = Ruleset(cloud_provider=cloud_provider.provider_code,
                               environment_name=cloud_provider.environment,
                               filename='filters.json',
                               rule_type='filters',
                               account_id=cloud_provider.account_id)
        processing_engine = ProcessingEngine(filter_rules)
        processing_engine.run(cloud_provider)
    except Exception as e:
        print_exception('Failure while applying display filters: {}'.format(e))
        return 107

    # Handle exceptions
    if exceptions:
        print_info('Applying exceptions')
        try:
            exceptions = RuleExceptions(exceptions)
            exceptions.process(cloud_provider)
            exceptions = exceptions.exceptions
        except Exception as e:
            print_exception(f'Failed to load exceptions: {e}')
            exceptions = {}
    else:
        exceptions = {}

    # Finalize
    try:
        print_info('Running post-processing engine')
        run_parameters = {
            'services': services,
            'skipped_services': skipped_services,
            'regions': regions,
            'excluded_regions': excluded_regions,
        }
        cloud_provider.postprocessing(report.current_time, finding_rules,
                                      run_parameters)
    except Exception as e:
        print_exception(
            'Failure while running post-processing engine: {}'.format(e))
        return 108

    # Save config and create HTML report
    try:
        html_report_path = report.save(cloud_provider, exceptions, force_write,
                                       debug)
    except Exception as e:
        print_exception('Failure while generating HTML report: {}'.format(e))
        return 109

    # Open the report by default
    if not no_browser:
        print_info('Opening the HTML report')
        url = 'file://%s' % os.path.abspath(html_report_path)
        webbrowser.open(url, new=2)

    if ERRORS_LIST:  # errors were handled during execution
        return 200
    else:
        return 0