Exemple #1
0
    def dump(self, dump_name):
        # download Cloudtrail logs from S3
        # export Cloudwatch logs to S3 and then download them

        folder = "attack_data/" + dump_name
        os.mkdir(folder)

        # Cloudtrail
        if self.config['dump_cloudtrail_data'] == '1':
            self.log.info("Dump Cloudtrail logs. This can take some time.")
            aws_service.download_S3_bucket(
                'AWSLogs', self.config['cloudtrail_s3_bucket'], folder,
                self.config['cloudtrail_data_from_last_x_hours'],
                self.config['cloudtrail_data_from_regions'].split(','))

        # Cloudwatch
        if self.config['dump_aws_eks_data'] == '1':
            self.log.info(
                "Dump AWS EKS logs from Cloudwatch. This can take some time.")
            aws_service.download_cloudwatch_logs(self.config, folder)

        # Sync to S3
        if self.config['sync_to_s3_bucket'] == '1':
            self.log.info(
                "upload attack data to S3 bucket. This can take some time")
            for file in self.getListOfFiles(folder):
                self.log.info("upload file " + file + " to S3 bucket.")
                p = pathlib.Path(file)
                new_path = str(pathlib.Path(*p.parts[1:]))
                aws_service.upload_file_s3_bucket(
                    self.config['s3_bucket_attack_data'], file, new_path)
    def store_attack_data(self, results, test_file):
        target_public_ip = aws_service.get_single_instance_public_ip(
            test_file['target'], self.config)
        if test_file['target'] == 'attack-range-windows-client':
            runner = ansible_runner.run(
                private_data_dir='.attack_range/',
                cmdline=str('-i ' + target_public_ip + ', '),
                roles_path="../ansible/roles",
                playbook='../ansible/playbooks/attack_data.yml',
                extravars={
                    'ansible_user': '******',
                    'ansible_password': self.config['win_password'],
                    'ansible_port': 5985,
                    'ansible_winrm_scheme': 'http'
                },
                verbosity=0)
        else:
            runner = ansible_runner.run(
                private_data_dir='.attack_range/',
                cmdline=str('-i ' + target_public_ip + ', '),
                roles_path="../ansible/roles",
                playbook='../ansible/playbooks/attack_data.yml',
                extravars={
                    'ansible_user': '******',
                    'ansible_password': self.config['win_password']
                },
                verbosity=0)

        aws_service.upload_file_s3_bucket('tmp/attack_data.txt', results,
                                          test_file, False)

        with tarfile.open('tmp/attack_data.tar.gz', "w:gz") as tar:
            tar.add('tmp/attack_data.txt', arcname="attack_data.txt")

        aws_service.upload_file_s3_bucket('tmp/attack_data.tar.gz', results,
                                          test_file, True)

        if os.path.exists('tmp/attack_data.tar.gz'):
            os.remove('tmp/attack_data.tar.gz')

        if os.path.exists('tmp/attack_data.txt'):
            os.remove('tmp/attack_data.txt')

        if runner.status == "successful":
            self.log.info("successfully stored attack data in S3 bucket")
        else:
            self.log.info("failed to store attack data in S3 bucket")
    def dump_attack_data(self, dump_name):

        # copy json from nxlog
        # copy raw data using powershell
        # copy indexes
        # packet capture with netsh
        # see https://medium.com/threat-hunters-forge/mordor-pcaps-part-1-capturing-network-packets-from-windows-endpoints-with-network-shell-e117b84ec971

        self.log.info("Dump log data")

        folder = "attack_data/" + dump_name
        os.mkdir(folder)

        servers = []
        if self.config['windows_domain_controller'] == '1':
            servers.append('windows_domain_controller')
        if self.config['windows_server'] == '1':
            servers.append('windows_server')

        # dump json and windows event logs from Windows servers
        for server in servers:
            server_str = ("attack-range-" + server).replace("_", "-")
            target_public_ip = aws_service.get_single_instance_public_ip(
                server_str, self.config)

            if server_str == 'attack-range-windows-client':
                runner = ansible_runner.run(
                    private_data_dir='.attack_range/',
                    cmdline=str('-i ' + target_public_ip + ', '),
                    roles_path="../ansible/roles",
                    playbook='../ansible/playbooks/attack_data.yml',
                    extravars={
                        'ansible_user': '******',
                        'ansible_password': self.config['win_password'],
                        'ansible_port': 5985,
                        'ansible_winrm_scheme': 'http',
                        'hostname': server_str,
                        'folder': dump_name
                    },
                    verbosity=0)
            else:
                runner = ansible_runner.run(
                    private_data_dir='.attack_range/',
                    cmdline=str('-i ' + target_public_ip + ', '),
                    roles_path="../ansible/roles",
                    playbook='../ansible/playbooks/attack_data.yml',
                    extravars={
                        'ansible_user': '******',
                        'ansible_password': self.config['win_password'],
                        'hostname': server_str,
                        'folder': dump_name
                    },
                    verbosity=0)

        if self.config['sync_to_s3_bucket'] == '1':
            for file in glob.glob(folder + "/*"):
                self.log.info(
                    "upload attack data to S3 bucket. This can take some time")
                aws_service.upload_file_s3_bucket(
                    self.config['s3_bucket_attack_data'], file,
                    str(dump_name + '/' + os.path.basename(file)))
    def dump_attack_data(self, dump_name, last_sim):

        self.log.info("Dump log data")

        folder = "attack_data/" + dump_name
        os.mkdir(os.path.join(os.path.dirname(__file__), '../' + folder))

        server_str = ("ar-splunk-" + self.config['range_name'] + "-" +
                      self.config['key_name'])
        if self.config['cloud_provider'] == 'aws':
            target_public_ip = aws_service.get_single_instance_public_ip(
                server_str, self.config)
            ansible_user = '******'
            ansible_port = 5986
        elif self.config['cloud_provider'] == 'azure':
            target_public_ip = azure_service.get_instance(
                self.config, server_str, self.log)['public_ip']
            ansible_user = '******'
            ansible_port = 5985

        with open(
                os.path.join(os.path.dirname(__file__),
                             '../attack_data/dumps.yml')) as dumps:
            for dump in yaml.full_load(dumps):
                if dump['enabled']:
                    dump_out = dump['dump_parameters']['out']
                    if last_sim:
                        # if last_sim is set, then it overrides time in dumps.yml
                        # and starts dumping from last simulation
                        with open(
                                os.path.join(
                                    os.path.dirname(__file__),
                                    "../attack_data/.%s-last-sim.tmp" %
                                    self.config['range_name']), 'r') as ls:
                            sim_ts = float(ls.readline())
                            dump['dump_parameters']['time'] = "-%ds" % int(
                                time.time() - sim_ts)
                    dump_search = "search %s earliest=%s | sort 0 _time" \
                                  % (dump['dump_parameters']['search'], dump['dump_parameters']['time'])
                    dump_info = "Dumping Splunk Search to %s " % dump_out
                    self.log.info(dump_info)
                    out = open(
                        os.path.join(
                            os.path.dirname(__file__),
                            "../attack_data/" + dump_name + "/" + dump_out),
                        'wb')
                    splunk_sdk.export_search(
                        target_public_ip,
                        s=dump_search,
                        password=self.config['attack_range_password'],
                        out=out)
                    out.close()
                    self.log.info("%s [Completed]" % dump_info)

        if self.config['sync_to_s3_bucket'] == '1':
            for file in glob.glob(
                    os.path.join(os.path.dirname(__file__),
                                 '../' + folder + '/*')):
                self.log.info(
                    "upload attack data to S3 bucket. This can take some time")
                aws_service.upload_file_s3_bucket(
                    self.config['s3_bucket_attack_data'], file,
                    str(dump_name + '/' + os.path.basename(file)), self.config)