Beispiel #1
0
    def test_legacy_submit(self):

        alert = ace_api.Alert(description='Test Alert')
        alert.add_observable(F_IPV4,
                             '1.2.3.4',
                             local_time(),
                             directives=[DIRECTIVE_NO_SCAN])
        alert.add_tag('test')
        temp_path = os.path.join(saq.TEMP_DIR, 'test.txt')
        with open(temp_path, 'w') as fp:
            fp.write('test')

        alert.add_attachment_link(temp_path, 'dest/test.txt')
        uuid = alert.submit(
            f'https://{saq.API_PREFIX}',
            ssl_verification=saq.CONFIG['SSL']['ca_chain_path'])
        self.assertTrue(validate_uuid(uuid))

        root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid))
        root.load()

        self.assertEquals(root.description, 'Test Alert')
        ipv4_observable = root.find_observable(lambda o: o.type == F_IPV4)
        self.assertIsNotNone(ipv4_observable)
        self.assertEquals(ipv4_observable.value, '1.2.3.4')
        self.assertTrue(ipv4_observable.has_directive(DIRECTIVE_NO_SCAN))

        file_observable = root.find_observable(lambda o: o.type == F_FILE)
        self.assertIsNotNone(file_observable)
        self.assertEquals(file_observable.value, 'dest/test.txt')
        with open(os.path.join(root.storage_dir, file_observable.value),
                  'r') as fp:
            self.assertEquals(fp.read(), 'test')
Beispiel #2
0
    def test_resubmit(self, db, c):
        # submit something so we have something to resubmit
        result = self._submit(analysis_mode=ANALYSIS_MODE_CORRELATION)
        self.assertIsNotNone(result)

        self.assertTrue('result' in result)
        result = result['result']
        self.assertIsNotNone(result['uuid'])
        uuid = result['uuid']

        # make sure this actually uploaded
        root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid))
        root.load()

        self.assertEquals(root.analysis_mode, ANALYSIS_MODE_CORRELATION)
        self.assertEquals(root.tool, 'unittest_tool')
        self.assertEquals(root.tool_instance, 'unittest_tool_instance')
        self.assertEquals(root.alert_type, 'unittest_type')
        self.assertEquals(root.description, 'testing')
        self.assertEquals(root.details, {'hello': 'world'})
        self.assertEquals(root.event_time, self._get_localized_submit_time())
        self.assertEquals(root.tags[0].name, 'alert_tag_1')
        self.assertEquals(root.tags[1].name, 'alert_tag_2')
        # NOTE that this is 4 instead of 2 since adding a file adds a F_FILE observable type
        self.assertEquals(len(root.all_observables), 4)

        o = root.find_observable(lambda o: o.type == 'ipv4')
        self.assertIsNotNone(o)
        self.assertEquals(o.value, '1.2.3.4')
        self.assertEquals(len(o.tags), 2)
        self.assertTrue(o.has_directive('no_scan'))
        self.assertTrue('basic_test' in o.limited_analysis)

        o = root.find_observable(
            lambda o: o.type == 'file' and o.value == 'sample.dat')
        self.assertIsNotNone(o)

        with open(os.path.join(root.storage_dir, o.value), 'rb') as fp:
            self.assertEquals(fp.read(), b'Hello, world!')

        o = root.find_observable(
            lambda o: o.type == 'file' and o.value == 'submit_test.dat')
        self.assertIsNotNone(o)
        self.assertEquals(
            os.path.getsize(os.path.join(root.storage_dir, o.value)), 1024)

        # we should see a single workload entry
        c.execute(
            "SELECT id, uuid, node_id, analysis_mode FROM workload WHERE uuid = %s",
            (uuid, ))
        row = c.fetchone()
        self.assertIsNotNone(row)
        self.assertIsNotNone(row[0])
        self.assertEquals(row[1], uuid)
        self.assertIsNotNone(row[2])
        self.assertEquals(row[3], ANALYSIS_MODE_CORRELATION)

        # now resubmit the alert
        result = ace_api.resubmit_alert(uuid)
        self.assertFalse('error' in result)
Beispiel #3
0
def get_file(uuid, file_uuid_or_name):
    storage_dir = storage_dir_from_uuid(uuid)
    if saq.CONFIG['service_engine']['work_dir'] and not os.path.isdir(storage_dir):
        storage_dir = workload_storage_dir(uuid)

    root = RootAnalysis(storage_dir=storage_dir)
    root.load()

    # is this a UUID?
    try:
        validate_uuid(file_uuid_or_name)
        file_observable = root.get_observable(file_uuid_or_name)
        if file_observable is None:
            abort(Response("invalid file_uuid {}".format(file_uuid_or_name), 400))

    except ValueError:
        file_observable = root.find_observable(lambda o: o.type == F_FILE and o.value == file_uuid_or_name)
        if file_observable is None:
            abort(Response("invalid file name {}".format(file_uuid_or_name), 400))
        

    # NOTE we use an absolute path here because if we don't then
    # send_from_directory makes it relavive from the app root path
    # which is (/opt/ace/aceapi)

    target_path = os.path.join(saq.SAQ_HOME, root.storage_dir, file_observable.value)
    if not os.path.exists(target_path):
        abort(Response("file path {} does not exist".format(target_path), 400))

    # XXX revisit how we save (name) files
    return send_from_directory(os.path.dirname(target_path), 
                               os.path.basename(target_path), 
                               as_attachment=True,
                               attachment_filename=os.path.basename(target_path).encode().decode('latin-1', errors='ignore'))
Beispiel #4
0
    def test_mailbox_submission(self):
        from flask import url_for
        from saq.analysis import _JSONEncoder
        from saq.modules.email import EmailAnalysis

        t = saq.LOCAL_TIMEZONE.localize(datetime.datetime.now()).astimezone(pytz.UTC).strftime(event_time_format_json_tz)
        with open(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), 'rb') as fp:
            result = self.client.post(url_for('analysis.submit'), data={
                'analysis': json.dumps({
                    'analysis_mode': 'email',
                    'tool': 'unittest',
                    'tool_instance': 'unittest_instance',
                    'type': 'mailbox',
                    'description': 'testing',
                    'event_time': t,
                    'details': { },
                    'observables': [
                        { 'type': F_FILE, 'value': 'rfc822.email', 'time': t, 'tags': [], 'directives': [ DIRECTIVE_ORIGINAL_EMAIL ], 'limited_analysis': [] },
                    ],
                    'tags': [ ],
                }, cls=_JSONEncoder),
                'file': (fp, 'rfc822.email'),
            }, content_type='multipart/form-data')

        result = result.get_json()
        self.assertIsNotNone(result)

        self.assertTrue('result' in result)
        result = result['result']
        self.assertIsNotNone(result['uuid'])
        uuid = result['uuid']

        # make sure we don't clean up the anaysis so we can check it
        saq.CONFIG['analysis_mode_email']['cleanup'] = 'no'

        engine = TestEngine(local_analysis_modes=['email'])
        engine.enable_module('analysis_module_file_type', 'email')
        engine.enable_module('analysis_module_email_analyzer', 'email')
        engine.enable_module('analysis_module_mailbox_email_analyzer', 'email')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid))
        root.load()
        observable = root.find_observable(lambda o: o.has_directive(DIRECTIVE_ORIGINAL_EMAIL))
        self.assertIsNotNone(observable)
        analysis = observable.get_analysis(EmailAnalysis)
        self.assertIsNotNone(analysis)

        # these should be the same
        self.assertEquals(analysis.details, root.details)
Beispiel #5
0
    def test_bro_smtp_stream_submission(self):
        from flask import url_for
        from saq.analysis import _JSONEncoder
        from saq.modules.email import EmailAnalysis, BroSMTPStreamAnalysis

        t = saq.LOCAL_TIMEZONE.localize(datetime.datetime.now()).astimezone(pytz.UTC).strftime(event_time_format_json_tz)
        with open(os.path.join('test_data', 'smtp_streams', 'CBmtfvapmTMqCEUw6'), 'rb') as fp:
            result = self.client.post(url_for('analysis.submit'), data={
                'analysis': json.dumps({
                    'analysis_mode': ANALYSIS_MODE_EMAIL,
                    'tool': 'unittest',
                    'tool_instance': 'unittest_instance',
                    'type': ANALYSIS_TYPE_BRO_SMTP,
                    'description': 'BRO SMTP Scanner Detection - ',
                    'event_time': t,
                    'details': { },
                    'observables': [
                        { 'type': F_FILE, 'value': 'CBmtfvapmTMqCEUw6', 'time': t, 'tags': [], 'directives': [ DIRECTIVE_ORIGINAL_SMTP ], 'limited_analysis': [] },
                    ],
                    'tags': [ ],
                }, cls=_JSONEncoder),
                'file': (fp, 'CBmtfvapmTMqCEUw6'),
            }, content_type='multipart/form-data')

        result = result.get_json()
        self.assertIsNotNone(result)

        self.assertTrue('result' in result)
        result = result['result']
        self.assertIsNotNone(result['uuid'])
        uuid = result['uuid']

        # make sure we don't clean up the anaysis so we can check it
        saq.CONFIG['analysis_mode_email']['cleanup'] = 'no'

        engine = TestEngine(local_analysis_modes=[ANALYSIS_MODE_EMAIL])
        engine.enable_module('analysis_module_file_type', 'email')
        engine.enable_module('analysis_module_email_analyzer', 'email')
        engine.enable_module('analysis_module_bro_smtp_analyzer', 'email')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        root = RootAnalysis(storage_dir=storage_dir_from_uuid(uuid))
        root.load()
        observable = root.find_observable(lambda o: o.has_directive(DIRECTIVE_ORIGINAL_SMTP))
        self.assertIsNotNone(observable)
        analysis = observable.get_analysis(BroSMTPStreamAnalysis)
        self.assertIsNotNone(analysis)
Beispiel #6
0
    def test_complete_processing(self):
        from saq.modules.email import BroSMTPStreamAnalysis

        # disable cleanup so we can check the results after
        saq.CONFIG['analysis_mode_email']['cleanup'] = 'no'

        self.process_pcap(os.path.join(saq.SAQ_HOME, 'test_data', 'pcaps', 'smtp.pcap'))

        self.start_api_server()

        engine = TestEngine()
        engine.enable_module('analysis_module_bro_smtp_analyzer', 'email')
        engine.start()

        collector = BroSMTPStreamCollector()
        collector.load_groups()
        collector.start()

        # look for all the expected log entries
        wait_for_log_count('found smtp stream', 1, 5)
        wait_for_log_count('copied file from', 1, 5)
        wait_for_log_count('scheduled BRO SMTP Scanner Detection -', 1, 5)
        wait_for_log_count('completed analysis RootAnalysis', 1, 20)

        engine.controlled_stop()
        engine.wait()

        collector.stop()
        collector.wait()

        # get the uuids returned by the api calls
        r = re.compile(r' uuid ([a-f0-9-]+)')
        for result in search_log('submit remote'):
            m = r.search(result.getMessage())
            self.assertIsNotNone(m)
            uuid = m.group(1)

            with self.subTest(uuid=uuid):

                root = RootAnalysis(uuid=uuid, storage_dir=storage_dir_from_uuid(uuid))
                root.load()

                # find the SMTP stream
                file_observable = root.find_observable(lambda x: x.type == F_FILE)
                self.assertTrue(bool(file_observable))
                
                # ensure it has the required directives
                self.assertTrue(file_observable.has_directive(DIRECTIVE_ORIGINAL_SMTP))
                self.assertTrue(file_observable.has_directive(DIRECTIVE_NO_SCAN))

                # ensure the bro smtp analyzer ran on it
                smtp_analysis = file_observable.get_analysis(BroSMTPStreamAnalysis)
                self.assertIsNotNone(smtp_analysis)

                # ensure it extracted a file
                email_observable = smtp_analysis.find_observable(lambda x: x.type == F_FILE)
                self.assertTrue(bool(email_observable))

                # and then ensure that it was treated as an email
                #import pdb; pdb.set_trace()
                self.assertTrue(email_observable.has_directive(DIRECTIVE_NO_SCAN))
                self.assertTrue(email_observable.has_directive(DIRECTIVE_ORIGINAL_EMAIL))
                self.assertTrue(email_observable.has_directive(DIRECTIVE_ARCHIVE))
Beispiel #7
0
    def test_cloudphish_tracking(self, db, c):

        from saq.modules.email import EmailAnalysis

        saq.CONFIG['analysis_mode_email']['cleanup'] = 'no'
        self.start_api_server()

        root = create_root_analysis(alert_type='mailbox',
                                    analysis_mode=ANALYSIS_MODE_EMAIL)
        root.initialize_storage()
        shutil.copy(
            os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'),
            os.path.join(root.storage_dir, 'email.rfc822'))
        file_observable = root.add_observable(F_FILE, 'email.rfc822')
        file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL)
        test_observable = root.add_observable(F_TEST, 'test_detection')
        test_observable.add_directive(DIRECTIVE_TRACKED)
        root.save()
        root.schedule()

        analysis_modes = [
            ANALYSIS_MODE_EMAIL, ANALYSIS_MODE_CLOUDPHISH,
            ANALYSIS_MODE_CORRELATION
        ]
        analysis_modules = [
            'analysis_module_file_type', 'analysis_module_email_analyzer',
            'analysis_module_mailbox_email_analyzer',
            'analysis_module_cloudphish',
            'analysis_module_cloudphish_request_analyzer',
            'analysis_module_crawlphish', 'analysis_module_url_extraction',
            'analysis_module_detection'
        ]

        engine = TestEngine(local_analysis_modes=analysis_modes)
        for module in analysis_modules:
            engine.enable_module(module, analysis_modes)

        # we only enable the BasicTestAnalyzer for the cloudphish mode so that cloudphish generates an alert
        engine.enable_module('analysis_module_basic_test',
                             ANALYSIS_MODE_CLOUDPHISH)

        engine.controlled_stop()
        engine.start()
        engine.wait()

        # get the message_id observable generated by the EmailAnalysis
        root = RootAnalysis(storage_dir=storage_dir_from_uuid(root.uuid))
        root.load()

        file_observable = root.get_observable(file_observable.id)
        self.assertIsNotNone(file_observable)
        email_analysis = file_observable.get_analysis(EmailAnalysis)
        self.assertTrue(bool(email_analysis))
        message_id = email_analysis.find_observable(
            lambda o: o.type == F_MESSAGE_ID)
        self.assertIsNotNone(message_id)

        # we should have a number of cloudphish alerts now
        c.execute("SELECT uuid FROM alerts WHERE tool != 'test_tool' LIMIT 1")
        row = c.fetchone()
        target_uuid = row[0]

        root = RootAnalysis(storage_dir=storage_dir_from_uuid(target_uuid))
        root.load()

        # this cloudphish alert should have the message_id observable
        # and it should be tagged as tracked
        self.assertIsNotNone(
            root.find_observable(lambda o: o.type == F_MESSAGE_ID and o.value
                                 == message_id.value and o.has_tag('tracked')))