Exemplo n.º 1
0
    def tearDown(self):
        if self.yss_process:
            try:
                self.yss_process.terminate()
                self.yss_process.wait(5)
            except Exception as e:
                print(self.yss_process.poll())
                logging.error("unable to terminate yss process {}: {}:".format(self.yss_process.pid, e))
                try:
                    self.yss_process.kill()
                    self.yss_process.wait(5)
                except Exception as e:
                    logging.critical("unable to kill yss process {}: {}".format(self.yss_process.pid, e))

            self.yss_stdout_reader_thread.join()
            self.yss_stdout_reader_thread = None
            self.yss_stderr_reader_thread.join()
            self.yss_stderr_reader_thread = None

        # set the socket dir back
        saq.YSS_SOCKET_DIR = self.old_socket_dir

        # reset the config since we changed stuff
        saq.load_configuration()

        super().tearDown()
Exemplo n.º 2
0
    def tearDown(self):
        if self.yara_service is not None:
            self.yara_service.stop_service()

        #if self.yss_process:
            #try:
                #self.yss_process.terminate()
                #self.yss_process.wait(5)
            #except Exception as e:
                #print(self.yss_process.poll())
                #logging.error("unable to terminate yss process {}: {}:".format(self.yss_process.pid, e))
                #try:
                    #self.yss_process.kill()
                    #self.yss_process.wait(5)
                #except Exception as e:
                    #logging.critical("unable to kill yss process {}: {}".format(self.yss_process.pid, e))

            #self.yss_stdout_reader_thread.join()
            #self.yss_stdout_reader_thread = None
            #self.yss_stderr_reader_thread.join()
            #self.yss_stderr_reader_thread = None

        # reset the config since we changed stuff
        saq.load_configuration()

        super().tearDown()
Exemplo n.º 3
0
 def reset_config(self):
     """Resets saq.CONFIG."""
     saq.load_configuration()
Exemplo n.º 4
0
 def wrapper(*args, **kwargs):
     try:
         return target_function(*args, **kwargs)
     finally:
         saq.load_configuration()
Exemplo n.º 5
0
 def setUp(self):
     saq.DUMP_TRACEBACKS = True
     logging.info("TEST: {}".format(self.id()))
     initialize_test_environment()
     saq.load_configuration()
     open_test_comms()
Exemplo n.º 6
0
    def test_email_pivot_excessive_emails(self):

        # process the email first -- we'll find it when we pivot

        root = create_root_analysis(uuid=str(uuid.uuid4()), alert_type='mailbox')
        root.initialize_storage()
        shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), 
                    os.path.join(root.storage_dir, 'email.rfc822'))
        file_observable = root.add_observable(F_FILE, 'email.rfc822')
        file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL)
        file_observable.add_directive(DIRECTIVE_ARCHIVE)
        root.save()
        root.schedule()

        engine = TestEngine()
        engine.enable_module('analysis_module_file_type', 'test_groups')
        engine.enable_module('analysis_module_file_hash_analyzer', 'test_groups')
        engine.enable_module('analysis_module_email_analyzer', 'test_groups')
        engine.enable_module('analysis_module_email_archiver', 'test_groups')
        engine.enable_module('analysis_module_url_extraction', 'test_groups')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        saq.load_configuration()

        # force this to exceed the limit
        saq.CONFIG['analysis_module_url_email_pivot_analyzer']['result_limit'] = '0'
        root = create_root_analysis(uuid=str(uuid.uuid4()), alert_type='cloudphish')

        root.initialize_storage()

        # make up some details
        root.details = { 
            'alertable': 1,
            'context': {
                'c': '1c38af75-0c42-4ae3-941d-de3975f68602',
                'd': '1',
                'i': 'ashland',
                's': 'email_scanner'
            },
            'sha256_url': '0061537d578e4f65d13e31e190e1079e00dadd808e9fa73f77e3308fdb0e1485',
            'url': 'https://www.alienvault.com', # <-- the important part
        }

        url_observable = root.add_observable(F_URL, 'https://www.alienvault.com')
        root.save()
        root.schedule()

        engine = TestEngine()
        engine.enable_module('analysis_module_url_email_pivot_analyzer', 'test_groups')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        root.load()
        url_observable = root.get_observable(url_observable.id)
        from saq.modules.email import URLEmailPivotAnalysis_v2
        analysis = url_observable.get_analysis(URLEmailPivotAnalysis_v2)
        self.assertIsNotNone(analysis)
        self.assertEquals(analysis.count, 1)
        # this should not have the details since it exceeded the limit
        self.assertIsNone(analysis.emails)
Exemplo n.º 7
0
    def test_email_pivot(self):

        # process the email first -- we'll find it when we pivot

        root = create_root_analysis(uuid=str(uuid.uuid4()), alert_type='mailbox')
        root.initialize_storage()
        shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), 
                    os.path.join(root.storage_dir, 'email.rfc822'))
        file_observable = root.add_observable(F_FILE, 'email.rfc822')
        file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL)
        file_observable.add_directive(DIRECTIVE_ARCHIVE)
        root.save()
        root.schedule()

        engine = TestEngine()
        engine.enable_module('analysis_module_file_type', 'test_groups')
        engine.enable_module('analysis_module_file_hash_analyzer', 'test_groups')
        engine.enable_module('analysis_module_email_analyzer', 'test_groups')
        engine.enable_module('analysis_module_email_archiver', 'test_groups')
        engine.enable_module('analysis_module_url_extraction', 'test_groups')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        saq.load_configuration()

        root = create_root_analysis(uuid=str(uuid.uuid4()), alert_type='cloudphish')
        root.initialize_storage()

        # make up some details
        root.details = { 
            'alertable': 1,
            'context': {
                'c': '1c38af75-0c42-4ae3-941d-de3975f68602',
                'd': '1',
                'i': 'ashland',
                's': 'email_scanner'
            },
            'sha256_url': '0061537d578e4f65d13e31e190e1079e00dadd808e9fa73f77e3308fdb0e1485',
            'url': 'https://www.alienvault.com', # <-- the important part
        }

        url_observable = root.add_observable(F_URL, 'https://www.alienvault.com')
        root.save()
        root.schedule()

        engine = TestEngine()
        engine.enable_module('analysis_module_url_email_pivot_analyzer', 'test_groups')
        engine.controlled_stop()
        engine.start()
        engine.wait()

        root.load()
        url_observable = root.get_observable(url_observable.id)
        from saq.modules.email import URLEmailPivotAnalysis_v2
        analysis = url_observable.get_analysis(URLEmailPivotAnalysis_v2)
        self.assertIsNotNone(analysis)
        self.assertEquals(analysis.count, 1)
        self.assertIsNotNone(analysis.emails)
        self.assertTrue('email_archive' in analysis.emails)
        archive_id = list(analysis.emails['email_archive'].keys())[0]
        entry = analysis.emails['email_archive'][archive_id]
        self.assertEquals(int(archive_id), entry['archive_id'])
        self.assertEquals('canary #3', entry['subject'])
        self.assertEquals('*****@*****.**', entry['recipient'])
        self.assertEquals('<CANTOGZsMiMb+7aB868zXSen_fO=NS-qFTUMo9h2eHtOexY8Qhw@mail.gmail.com>', entry['message_id'])
        self.assertEquals('*****@*****.**', entry['sender'])
        self.assertEquals(len(entry['remediation_history']), 0)
        self.assertFalse(entry['remediated'])