def test_normalize_email_address(self): self.assertEquals(normalize_email_address('*****@*****.**'), '*****@*****.**') self.assertEquals(normalize_email_address('<*****@*****.**>'), '*****@*****.**') self.assertEquals(normalize_email_address('<*****@*****.**>'), '*****@*****.**') self.assertEquals( normalize_email_address('"user name" <*****@*****.**>'), '*****@*****.**') self.assertEquals(normalize_email_address('user name <*****@*****.**>'), '*****@*****.**')
def analyze(self, root): email = get_email(root) if not email: return if email.env_rcpt_to and email.mail_to: env_rcpt_to = normalize_email_address(email.env_rcpt_to) mail_to = normalize_email_address(email.mail_to) if normalize_email_address( email.env_rcpt_to) != normalize_email_address( email.mail_to): return ProfilePoint( self.description, "recipient {} does not match mail to {}".format( env_rcpt_to, mail_to)) return
def __init__(self, *args, **kwargs): super().__init__(F_EMAIL_ADDRESS, *args, **kwargs) # normalize email addresses normalized = normalize_email_address(self.value) if not normalized: logging.warning("unable to normalize email address {}".format(self.value)) else: self.value = normalized
def analyze(self, root): email = get_email(root) if email is None: return in_reply_to = None return_path = None mail_from = None if email.headers: for key, value in email.headers: if key.lower() == 'reply-to': in_reply_to = normalize_email_address(value) elif key.lower() == 'return-path': return_path = normalize_email_address(value) elif key.lower() == 'from': mail_from = normalize_email_address(value) if not mail_from: return result = [] if return_path is not None and mail_from != return_path: result.append( ProfilePoint( self.description, "mail from {} does not match return path {}".format( mail_from, return_path))) if in_reply_to is not None and mail_from != in_reply_to: result.append( ProfilePoint( self.description, "mail from {} does not match reply to {}".format( mail_from, in_reply_to))) return result
def execute_request(self, remediation): logging.info(f"execution remediation {remediation}") message_id, recipient = remediation.key.split(':', 1) recipient = normalize_email_address(recipient) logging.debug( f"got message_id {message_id} recipient {recipient} from key {remediation.key}" ) for remediator in self.remediators: # TODO - find a better way to test this if saq.UNIT_TESTING: remediation.successful = True remediation.result = 'removed' remediation.status = REMEDIATION_STATUS_COMPLETED return remediation if not initialize_remediator(remediator, self.errors): continue if not attempt_remediation(remediation, remediator, recipient, message_id, self.errors): continue if successful_remediation(remediation): return remediation failed_remediation(remediation, remediator, self.errors) continue # If no remediator was successful, then the result message will be # a list of errors for each config/remediator type. error_messages = '\n'.join([ f'{section_name}: {message}' for section_name, message in self.errors.items() ]) remediation.result = error_messages remediation.successful = False remediation.status = REMEDIATION_STATUS_COMPLETED logging.info(f"completed remediation request {remediation}") return remediation
def test_update_brocess(self): # make sure we update the brocess database when we can scan email self.reset_brocess() root = create_root_analysis(alert_type='mailbox') root.initialize_storage() shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), os.path.join(root.storage_dir, 'email.rfc822')) file_observable = root.add_observable(F_FILE, 'email.rfc822') file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL) root.save() root.schedule() engine = TestEngine() engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_email_logger', 'test_groups') engine.controlled_stop() engine.start() engine.wait() root.load() file_observable = root.get_observable(file_observable.id) from saq.modules.email import EmailAnalysis analysis = file_observable.get_analysis(EmailAnalysis) self.assertIsNotNone(analysis) # get the source and dest of the email so we can look it up in the brocess database from saq.email import normalize_email_address mail_from = normalize_email_address(analysis.mail_from) env_rcpt_to = normalize_email_address(analysis.env_rcpt_to[0]) # we should see a count of 1 here with get_db_connection('brocess') as db: c = db.cursor() c.execute("""SELECT numconnections FROM smtplog WHERE source = %s AND destination = %s""", (mail_from, env_rcpt_to)) count = c.fetchone() self.assertEquals(count[0], 1) # and then we do it again and make sure the count increased root = create_root_analysis(alert_type='mailbox') root.initialize_storage() shutil.copy(os.path.join('test_data', 'emails', 'splunk_logging.email.rfc822'), os.path.join(root.storage_dir, 'email.rfc822')) file_observable = root.add_observable(F_FILE, 'email.rfc822') file_observable.add_directive(DIRECTIVE_ORIGINAL_EMAIL) root.save() root.schedule() engine = TestEngine() engine.enable_module('analysis_module_file_type', 'test_groups') engine.enable_module('analysis_module_email_analyzer', 'test_groups') engine.enable_module('analysis_module_email_logger', 'test_groups') engine.controlled_stop() engine.start() engine.wait() with get_db_connection('brocess') as db: c = db.cursor() c.execute("""SELECT numconnections FROM smtplog WHERE source = %s AND destination = %s""", (mail_from, env_rcpt_to)) count = c.fetchone() self.assertEquals(count[0], 2)
def post_smtp_analysis(self, root): from saq.modules.email import EmailAnalysis, SMTPStreamAnalysis, \ BrotexSMTPPackageAnalysis, \ KEY_ENVELOPES_MAIL_FROM, KEY_ENVELOPES_RCPT_TO # get the paths to the email scanning system #email_scanner_dir = saq.CONFIG['engine_email_scanner']['collection_dir'] email_scanner_dir = self.collection_dir # create a new analysis root for each email analysis we found for analysis in root.all_analysis: if not isinstance(analysis, EmailAnalysis) or not analysis.email: continue env_mail_from = None env_rcpt_to = None connection_id = None # the observable for this EmailAnalysis will be a file email_file = analysis.observable if email_file.type != F_FILE: logging.warning( "the observable for {} should be F_FILE but it is {}". format(analysis, email_file.type)) else: # this will be either an rfc822 file generated by the SMTPStreamAnalysis module # (which will have the envelope information) # OR it is a "broken stream" file, which does not stream_analysis = [ a for a in root.all_analysis if isinstance(a, SMTPStreamAnalysis) and email_file in a.observables ] if len(stream_analysis) > 1: logging.error("there should not be more than one of these") elif len(stream_analysis) == 1: stream_analysis = stream_analysis[0] logging.debug( "detected stream analysis for {}".format(email_file)) # get the MAIL FROM and RCPT TO from this if not analysis.env_mail_from: if email_file.value in stream_analysis.envelopes: analysis.env_mail_from = stream_analysis.envelopes[ email_file.value][KEY_ENVELOPES_MAIL_FROM] if not analysis.env_rcpt_to: if email_file.value in stream_analysis.envelopes: analysis.env_rcpt_to = stream_analysis.envelopes[ email_file.value][KEY_ENVELOPES_RCPT_TO] # get the original brotex package file that the stream came from stream_package = stream_analysis.observable # get the BrotexSMTPPackageAnalysis for this stream package so we can get the connection id package_analysis = [ a for a in root.all_analysis if isinstance(a, BrotexSMTPPackageAnalysis) and stream_package in a.observables ] if len(package_analysis) > 1: logging.error( "there should not be more than one of these!") elif len(package_analysis) == 1: package_analysis = package_analysis[0] connection_id = package_analysis.connection_id # if we could not find the stream, we will want to find the brotex smtp package so we can have the connection id package_analysis = [ a for a in root.all_analysis if isinstance(a, BrotexSMTPPackageAnalysis) and email_file in a.observables ] if len(package_analysis) > 1: logging.error( "there should not be more than one of these!") elif len(package_analysis) == 1: package_analysis = package_analysis[0] connection_id = package_analysis.connection_id subroot = RootAnalysis() subroot.company_name = root.company_name subroot.tool = root.tool subroot.tool_instance = root.tool_instance subroot.alert_type = root.alert_type subroot.description = 'Brotex SMTP Stream Detection - ' if analysis.decoded_subject: subroot.description += '{} '.format(analysis.decoded_subject) elif analysis.subject: subroot.description += '{} '.format(analysis.subject) else: subroot.description += '(no subject) ' if analysis.env_mail_from: subroot.description += 'From {} '.format( normalize_email_address(analysis.env_mail_from)) elif analysis.mail_from: subroot.description += 'From {} '.format( normalize_email_address(analysis.mail_from)) if analysis.env_rcpt_to: if len(analysis.env_rcpt_to) == 1: subroot.description += 'To {} '.format( analysis.env_rcpt_to[0]) else: subroot.description += 'To ({} recipients) '.format( len(analysis.env_rcpt_to)) elif analysis.mail_to: if isinstance(analysis.mail_to, list): # XXX I think this *has* to be a list if len(analysis.mail_to) == 1: subroot.description += 'To {} '.format( analysis.mail_to[0]) else: subroot.description += 'To ({} recipients) '.format( len(analysis.mail_to)) else: subroot.description += 'To {} '.format( analysis.mail_to) subroot.event_time = root.event_time subroot.details = analysis.details subroot.details['connection_id'] = connection_id subroot.uuid = str(uuid.uuid4()) # we use a temporary directory while we process the file subroot.storage_dir = os.path.join(email_scanner_dir, subroot.uuid[0:3], subroot.uuid) subroot.initialize_storage() # copy the original file src_path = os.path.join(root.storage_dir, analysis.observable.value) dest_path = os.path.join(subroot.storage_dir, analysis.observable.value) subroot.add_observable( F_FILE, os.path.relpath(dest_path, start=subroot.storage_dir)) # so the EmailAnalysis that will trigger on the RFC822 file (or whatever you have) # will *not* have the envelope headers # so we do that here in the main alert env_mail_from = None if analysis.env_mail_from: # this is to handle this: <*****@*****.**> SIZE=80280 # XXX assuming there can be no spaces in an email address env_mail_from = analysis.env_mail_from.split(' ', 1) env_mail_from = env_mail_from[0] # is this not the empty indicator? if env_mail_from != '<>': env_mail_from = normalize_email_address(env_mail_from) subroot.add_observable(F_EMAIL_ADDRESS, env_mail_from) if analysis.env_rcpt_to: for address in analysis.env_rcpt_to: address = normalize_email_address(address) if address: subroot.add_observable(F_EMAIL_ADDRESS, address) if env_mail_from: subroot.add_observable( F_EMAIL_CONVERSATION, create_email_conversation( env_mail_from, address)) try: subroot.save() except Exception as e: logging.error("unable to save {}: {}".format(alert, e)) report_exception() continue # TODO also add the stream and update any envelopment headers and stuff try: logging.debug("copying {} to {}".format(src_path, dest_path)) shutil.copy(src_path, dest_path) except Exception as e: logging.error("unable to copy {} to {}: {}".format( src_path, dest_path, e)) report_exception() continue # submit the path to the database of the email scanner for analysis try: submit_sql_work_item('EMAIL', subroot.storage_dir) except Exception as e: logging.error("unable to add work item: {}".format(e)) report_exception() continue