def main(): signal.signal(signal.SIGINT, signal_handler) try: _loader = ConfigLoader(Level.INFO) filename = 'config.yaml' _config = _loader.configure(filename) _queue = MessageQueue(Level.INFO) _indicator = Indicator(Level.INFO) _compass = Compass(_config, _queue, _indicator, Level.INFO) _compass.enable() _counter = itertools.count() print(Fore.CYAN + 'wave robot in air until it beeps...' + Style.RESET_ALL) while True: _count = next(_counter) _heading = _compass.get_heading() # _log.info(Fore.CYAN + Style.BRIGHT + '{:d}: {:>5.2f}; calibrated? {}'.format(_count, _heading[1], _heading[0])) time.sleep(1.0) except KeyboardInterrupt: if _compass is not None: _compass.close() _log.info('done.') sys.exit(0)
def main(): try: # read YAML configuration _loader = ConfigLoader(Level.INFO) filename = 'config.yaml' _config = _loader.configure(filename) _message_factory = MessageFactory(Level.INFO) _queue = MessageQueue(_message_factory, Level.INFO) _ifs = IntegratedFrontSensor(_config, _queue, _message_factory, Level.INFO) _indicator = Indicator(Level.INFO) # _indicator.set_heading(180) # add indicator as message consumer _queue.add_consumer(_indicator) _ifs.enable() while True: time.sleep(1.0) except KeyboardInterrupt: print(Fore.RED + 'Ctrl-C caught; exiting...' + Style.RESET_ALL) except Exception as e: print(Fore.RED + Style.BRIGHT + 'error starting ifs: {}\n{}'.format(e, traceback.format_exc()) + Style.RESET_ALL) finally: if _ifs is not None: _ifs.close()
def main(): try: # signal.signal(signal.SIGINT, signal_handler) _wheel_circumference_mm = 218 _forward_steps_per_rotation = 494 _forward_steps = get_forward_steps() _loader = ConfigLoader(Level.INFO) filename = 'config.yaml' _config = _loader.configure(filename) _queue = MessageQueue(Level.INFO) _indicator = Indicator(Level.INFO) _compass = Compass(_config, _queue, _indicator, Level.INFO) _compass.enable() print(Fore.CYAN + Style.BRIGHT + 'wave robot in the air until calibrated.' + Style.RESET_ALL) while not _compass.is_calibrated(): time.sleep(0.33) print( Fore.CYAN + Style.BRIGHT + 'CALIBRATED. xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx ' + Style.RESET_ALL) time.sleep(5.0) _motors = Motors(_config, None, None, Level.INFO) # _port_motor = _motors.get_motor(Orientation.PORT) # _stbd_motor = _motors.get_motor(Orientation.STBD) _heading = 0.0 # due south _spin(_motors, Rotation.CLOCKWISE, lambda: not has_heading(_compass, _heading)) print(Fore.CYAN + Style.BRIGHT + 'test complete.' + Style.RESET_ALL) except KeyboardInterrupt: _stbd_motor.halt() quit() except Exception as e: print(Fore.RED + Style.BRIGHT + 'error in PID controller: {}'.format(e) + Style.RESET_ALL) traceback.print_exc(file=sys.stdout) finally: print(Fore.YELLOW + Style.BRIGHT + 'C. finally.' + Style.RESET_ALL)
def dedup_reports(report_list, whitelist): """ Merge a list of BaseSandboxParser subclass objects to make a single generic report. """ logger = logging.getLogger() logger.debug('Deduping sandbox report list') # Create the new generic report. dedup_report = BaseSandboxParser() for report in report_list: dedup_report.sandbox_urls += report.sandbox_urls if report.filename and not report.filename == 'sample': dedup_report.filename = report.filename if report.original_filename: dedup_report.original_filename = report.original_filename dedup_report.indicators.append( Indicator('Windows - FileName', dedup_report.original_filename, tags=['sandboxed_sample'])) if report.md5: dedup_report.md5 = report.md5 dedup_report.indicators.append( Indicator('Hash - MD5', dedup_report.md5, tags=['sandboxed_sample'])) if report.sha1: dedup_report.sha1 = report.sha1 dedup_report.indicators.append( Indicator('Hash - SHA1', dedup_report.sha1, tags=['sandboxed_sample'])) if report.sha256: dedup_report.sha256 = report.sha256 dedup_report.indicators.append( Indicator('Hash - SHA256', dedup_report.sha256, tags=['sandboxed_sample'])) if report.sha512: dedup_report.sha512 = report.sha512 if report.ssdeep: dedup_report.ssdeep = report.ssdeep dedup_report.indicators.append( Indicator('Hash - SSDEEP', dedup_report.ssdeep, tags=['sandboxed_sample'])) dedup_report.malware_family += report.malware_family # Dedup the contacted hosts. for host in report.contacted_hosts: if not host in dedup_report.contacted_hosts: dedup_report.contacted_hosts.append(host) tags = ['contacted_host'] if host['protocol'] and host['port']: tags.append('{} {}'.format(host['protocol'], host['port'])) elif host['protocol']: tags.append(host['protocol']) # For now we consider ALL contacted hosts to be benign, so no need to check the whitelist. <- XXX UPDATE WHY ?! #dedup_report.indicators.append(Indicator('Address - ipv4-addr', host['ipv4'], status='Informational', tags=tags)) dedup_report.indicators.append( Indicator('Address - ipv4-addr', host['ipv4'], tags=tags)) # Suricata for suricata_alert in report.suricata_alerts: if suricata_alert not in dedup_report.suricata_alerts: dedup_report.suricata_alerts.append(suricata_alert) # Dedup modified files for file in report.modified_files: if not file in dedup_report.modified_files: dedup_report.modified_files.append(file) # Dedup the dropped files. for file in report.dropped_files: # Dropped files are harder than the other items to properly whitelist, so we will # initially restrict them to certain file names or file types that we care about. if any(name in file['filename'].lower() for name in dedup_report.good_dropped_file_names) or any( t in file['type'] for t in dedup_report.good_dropped_file_types): if not file in dedup_report.dropped_files: dedup_report.dropped_files.append(file) # If any part of the dropped file is whitelisted, make sure we mark all parts as whitelisted. if whitelist.is_dropped_file_whitelisted(file): status = 'Whitelisted' file['status'] = 'Whitelisted' else: status = 'New' dedup_report.indicators.append( Indicator('Windows - FileName', file['filename'], status=status, tags=['dropped_file'])) dedup_report.indicators.append( Indicator('Hash - MD5', file['md5'], status=status, tags=['dropped_file'], relationships=[file['sha1'], file['sha256']])) dedup_report.indicators.append( Indicator('Hash - SHA1', file['sha1'], status=status, tags=['dropped_file'], relationships=[file['md5'], file['sha256']])) dedup_report.indicators.append( Indicator('Hash - SHA256', file['sha256'], status=status, tags=['dropped_file'], relationships=[file['md5'], file['sha1']])) # Dedup the HTTP requests. for request in report.http_requests: if not request in dedup_report.http_requests: dedup_report.http_requests.append(request) dedup_report.indicators += make_url_indicators( [request['url']], tags=['http_request', request['method']]) # Dedup the DNS requests. for request in report.dns_requests: if not request in dedup_report.dns_requests: dedup_report.dns_requests.append(request) # If any part of the DNS request is whitelisted, make sure we mark all parts as whitelisted. if whitelist.is_dns_request_whitelisted(request): status = 'Whitelisted' else: status = 'New' # For now we consider ALL request IP addresses to be benign, so no need to check the whitelist. dedup_report.indicators.append( Indicator('URI - Domain Name', request['request'], tags=['dns_request'])) try: ipaddress.ip_address(request['answer']) dedup_report.indicators.append( Indicator('Address - ipv4-addr', request['answer'], tags=['dns_response'], status='Informational', relationships=[request['request']])) except: pass # Dedup the memory strings. dedup_report.memory_strings += report.memory_strings dedup_report.memory_strings = sorted( list(set(dedup_report.memory_strings))) # Dedup the memory URLs. dedup_report.memory_urls += report.memory_urls dedup_report.memory_urls = list(set(dedup_report.memory_urls)) dedup_report.memory_urls = [ u for u in dedup_report.memory_urls if RegexHelpers.is_url(u) ] dedup_report.indicators += make_url_indicators( dedup_report.memory_urls, tags=['url_in_memory']) # Dedup the strings URLs. dedup_report.strings_urls += report.strings_urls dedup_report.strings_urls = list(set(dedup_report.strings_urls)) dedup_report.strings_urls = [ u for u in dedup_report.strings_urls if RegexHelpers.is_url(u) ] dedup_report.indicators += make_url_indicators( dedup_report.strings_urls, tags=['url_in_strings']) # Dedup the mutexes. dedup_report.mutexes += report.mutexes dedup_report.mutexes = list(set(dedup_report.mutexes)) # Dedup the resolved APIs. dedup_report.resolved_apis += report.resolved_apis dedup_report.resolved_apis = list(set(dedup_report.resolved_apis)) # Dedup the created services. dedup_report.created_services += report.created_services dedup_report.created_services = list(set( dedup_report.created_services)) # Dedup the started services. dedup_report.started_services += report.started_services dedup_report.started_services = list(set( dedup_report.started_services)) # Add the process tree as-is. dedup_report.process_trees.append(report.process_tree) # Try to decode base64 chunks in the process tree. process_tree_decoded = report.process_tree for chunk in report.process_tree.split(): try: decoded_chunk = base64.b64decode(chunk).decode('utf-8') if '\x00' in decoded_chunk: decoded_chunk = base64.b64decode(chunk).decode('utf-16') process_tree_decoded = process_tree_decoded.replace( chunk, decoded_chunk) except: pass dedup_report.process_trees_decoded.append(process_tree_decoded) # Remove ` backtick and other basic Powershell obfuscation. new_trees = [] for decoded_process_tree in dedup_report.process_trees_decoded: if 'powershell' in decoded_process_tree.lower(): new_trees.append(decoded_process_tree.replace('`', '')) dedup_report.process_trees_decoded += new_trees # Remove Powershell string formatter obfuscation. new_trees = [] for decoded_process_tree in dedup_report.process_trees_decoded: formatter_pattern = re.compile( r'(\([\'\"](({(\d+)})+)[\'\"]\s*\-f\s*(([\'\"][^\'\"]+[\'\"],*)+)\))', re.IGNORECASE) results = formatter_pattern.findall(decoded_process_tree) if results: for result in results: """ ('("{0}{1}"-f\'JDxA\',\'QDc\')', '{0}{1}', '{1}', '1', "'JDxA','QDc'", "'QDc'") """ full_match = result[0] order = result[1][1:-1] # 0}{1 items = result[4] # "'JDxA','QDc'" order_list = order.split('}{') order_ints = [int(x) for x in order_list] items_list = [ i.replace('\'', '').replace('"', '') for i in items.split(',') ] if len(order_ints) == len(items_list): deobfuscated_string = '' for i in order_ints: deobfuscated_string += items_list[i] decoded_process_tree = decoded_process_tree.replace( full_match, deobfuscated_string) new_trees.append(decoded_process_tree) dedup_report.process_trees_decoded += new_trees # Try to decode string .split() obfuscation (used by Emotet and others) new_trees = [] for decoded_process_tree in dedup_report.process_trees_decoded: if 'split' in decoded_process_tree.lower(): try: split_char_pattern = re.compile( r'\.[\'\"]*split[\'\"]*\([\'\"\s]*(.*?)[\'\"\s]*\)', re.IGNORECASE) try: split_char = str( split_char_pattern.search( decoded_process_tree).group(1)) except AttributeError: split_char = None if split_char: new_process_tree_decoded = ' '.join( decoded_process_tree.split(split_char)) new_process_tree_decoded = new_process_tree_decoded.replace( "'+'", '') new_process_tree_decoded = new_process_tree_decoded.replace( '"+"', '') new_process_tree_decoded = new_process_tree_decoded.replace( '\'', ' ') new_process_tree_decoded = new_process_tree_decoded.replace( '\"', ' ') new_process_tree_decoded = new_process_tree_decoded.replace( '. ', ' ') new_trees.append(new_process_tree_decoded) except: logger.exception( 'Could not find process tree split() character.') dedup_report.process_trees_decoded += new_trees # Try to decode string .invoke() obfuscation (used by Emotet and others) new_trees = [] for decoded_process_tree in dedup_report.process_trees_decoded: if 'invoke' in decoded_process_tree.lower(): try: split_char_pattern = re.compile( r'\.[\'\"]*invoke[\'\"]*\([\'\"\s]*(.*?)[\'\"\s]*\)', re.IGNORECASE) try: split_char = str( split_char_pattern.search( decoded_process_tree).group(1)) except AttributeError: split_char = None if split_char: new_process_tree_decoded = ' '.join( decoded_process_tree.split(split_char)) new_process_tree_decoded = new_process_tree_decoded.replace( "'+'", '') new_process_tree_decoded = new_process_tree_decoded.replace( '"+"', '') new_process_tree_decoded = new_process_tree_decoded.replace( '\'', ' ') new_process_tree_decoded = new_process_tree_decoded.replace( '\"', ' ') new_process_tree_decoded = new_process_tree_decoded.replace( '. ', ' ') new_trees.append(new_process_tree_decoded) except: logger.exception( 'Could not find process tree invoke() character.') dedup_report.process_trees_decoded += new_trees # Dedup the process tree URLs. Start by just adding the URLs from each report. dedup_report.process_tree_urls += report.process_tree_urls # Find the URLs in each decoded process tree. for decoded_tree in dedup_report.process_trees_decoded: urls = find_urls(decoded_tree) # Remove any URL that has these URLs as substrings, since it's probably a bogus # URL from the original, non-decoded process tree. for u in report.process_tree_urls: if any(decoded_url in u for decoded_url in urls): try: dedup_report.process_tree_urls.remove(u) logger.debug( 'Removing bad process tree URL: {}'.format(u)) except: pass dedup_report.process_tree_urls += urls dedup_report.process_tree_urls = list( set(dedup_report.process_tree_urls)) dedup_report.process_tree_urls = [ u for u in dedup_report.process_tree_urls if RegexHelpers.is_url(u) ] dedup_report.indicators += make_url_indicators( dedup_report.process_tree_urls, tags=['url_in_process_tree']) # Add the screenshot URLs as-is. if report.screenshot_path: dedup_report.screenshot_paths.append(report.screenshot_path) return dedup_report
def _parse_attachment(self, message_part, charset): part_items = message_part.items() for tup in part_items: for value in tup: if 'attachment' in value: file_data = message_part.get_payload() attachment_dict = {} if message_part.get('Content-Transfer-Encoding', None) == 'base64': file_data_b64 = file_data.replace('\n', '') # For some reason, sometimes the attachments don't have the proper # padding. Add a couple "==" on the end for good measure. This doesn't # seem to harm correctly encoded attachments. file_data_decoded = base64.b64decode(file_data_b64 + '==') # Try and get strings out of the attachment. strings_list = RegexHelpers.find_strings(file_data_decoded) strings = ' '.join(strings_list) # Look for any URLs that were in the strings. strings_urls = find_urls(strings) attachment_dict['strings_urls'] = strings_urls elif message_part.get_content_type() == 'text/html': file_data_decoded = message_part.get_payload(decode=True).decode(charset).encode('utf-8') else: file_data_decoded = file_data try: md5_hasher = hashlib.md5() md5_hasher.update(file_data_decoded) md5_hash = md5_hasher.hexdigest() except TypeError: md5_hash = '' try: sha256_hasher = hashlib.sha256() sha256_hasher.update(file_data_decoded) sha256_hash = sha256_hasher.hexdigest() except TypeError: sha256_hash = '' attachment_dict['content_type'] = message_part.get_content_type() attachment_dict['size'] = len(file_data_decoded) attachment_dict['md5'] = md5_hash attachment_dict['sha256'] = sha256_hash attachment_dict['name'] = '' attachment_dict['create_date'] = '' attachment_dict['mod_date'] = '' attachment_dict['read_date'] = '' # Find the attachment name. Normally this follows a specific format # and is called 'filename=' but recently I've seen some that are in # different locations are are just called 'name='... Hence removing # old code and replacing with a regex statement to account for either # name in any location in the message part. attachment_name_pattern = re.compile(r'(file)?name="?([^"]+)"?') for tup in part_items: for item in tup: item_lines = item.splitlines() for item_line in item_lines: attachment_name = attachment_name_pattern.search(item_line) if attachment_name: attachment_dict['name'] = RegexHelpers.decode_utf_b64_string(attachment_name.groups()[1]) if attachment_dict['name'].endswith(';'): attachment_dict['name'] = attachment_dict['name'][:-1] # Make the attachment indicators. self.indicators.append(Indicator('Windows - FileName', attachment_dict['name'], tags=['attachment'])) self.indicators.append(Indicator('Hash - MD5', attachment_dict['md5'], tags=['attachment'])) self.indicators.append(Indicator('Hash - SHA256', attachment_dict['sha256'], tags=['attachment'])) return attachment_dict return None
def __init__(self, smtp_path, whitelist): # Initiate logging. self.logger = logging.getLogger() # Save the whitelist. self.whitelist = whitelist # Items we parse out of the email. self.ace_url = '' self.attachments = [] self.body = '' self.cc_addresses = [] self.envelope_from = '' self.envelope_to = '' self.from_address = '' self.headers = '' self.html = '' self.indicators = [] self.message_id = '' self.original_recipient = '' self.path = smtp_path self.received = '' self.received_time = '' self.remediated = False self.reply_to = '' self.return_path = '' self.screenshots = [] self.subject = '' self.subject_decoded = '' self.to_addresses = [] self.urls = [] self.x_auth_id = '' self.x_mailer = '' self.x_original_sender = '' self.x_originating_ip = '' self.x_sender = '' self.x_sender_id = '' self.x_sender_ip = '' # Build the URL to the ACE alert. ace_uuid_pattern = re.compile(r'([a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12})') match = ace_uuid_pattern.search(self.path) if match: self.ace_url = '{}{}'.format(config['ace']['ace_alert_url'], match.group(1)) with open(self.path, encoding='utf-8', errors='ignore') as s: smtp_stream = s.read().splitlines() # Locate any screenshots for this email. email_dir = os.path.dirname(self.path) files = os.listdir(email_dir) for f in files: if 'text_html' in f and f.endswith('.png') and not f.startswith('email_screenshot'): self.logger.debug('Found email screenshot: {}'.format(os.path.join(email_dir, f))) self.screenshots.append(os.path.join(email_dir, f)) # Find the envelope from/to addresses. This will only work if given an # "smtp.stream" file, since otherwise the SMTP commands will not exist. envelope_address_pattern = re.compile(r'.*<(.*)>.*') for line in smtp_stream: if line.startswith('MAIL FROM:'): try: self.envelope_from = envelope_address_pattern.match(line).group(1) except: self.logger.exception('Unable to parse envelope from.') if line.startswith('RCPT TO:'): try: self.envelope_to = envelope_address_pattern.match(line).group(1) except: self.logger.exception('Unable to parse envelope to.') # Just in case we are dealing with an "smtp.stream" file that still has # the SMTP commands above the actual e-mail, we need to strip those out. # This will remove all lines prior to the Received: headers so that the # email.parser can properly parse out the e-mail. If we were given an # "smtp.email" type of file with the SMTP commands already removed, this # should not affect anything. This is legacy code at this point. try: while not smtp_stream[0].startswith('Received:'): smtp_stream.pop(0) except IndexError: smtp_stream = [] # Join the header lines into a single string. self.email_text = '\n'.join(smtp_stream) # Create the e-mail object. email_obj = email.message_from_string(self.email_text) # We want to try and parse an embedded/attached e-mail if there is one. # Walk the full e-mail's parts. for part in email_obj.walk(): # Continue if the part looks like a valid e-mail. if part.get_content_type() == 'message/rfc822': # Split the part lines into a list. part_text = str(part).splitlines() if any('Received:' in line for line in part_text): # Make sure our part starts with the Received: headers. while not part_text[0].startswith('Received:'): part_text.pop(0) part_text = '\n'.join(part_text) # Make the new e-mail object. email_obj = email.message_from_string(part_text) # Parse the e-mail object for its content. parsed_email = self._parse_content(email_obj) # Now that we have the e-mail object, parse out some of the interesting parts. self.headers = self._get_all_headers_string(email_obj) self.received = self.get_header(email_obj, 'received') # Get the e-mail's plaintext body, HTML body, and the visible text from the HTML. self.body = parsed_email['body'] self.html = parsed_email['html'] # Get any e-mail attachments. self.attachments = parsed_email['attachments'] # From address try: self.from_address = self._get_address_list(email_obj, 'from')[0][1] self.indicators.append(Indicator('Email - Address', self.from_address, tags=['from_address'])) except: pass # From domain try: self.indicators.append(Indicator('URI - Domain Name', self.from_address.split('@')[1], tags=['from_domain'])) except: pass # Reply-To address try: self.reply_to = self._get_address_list(email_obj, 'reply-to')[0][1] self.indicators.append(Indicator('Email - Address', self.reply_to, tags=['reply_to'])) except: pass # X-Sender address try: self.x_sender = self._get_address_list(email_obj, 'X-Sender')[0][1] self.indicators.append(Indicator('Email - Address', self.x_sender, tags=['x_sender'])) except: pass # X-Sender-Id address try: self.x_sender_id = self._get_address_list(email_obj, 'X-Sender-Id')[0][1] self.indicators.append(Indicator('Email - Address', self.x_sender_id, tags=['x_sender_id'])) except: pass # X-Auth-Id address try: self.x_auth_id = self._get_address_list(email_obj, 'X-Auth-ID')[0][1] self.indicators.append(Indicator('Email - Address', self.x_auth_id, tags=['x_auth_id'])) except: pass # Return-Path address try: self.return_path = self._get_address_list(email_obj, 'return_path')[0][1] self.indicators.append(Indicator('Email - Address', self.return_path, tags=['return_path'])) except: pass # X-MS-Exchange-Organization-OriginalEnvelopeRecipients address try: self.original_recipient = self._get_address_list(email_obj, 'X-MS-Exchange-Organization-OriginalEnvelopeRecipients')[0][1].lower() self.indicators.append(Indicator('Email - Address', self.original_recipient, status='Informational', tags=['original_recipient'])) except: pass # If the original_recipient was not found, check if this is a POTENTIAL PHISH e-mail and use the from address. if not self.original_recipient and 'Subject: [POTENTIAL PHISH]' in self.email_text: try: temp_email_obj = email.message_from_string(self.email_text) self.original_recipient = self._get_address_list(temp_email_obj, 'from')[0][1] self.indicators.append(Indicator('Email - Address', self.original_recipient, status='Informational', tags=['original_recipient'])) except: self.logger.exception('Error parsing original recipient from POTENTIAL PHISH e-mail.') # Subject try: self.subject = ''.join(self.get_header(email_obj, 'subject')[0].splitlines()) self.indicators.append(Indicator('Email - Subject', self.subject)) except: pass # Decoded subject try: self.subject_decoded = ''.join(str(make_header(decode_header(self.get_header(email_obj, 'subject')[0]))).splitlines()) self.indicators.append(Indicator('Email - Subject', self.subject_decoded)) except: pass # To addresses self.to_addresses = [x[1].lower() for x in self._get_address_list(email_obj, 'to')] # CC addresses self.cc_addresses = [x[1].lower() for x in self._get_address_list(email_obj, 'cc')] # Message-Id try: self.message_id = self.get_header(email_obj, 'message-id')[0] self.indicators.append(Indicator('Email Message ID', self.message_id, status='Informational')) except: pass # X-Mailer try: self.x_mailer = self.get_header(email_obj, 'x-mailer')[0] self.indicators.append(Indicator('Email - Xmailer', self.x_mailer, status='Informational')) except: pass # X-Original-Sender address try: self.x_original_sender = self.get_header(email_obj, 'x-original-sender')[0] self.indicators.append(Indicator('Email - Address', self.x_original_sender, tags=['x_original_sender'])) except: pass # X-Originating-Ip try: x_originating_ip = self.get_header(email_obj, 'x-originating-ip')[0] # Sometimes this field is in the form: [1.1.1.1] # Make sure we remove any non-IP characters. ip = RegexHelpers.find_ip_addresses(x_originating_ip) if ip: self.x_originating_ip = ip[0] self.indicators.append(Indicator('Address - ipv4-addr', self.x_originating_ip, tags=['x_originating_ip'])) except: pass # X-Sender-Ip try: x_sender_ip = self.get_header(email_obj, 'x-sender-ip')[0] # Make sure like the X-Originating-IP that we only # get the IP address and no other characters. ip = RegexHelpers.find_ip_addresses(x_sender_ip) if ip: self.x_sender_ip = ip[0] self.indicators.append(Indicator('Address - ipv4-addr', self.x_sender_ip, tags=['x_sender_ip'])) except: pass self.received_time = self._get_received_time(email_obj) if not self.received_time: self.received_time = self._get_date_time() # Find any URLs in the plaintext body. text_urls = find_urls(self.body) # Find any URLs in the HTML body. html_urls = find_urls(self.html) # Get any strings URLs. strings_urls = [] """ for file in self.attachments: try: strings_urls += file['strings_urls'] except: pass """ # Try and remove any URLs that look like partial versions of other URLs. all_urls = text_urls + html_urls + strings_urls unique_urls = set() for u in all_urls: if not any(other_url.startswith(u) and other_url != u for other_url in all_urls): unique_urls.add(u) # Get rid of any invalid URLs. self.urls = [u for u in unique_urls if is_valid(u)] # Make indicators for the URLs. self.indicators += make_url_indicators(self.urls) # Get rid of any invalid indicators. self.indicators = [i for i in self.indicators if i.value] # Add any extra tags to each indicator. for i in self.indicators: i.tags.append('phish')
def __init__(self, alert_path): # Start logging. self.logger = logging.getLogger() # Read the alert JSON. with open(alert_path, encoding='utf8') as a: self.ace_json = json.load(a) self.alert_dir = os.path.dirname(alert_path) self.path = alert_path self.time = self.ace_json['event_time'] self.tool = self.ace_json['tool'] self.type = self.ace_json['type'] self.name = self.ace_json['uuid'] self.description = self.ace_json['description'] try: self.company_name = self.ace_json['company_name'] except: self.company_name = 'legacy' # Get all detection points self.detections = self.get_all_detection_points() # Load the URL from the config file. self.url = config['ace']['ace_alert_url'] + self.name """ # # USER ANALYSIS # """ # Try and find any user analysis files. user_analysis_files = self.get_all_analysis_paths( 'saq.modules.user:EmailAddressAnalysis') # Parse any user_analysis_files. self.user_analysis = [] for file in user_analysis_files: if os.path.exists(os.path.join(self.alert_dir, '.ace', file)): self.logger.debug( "processing EmailAddressAnalysis for user data...") with open(os.path.join(self.alert_dir, '.ace', file), encoding='utf8') as j: user_analysis_data = json.load(j) for json_data in user_analysis_data: user = { 'cn': '', 'displayName': '', 'mail': '', 'title': '', 'description': '', 'department': '', 'company': '', 'distinguishedName': '' } if 'attributes' not in json_data: continue user_data = json_data['attributes'] try: user['cn'] = user_data['cn'] except KeyError: pass try: user['displayName'] = user_data['displayName'] except KeyError: pass try: user['mail'] = user_data['mail'] except KeyError: pass try: user['title'] = user_data['title'] except KeyError: pass try: user['description'] = ' | '.join( user_data['description']) except KeyError: pass try: user['department'] = user_data['department'] except KeyError: pass try: user['company'] = user_data['company'] except KeyError: pass try: user['distinguishedName'] = user_data[ 'distinguishedName'] except KeyError: pass self.user_analysis.append(user) """ # # URLS # """ # Save whatever URLs ACE was able to automatically extract. urls = set() url_files = self.get_all_analysis_paths( 'saq.modules.file_analysis:URLExtractionAnalysis') for file in url_files: try: with open(os.path.join(self.alert_dir, '.ace', file), encoding='utf8') as j: json_data = json.load(j) for url in json_data['urls']: if url.endswith('/'): url = url[:-1] urls.add(url) except FileNotFoundError as e: logging.warning( "Caught FileNotFoundError trying to open '{}'".format( os.path.join(self.alert_dir, '.ace', file))) self.urls = sorted(list(urls)) # Make indicators from the URLs. self.indicators = make_url_indicators(self.urls) """ # # Analysis IOCs: Append IOCs added by ACE modules. # """ self.ace_iocs = self.get_all_analysis_iocs() # next parse them into indicators ioc_indicators = [] sip_indicator_map = config['ace']['ace_to_sip_indicator_map'] for ioc in self.ace_iocs: ioc_sip_type = sip_indicator_map.get(ioc.get('type'), None) if not ioc_sip_type: continue if ioc_sip_type == "Email - Subject" and ioc['value'].startswith( "[POTENTIAL PHISH] "): continue # make indicator and append to self.indicators status = "New" if not ioc['status'] else ioc['status'] ioc_indicators.append( Indicator(ioc_sip_type, ioc['value'], status=status, tags=ioc['tags'])) self.indicators.extend(ioc_indicators) self.indicators = merge_indicators(self.indicators) """ # # SCREENSHOTS # """ screenshots = set() for observable in self.ace_json['observable_store'].keys(): try: if 'screenshot' in self.ace_json['observable_store'][ observable]['tags']: screenshot_path = os.path.join( self.alert_dir, self.ace_json['observable_store'][observable]['value']) screenshots.add(screenshot_path) self.logger.debug( 'Found ACE screenshot: {}'.format(screenshot_path)) except: pass self.screenshots = sorted(list(screenshots)) """ # # TAGS # """ tags = set() for observable in self.ace_json['observable_store'].keys(): try: for tag in self.ace_json['observable_store'][observable][ 'tags']: tags.add(tag) except: pass self.tags = sorted(list(tags)) self.logger.debug('"{}" alert has these tags: {}'.format( self.name, self.tags)) """ # # Falcon Reports # """ # if 1000 talents, skip this step? # OR TODO: Create ES config file in local event dir with # an option to skip sandboxing self.download_full_falcon_reports(skip=False)
def run(self): ''' This first disables the Pi's status LEDs, establishes the message queue arbitrator, the controller, enables the set of features, then starts the main OS loop. ''' super(AbstractTask, self).run() loop_count = 0 # display banner! _banner = '\n' \ 'ros\n' \ 'ros █▒▒▒▒▒▒▒ █▒▒▒▒▒▒ █▒▒▒▒▒▒ █▒▒ \n' \ + 'ros █▒▒ █▒▒ █▒▒ █▒▒ █▒▒ █▒▒ \n' \ + 'ros █▒▒▒▒▒▒ █▒▒ █▒▒ █▒▒▒▒▒▒ █▒▒ \n' \ + 'ros █▒▒ █▒▒ █▒▒ █▒▒ █▒▒ \n' \ + 'ros █▒▒ █▒▒ █▒▒▒▒▒▒ █▒▒▒▒▒▒ █▒▒ \n' \ + 'ros\n' self._log.info(_banner) self._disable_leds = self._config['pi'].get('disable_leds') if self._disable_leds: # disable Pi LEDs since they may be distracting self._set_pi_leds(False) self._log.info('enabling features...') for feature in self._features: self._log.info('enabling feature {}...'.format(feature.name())) feature.enable() # __enable_player = self._config['ros'].get('enable_player') # if __enable_player: # self._log.info('configuring sound player...') # self._player = Player(Level.INFO) # else: # self._player = None # i2c_slave_address = config['ros'].get('i2c_master').get('device_id') # i2c hex address of I2C slave device vl53l1x_available = True # self.get_property('features', 'vl53l1x') ultraborg_available = True # self.get_property('features', 'ultraborg') if vl53l1x_available and ultraborg_available: self._log.critical('starting scanner tool...') self._lidar = Lidar(self._config, Level.INFO) self._lidar.enable() else: self._log.critical( 'lidar scanner tool does not have necessary dependencies.') # wait to stabilise features? # configure the Controller and Arbitrator self._log.info('configuring controller...') self._controller = Controller(self._config, self._ifs, self._motors, self._callback_shutdown, Level.INFO) self._log.info('configuring arbitrator...') self._arbitrator = Arbitrator(self._config, self._queue, self._controller, Level.WARN) _flask_enabled = self._config['flask'].get('enabled') if _flask_enabled: self._log.info('starting flask web server...') self.configure_web_server() else: self._log.info( 'not starting flask web server (suppressed from command line).' ) # bluetooth gamepad controller if self._gamepad_enabled: self._connect_gamepad() self._log.warning('Press Ctrl-C to exit.') _wait_for_button_press = self._config['ros'].get( 'wait_for_button_press') self._controller.set_standby(_wait_for_button_press) # begin main loop .............................. self._log.info('starting button thread...') self._button.start() # self._log.info('enabling bno055 sensor...') # self._bno055.enable() # self._bumpers.enable() self._indicator = Indicator(Level.INFO) # add indicator as message consumer self._queue.add_consumer(self._indicator) self._log.info(Fore.MAGENTA + 'enabling integrated front sensor...') self._ifs.enable() # self._log.info('starting info thread...') # self._info.start() # self._log.info('starting blinky thread...') # self._rgbmatrix.enable(DisplayType.RANDOM) # enable arbitrator tasks (normal functioning of robot) main_loop_delay_ms = self._config['ros'].get('main_loop_delay_ms') self._log.info( 'begin main os loop with {:d}ms delay.'.format(main_loop_delay_ms)) _loop_delay_sec = main_loop_delay_ms / 1000 _main_loop_count = 0 self._arbitrator.start() self._active = True while self._active: # The sensors and the flask service sends messages to the message queue, # which forwards those messages on to the arbitrator, which chooses the # highest priority message to send on to the controller. So the timing # of this loop is inconsequential; it exists solely as a keep-alive. _main_loop_count += 1 self._log.debug(Fore.BLACK + Style.DIM + '[{:d}] main loop...'.format(_main_loop_count)) time.sleep(_loop_delay_sec) # end application loop ......................... if not self._closing: self._log.warning('closing following loop...') self.close()
def main(argv): try: _log = Logger("indicator_test", Level.INFO) _indicator = Indicator(Level.INFO) _sleep_sec = 0.2 for hue in range(360): _indicator.set_heading(hue) # time.sleep(0.01) sys.exit(0) _log.info(Fore.CYAN + 'DIR FWD' + Style.RESET_ALL) _indicator.set_direction_fwd(True) time.sleep(_sleep_sec) _indicator.set_direction_fwd(False) _log.info(Fore.RED + 'DIR PORT' + Style.RESET_ALL) _indicator.set_direction_port(True) time.sleep(_sleep_sec) _indicator.set_direction_port(False) _log.info(Fore.YELLOW + 'DIR AFT' + Style.RESET_ALL) _indicator.set_direction_aft(True) time.sleep(_sleep_sec) _indicator.set_direction_aft(False) _log.info(Fore.GREEN + 'DIR STBD' + Style.RESET_ALL) _indicator.set_direction_stbd(True) time.sleep(_sleep_sec) _indicator.set_direction_stbd(False) _log.info(Fore.RED + 'PORT SIDE IR' + Style.RESET_ALL) _indicator.set_ir_sensor_port_side(True) time.sleep(_sleep_sec) _indicator.set_ir_sensor_port_side(False) _log.info(Fore.RED + 'PORT IR' + Style.RESET_ALL) _indicator.set_ir_sensor_port(True) time.sleep(_sleep_sec) _indicator.set_ir_sensor_port(False) _log.info(Fore.CYAN + 'CNTR IR' + Style.RESET_ALL) _indicator.set_ir_sensor_center(True) time.sleep(_sleep_sec) _indicator.set_ir_sensor_center(False) _log.info(Fore.GREEN + 'STBD IR' + Style.RESET_ALL) _indicator.set_ir_sensor_stbd(True) time.sleep(_sleep_sec) _indicator.set_ir_sensor_stbd(False) _log.info(Fore.GREEN + 'STBD IR' + Style.RESET_ALL) _indicator.set_ir_sensor_stbd_side(True) time.sleep(_sleep_sec) _indicator.set_ir_sensor_stbd_side(False) _log.info(Fore.RED + 'PORT BUMPER' + Style.RESET_ALL) _indicator.set_bumper_port(True) time.sleep(_sleep_sec) _indicator.set_bumper_port(False) _log.info(Fore.CYAN + 'CNTR BUMPER' + Style.RESET_ALL) _indicator.set_bumper_center(True) time.sleep(_sleep_sec) _indicator.set_bumper_center(False) _log.info(Fore.GREEN + 'STBD BUMPER' + Style.RESET_ALL) _indicator.set_bumper_stbd(True) time.sleep(_sleep_sec) _indicator.set_bumper_stbd(False) _indicator.clear() except KeyboardInterrupt: _log.error('caught Ctrl-C; exiting...') except Exception: _log.error('error starting ros: {}'.format(traceback.format_exc()))
def build_stock_pool_indicator(self): print "init stock number in stock pool: ", len(self._stock_list) self._sp = StockPool(self._stock_list, self._sp_start, self._sp_end) self._ind = Indicator(self._sp, self._start, self._indicator_list)