def get_message_center_messages(): """Retrieves the list of messages from the MCServer Args: None Returns: messag_list: List of messages""" messages = [] conn_failed = False try: # First we need to retrieve the list of tags for the current system. # If this call fails, we shouldn't make the mcserver request proxy = AVProxy() if proxy is None: app.logger.error("Connection error with AVProxy") system_tags = get_system_tags() if len(system_tags) == 0: return [] revision = get_latest_message_revision() msg_filter = "filters=%s" % ','.join(system_tags) if revision is not None: msg_filter += "&revision=%s" % revision url = 'https://%s:%s/messages?%s' % ( app.config['MESSAGE_CENTER_SERVER'], app.config['MESSAGE_CENTER_PORT'], msg_filter) request = urllib2.Request(url) response = proxy.open(request, timeout=20, retries=3) response_data = json.loads(response.read()) response_code = response.getcode() if response_code != 200: app.logger.warning("Invalid reponse from the mcserver %s:%s" % (response_code, response_data)) for field in ['data', 'status', 'signature']: if field not in response_data: return [] # Validate the data if not verify_sign(response_data['signature'], response_data['data']): app.logger.warning( "Cannot verify the data comimg from the mcserver") return [], False messages = ast.literal_eval(b64decode(response_data['data'])) if 'revision' in response_data: save_messages_revision(response_data['revision']) except (URLError, HTTPError): conn_failed = True app.logger.error("Cannot connect to the Message Center Server") except Exception: import traceback app.logger.error( "An error occurred while retrieving the Message Center Server messages: %s" % str(traceback.format_exc())) return messages, conn_failed
def make_request(self, url): """Make a request against the OTX server Args: url (str): The url with the request. Returns: response_data(json): The OTX response Raise an exception when something is wrong """ proxy = AVProxy() if proxy is None: api_log.error("Connection error with AVProxy") try: request = urllib2.Request(url) request.add_header('X-OTX-API-KEY', self.key) response = proxy.open(request, timeout=20, retries=3) response_data = json.loads(response.read(), encoding="utf-8") except urllib2.URLError as err: if err.code == 403: raise InvalidAPIKey("Invalid API Key") elif err.code == 400: raise BadRequest("Bad Request") else: raise Exception(str(err)) except Exception as err: raise Exception(str(err)) return response_data
def test_constructor_4(self): """ AVProxy: Check bad proxy """ self.mock_read_file.return_value = (True, self.bad_proxy) proxy = AVProxy(proxy_file=self.fake_proxy_file) self.assertEqual(None, proxy.get_proxy_url()) self.assertFalse(proxy.need_authentication())
def test_constructor_5(self): """ AVProxy: Check non-existent proxy file """ self.mock_read_file.return_value = (False, 'not exist') proxy = AVProxy(proxy_file=self.fake_proxy_file) self.assertEqual(None, proxy.get_proxy_url()) self.assertFalse(proxy.need_authentication())
def test_constructor_3(self): """ AVProxy: Check auth proxy """ self.mock_read_file.return_value = (True, self.proxy_with_port_and_auth) proxy = AVProxy(proxy_file=self.fake_proxy_file) self.assertEqual(proxy.get_proxy_url(), self.get_proxy_url(self.proxy_with_port_and_auth)) self.assertTrue(proxy.need_authentication())
def test_constructor_2(self): """ AVProxy: Check proxy with port """ self.mock_read_file.return_value = (True, self.proxy_with_port) proxy = AVProxy(proxy_file=self.fake_proxy_file) self.assertEqual(proxy.get_proxy_url(), self.get_proxy_url(self.proxy_with_port)) self.assertFalse(proxy.need_authentication())
def test_constructor_1(self): """ AVProxy: No authentication required """ self.mock_read_file.return_value = (True, self.simple_proxy) proxy = AVProxy(proxy_file=self.fake_proxy_file) self.mock_read_file.assert_called_once_with('127.0.0.1', self.fake_proxy_file) self.assertEqual(proxy.get_proxy_url(), self.get_proxy_url(self.simple_proxy)) self.assertFalse(proxy.need_authentication())
def test_no_proxy_connect(self): # Open connection without proxy proxy = AVProxy() # using an url response = proxy.open("http://python.org") assert response is not None del response # using a request request = urllib2.Request("http://python.org") response = proxy.open(request) assert response is not None
def test_no_proxy_connect_with_request(self, mock_opener): """ AVProxy: Open connection without proxy using request """ expected_response = 'response OK' mock_opener.return_value.open.return_value = expected_response proxy = AVProxy() request = urllib2.Request("http://python.org") response = proxy.open(request) self.assertEqual(expected_response, response) call_args, _ = mock_opener.return_value.open.call_args req_obj_from_call = call_args[0] self.assertEqual({}, req_obj_from_call.headers) self.assertIsInstance(req_obj_from_call, urllib2.Request)
def test_no_proxy_connect_with_url(self, mock_opener): """ AVProxy: Open connection without proxy using url """ expected_response = 'response OK' mock_opener.return_value.open.return_value = expected_response proxy = AVProxy() response = proxy.open("http://python.org", timeout=2) self.assertEqual(expected_response, response) call_args, call_kwargs = mock_opener.return_value.open.call_args req_obj_from_call = call_args[0] self.assertEqual(AVProxy.USER_AGENT, req_obj_from_call.get_header('User-agent')) self.assertIsInstance(req_obj_from_call, urllib2.Request) self.assertEqual(call_kwargs, {'timeout': 2})
def __init__(self, key, server="https://otx.alienvault.com/"): self.key = key self.server = server self.url_base = "{}api/v1".format(server) self.avproxy = AVProxy() self.pulse_db = PulseDB() self.pulse_correlation_db = PulseCorrelationDB() self.date_types = { "events": "latest_events_call_date", "subscribed": "latest_subscribed_call_date" } self.otx_user_version = self.get_otx_user_version()
def test_no_proxy_connect_url_aut(self): """ AVProxy: Bad Proxy with retries """ self.mock_read_file.return_value = (True, self.proxy_with_port_and_auth) proxy = AVProxy(proxy_file='auth_proxy') self.assertRaises((urllib2.URLError, IOError, httplib.HTTPException), proxy.open, "http://python.org", timeout=0.5, retries=1)
def __init__(self, key, server="https://otx.alienvault.com/"): self.key = key self.server = server self.url_base = "{}api/v1".format(server) self.avproxy = AVProxy() self.pulse_db = PulseDB() self.pulse_correlation_db = PulseCorrelationDB() self.date_types = {"events": "latest_events_call_date", "subscribed": "latest_subscribed_call_date"} self.otx_user_version = self.get_otx_user_version()
class OTXv2(object): def __init__(self, key, server="https://otx.alienvault.com/"): self.key = key self.server = server self.url_base = "{}api/v1".format(server) self.avproxy = AVProxy() self.pulse_db = PulseDB() self.pulse_correlation_db = PulseCorrelationDB() self.date_types = { "events": "latest_events_call_date", "subscribed": "latest_subscribed_call_date" } self.otx_user_version = self.get_otx_user_version() def update_latest_request(self, d_type, d_update=None): """Update the latest otx request timestamp Args: type (str): The type of date to update. update_date (str): Update Date Returns: Boolean """ date_type = self.date_types.get(d_type, None) if date_type is None: return False update_date = datetime.datetime.utcnow().strftime( "%Y-%m-%dT%H:%M:%S") if d_update is None else d_update try: self.pulse_db.set_key_value(date_type, update_date) except Exception as err: api_log.error("Cannot save messages revision: %s" % str(err)) return False return True def get_latest_request(self, d_type): """Loads the latest request timestamp Args: type (str): The type of date to get. Returns: None or the date in string format """ date_type = self.date_types.get(d_type, None) if date_type is None: return date_type try: latest_timestamp = self.pulse_db.get(date_type) except Exception as err: api_log.warning("Cannot get messages revision: {}".format(err)) return None return None if latest_timestamp == "" else latest_timestamp @retry(RequestException, tries=5, backoff=2, logger=api_log) def make_request(self, url): """Make a request against the OTX server Args: url (str): The url with the request. Returns: response_data(json): The OTX response Raise an exception when something is wrong """ api_log.info("trying to make a request: {}".format(url)) custom_headers = {'X-OTX-API-KEY': self.key} if self.otx_user_version: # Information about system that is using OTX custom_headers['User-Agent'] = self.otx_user_version # http://docs.python-requests.org/en/master/user/advanced/#proxies proxies = self.avproxy.get_proxies() response_data = requests.get(url, headers=custom_headers, proxies=proxies, timeout=10) api_log.info("Status code: {}".format(response_data.status_code)) if response_data.status_code in GENERAL_ERROR_CODES: # Making timeout raise RequestException("Response status code: {}".format( response_data.status_code)) if response_data.status_code == INVALID_API_KEY_CODE: raise InvalidAPIKey("Invalid API Key") if response_data.status_code == BAD_REQUEST_CODE: raise BadRequest("Bad Request") return response_data.json() def check_token(self): """Checks if a OTX token is valid and return user info if so. Args: None Returns: user_data(dict): A dict with the user info. """ url = "{}/user/".format(self.url_base) try: user_data = self.make_request(url) except Exception as err: api_log.warning("OTX key activation error: {}".format(err)) raise return user_data def save_pulses(self, pulses): """Save a list of pulses in redis. Args: pulses(list): List of pulses we want to save Returns: integer: Number of new pulses saved. """ if len(pulses) > 0: self.pulse_db.merge(pulses) self.pulse_correlation_db.store(pulses) return len(pulses) def remove_pulses(self, pulses): """Delete a list of pulses from redis. Args: pulses(list): List of pulse IDs we want to remove Returns: integer: Number of pulses removed. """ del_pulses = len(pulses) if del_pulses > 0: for p_id in pulses: try: pulse = ast.literal_eval(self.pulse_db.get(p_id)) self.pulse_db.delete_key(p_id) self.pulse_correlation_db.delete_pulse(pulse) except RedisDBKeyNotFound: del_pulses -= 1 continue except Exception as err: api_log.error("Error deleting Pulse: {}".format(err)) del_pulses -= 1 continue return del_pulses def add_pulses_from_list(self, pulses): """Add the pulses given. Args: pulses(list): List of pulses we want to add Returns: integer: Number of new pulses downloaded. """ p_download = [] for p_id in pulses: request = "{}/pulses/{}/".format(self.url_base, p_id) try: json_data = self.make_request(request) # Save pulse data on redis p_download.append(json_data) except Exception as err: api_log.warning("Cannot download pulse {}: {}".format( p_id, err)) continue return self.save_pulses(p_download) def add_pulses_from_authors(self, authors): """Add the pulses from some given authors. Args: authors(list): List of authors we want their pulses to be added Returns: integer: Number of new pulses added. """ pulse_downloaded = 0 for author in authors: next_request = "{}/pulses/subscribed?limit=20&author_name={}".format( self.url_base, author) while next_request: try: json_data = self.make_request(next_request) # Save pulse data on redis pulse_downloaded += self.save_pulses( json_data.get('results')) # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning( "Cannot download pulses from author {}: {}".format( author, err)) continue return pulse_downloaded def remove_pulses_from_authors(self, authors): """Remove the pulses from some given authors. Args: authors(list): List of authors we want their pulses to be removed Returns: integer: Number of pulses removed. """ if len(authors) < 1: return 0 pulse_list = [] all_pulses = self.pulse_db.get_all() for pulse in all_pulses: if pulse.get('author_name', '') in authors: pulse_list.append(pulse.get('id')) return self.remove_pulses(pulse_list) def get_pulse_updates(self): """Update the redis with the pulses that must been re-added and deleted. Returns: tuple: Number of pulses updated and deleted. """ total_add = 0 total_del = 0 subscribed_timestamp = self.get_latest_request('subscribed') events_timestamp = self.get_latest_request('events') # If it is the first time we download the pulses we don't execute this call. if subscribed_timestamp is not None: # Getting event time or subscribed time in case event time is null by any reason. events_timestamp = subscribed_timestamp if events_timestamp is None else events_timestamp next_request = "{}/pulses/events?limit=20&since={}".format( self.url_base, events_timestamp) else: return total_add, total_del event = {} while next_request: try: json_data = self.make_request(next_request) # We need to apply the action in each iteration to keep the order of each modification. for event in json_data.get('results'): e_type = event.get('object_type') e_action = event.get('action') e_id = event.get('object_id') # Authors to delete if e_type == 'user' and e_action in [ 'unsubscribe', 'delete' ]: total_del += self.remove_pulses_from_authors([e_id]) # Authors to subscribe elif e_type == 'user' and e_action == 'subscribe': total_add += self.add_pulses_from_authors([e_id]) # Pulses to delete elif e_type == 'pulse' and e_action in [ 'unsubscribe', 'delete' ]: total_del += self.remove_pulses([e_id]) # Pulses to add elif e_type == 'pulse' and e_action == 'subscribe': total_add += self.add_pulses_from_list([e_id]) # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning( "Cannot download pulse updates: {}".format(err)) raise update_timestamp = event.get('created', None) if update_timestamp is not None: self.update_latest_request('events', update_timestamp) return total_add, total_del def get_new_pulses(self): """Update the redis with the pulses that must been added. Args: None Returns: integer: Number of new pulses downloaded. """ pulse_downloaded = 0 subscribed_timestamp = self.get_latest_request('subscribed') if subscribed_timestamp is not None: next_request = "{}/pulses/subscribed?limit=20&modified_since={}".format( self.url_base, subscribed_timestamp) else: next_request = "{}/pulses/subscribed?limit=20".format( self.url_base) # This var will store the date of the newest pulse that will be used to query the next time. update_timestamp = None while next_request: try: json_data = self.make_request(next_request) p_data = json_data.get('results', []) # First we remove the pulse to avoid IOC inconsistency problems. self.remove_pulses([p.get('id', '') for p in p_data]) # Save pulse data on redis pulse_downloaded += self.save_pulses(p_data) # Save the newest pulse date if update_timestamp is None: try: # We save the first pulse modified date. update_timestamp = p_data[0]['modified'] except: pass # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning("Cannot download new pulses: {}".format(err)) raise # Saving the request date if update_timestamp is not None: self.update_latest_request('subscribed', update_timestamp) # If it is the first time we download the pulses, we update the event request time to the current UTC timestamp. if subscribed_timestamp is None: self.update_latest_request('events') return pulse_downloaded def download_pulses(self): """Retrieves all the pulses information, both new and deleted Args: None Returns: result(dict): number of new pulses downloaded, updated and deleted pulses. """ try: p_update, p_delete = self.get_pulse_updates() p_new = self.get_new_pulses() self.pulse_correlation_db.sync() except Exception: raise db_set_config("open_threat_exchange_latest_update", datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")) return { 'new_pulses': p_new, 'updated_pulses': p_update, 'deleted_pulses': p_delete } @staticmethod def get_otx_user_version(): """ Returns string like 'OTX USM/5.2.4' if able to get system version or '' otherwise """ otx_user_version = '' try: data_retrieved, version_data = get_alienvault_version() if data_retrieved: # we need only product name and version number, so get first 2 version_data = version_data.replace('ALIENVAULT', 'USM').split()[:2] otx_user_version = 'OTX {}'.format('/'.join(version_data)) else: api_log.warning( 'Bad result returned for alienvault version: {}'.format( version_data)) except Exception as err: api_log.warning( 'Failed to get alienvault version. Reason: {}'.format(err)) return otx_user_version
def test_bad_proxy_connect_url_auth(self): # Open connection through proxy with authentication proxy = AVProxy(proxy_file=Test.content[2][0]) response = proxy.open("http://python.org", timeout=0.5)
def test_bad_proxy_connect_request(self): # Open connection through proxy without authentication proxy = AVProxy(proxy_file=Test.content[0][0]) request = urllib2.Request("http://python.org") response = proxy.open(request, timeout=1)
def test_constructor(self): proxy = AVProxy(proxy_file=Test.content[0][0]) assert proxy.get_proxy_url() == self.get_proxy_url(Test.content[0][1]) assert proxy.need_authentication() == False del proxy proxy = AVProxy(proxy_file=Test.content[1][0]) assert proxy.get_proxy_url() == self.get_proxy_url(Test.content[1][1]) assert proxy.need_authentication() == False del proxy proxy = AVProxy(proxy_file=Test.content[2][0]) assert proxy.get_proxy_url() == self.get_proxy_url(Test.content[2][1]) assert proxy.need_authentication() == True del proxy # Bad proxy file proxy = AVProxy(proxy_file=Test.content[3][0]) assert proxy.get_proxy_url() == None assert proxy.need_authentication() == False del proxy # Non-existent proxy file proxy = AVProxy(proxy_file="/tmp/as/sdadsa") assert proxy.get_proxy_url() == None assert proxy.need_authentication() == False del proxy
def test_no_proxy_connect_url_aut(self): # Bad Proxy with retries proxy = AVProxy(proxy_file=Test.content[2][0]) response = proxy.open("http://python.org", timeout=0.5, retries=1)
def test_no_proxy_connect_retry(self): # No proxy with retries proxy = AVProxy() response = proxy.open("http://python.org", retries=0.5) assert response is not None
class OTXv2(object): def __init__(self, key, server="https://otx.alienvault.com/"): self.key = key self.server = server self.url_base = "{}api/v1".format(server) self.avproxy = AVProxy() self.pulse_db = PulseDB() self.pulse_correlation_db = PulseCorrelationDB() self.date_types = {"events": "latest_events_call_date", "subscribed": "latest_subscribed_call_date"} self.otx_user_version = self.get_otx_user_version() def update_latest_request(self, d_type, d_update=None): """Update the latest otx request timestamp Args: type (str): The type of date to update. update_date (str): Update Date Returns: Boolean """ date_type = self.date_types.get(d_type, None) if date_type is None: return False update_date = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S") if d_update is None else d_update try: self.pulse_db.set_key_value(date_type, update_date) except Exception as err: api_log.error("Cannot save messages revision: %s" % str(err)) return False return True def get_latest_request(self, d_type): """Loads the latest request timestamp Args: type (str): The type of date to get. Returns: None or the date in string format """ date_type = self.date_types.get(d_type, None) if date_type is None: return date_type try: latest_timestamp = self.pulse_db.get(date_type) except Exception as err: api_log.warning("Cannot get messages revision: {}".format(err)) return None return None if latest_timestamp == "" else latest_timestamp @retry(RequestException, tries=5, backoff=2, logger=api_log) def make_request(self, url): """Make a request against the OTX server Args: url (str): The url with the request. Returns: response_data(json): The OTX response Raise an exception when something is wrong """ api_log.info("trying to make a request: {}".format(url)) custom_headers = {'X-OTX-API-KEY': self.key} if self.otx_user_version: # Information about system that is using OTX custom_headers['User-Agent'] = self.otx_user_version # http://docs.python-requests.org/en/master/user/advanced/#proxies proxies = self.avproxy.get_proxies() response_data = requests.get(url, headers=custom_headers, proxies=proxies, timeout=10) api_log.info("Status code: {}".format(response_data.status_code)) if response_data.status_code in GENERAL_ERROR_CODES: # Making timeout raise RequestException("Response status code: {}".format(response_data.status_code)) if response_data.status_code == INVALID_API_KEY_CODE: raise InvalidAPIKey("Invalid API Key") if response_data.status_code == BAD_REQUEST_CODE: raise BadRequest("Bad Request") return response_data.json() def check_token(self): """Checks if a OTX token is valid and return user info if so. Args: None Returns: user_data(dict): A dict with the user info. """ url = "{}/user/".format(self.url_base) try: user_data = self.make_request(url) except Exception as err: api_log.warning("OTX key activation error: {}".format(err)) raise return user_data def save_pulses(self, pulses): """Save a list of pulses in redis. Args: pulses(list): List of pulses we want to save Returns: integer: Number of new pulses saved. """ if len(pulses) > 0: self.pulse_db.merge(pulses) self.pulse_correlation_db.store(pulses) return len(pulses) def remove_pulses(self, pulses): """Delete a list of pulses from redis. Args: pulses(list): List of pulse IDs we want to remove Returns: integer: Number of pulses removed. """ del_pulses = len(pulses) if del_pulses > 0: for p_id in pulses: try: pulse = ast.literal_eval(self.pulse_db.get(p_id)) self.pulse_db.delete_key(p_id) self.pulse_correlation_db.delete_pulse(pulse) except RedisDBKeyNotFound: del_pulses -= 1 continue except Exception as err: api_log.error("Error deleting Pulse: {}".format(err)) del_pulses -= 1 continue return del_pulses def add_pulses_from_list(self, pulses): """Add the pulses given. Args: pulses(list): List of pulses we want to add Returns: integer: Number of new pulses downloaded. """ p_download = [] for p_id in pulses: request = "{}/pulses/{}/".format(self.url_base, p_id) try: json_data = self.make_request(request) # Save pulse data on redis p_download.append(json_data) except Exception as err: api_log.warning("Cannot download pulse {}: {}".format(p_id, err)) continue return self.save_pulses(p_download) def add_pulses_from_authors(self, authors): """Add the pulses from some given authors. Args: authors(list): List of authors we want their pulses to be added Returns: integer: Number of new pulses added. """ pulse_downloaded = 0 for author in authors: next_request = "{}/pulses/subscribed?limit=20&author_name={}".format(self.url_base, author) while next_request: try: json_data = self.make_request(next_request) # Save pulse data on redis pulse_downloaded += self.save_pulses(json_data.get('results')) # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning("Cannot download pulses from author {}: {}".format(author, err)) continue return pulse_downloaded def remove_pulses_from_authors(self, authors): """Remove the pulses from some given authors. Args: authors(list): List of authors we want their pulses to be removed Returns: integer: Number of pulses removed. """ if len(authors) < 1: return 0 pulse_list = [] all_pulses = self.pulse_db.get_all() for pulse in all_pulses: if pulse.get('author_name', '') in authors: pulse_list.append(pulse.get('id')) return self.remove_pulses(pulse_list) def get_pulse_updates(self): """Update the redis with the pulses that must been re-added and deleted. Returns: tuple: Number of pulses updated and deleted. """ total_add = 0 total_del = 0 subscribed_timestamp = self.get_latest_request('subscribed') events_timestamp = self.get_latest_request('events') # If it is the first time we download the pulses we don't execute this call. if subscribed_timestamp is not None: # Getting event time or subscribed time in case event time is null by any reason. events_timestamp = subscribed_timestamp if events_timestamp is None else events_timestamp next_request = "{}/pulses/events?limit=20&since={}".format(self.url_base, events_timestamp) else: return total_add, total_del event = {} while next_request: try: json_data = self.make_request(next_request) # We need to apply the action in each iteration to keep the order of each modification. for event in json_data.get('results'): e_type = event.get('object_type') e_action = event.get('action') e_id = event.get('object_id') # Authors to delete if e_type == 'user' and e_action in ['unsubscribe', 'delete']: total_del += self.remove_pulses_from_authors([e_id]) # Authors to subscribe elif e_type == 'user' and e_action == 'subscribe': total_add += self.add_pulses_from_authors([e_id]) # Pulses to delete elif e_type == 'pulse' and e_action in ['unsubscribe', 'delete']: total_del += self.remove_pulses([e_id]) # Pulses to add elif e_type == 'pulse' and e_action == 'subscribe': total_add += self.add_pulses_from_list([e_id]) # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning("Cannot download pulse updates: {}".format(err)) raise update_timestamp = event.get('created', None) if update_timestamp is not None: self.update_latest_request('events', update_timestamp) return total_add, total_del def get_new_pulses(self): """Update the redis with the pulses that must been added. Args: None Returns: integer: Number of new pulses downloaded. """ pulse_downloaded = 0 subscribed_timestamp = self.get_latest_request('subscribed') if subscribed_timestamp is not None: next_request = "{}/pulses/subscribed?limit=20&modified_since={}".format(self.url_base, subscribed_timestamp) else: next_request = "{}/pulses/subscribed?limit=20".format(self.url_base) # This var will store the date of the newest pulse that will be used to query the next time. update_timestamp = None while next_request: try: json_data = self.make_request(next_request) p_data = json_data.get('results', []) # First we remove the pulse to avoid IOC inconsistency problems. self.remove_pulses([p.get('id', '') for p in p_data]) # Save pulse data on redis pulse_downloaded += self.save_pulses(p_data) # Save the newest pulse date if update_timestamp is None: try: # We save the first pulse modified date. update_timestamp = p_data[0]['modified'] except: pass # Get next request next_request = json_data.get('next') except Exception as err: api_log.warning("Cannot download new pulses: {}".format(err)) raise # Saving the request date if update_timestamp is not None: self.update_latest_request('subscribed', update_timestamp) # If it is the first time we download the pulses, we update the event request time to the current UTC timestamp. if subscribed_timestamp is None: self.update_latest_request('events') return pulse_downloaded def download_pulses(self): """Retrieves all the pulses information, both new and deleted Args: None Returns: result(dict): number of new pulses downloaded, updated and deleted pulses. """ try: p_update, p_delete = self.get_pulse_updates() p_new = self.get_new_pulses() self.pulse_correlation_db.sync() except Exception: raise db_set_config("open_threat_exchange_latest_update", datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S")) return {'new_pulses': p_new, 'updated_pulses': p_update, 'deleted_pulses': p_delete} @staticmethod def get_otx_user_version(): """ Returns string like 'OTX USM/5.2.4' if able to get system version or '' otherwise """ otx_user_version = '' try: data_retrieved, version_data = get_alienvault_version() if data_retrieved: # we need only product name and version number, so get first 2 version_data = version_data.replace('ALIENVAULT', 'USM').split()[:2] otx_user_version = 'OTX {}'.format('/'.join(version_data)) else: api_log.warning('Bad result returned for alienvault version: {}'.format(version_data)) except Exception as err: api_log.warning('Failed to get alienvault version. Reason: {}'.format(err)) return otx_user_version
def get_message_center_messages(): """Retrieves the list of messages from the MCServer Args: None Returns: messag_list: List of messages""" messages = [] conn_failed = False try: # First we need to retrieve the list of tags for the current system. # If this call fails, we shouldn't make the mcserver request proxy = AVProxy() if proxy is None: app.logger.error("Connection error with AVProxy") system_tags = get_system_tags() if len(system_tags) == 0: return [] revision = get_latest_message_revision() msg_filter = "filters=%s" % ",".join(system_tags) if revision is not None: msg_filter += "&revision=%s" % revision url = "https://%s:%s/messages?%s" % ( app.config["MESSAGE_CENTER_SERVER"], app.config["MESSAGE_CENTER_PORT"], msg_filter, ) request = urllib2.Request(url) response = proxy.open(request, timeout=20, retries=3) response_data = json.loads(response.read()) response_code = response.getcode() if response_code != 200: app.logger.warning("Invalid reponse from the mcserver %s:%s" % (response_code, response_data)) for field in ["data", "status", "signature"]: if field not in response_data: return [] # Validate the data if not verify_sign(response_data["signature"], response_data["data"]): app.logger.warning("Cannot verify the data comimg from the mcserver") return [], False messages = ast.literal_eval(b64decode(response_data["data"])) if "revision" in response_data: save_messages_revision(response_data["revision"]) except (URLError, HTTPError): conn_failed = True app.logger.error("Cannot connect to the Message Center Server") except Exception: import traceback app.logger.error( "An error occurred while retrieving the Message Center Server messages: %s" % str(traceback.format_exc()) ) return messages, conn_failed
# import time import json import urllib2 import requests import celery.utils.log from api.lib.monitors.monitor import (Monitor, MonitorTypes, ComponentTypes) from ansiblemethods.system.system import get_doctor_data from db.methods.system import get_systems, get_system_id_from_local, get_system_ip_from_system_id from apimethods.system.proxy import AVProxy logger = celery.utils.log.get_logger("celery") PROXY = AVProxy() if PROXY is None: logger.error("Connection error with AVProxy") class MonitorPlatformTelemetryData(Monitor): """ Get platform telemetry data using the AV Doctor. This basically runs the Doctor on all suitable systems, and delivers the output data to a server. """ def __init__(self): Monitor.__init__(self, MonitorTypes.MONITOR_PLATFORM_TELEMETRY_DATA) self.message = 'Platform Telemetry Data Monitor Enabled' self.__strike_zone_plugins = [ '0005_agent_plugins_exist.plg', '0006_agent_plugins_integrity.plg',