def analyze(self, line): fields = line.split('|') if len(fields) < 8: return ip = toolbox.find_ips(fields[0])[0] ip = Ip(ip=ip, tags=['tor']) tornode = {} tornode['description'] = "Tor exit node" tornode['ip'] = fields[0] tornode['name'] = fields[1] tornode['router-port'] = fields[2] tornode['directory-port'] = fields[3] tornode['flags'] = fields[4] tornode['uptime'] = fields[5] tornode['version'] = fields[6] tornode['contactinfo'] = fields[7] tornode['id'] = md5.new(tornode['ip']+tornode['name']).hexdigest() tornode['value'] = "Tor node: %s (%s)" % (tornode['name'], tornode['ip']) tornode['source'] = self.name ip.add_evil(tornode) ip.seen() self.commit_to_db(ip)
def analyze(self, dict): evil = Evil() # description evil['description'] = dict['description'] host = re.search("Host: (?P<host>\S+),", dict['description']) if host: if toolbox.is_ip(host.group('host')): host = Ip(toolbox.is_ip(host.group('host'))) elif toolbox.is_hostname(host.group('host')): host = Hostname(toolbox.is_hostname(host.group('host'))) else: return None, None version = re.search("Version: (?P<version>[ABCD])", dict['description']) if version != None: evil['version'] = version.group('version') else: evil['version'] = 'N/A' # linkback evil['link'] = dict['link'] # tags evil['tags'] += ['feodo', 'cridex', 'malware', 'exe'] evil['value'] = "Feodo C2 ({})".format(host['value']) return host, evil
def analyze(self, line): fields = line.split('|') if len(fields) < 8: return ip = toolbox.find_ips(fields[0])[0] ip = Ip(ip=ip, tags=['tor']) tornode = {} tornode['description'] = "Tor exit node" tornode['ip'] = fields[0] tornode['name'] = fields[1] tornode['router-port'] = fields[2] tornode['directory-port'] = fields[3] tornode['flags'] = fields[4] tornode['uptime'] = fields[5] tornode['version'] = fields[6] tornode['contactinfo'] = fields[7] tornode['id'] = md5.new(tornode['ip'] + tornode['name']).hexdigest() tornode['value'] = "Tor node: %s (%s)" % (tornode['name'], tornode['ip']) tornode['source'] = self.name ip.add_evil(tornode) self.commit_to_db(ip)
def analyze(self, dict): evil = dict evil['host'] = dict['ip'] evil['id'] = md5.new(evil['ip'] + 'Lv %s' % dict['lv']).hexdigest() evil['description'] = 'This IP was reported for ' + dict[ 'lv'] + '/5 malicious activity' evil['source'] = self.name ip = Ip(ip=evil['host']) ip.add_evil(evil) self.commit_to_db(ip)
class FeodoTracker(Feed): descriptions = { 'A': "Hosted on compromised webservers running an nginx proxy on port 8080 TCP forwarding all botnet traffic to a tier 2 proxy node. Botnet traffic usually directly hits these hosts on port 8080 TCP without using a domain name.", 'B': "Hosted on servers rented and operated by cybercriminals for the exclusive purpose of hosting a Feodo botnet controller. Usually taking advantage of a domain name within ccTLD .ru. Botnet traffic usually hits these domain names using port 80 TCP.", 'C': "Successor of Feodo, completely different code. Hosted on the same botnet infrastructure as Version A (compromised webservers, nginx on port 8080 TCP or port 7779 TCP, no domain names) but using a different URL structure. This Version is also known as Geodo.", 'D': "Successor of Cridex. This version is also known as Dridex", } variants = { 'A': "Feodo", 'B': "Feodo", 'C': "Geodo", 'D': "Dridex", } def __init__(self, name): super(FeodoTracker, self).__init__(name) self.name = "FeodoTracker" self.source = "https://feodotracker.abuse.ch/feodotracker.rss" self.description = "Feodo Tracker RSS Feed. This feed shows the latest twenty Feodo C2 servers which Feodo Tracker has identified." def update(self): for dict in self.update_xml('item', ["title", "link", "description", "guid"]): self.analyze(dict) def analyze(self, dict): evil = dict date_string = re.search(r"\((?P<datetime>[\d\- :]+)\)", dict['title']).group('datetime') try: evil['date_added'] = datetime.datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError, e: pass g = re.match(r'^Host: (?P<host>.+), Version: (?P<version>\w)', dict['description']) g = g.groupdict() evil['host'] = g['host'] evil['version'] = g['version'] evil['description'] = FeodoTracker.descriptions[g['version']] evil['id'] = md5.new(dict['description']).hexdigest() evil['source'] = self.name del evil['title'] if toolbox.is_ip(evil['host']): elt = Ip(ip=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elif toolbox.is_hostname(evil['host']): elt = Hostname(hostname=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elt.add_evil(evil) self.commit_to_db(elt)
class OpenblIP(Feed): """ This gets data fromhttp://www.openbl.org/lists/base.txt """ def __init__(self, name): super(OpenblIP, self).__init__(name, run_every="12h") def update(self): feed = urllib2.urlopen( "http://www.openbl.org/lists/base.txt").readlines() self.status = "OK" for line in feed: self.analyze(line) return True def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['openblip']) ip, new = self.model.save(ip, with_status=True) if new: self.elements_fetched += 1
def analyze(self, line): if line.startswith("#"): return print line ip, port, domains, traffic_info, description, date = line.split(',') ip = Ip(ip=ip) ip = self.model.save(ip) port = re.search('[\d]+', port) if port: port = port.group() domains = [d.strip() for d in domains.split('/')] domains = [ self.model.add_text([d.split(':')[0]]) for d in domains if d ] for d in domains: self.model.connect(ip, d, 'domain') evil = Evil() evil['tags'] = ['MalwareTrafficAnalysis'] evil['description'] = "{} ({})".format(description, traffic_info) evil['value'] = '{} on {}'.format(description, ip['value']) self.commit_to_db(ip, evil)
def analyze(self, line): # This function should only analyze one record at a time (i.e. one line, or one XML node) # This is also where you tell Malcom to ignore e.g. lines starting with # # # Say the resource you requested has the following format: # # ip_addr;owner;description; # 8.8.8.8;Google Inc.;malicious nameserver; # # You should a script similar to: ip, org, description = line.split(';') # split the entry into elements _ip = Ip(ip=ip) # create a new IP element. _ip['tags'] = [ 'google.com' ] # add any tags you want. the 'evil' tag will be added automatically before insert # Now comes the definition of the Evil element. Associate it with other elements to build threat intel. # Not adding the information directly to the IP element enables us to determine how many different sources have # seen this specific artifact. evil = Evil() evil['tags'] = ['zeus', 'cc' ] # it was a Zeus CC, remember the feed description? # If you're using a helper function like update_xml or lines, the return value of the analyze function must always # be a tuple of type (Element, Evil). This will connect both elements, add apropriate tags to them and insert them # in the db. return _ip, evil
class MalwareTrafficAnalysis(Feed): """ This is a feed that will fetch data from a URL and process it """ def __init__(self, name): super(MalwareTrafficAnalysis, self).__init__(name, run_every="12h") self.name = "MalwareTrafficAnalysis" self.source = "http://www.malware-traffic-analysis.net/suspicious-ip-addresses-and-domains.txt" self.description = "Collects results from malware-traffic-analysis.net" def update(self): feed = urllib2.urlopen(self.source).read().split('\n') for line in feed: self.analyze(line) def analyze(self, line): if line.startswith("#") or line.startswith("IP address"): return try: ip, port, domains, traffic_info, description, date_string = line.split( ',') except ValueError, e: print "Malformed line, skipping" return evil = {} evil['ip'] = ip port = re.search('[\d]+', port) if port: evil['port'] = port.group() evil['domains'] = domains evil['description'] = "{}".format(description) if traffic_info: evil['description'] += " ({})".format(traffic_info) evil['date_added'] = datetime.datetime.strptime( date_string, "%Y-%m-%d") evil['id'] = md5.new(evil['description'] + evil['ip'] + date_string).hexdigest() evil['source'] = self.name ip = Ip(ip=ip) domains = [ d.strip() for d in domains.split('/') if toolbox.is_hostname(d.strip()) ] ip.seen(first=evil['date_added']) ip.add_evil(evil) i = self.commit_to_db(ip) for d in domains: h = Hostname(hostname=d) h.seen(first=evil['date_added']) h.add_evil(evil) h = self.commit_to_db(h) self.model.connect(h, i)
def analyze(self, dict): evil = dict date_string = re.search(r"\((?P<datetime>[\d\- :]+)\)", dict['title']).group('datetime') try: evil['date_added'] = datetime.datetime.strptime( date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match(r'^Host: (?P<host>.+), Version: (?P<version>\w)', dict['description']) g = g.groupdict() evil['host'] = g['host'] evil['version'] = g['version'] evil['description'] = FeodoTracker.descriptions[g['version']] evil['id'] = md5.new(dict['description']).hexdigest() evil['source'] = self.name del evil['title'] if toolbox.is_ip(evil['host']): elt = Ip(ip=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elif toolbox.is_hostname(evil['host']): elt = Hostname(hostname=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elt.seen(first=evil['date_added']) elt.add_evil(evil) self.commit_to_db(elt)
def analyze(self, line): # This function should only analyze one record at a time (i.e. one line, or one XML node) # This is also where you tell Malcom to ignore e.g. lines starting with # # # Say the resource you requested has the following format: # # ip_addr;owner;description; # 8.8.8.8;Google Inc.;malicious nameserver; # # You should a script similar to: ip, org, description = line.split(';') # split the entry into elements _ip = Ip(ip=ip) # create a new IP element. _ip['tags'] = ['zeus', 'cc' ] # it was a Zeus CC, remember the feed description? # Now comes the definition of the Evil element. Associate it with other elements to build threat intel. # Not adding the information directly to the IP element enables us to determine how many different sources have # seen this specific artifact. evil = {} # create a dictionary that will be included in the element evil['org'] = org # The ID will determine when this entry is updated # If the elements included in the ID remain the same, the entry will be updated; # if one of them changes, a new entry will be created evil['id'] = md5.new(org + ip).hexdigest() # The source will tell you where the information comes from. # A good idea is to give it the name of the feed evil['source'] = self.name # Mandatory field. This is should explain why the element is evil # other than "it just shows up on a blocklist" evil['description'] = description # You can include any other information you might want evil['foo'] = "bar" # Time to commit information to the DB # Add the evil information to the IP _ip.add_evil(evil) # Commit the IP to the DB self.commit_to_db(_ip)
def analyze(self, line): # This function should only analyze one record at a time (i.e. one line, or one XML node) # This is also where you tell Malcom to ignore e.g. lines starting with # # # Say the resource you requested has the following format: # # ip_addr;owner;description; # 8.8.8.8;Google Inc.;malicious nameserver; # # You should a script similar to: ip, org, description = line.split(';') # split the entry into elements _ip = Ip(ip=ip) # create a new IP element. _ip['tags'] = ['zeus', 'cc'] # it was a Zeus CC, remember the feed description? # Now comes the definition of the Evil element. Associate it with other elements to build threat intel. # Not adding the information directly to the IP element enables us to determine how many different sources have # seen this specific artifact. evil = {} # create a dictionary that will be included in the element evil['org'] = org # The ID will determine when this entry is updated # If the elements included in the ID remain the same, the entry will be updated; # if one of them changes, a new entry will be created evil['id'] = md5.new(org+ip).hexdigest() # The source will tell you where the information comes from. # A good idea is to give it the name of the feed evil['source'] = self.name # Mandatory field. This is should explain why the element is evil # other than "it just shows up on a blocklist" evil['description'] = description # You can include any other information you might want evil['foo'] = "bar" # Time to commit information to the DB # Add the evil information to the IP _ip.add_evil(evil) # Commit the IP to the DB self.commit_to_db(_ip)
class TorExitNodes(Feed): """ This gets data from https://www.dan.me.uk/tornodes """ def __init__(self, name): super(TorExitNodes, self).__init__(name, run_every="12h") self.name = "TorExitNodes" self.source = "https://www.dan.me.uk/tornodes" self.description = "List of Tor exit nodes" def update(self): feed = urllib2.urlopen(self.source).read() start = feed.find('<!-- __BEGIN_TOR_NODE_LIST__ //-->') + len( '<!-- __BEGIN_TOR_NODE_LIST__ //-->') end = feed.find('<!-- __END_TOR_NODE_LIST__ //-->') feed = feed[start:end].replace('\n', '').replace( '<br />', '\n').replace('>', '>').replace('<', '<').split('\n') if len(feed) > 10: self.status = "OK" for line in feed: self.analyze(line) return True def analyze(self, line): fields = line.split('|') tornode = Evil(tags=['tor exit node']) # try: tornode['ip'] = fields[0] tornode['name'] = fields[1] tornode['router-port'] = fields[2] tornode['directory-port'] = fields[3] tornode['flags'] = fields[4] tornode['uptime'] = fields[5] tornode['version'] = fields[6] tornode['contactinfo'] = fields[7] except Exception, e: return tornode['value'] = "Tor node: %s (%s)" % (tornode['name'], tornode['ip']) try: ip = toolbox.find_ips(tornode['ip'])[0] ip = Ip(ip=ip, tags=['tor']) except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return
def analyze(self, dict): evil = dict date_string = re.search(r"\((?P<datetime>[\d\- :]+)\)", dict['title']).group('datetime') try: evil['date_added'] = datetime.datetime.strptime(date_string, "%Y-%m-%d %H:%M:%S") except ValueError: pass g = re.match(r'^Host: (?P<host>.+), Version: (?P<version>\w)', dict['description']) g = g.groupdict() evil['host'] = g['host'] evil['version'] = g['version'] evil['description'] = FeodoTracker.descriptions[g['version']] evil['id'] = md5.new(dict['description']).hexdigest() evil['source'] = self.name del evil['title'] if toolbox.is_ip(evil['host']): elt = Ip(ip=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elif toolbox.is_hostname(evil['host']): elt = Hostname(hostname=evil['host'], tags=[FeodoTracker.variants[g['version']]]) elt.seen(first=evil['date_added']) elt.add_evil(evil) self.commit_to_db(elt)
def analyze(self, dict): evil = dict evil['date_added'] = datetime.datetime.strptime(dict['first_seen'], "%d-%m-%Y") # url evil['url'] = dict['url'] evil['id'] = md5.new(evil['url'] + dict['first_seen']).hexdigest() evil['description'] = self.description evil['source'] = self.name url = Url(url=evil['url'], tags=[dict['malware']]) url.seen(first=evil['date_added']) url.add_evil(evil) self.commit_to_db(url) # ip evil['url'] = dict['ip'] evil['id'] = md5.new(evil['url'] + dict['first_seen']).hexdigest() ip = Ip(ip=dict['ip'], tags=[dict['malware']]) ip.seen(first=evil['date_added']) ip.add_evil(evil) self.commit_to_db(ip)
def analyze(self, dict): # We create an Evil object. Evil objects are what Malcom uses # to store anything it considers evil. Malware, spam sources, etc. # Remember that you can create your own datatypes, if need be. evil = Evil() # We start populating the Evil() object's attributes with # information from the dict we parsed earlier # description evil['description'] = dict['link'] + " " + dict['description'] # status status = re.search("Status: (?P<status>\S+),", dict['description']) if status: evil['status'] = status.group('status') else: evil['status'] = "unknown" # linkback evil['guid'] = dict['guid'] # tags evil['tags'] += ['spyeye', 'malware', 'cc'] # This is important. Values have to be unique, since it's this way that # Malcom will identify them in the database. # This is probably not the best way, but it will do for now. host = re.search("Host: (?P<host>\S+),", dict['description']).group("host") if toolbox.find_ips(host): elt = Ip(host, tags=['cc', 'spyeye', 'malware']) else: elt = Hostname(host, tags=['cc', 'spyeye', 'malware']) evil['value'] = "SpyEye CC (%s)" % elt['value'] # Save elements to DB. The status field will contain information on # whether this element already existed in the DB. return elt, evil self.commit_to_db(elt, evil)
def analyze(self, dict): evil = dict evil['host'] = dict['ip'] evil['id'] = md5.new(evil['ip'] + 'InfosecCertPaItIP').hexdigest() evil['description'] = self.description evil['source'] = self.name ip = Ip(ip=evil['host']) ip.seen() ip.add_evil(evil) self.commit_to_db(ip)
def add_text(self, text, tags=[]): added = [] for t in text: elt = None if t.strip() != "": if is_url(t): elt = Url(is_url(t), []) elif is_hostname(t): elt = Hostname(is_hostname(t), []) elif is_ip(t): elt = Ip(is_ip(t), []) if elt: added.append(self.save_element(elt, tags)) if len(added) == 1: return added[0] else: return added
def analyze(self, dict): evil = dict evil['host'] = dict['ip'] evil['id'] = md5.new(evil['ip'] + evil['score'] + '/7').hexdigest() evil['description'] = 'Threat Score %s/7 by Alienvault.com' % dict[ 'score'] evil['source'] = self.name ip = Ip(ip=evil['host']) ip.seen() ip.add_evil(evil) self.commit_to_db(ip)
class AlienvaultIP(Feed): """ This gets data from https://reputation.alienvault.com/reputation.generic """ def __init__(self, name): super(AlienvaultIP, self).__init__(name, run_every="12h") self.name = "Alienvault" self.description = "Alienvault IP Reputation Database" self.source = "https://reputation.alienvault.com/reputation.generic" self.confidence = 50 def update(self): self.update_lines() def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] description = re.search(" # (?P<description>[^,]+),", line) if description: description = description.group('description') else: description = False except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, we bail return if not description: return # we're not interested in non-qualified information # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['alienvault']) # Create the new Evil and store it in the DB evil = Evil() evil['value'] = ip['value'] + ' (%s)' % description evil['tags'] = ['AlienvaultIP', description] return ip, evil
def analyze(self, dict): evil = dict evil['first_seen'] = datetime.datetime.strptime(dict['first_seen'], "%Y-%m-%d %H:%M:%S") evil['last_seen'] = datetime.datetime.strptime(dict['last_seen'], "%Y-%m-%d %H:%M:%S") evil['url'] = dict['ip'] evil['id'] = md5.new(evil['url'] + dict['category']).hexdigest() evil['description'] = self.description evil['source'] = self.name ip = Ip(ip=evil['url']) ip.seen(first=evil['first_seen'], last=evil['last_seen']) ip.add_evil(evil) self.commit_to_db(ip)
class DShield3215(Feed): """ This gets data from http://dshield.org/asdetailsascii.html?as=3215 """ def __init__(self, name): super(DShield3215, self).__init__(name) self.name = "DShield3215" self.source = 'http://dshield.org/asdetailsascii.html?as=3215' self.description = "DShield scanning report for AS 3215" self.confidence = 30 def update(self): self.update_lines() def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return dict = line.split('\t') if int( dict[2] ) < 300: # skip entries which have not been reported at least 300 times return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB dict = line.split('\t') ip = Ip(ip=ip, tags=['dshield']) evil = Evil() evil['value'] = 'Scanner at %s' % ip['value'] evil['reports'] = dict[2] evil['first seen'] = dict[3] evil['last seen'] = dict[4] return ip, evil
def analyze(self, dict): evil = dict try: evil['date_added'] = datetime.datetime.strptime(dict['first_seen'], "%Y-%m-%d %H:%M:%S") except ValueError: pass evil['host'] = dict['dst_ip'] evil['version'] = dict['malware'] evil['description'] = FeodoTracker.descriptions[dict['malware']] evil['id'] = md5.new(evil['host'] + evil['description']).hexdigest() evil['source'] = self.name if toolbox.is_ip(evil['host']): elt = Ip(ip=evil['host'], tags=[dict['malware']]) elif toolbox.is_hostname(evil['host']): elt = Hostname(hostname=evil['host'], tags=[dict['malware']]) elt.seen(first=evil['date_added']) elt.add_evil(evil) self.commit_to_db(elt)
def analyze(self, line): if line.startswith("#") or line.startswith("IP address"): return try: ip, port, domains, traffic_info, description, date_string = line.split(',') except ValueError: # Malformed line, skipping return evil = {} evil['ip'] = ip port = re.search('[\d]+', port) if port: evil['port'] = port.group() evil['domains'] = domains evil['description'] = "{}".format(description) if traffic_info: evil['description'] += " ({})".format(traffic_info) evil['date_added'] = datetime.datetime.strptime(date_string, "%Y-%m-%d") evil['id'] = md5.new(evil['description']+evil['ip']+date_string).hexdigest() evil['source'] = self.name ip = Ip(ip=ip) domains = [d.strip() for d in domains.split('/') if toolbox.is_hostname(d.strip())] ip.seen(first=evil['date_added']) ip.add_evil(evil) i = self.commit_to_db(ip) for d in domains: h = Hostname(hostname=d) h.seen(first=evil['date_added']) h.add_evil(evil) h = self.commit_to_db(h) self.model.connect(h, i)
try: feed = urllib2.urlopen( "http://www.malwaredomainlist.com/hostslist/ip.txt").readlines( ) self.status = "OK" except Exception, e: self.status = "ERROR: " + str(e) return False for line in feed: self.analyze(line) return True def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['mdliplist']) ip, status = self.analytics.save_element(ip, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1
def update(self): try: feed = urllib2.urlopen( "http://www.openbl.org/lists/base.txt").readlines() self.status = "OK" except Exception, e: self.status = "ERROR: " + str(e) return False for line in feed: self.analyze(line) return True def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['openblip']) ip, status = self.analytics.save_element(ip, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1
try: feed = urllib2.urlopen("http://dshield.org/asdetailsascii.html?as=16276").readlines() self.status = "OK" except Exception, e: self.status = "ERROR: " + str(e) return False for line in feed: self.analyze(line) return True def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['dshield']) ip, status = self.analytics.save_element(ip, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1
tornode['ip'] = fields[0] tornode['name'] = fields[1] tornode['router-port'] = fields[2] tornode['directory-port'] = fields[3] tornode['flags'] = fields[4] tornode['uptime'] = fields[5] tornode['version'] = fields[6] tornode['contactinfo'] = fields[7] except Exception, e: return tornode['value'] = "Tor: %s (%s)" % (tornode['name'], tornode['ip']) try: ip = toolbox.find_ips(tornode['ip'])[0] ip = Ip(ip=ip, tags=['Tor Node']) except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # store ip in database ip, status = self.analytics.save_element(ip, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1 # store tornode in database tornode, status = self.analytics.save_element(tornode, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1
try: feed = urllib2.urlopen( "https://reputation.alienvault.com/reputation.generic" ).readlines() self.status = "OK" except Exception, e: self.status = "ERROR: " + str(e) return False for line in feed: self.analyze(line) return True def analyze(self, line): if line.startswith('#') or line.startswith('\n'): return try: ip = toolbox.find_ips(line)[0] except Exception, e: # if find_ip raises an exception, it means no ip # was found in the line, so we return return # Create the new ip and store it in the DB ip = Ip(ip=ip, tags=['alienvault']) ip, status = self.analytics.save_element(ip, with_status=True) if status['updatedExisting'] == False: self.elements_fetched += 1