Exemple #1
2
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                
                row = row.strip()              

                if len(row) == 0 or row.startswith('#'):
                    continue
                
                row = row.split()
                event = Event()

                columns = ["source_ip", "source_time"]
                
                for key, value in zip(columns, row):    
                    if key == "source_time":
                        value = datetime.utcfromtimestamp(int(value)).strftime('%Y-%m-%d %H:%M:%S') + " UTC"
                    
                    event.add(key, value.strip())

                event.add('feed', 'openbl')
                event.add('feed_url', 'http://www.openbl.org/lists/date_all.txt')
                event.add('type', 'blacklist')
                
                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)

        self.acknowledge_message()
Exemple #2
0
    def process(self):
        report = self.receive_message()

        if report:
            regex_ip = "^[^ \t]+"
            regex_timestamp = "# ([^ \t]+ [^ \t]+)"
            
            for row in report.split('\n'):

                if row.startswith('#'):
                    continue

                event = Event()

                match = re.search(regex_ip, row)
                if match:
                    ip = match.group()
                    
                match = re.search(regex_timestamp, row)
                if match:
                    timestamp = match.group(1) + " UTC"
                
                event.add("source_ip", ip)
                event.add("source_time", timestamp)
                event.add('feed', 'bruteforceblocker')
                event.add('feed_url', 'http://danger.rulez.sk/projects/bruteforceblocker/blist.php')
                event.add('type', 'brute-force')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #3
0
    def process(self):
        report = self.receive_message()

        if report:
            regex_ip = "^(\d+\.\d+\.\d+\.\d+)"
            regex_timestamp = "(\d+\-\d+\-\d+\s\d+\:\d+\:\d+)"
            
            for row in report.split('\n'):

                if row.startswith('#'):
                    continue

                event = Event()

                match = re.search(regex_ip, row)
                if match:
                    ip = ".".join([octet.lstrip('0') for octet in match.group().split('.')])
		
                match = re.search(regex_timestamp, row)
                if match:
                    timestamp = match.group(1) + " UTC"
                
                event.add("source_ip", ip)
                event.add("source_time", timestamp)
                event.add('feed', 'dshield')
                event.add('feed_url', 'http://dshield.org/asdetailsascii.html')
                event.add('type', 'brute-force')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #4
0
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                row = row.strip()

                if len(row) == 0 or not row.startswith('http'):
                    continue
                
                url_object = urlparse.urlparse(row)

                if not url_object:
                    continue

                url      = url_object.geturl() 
                hostname = url_object.hostname
                port     = url_object.port

                event = Event()
                event.add("source_url", url)
                event.add("source_domain_name", hostname)
                if port:
                    event.add("source_port", str(port))

                event.add('feed', 'vxvault')
                event.add('feed_url', 'http://vxvault.siri-urz.net/URL_List.php')
                event.add('type', 'malware')
                
                event = utils.generate_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()
	
	if report:
	   event = Event()
	   record_list=[]
	   report=report.split('\n')
           for line in report:
	       if line!='' and not line.startswith('#') and not line=='ERROR: Too many connections':
		  event = Event()
		  values=line.split(',')
		  self.logger.info(values)
		  event.add('source_time',values[0].strip('"')+" UTC")
		  event.add('additional_information',values[1].strip('"'))
		  event.add('malware',values[2].strip('"'))
		  event.add('source_domain_name',values[3].strip('"'))
		  event.add('source_url',values[4].strip('"'))
		  event.add('status',values[5].strip('"'))
		  event.add('feed','abuse.ch')
		  event.add('type','ransomware')
		  event = utils.parse_source_time(event, "source_time")  
                  event = utils.generate_observation_time(event, "observation_time")
                  iplist=[]
		  iplist=values[6].split('|')
		  for ip in iplist:
		      message=deepcopy(event)
		      message.add('source_ip',ip.strip('"'))
		      message=utils.generate_reported_fields(message)
		      self.send_message(message) 		

        
	self.acknowledge_message()
Exemple #6
0
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                row = row.strip()

                if len(row) == 0 or row.startswith('other'):
                    continue

                row = row.split()
                event = Event()

                columns = ["source_ip"]
                for key, value in zip(columns, row):
                    event.add(key, value)
                    
                event.add('feed', 'arbor')
                event.add('feed_url', 'http://atlas-public.ec2.arbor.net/public/ssh_attackers')
                event.add('type', 'brute-force')

                event = utils.generate_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #7
0
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                row = row.strip()              

                if len(row) == 0 or row.startswith('#'): # ignore all lines starting with comment mark
                    continue
                
                row = row.split('|')
                event = Event()

                columns = ["source_asn", "source_as_name", "source_ip", "source_time"]
                
                for key, value in zip(columns, row):
                    value = value.strip()
                    
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
                    
                event.add('feed', 'dragonresearchgroup')
                event.add('feed_url', 'http://dragonresearchgroup.org/insight/vncprobe.txt')
                event.add('type', 'brute-force')
                event.add('application_protocol', 'vnc')

                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #8
0
    def process(self):
        report = self.receive_message()

        for row in report.split('<tr>'):

            # Get IP and Type
            info1 = re.search(">[\ ]*(\d+\.\d+\.\d+\.\d+)[\ ]*<.*</td><td>([^<]+)</td>", row)
            
            # Get Timestamp
            info2 = re.search("<td>[\ ]*(\d{4}-\d{2}-\d{2}\ \d{2}:\d{2}:\d{2})[\ ]*</td>", row)

            if info1:
                event = Event()

                event.add("source_ip", info1.group(1))
                description = info1.group(2)
                event_type = self.get_type(description)
                event.add('type', event_type)
                event.add('description', description)
                event.add("source_time", info2.group(1) + " UTC-8")
                event.add('feed', 'taichungcitynetflow')
                event.add('feed_url', 'https://tc.edu.tw/net/netflow/lkout/recent/30')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
            
                self.send_message(event)
        self.acknowledge_message()
Exemple #9
0
    def process(self):
        report = self.receive_message()

        if report:
            report = encode(report)
            
            columns = ["source_time", "source_url", "source_ip", "source_reverse_dns", "malware", "__IGNORE__", "source_asn"]

            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):
                event = Event()

                for key, value in zip(columns, row):
                    
                    if key is "__IGNORE__":
                        continue
                    
                    if key is "source_time":
                        value = value.replace('_',' ')
                        value += " UTC"
                        
                    if key is "malware":
                        value = value.lower()
                        
                    event.add(key, value)
                    
                event.add('feed', 'malwaredomainslist')
                event.add('feed_url', 'http://www.malwaredomainlist.com/updatescsv.php')
                event.add('type', 'malware')    # FIXME
                
                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
        self.acknowledge_message()
Exemple #10
0
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                row = row.strip()

                if len(row) == 0 or not row.startswith('<td style'):
                    continue

                m = re.search("color: black;\">(\d+.\d+.\d+.\d+)</span></td><td>(.*)</td>", row)
                if m:
                    event = Event()

                    event.add("source_ip", m.group(1))
                    
                    event.add('feed', 'netflowhtml')
                    event.add('feed_url', 'https://tc.edu.tw/net/netflow/lkout/recent/1')
                    event.add('type', m.group(2))

                    event = utils.generate_source_time(event, "source_time")
                    event = utils.generate_observation_time(event, "observation_time")
                    event = utils.generate_reported_fields(event)
                
                    self.send_message(event)
        self.acknowledge_message()
    def process(self):

        reports = self.receive_message()
	
	
	
	
	reports=ast.literal_eval(reports)
        if reports:
            for report in reports:
		pre_parsed_lines=[]
		
		
		for p in report['message'].split('\n'):
	            pre_parsed_lines.append(p.strip())
		    
                parsed_line_report=dict()
		count=0 
		startpos=0
	        endpos=0
		
		#Verificar qual e o inicio e o fim do array que contem a informacao do report 
       		while count<len(pre_parsed_lines):
		     	
	    	      if pre_parsed_lines[count].startswith('Evidentiary Information'):
	       		 startpos=count+1
            	      if "Username" in pre_parsed_lines[count] and startpos!=0 and endpos==0:	
	       		 endpos=count-1
	    	      count+=1

				
                
		#transformar o report em dicionario depois de garantir que existem linhas de report
		if startpos!=endpos:
	         
		 for p in pre_parsed_lines[startpos:endpos]:
	   	     line=p.split(": ")	
		     parsed_line_report[line[0]]=line[1]
				  
		
		
                 event = Event()
		 event.add('rtir_id',report['id'])
		 event.add('description',report['subject'])
                 event.add('source_ip',parsed_line_report['Infringers IP Address'])
		 event.add('source_port',parsed_line_report['Port ID'])
		 event.add('application_protocol',parsed_line_report['Protocol'].lower())
		 event.add('additional_information','Content name:'+parsed_line_report['Infringed Work'] + ' | File name:'+parsed_line_report['Infringing File Name'] + ' | File size:'+parsed_line_report['Infringing File Size'])
		 date_value=datetime.strptime(parsed_line_report['Recent Infringement Timestamp'][:-4],'%d %b %Y %H:%M:%S').strftime('%Y-%m-%d %H:%M:%S') + " UTC"
		 event.add('source_time',date_value)
                 event.add('feed', 'RT-CuttingCorp')
                 event.add('feed_code', 'CuttingCorp') 
	 	 event.add('type', 'copyright')
                 event = utils.parse_source_time(event, "source_time")
                 event = utils.generate_observation_time(event, "observation_time")
                 event = utils.generate_reported_fields(event)
		 self.logger.info("message sent")
                 self.send_message(event)

        self.acknowledge_message()
Exemple #12
0
    def process(self):
        report = self.receive_message()
        
        if report:
            event = Event()
            report = encode(report)

            # colums according to https://www.phishtank.com/developer_info.php as of 2015/04/30:
            #   phish_id,url,phish_detail_url,submission_time,verified,verification_time,online,target
            # example:
            # 123456,http://www.example.com/,http://www.phishtank.com/phish_detail.php?phish_id=123456,2009-06-19T15:15:47+00:00,yes,2009-06-19T15:37:31+00:00,yes,1st National Example Bank
            columns = ["__IGNORE__", "source_url", "description_url", "source_time", "__IGNORE__", "__IGNORE__", "__IGNORE__", "target"]
            
            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):

                if "phish_id" in row:
                    continue		# skip header
                
                for key, value in zip(columns, row):

                    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
             
        self.acknowledge_message()
Exemple #13
0
    def process(self):
        report = self.receive_message()
        
        if report:
            event = Event()

            columns = ["__IGNORE__", "source_url", "description_url", "source_time", "__IGNORE__", "__IGNORE__", "__IGNORE__", "target"]
            
            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):

                if "phish_id" in row:
                    continue
                
                for key, value in zip(columns, row):

                    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
             
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                
                row = row.strip()
                
                if len(row) == 0:
                    continue

                row = row.split('|')
                event = Event()

                columns = ["source_url", "source_asn", "source_ip", "source_time", "source_reverse_dns", "source_cc", "__IGNORE__", "additional_information"]
                
                for key, value in zip(columns, row):
                    value = value.strip()
                    
                    if key == "source_time":
                        value += " UTC"
                    
                    if value != "N/A" and key != "__IGNORE__":
                        event.add(key, value)
                        
                event.add('feed', 'cert-eu')
                event.add('type', 'malware')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)
            self.acknowledge_message()
    def process(self):
        event = self.receive_message()

        if event:
	   event = utils.generate_observation_time(event, "observation_time")
           self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
		"protocol" : "transport_protocol",		
		"port" : "source_port",
		"hostname": "source_reverse_dns",
		"tag": "__IGNORE__",
		"asn": "source_asn",
		"geo": "source_cc",
		"region" : "source_region",
		"city" : "source_city",
		"naics" : "__IGNORE__",
                "sic": "__IGNORE__",
		"programs": "__IGNORE__",
		"mountd_port": "__IGNORE__",
		"exports": "__IGNORE__"
		
            }           

            rows = csv.DictReader(StringIO.StringIO(report))

            for row in rows:
                event = Event()

                for key, value in row.items():
		    if key=='sector':
			continue
                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TDB__":
                        continue
                    
                    # set timezone explicitly to UTC as it is absent in the input 
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
            
                event.add('feed', 'shadowserver-portmapper')
                event.add('type', 'vulnerable service')
                event.add('application_protocol', 'portmapper')
		event.add('description','Open PortMapper')

                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
		
                self.send_message(event)
        self.acknowledge_message()
Exemple #17
0
 def process(self):
     report = self.receive_message()
     self.logger.debug("Will apply regex %s" % self.parameters.regex)
     if report:
         rowcount = 0
         for row in report.split('\n'):  # For each line
             event = Event()
             match = re.search(self.parameters.regex, row)
             if match:
                 for key in match.groupdict():
                     event.add(key, match.groupdict()[key])
             else:
                 continue  # skip lines without matching regex
             rowcount += 1
             # Get detail from parser parameters, will be nice to have it by
             # source parameters.. Avoid adding if parsed
             if not 'feed' in match.groupdict():
               event.add('feed', self.parameters.feed)
             if not 'feed_url' in match.groupdict():
               event.add('feed_url', self.parameters.feed_url)
             if not 'type' in match.groupdict():
               event.add('type', self.parameters.type)
             event = utils.parse_source_time(event, "source_time")
             event = utils.generate_observation_time(event,
               "observation_time")
             event = utils.generate_reported_fields(event)
             self.send_message(event)
         self.logger.info("Processed %d event" % rowcount)
     self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            for row in report.split('\n'):
                row = row.strip()

                if len(row) == 0 or row.startswith('#'):
                    continue

                row = row.split()
                event = Event()

                columns = ["source_url"]
                for key, value in zip(columns, row):
                    event.add(key, value)
                    
                event.add('feed', 'malwarepatrol-dansguardian')
                event.add('type', 'malware')
                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)
        self.acknowledge_message()
Exemple #19
0
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()
            headers = lib.dcu_headers()    
       
            rows = report.split("\n")

            for row in rows:
                try: 
                    columns = row.strip().split("\t")
                    fields = dict(zip(headers, columns))

                    event = Event(lib.convert_dcu_fields(fields))
                    event.add("feed", "microsoft-dcu")

                    event = utils.generate_observation_time(event, "observation_time")
                    event = utils.generate_reported_fields(event)

                    self.send_message(event)
                except lib.ParsingError as exc:
                    msg = "Got a parsing problem: %s affected row '%s' IGNORING AND CONTINUING" % (exc.message, row.strip())
                    self.logger.warning(msg, exc_info=True)
                    continue
        self.acknowledge_message()
Exemple #20
0
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "protocol": "transport_protocol",
                "port": "source_port",
                "hostname": "source_reverse_dns",
                "sysdesc": "__TDB__",
                "sysname": "__TDB__",
                "asn": "source_asn",
                "geo": "source_cc",
                "region": "source_region",
                "city": "source_city",
                "version": "__IGNORE__"
            }

            rows = csv.DictReader(StringIO.StringIO(report))

            for row in rows:
                event = Event()

                for key, value in row.items():

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()

                    if key is "__IGNORE__" or key is "__TDB__":
                        continue

                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"

                    event.add(key, value)

                event.add('feed', 'shadowserver-snmp')
                event.add('type', 'vulnerable service')
                event.add('application_protocol', 'snmp')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(
                    event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "asn": "source_asn",
		"geo": "source_cc",
		"md5hash": "artifact_hash",
		"url" : "reported_destination_url",
                "user_agent" : "user_agent",
                "host": "reported_destination_reverse_dns",
                "method": "comment"
            }
            
            rows = csv.DictReader(StringIO.StringIO(report))
            
            for row in rows:
                event = Event()
                
                for key, value in row.items():

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TDB__":
                        continue
                    
                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"
		    if key== "comment":
			value ="HTTP Method ->"+value
                    
                    event.add(key, value)
            
                event.add('feed', 'shadowserver-Sandbox-Url')
                event.add('type', 'malware')
		event.add('artifact_hash_type','MD5')
                

                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()
	parsed_event=Event()
	
        
        if report:
	   if report.to_dict()['report']['report_category']=='eu.acdc.bot':
	         if(report.to_dict()['report'].has_key('sample_b64')):	
		    report.to_dict()['report']['sample_b64']='ficheiro binario'
		    self.logger.info('field changed')	
	
		 tmp=(report.value('report')).get('report_type')
   		 try:
		  categ=tmp[tmp.index('[')+1:tmp.index('][')]
   		  sensor=tmp[tmp.index('][')+2:tmp.index('][',tmp.index('][')+2)]
   		  entity=tmp[tmp.index('][',tmp.index('][')+2)+2:tmp.index(']',tmp.index('][',tmp.index('][')+2)+2)]
   		  description=tmp.split(']')[3][1:]
		  value1=json.dumps({"Category":"%s"%categ,"Sensor":"%s"%sensor,"Entity":"%s"%entity,"Description":"%s"%description})
		  value=json.loads(value1)
		  report.add('Additional Info',value)
        	 except ValueError:
        	  self.logger.info("Event not parsed correctly")
          
         

		 if report.to_dict().has_key('Additional Info'):
		  parsed_event.add('feed','ACDC:'+report.to_dict()['Additional Info']['Entity'])
		  parsed_event.add('additional_information',report.to_dict()['Additional Info']['Description'])
        	 else:
		  parsed_event.add('feed','ACDC:'+report.to_dict()['report']['report_type'])

		 parsed_event.add('feed_code','ACDC:'+str(report.to_dict()['meta_data']['id']))
		 parsed_event.add('description',report.to_dict()['report']['report_category'])
		 parsed_event.add('source_ip',report.to_dict()['report']['src_ip_v4'])
		 parsed_event.add('source_port',str(report.to_dict()['report']['src_port']))
		 parsed_event.add('source_domain_name',report.to_dict()['meta_data']['domain'])
		 parsed_event.add('source_time',report.to_dict()['report']['timestamp'])
		 if report.to_dict()['report']['report_subcategory']=="fast_flux":
		    parsed_event.add('type','fastflux')
		 else:
		    parsed_event.add('type','malware')
		
		 if report.to_dict()['report'].has_key('c2_ip_v4'):
		    parsed_event.add('destination_ip',report.to_dict()['report']['c2_ip_v4'])
		    parsed_event.add('destination_port',str(report.to_dict()['report']['c2_port']))	
			

 		 parsed_event = utils.parse_source_time(parsed_event, "source_time")
        	 parsed_event = utils.generate_observation_time(parsed_event, "observation_time")
        	 parsed_event = utils.generate_reported_fields(parsed_event)
		 self.send_message(parsed_event)
   
        self.acknowledge_message()
Exemple #23
0
    def process(self):
        report = self.receive_message()
        self.logger.info(report)
        if report:
                
                #m = json.loads(report)
                m = report
           
                event = Event()
                for k in m.keys():
                    event.add(k, m.value(k))
                    
                event.add('feed', 'hpfeed')
                event.add('feed_url', m.value("sensorname"))

                event = utils.generate_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()
        
        if report:
            report = encode(report)
	    columns = {
		"phish_id": "__IGNORE__",   
		"url": "source_url",            
		"phish_detail_url": "description_url",
		"submission_time": "__IGNORE__",
		"verified": "__IGNORE__",
		"verification_time": "source_time",
		"online": "__IGNORE__",
		"target": "__IGNORE__"
            }
          
            
            for row in csv.DictReader(StringIO.StringIO(report)):
		event = Event()
                               
                for key, value in row.items():
		    
		    key = columns[key]
                    
		    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
		
                    
                self.send_message(event)
             
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "port": "source_port",
		"hostname": "source_reverse_dns",
		"tag": "malware",		
		"application": "application_protocol",
		"asn": "source_asn",
                "geo": "source_cc",
                "region": "source_region",
                "city": "source_city",
                "url": "source_url_secondpart",
		"http_host": "source_urlfirstpart",
		"category": "description",
		"system": "os_name",
		"detected_since": "__IGNORE__",
		"server": "__IGNORE__",
		"redirect_target": "__IGNORE__",
		"naics": "__IGNORE__",
		"sic": "__IGNORE__"
				
            }
            
            rows = csv.DictReader(StringIO.StringIO(report))
            
            for row in rows:
		event = Event()
		urlfirstpart=""
	        urlsecondpart=""
                port=""
		fullurl=""
                
		for key, value in row.items():

		    if key=='sector':
			continue

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TBD__":
                       continue
                    
                    if key is "source_url_secondpart":
		       urlsecondpart=value
		       continue

		    if key is "source_urlfirstpart":
		       urlfirstpart=value
		       continue
		    
		    if key is "source_port":
		       port=value

		    if key is "malware":
                        value = value.strip().lower()
                        
                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
            	
		if port=="80":
		   fullurl="http://"
 		if port=="443":
		   fullurl="https://"
		
		fullurl=fullurl+urlfirstpart+"/"+urlsecondpart
                event.add('feed', 'shadowserver-websites')
                event.add('type', 'compromised')
                event.add('source_url', fullurl)
                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #26
0
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "port": "source_port",
                "asn": "source_asn",
                "geo": "source_cc",
                "region": "source_region",
                "city": "source_city",
                "hostname": "source_reverse_dns",
                "type": "__IGNORE__",
                "infection": "malware",
                "url": "__TBD__",
                "agent": "__TBD__",
                "cc": "destination_ip",
                "cc_port": "destination_port",
                "cc_asn": "destination_asn",
                "cc_geo": "destination_cc",
                "cc_dns": "destination_reverse_dns",
                "count": "__TBD__",
                "proxy": "__TBD__",
                "application": "__TBD__",
                "p0f_genre": "__TBD__",
                "p0f_detail": "__TBD__",
                "machine_name": "__TBD__",
                "id": "__TBD__"
            }

            rows = csv.DictReader(StringIO.StringIO(report))

            for row in rows:
                event = Event()

                for key, value in row.items():

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()

                    if key is "__IGNORE__" or key is "__TBD__":
                        continue

                    if key is "malware":
                        value = value.strip().lower()

                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"

                    event.add(key, value)

                event.add('feed', 'shadowserver-drone')
                event.add('type', 'botnet drone')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(
                    event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)
        self.acknowledge_message()
Exemple #27
0
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "port": "source_port",
                "asn": "source_asn",
                "geo": "source_cc",
                "region": "source_region",
                "city": "source_city",
                "hostname": "source_reverse_dns",
                "type": "__IGNORE__",
                "infection": "malware",
                "url": "__TBD__",
                "agent": "__TBD__",
                "cc": "destination_ip",
                "cc_port": "destination_port",
                "cc_asn": "destination_asn",
                "cc_geo": "destination_cc",
                "cc_dns": "destination_reverse_dns",
                "count": "__TBD__",
                "proxy": "__TBD__",
                "application": "__TBD__",
                "p0f_genre": "__TBD__",
                "p0f_detail": "__TBD__",
                "machine_name": "__TBD__",
                "id": "__TBD__"
            }
            
            rows = csv.DictReader(StringIO.StringIO(report))
            
            for row in rows:
                event = Event()
                
                for key, value in row.items():

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TBD__":
                        continue
                    
                    if key is "malware":
                        value = value.strip().lower()
                        
                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
            
                event.add('feed', 'shadowserver-drone')
                event.add('type', 'botnet drone')
                
                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
		"port" : "source_port",
		"hostname": "source_reverse_dns",
		"tag": "__IGNORE__",
		"handshake": "__IGNORE__",
		"asn": "source_asn",
		"geo": "source_cc",
		"region" : "source_region",
		"city" : "source_city",
		"cipher_suite": "__IGNORE__",
		"cert_length": "__IGNORE__",
		"subject_common_name": "__IGNORE__",
		"issuer_common_name": "__IGNORE__",
		"cert_issue_date": "__IGNORE__",
		"cert_expiration_date": "__IGNORE__",
		"sha1_fingerprint": "__IGNORE__",
		"cert_serial_number": "__IGNORE__",
		"signature_algorithm": "__IGNORE__",
		"key_algorithm": "__IGNORE__",
		"subject_organization_name": "__IGNORE__",
		"subject_organization_unit_name": "__IGNORE__",
		"subject_country": "__IGNORE__",
		"subject_state_or_province_name": "__IGNORE__",
		"subject_locality_name": "__IGNORE__",
		"subject_street_address": "__IGNORE__",
		"subject_postal_code": "__IGNORE__",
		"subject_surname": "__IGNORE__",
		"subject_given_name": "__IGNORE__",
		"subject_email_address": "__IGNORE__",
		"subject_business_category": "__IGNORE__",
		"subject_serial_number": "__IGNORE__",
		"issuer_organization_name": "__IGNORE__",
		"issuer_organization_unit_name": "__IGNORE__",
		"issuer_country": "__IGNORE__",
		"issuer_state_or_province_name": "__IGNORE__",
		"issuer_locality_name": "__IGNORE__",
		"issuer_street_address": "__IGNORE__",
		"issuer_postal_code": "__IGNORE__",
		"issuer_surname": "__IGNORE__",
		"issuer_given_name": "__IGNORE__",
		"issuer_email_address": "__IGNORE__",
		"issuer_business_category": "__IGNORE__",
		"issuer_serial_number": "__IGNORE__",
		"naics": "__IGNORE__",
		"sic": "__IGNORE__",
		"freak_vulnerable": "__IGNORE__",
		"freak_cipher_suite": "__IGNORE__"
               
            }           

            rows = csv.DictReader(StringIO.StringIO(report))

            for row in rows:
                event = Event()

                for key, value in row.items():
		    if key=='sector':
			continue
                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TDB__":
                        continue
                    
                    # set timezone explicitly to UTC as it is absent in the input 
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
            
                event.add('feed', 'shadowserver-sslfreak')
                event.add('type', 'vulnerable service')
		event.add('description','SSL Freak Vulnerability')

                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
		
                self.send_message(event)
        self.acknowledge_message()
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()
            
            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "asn": "source_asn",
                "geo": "source_cc",
                "url": "__TBD__",
                "type": "malware",
                "http_agent": "user_agent",
                "tor": "__TBD__",
                "src_port": "source_port",
                "p0f_genre": "__TBD__",
                "p0f_detail": "__TBD__",
                "hostname": "source_reverse_dns",
                "dst_port": "destination_port",
                "http_host": "destination_reverse_dns",
                "http_referer": "__TBD__",
		"http_referer_ip": "__TBD__",
                "http_referer_asn": "__TBD__",
                "http_referer_geo": "__TBD__",
                "dst_ip": "destination_ip",
                "dst_asn": "destination_asn",
                "dst_geo": "destination_cc",
		"naics": "__IGNORE__",
		"sic": "__IGNORE__"
            }
            
            rows = csv.DictReader(StringIO.StringIO(report))
            
            for row in rows:
                event = Event()
                
                for key, value in row.items():

                    
		    if not value or key not in columns:
                        continue

 	    	    key = columns[key]

                   

                    value = value.strip()
                    
                    if key is "__IGNORE__" or key is "__TBD__":
                        continue
                    
                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"
                    
                    event.add(key, value)
            
                event.add('feed', 'shadowserver-sinkhole-http')
                event.add('type', 'malware')
                event.add('application_protocol', 'http')
                
                event = utils.parse_source_time(event, "source_time")  
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                
                self.send_message(event)
        self.acknowledge_message()
Exemple #30
0
    def process(self):
        report = self.receive_message()

        if report:
            report = report.strip()

            columns = {
                "timestamp": "source_time",
                "ip": "source_ip",
                "asn": "source_asn",
                "geo": "source_cc",
                "url": "__TBD__",
                "type": "__IGNORE__",
                "http_agent": "__TBD__",
                "tor": "__TBD__",
                "src_port": "source_port",
                "p0f_genre": "__TBD__",
                "p0f_detail": "__TBD__",
                "hostname": "source_reverse_dns",
                "dst_port": "destination_port",
                "http_host": "__TBD__",
                "http_referer": "__TBD__",
                "http_referer_asn": "__TBD__",
                "http_referer_ip": "__TBD__",
                "http_referer_geo": "__TBD__",
                "dst_ip": "destination_ip",
                "dst_asn": "destination_asn",
                "dst_geo": "destination_cc",
                "naics": "__IGNORE__",
                "sic": "__IGNORE__"
            }

            rows = csv.DictReader(StringIO.StringIO(report))

            for row in rows:
                event = Event()

                for key, value in row.items():

                    key = columns[key]

                    if not value:
                        continue

                    value = value.strip()

                    if key is "__IGNORE__" or key is "__TBD__":
                        continue

                    # set timezone explicitly to UTC as it is absent in the input
                    if key == "source_time":
                        value += " UTC"

                    event.add(key, value)

                event.add('feed', 'shadowserver-microsoft-sinkhole')
                event.add('type', 'botnet drone')
                event.add('application_protocol', 'http')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(
                    event, "observation_time")
                event = utils.generate_reported_fields(event)

                self.send_message(event)
        self.acknowledge_message()
    def process(self):
	tz=pytz.timezone('Europe/Lisbon')
        reports = self.receive_message()
	alerttype=None
	columns=['Source IP','Source port','Destination IP','Destination port','Protocol','Timestamp','Duration','Transferred','Packets','Flags','Source AS',
'Destination AS']
	
	
	reports=ast.literal_eval(reports)
        
        if reports:
            for report in reports:
		pre_parsed_lines=[]
		
		
		for p in report['message'].split('\n'):
	            pre_parsed_lines.append(p.strip())
		
                parsed_line_report=dict()
		count=0 
		startpos=0
	        endpos=0
		
		#Verificar qual e o inicio e o fim do array que contem a informacao do report 
       		while count<len(pre_parsed_lines):
	    	      if pre_parsed_lines[count].startswith('#Evidence'):
	       		 startpos=count+1
            	      if pre_parsed_lines[count]=='' and startpos!=0 and endpos==0:
	       		 endpos=count
	    	      count+=1
	
		#transformar o report em dicionario depois de garantir que existem linhas de report
		if startpos+1!=endpos:
		 eventlist=[]
		 for p in pre_parsed_lines[startpos+1:endpos]:
	   	     line=p.split(";")
		     parsed_line_report=dict()
	             for key, value in zip(columns, line):     
    	      	         parsed_line_report[key]=value	  
		     eventlist.append(parsed_line_report)	 
                 
		 
		 for eventline in eventlist:
                     event = Event()
		     event.add('rtir_id',report['id'])
		     event.add('description',report['subject'])
                     event.add('source_ip',eventline['Source IP'])
		     event.add('source_port',eventline['Source port'])
                     event.add('destination_ip',eventline['Destination IP'])
		     event.add('destination_port',eventline['Destination port'])
		     event.add('transport_protocol',eventline['Protocol'].lower())
		     date_value=(tz.localize(datetime.strptime(eventline['Timestamp'],'%Y-%m-%d %H:%M:%S.%f'),is_dst=None).astimezone(pytz.utc)).strftime('%Y-%m-%d %H:%M:%S') + " UTC"
		     event.add('source_time',date_value)
                     event.add('feed', 'RT-GEANT')
                     event.add('feed_code', 'GEANT')
		     event.add('type', 'botnet drone')
		     event.add("additional_information","Possible C&C connection attempt occurred")
                     event = utils.generate_observation_time(event, "observation_time")
                     event = utils.generate_reported_fields(event)
                     self.send_message(event)
		     self.logger.info("Message sent")
            self.acknowledge_message()
    def process(self):

        reports = self.receive_message()
	

	
	
	reports=ast.literal_eval(reports)
        
        if reports:
	    badparsing=0
            for report in reports:
		pre_parsed_lines=[]
		
		
		for p in report['message'].split('\n'):
	            pre_parsed_lines.append(p.strip())
		
                parsed_line_report=dict()
		count=0 
		startpos=0
	        endpos=0
		
		#Verificar qual e o inicio e o fim do array que contem a informacao do report 
       		while count<len(pre_parsed_lines):
	    	      if pre_parsed_lines[count].startswith('------------- Infringement Details'):
	       		 startpos=count+1
            	      if pre_parsed_lines[count]=='---------------------------------------------------------------------' and startpos!=0 and endpos==0:	
	       		 endpos=count
	    	      count+=1

				

		#transformar o report em dicionario depois de garantir que existem linhas de report
		if startpos!=endpos:
		 for p in pre_parsed_lines[startpos:endpos]:
	   	     line=p.split(": ")
		     if len(line)>1:
		        parsed_line_report[line[0]]=line[1]
		     else:
			self.logger.info("Bad Parsing")
			badparsing=1
		  
		
		 if badparsing==0:
                  event = Event()
		  event.add('rtir_id',report['id'])
		  event.add('description',report['subject'])
                  event.add('source_ip',parsed_line_report['IP Address'].strip())
		  event.add('source_port',parsed_line_report['Port'].strip())
		  event.add('application_protocol',parsed_line_report['Type'].lower().strip())
		  event.add('additional_information','Content name:'+parsed_line_report['Title'].strip() + ' | File name:'+parsed_line_report['Filename'].strip() + ' | File size:'+parsed_line_report['Filesize'].strip())
		  date_value=datetime.strptime(parsed_line_report['Timestamp'][:-1].strip(),'%Y-%m-%dT%H:%M:%S').strftime('%Y-%m-%d %H:%M:%S') + " UTC"
		  event.add('source_time',date_value)
                  event.add('feed', 'RT-turnernetwork')
                  event.add('feed_code', 'Turner Network Television')
	 	  event.add('type', 'copyright')
                  event = utils.parse_source_time(event, "source_time")
                  event = utils.generate_observation_time(event, "observation_time")
                  event = utils.generate_reported_fields(event)
                  self.send_message(event)

        self.acknowledge_message()