def get_url_feed(self): url_list = [] turl = self.url_sources % (self.api_key) fData = get_url(turl, ret_json=True) for url in fData: self.results.add_url(url)
def get_file_feed(self): hash_list = self.get_hash_list() if hash_list: for thash in hash_list.split('\n'): uFile = self.url_download % (self.api_key, thash) fData = get_url(uFile) self.results.add_file(fData)
def pull_c2_dga(self): indata = get_url(self.dga_list_url) for r_addr in indata.split('\n'): if r_addr.startswith("#") or "," not in r_addr: continue addr = r_addr.split(',') md = {'note': addr[1]} self.results.add_domain(addr[0], md)
def pull_feed(self): indata = get_url(self.url) for r_addr in indata.split('\n'): if r_addr.startswith("//") or len(r_addr) < 6: continue addr = r_addr.strip() self.results.add_ipaddress(addr)
def pull_feeds(self): indata = get_url(self.url) if ("# START" in indata and "# END" in indata): fList = indata[indata.index("# START"):indata.index("# END")] for addr in fList.split('\n'): addr = addr.strip() if len(addr) > 6: self.results.add_ipaddress(addr)
def pull_telnet_feed(self): indata = get_url(self.telnet_url) if "Provided by nothink.org" not in indata: return False for r_addr in indata.split('\n'): if r_addr.startswith("#") or len(r_addr) < 6: continue md = {'note': "Telnet Scanner"} r_addr = r_addr.strip() self.results.add_ipaddress( r_addr, md )
def pull_feed(self): req_user_agent = {'User-agent': 'MalPipe 0.1'} vtIDS = [] notif_feed = get_url(self.feed_url % (self.api_key), req_user_agent) if notif_feed == None: return False try: json_notif_feed = json.loads(notif_feed) except Exception, e: return False
def pull_feeds(self): indata = get_url(self.url) for rnode in indata.split( '\n')[1:]: # Loop through each, strip off CSV header node = rnode.split(',') if len(node) < 22: continue addr = node[4] md = { 'name': node[0], 'country': node[1], 'uptime': node[3], 'hostname': node[5], 'exit': bool(int(node[9])), 'version': node[17], 'asn': { 'asnname': node[21], 'asn': node[22], } } self.results.add_ipaddress(addr, md)
try: json_notif_feed = json.loads(notif_feed) except Exception, e: return False for vt_notif in json_notif_feed["notifications"]: vtIDS.append(int(vt_notif["id"])) post_url(self.clear_notifications % self.api_key, _data=json.dumps(vtIDS)) for vt_notif in json_notif_feed["notifications"]: try: if self.download_files: fsample = get_url( self.download_url % (self.api_key, vt_notif["sha256"]), req_user_agent) else: fsample = vt_notif["sha256"] self.results.add_file(fsample, self.parse_metadata(vt_notif)) except KeyError: print "[%s] Problem parsing VT feed" % ( self.metadata.module_name) return False return True # For threaded feeds, use while loop to keep running def run(self): while True: if self.pull_feed(): self.process()
def pull_feed(self): indata = get_url(self.url) for r_url in indata.split('\n'): if r_url.startswith("http"): r_url = r_url.strip() self.results.add_url(r_url)
def get_hash_list(self): return get_url(self.url_daily_list)