def getFile(source, unpack=True): global Modified try: (f, r) = Configuration.getFeedData(source, unpack) if (r.headers['last-modified'] == None or r.headers['last-modified'] != db.getLastModified(source)): Modified = True return (f, r) else: return (None, None) except: print("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL(source)))
def getFile(source, unpack=True): global Modified try: (f, r) = Configuration.getFeedData(source, unpack) if (r.headers['last-modified'] == None or r.headers['last-modified'] != db.getLastModified(source)): Modified = True return (f, r) else: return (None, None) except: print("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL(source)))
import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import dateutil.parser import math import pytz from datetime import datetime from pymisp import PyMISP from lib.ProgressBar import progressbar from lib.Config import Configuration as conf import lib.DatabaseLayer as db i = db.getLastModified('user_misp') now = datetime.utcnow().replace(tzinfo = pytz.utc) if i: last = dateutil.parser.parse(i) delta = now - last since = "%sm"%math.ceil(delta.total_seconds()/60) else: since="" # Misp interface misp_url, misp_key = conf.getMISPCredentials() if not misp_url: print("MISP credentials not specified") sys.exit(1) try: misp = PyMISP(misp_url, misp_key, True, 'json') except:
import shutil from lib.Config import Configuration import lib.DatabaseLayer as db vFeedurl = Configuration.getvFeedURL() vFeedstatus = Configuration.getvFeedStatus() tmppath = os.path.join(runPath, "..", Configuration.getTmpdir()) print(tmppath) # check modification date try: u = Configuration.getFile(vFeedurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl)) last_modified = parse_datetime(u.headers['last-modified'], ignoretz=True) i = db.getLastModified('vfeed') if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath+'/vfeed.db.tgz', 'wb') as fp: shutil.copyfileobj(u, fp) t = tarfile.open(name=tmppath+'/vfeed.db.tgz', mode='r:*') t.extract('vfeed.db', path=tmppath) t.close() # excluded map_cve_milw0rm because it moved to a different domain, thus the id is irrelevant. # Talked about this with Toolswatch dev, he's going to take a look, so leave this comment in until further notice
# Copyright (c) 2014 psychedelys # Copyright (c) 2014-2018 Pieter-Jan Moreels - [email protected] # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import urllib from lib.ProgressBar import progressbar import lib.DatabaseLayer as db # get dates icve = db.getLastModified('cve') icpeo = db.getLastModified('cpeother') # check modification date date = False if icve is not None and icpeo is not None: # Go check date if icve >= icpeo: print("Not modified") sys.exit(0) else: date = True # only get collection of new CVE's collections = [] if date:
RefUrl = Configuration.getRefURL() tmppath = Configuration.getTmpdir() argparser = argparse.ArgumentParser(description='Populate/update the NIST ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() if args.v: verbose = True # check modification date try: u = Configuration.getFile(RefUrl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(RefUrl)) i = db.getLastModified('ref') if i is not None: if u.headers['last-modified'] == i: print("Not modified") sys.exit(0) # Create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath+'/allrefmaps.zip', 'wb') as fp: shutil.copyfileobj(u, fp) x = zipfile.ZipFile(tmppath+'/allrefmaps.zip') for e in x.namelist():
# connect to db db = Configuration.getMongoConnection() d2sec = db.d2sec info = db.info # make parser parser = make_parser() ch = ExploitHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(d2securl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(d2securl)) i = dbLayer.getLastModified("d2sec") if i is not None: if f.headers['last-modified'] == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) bulk = d2sec.initialize_ordered_bulk_op() for exploit in progressbar(ch.d2sec): print (exploit) if args.v: print (exploit) bulk.find({'id': exploit['id']}).upsert().update({"$set": {'id': exploit['id'], 'url': exploit['url'], 'name': exploit['name']}}) bulk.execute() #update database info after successful program-run
self.statement = self.statement + self.statement.rstrip() self.vendor[-1]['statement'] = self.statement # make parser parser = make_parser() ch = VendorHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('vendor') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL('vendor'))) last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('vendor') if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) statements = [] for statement in progressbar(ch.vendor): if args.v: print(statement) statements.append(statement) db.bulkUpdate('vendor', statements) #update database info after successful program-run db.setColUpdate('vendor', last_modified)
from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as dbLayer vFeedurl = Configuration.getvFeedURL() vFeedstatus = Configuration.getvFeedStatus() tmppath = Configuration.getTmpdir() # connect to db db = Configuration.getMongoConnection() # check modification date try: u = Configuration.getFile(vFeedurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl)) i = dbLayer.getLastModified('vfeed') if i is not None: if u.headers['last-modified'] == i: print("Not modified") sys.exit(0) # create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath+'/vfeed.db.tgz', 'wb') as fp: shutil.copyfileobj(u, fp) t = tarfile.open(name=tmppath+'/vfeed.db.tgz', mode='r') t.extract('vfeed.db', path=tmppath) t.close # excluded map_cve_milw0rm because it moved to a different domain, thus the id is irrelevant. # Talked about this with Toolswatch dev, he's going to take a look, so leave this comment in until further notice
# Copyright (c) 2014-2018 Pieter-Jan Moreels - [email protected] # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import urllib from lib.ProgressBar import progressbar import lib.DatabaseLayer as db # get dates icve = db.getLastModified("cves") icpeo = db.getLastModified("cpeother") # check modification date date = False if icve is not None and icpeo is not None: # Go check date if icve >= icpeo: print("Not modified") sys.exit(0) else: date = True # only get collection of new CVE's collections = [] if date:
if name == 'capec:Attack_Pattern_Catalog': self.Attack_Pattern_Catalog_tag = False # dictionary capecurl = Configuration.getCAPECDict() # make parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(capecurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(capecurl)) i = db.getLastModified('capec') last_modified = parse_datetime(f.headers['last-modified']) if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) attacks=[] for attack in progressbar(ch.capec): attacks.append(attack) db.bulkUpdate("capec", attacks) #update database info after successful program-run db.setColUpdate('capec', last_modified)
# dictionary exploitdburl = Configuration.getexploitdbDict() tmppath = Configuration.getTmpdir() argparser = argparse.ArgumentParser(description='Populate/update the exploitdb ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() try: f = Configuration.getFile(exploitdburl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(exploitdburl)) i = db.getLastModified('exploitdb') if i is not None: if f.headers['last-modified'] == i: print("Not modified") sys.exit(0) if not os.path.exists(tmppath): os.mkdir(tmppath) csvfile = tmppath+'/exploitdb.csv' with open(csvfile, 'wb') as fp: shutil.copyfileobj(f, fp) fp.close() exploits=[] with open(csvfile, newline='') as csvtoparse:
argparser = argparse.ArgumentParser( description='Populate/update the exploitdb ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() try: f = Configuration.getFile(exploitdburl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (exploitdburl)) i = db.getLastModified('exploitdb') if i is not None: if f.headers['last-modified'] == i: print("Not modified") sys.exit(0) if not os.path.exists(tmppath): os.mkdir(tmppath) csvfile = tmppath + '/exploitdb.csv' with open(csvfile, 'wb') as fp: shutil.copyfileobj(f, fp) fp.close() exploits = [] with open(csvfile, newline='', encoding='utf-8') as csvtoparse:
if __name__ == "__main__": # Make a SAX2 XML parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) # Retrieve CAPECs from the configuration's capec url try: print("[+] Getting CAPEC XML file") (f, r) = Configuration.getFeedData('capec') except Exception as e: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("capec"))) db_last_modified = db.getLastModified('capec') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if db_last_modified is not None: if last_modified == db_last_modified: print("Not modified") sys.exit(0) # Parse XML and store in database parser.parse(f) attacks = [] for attack in progressbar(ch.capec): attacks.append(attack) print("[+] %d attacks in XML file" % (len(attacks))) db.bulkUpdate("capec", attacks)
self.href = None # dict cpedict = Configuration.getCPEDict() # make parser parser = make_parser() ch = CPEHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(cpedict) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (cpedict)) i = db.getLastModified("cpe") last_modified = parse_datetime(f.headers["last-modified"]) if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cpeList = [] for x in progressbar(ch.cpe): x["id"] = toStringFormattedCPE(x["name"]) x["title"] = x["title"][0] x["cpe_2_2"] = x.pop("name") if not x["references"]: x.pop("references") cpeList.append(x)
self.relationship['cwe_id'] = self.relationship_id # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cwe"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None and not args.f: if lastmodified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cweList = [] for cwe in progressbar(ch.cwe): cwe['description_summary'] = cwe['description_summary'].replace( "\t\t\t\t\t", " ") if args.v: print(cwe) cweList.append(cwe)
self.weakness_tag = False # dictionary cwedict = Configuration.getCWEDict() # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(cwedict) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(cwedict)) lastmodified = parse_datetime(f.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) # preparing xml by saving in a tempfile and unzipping tmpdir = tempfile.gettempdir() tmpfile = tempfile.NamedTemporaryFile() cwezip = open(tmpfile.name, 'wb') cwezip.write(f.read()) cwezip.close() with zipfile.ZipFile(tmpfile.name) as z: z.extractall(tmpdir) z.close() f = open(os.path.join(tmpdir, 'cwec_v2.8.xml'))
import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import dateutil.parser import math import pytz from datetime import datetime from pymisp import PyMISP from lib.ProgressBar import progressbar from lib.Config import Configuration as conf import lib.DatabaseLayer as db i = db.getLastModified('user_misp') now = datetime.utcnow().replace(tzinfo=pytz.utc) if i: last = dateutil.parser.parse(i) delta = now - last since = "%sm" % math.ceil(delta.total_seconds() / 60) else: since = "" # Misp interface misp_url, misp_key = conf.getMISPCredentials() if not misp_url: print("MISP credentials not specified") sys.exit(1) try: misp = PyMISP(misp_url, misp_key, True, 'json')
argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() if args.v: verbose = True # check modification date try: (f, r) = Configuration.getFeedData('ref') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("ref"))) i = db.getLastModified('ref') if i is not None: if r.headers['last-modified'] == i: print("Not modified") sys.exit(0) # Create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath + '/allrefmaps.zip', 'wb') as fp: shutil.copyfileobj(f, fp) x = zipfile.ZipFile(tmppath + '/allrefmaps.zip') for e in x.namelist(): filename = e
except: sys.exit("Redis server not running on %s:%s" % (Configuration.getRedisHost(), Configuration.getRedisPort())) except Exception as e: print(e) sys.exit(1) try: (f, r) = Configuration.getFeedData('via4') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("via4"))) # check modification date lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified("via4") db.setColUpdateCurrentTime('via4') if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) data = json.loads(f.read().decode('utf-8')) cves = data['cves'] bulk = [dict(val, id=key) for key, val in cves.items() if key] db.bulkUpdate('via4', bulk) db.setColInfo('via4', 'sources', data['metadata']['sources']) db.setColInfo('via4', 'searchables', data['metadata']['searchables']) #update database info after successful program-run db.setColUpdate('via4', lastmodified)
if name == 'Attack_Pattern_Catalog': self.Attack_Pattern_Catalog_tag = False # make parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) print(ch.capec) # check modification date try: (f, r) = Configuration.getFeedData('capec') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("capec"))) i = db.getLastModified('capec') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) #if i is not None: # if last_modified == i: # print("Not modified") # sys.exit(0) # parse xml and store in database parser.parse(f) attacks = [] for attack in progressbar(ch.capec): attacks.append(attack) db.bulkUpdate("capec", attacks) #update database info after successful program-run db.setColUpdate('capec', last_modified)
# Copyright (c) 2014 psychedelys # Copyright (c) 2014-2015 PidgeyL # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import urllib from lib.ProgressBar import progressbar import lib.DatabaseLayer as db # get dates icve = db.getLastModified('cve') icpeo = db.getLastModified('cpeother') # check modification date date = False if icve is not None and icpeo is not None: # Go check date if icve >= icpeo: print("Not modified") sys.exit(0) else: date = True # only get collection of new CVE's collections = [] if date:
self.referencetag = False self.href = None # dict cpedict = Configuration.getCPEDict() # make parser parser = make_parser() ch = CPEHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(cpedict) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(cpedict)) i = db.getLastModified('cpe') if i is not None: if f.headers['last-modified'] == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cpeList=[] for x in progressbar(ch.cpe): x['id']= toStringFormattedCPE(x['name']) x['title']=x['title'][0] x['cpe_2_2'] = x.pop('name') if not x['references']: x.pop('references') cpeList.append(x) db.bulkUpdate("cpe", cpeList)
if name == 'elliot': self.elliottag = False # dictionary d2securl = Configuration.getd2secDict() # make parser parser = make_parser() ch = ExploitHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(d2securl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(d2securl)) i = db.getLastModified("d2sec") if i is not None: if f.headers['last-modified'] == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) exploitList=[] for exploit in progressbar(ch.d2sec): print (exploit) if args.v: print (exploit) exploitList.append(exploit) db.d2secBulkUpdate(exploitList) #update database info after successful program-run
import sqlite3 from lib.ProgressBar import progressbar from lib.Config import Configuration import lib.DatabaseLayer as db vFeedurl = Configuration.getvFeedURL() vFeedstatus = Configuration.getvFeedStatus() tmppath = os.path.join(runPath, "..", Configuration.getTmpdir()) # check modification date try: u = Configuration.getFile(vFeedurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl)) i = db.getLastModified('vfeed') if i is not None: if u.headers['last-modified'] == i: print("Not modified") sys.exit(0) # create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath+'/vfeed.db.tgz', 'wb') as fp: shutil.copyfileobj(u, fp) t = tarfile.open(name=tmppath+'/vfeed.db.tgz', mode='r') t.extract('vfeed.db', path=tmppath) t.close # excluded map_cve_milw0rm because it moved to a different domain, thus the id is irrelevant. # Talked about this with Toolswatch dev, he's going to take a look, so leave this comment in until further notice
try: redis.info() except: sys.exit("Redis server not running on %s:%s"%(Configuration.getRedisHost(),Configuration.getRedisPort())) except Exception as e: print(e) sys.exit(1) try: (f, r) = Configuration.getFeedData('via4') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("via4"))) # check modification date lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i=db.getLastModified("via4") if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) data = json.loads(f.read().decode('utf-8')) cves = data['cves'] bulk = [dict(val, id=key) for key, val in cves.items() if key] db.bulkUpdate('via4', bulk) db.setColInfo('via4', 'sources', data['metadata']['sources']) db.setColInfo('via4', 'searchables', data['metadata']['searchables']) #update database info after successful program-run db.setColUpdate('via4', lastmodified)
# Make a SAX2 XML parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) # Retrieve CAPECs from the configuration's capec url try: print("[+] Getting CAPEC XML file") (f, r) = Configuration.getFeedData("capec") except Exception as e: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("capec")) ) db_last_modified = db.getLastModified("capec") last_modified = parse_datetime(r.headers["last-modified"], ignoretz=True) if db_last_modified is not None: if last_modified == db_last_modified: print("Not modified") sys.exit(0) # Parse XML and store in database parser.parse(f) attacks = [] for attack in progressbar(ch.capec): attacks.append(attack) print("[+] %d attacks in XML file" % (len(attacks))) db.bulkUpdate("capec", attacks)
# dictionary d2securl = Configuration.getd2secDict() # make parser parser = make_parser() ch = ExploitHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(d2securl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (d2securl)) last_modified = parse_datetime(f.headers['last-modified'], ignoretz=True) i = db.getLastModified("d2sec") if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) exploitList = [] for exploit in progressbar(ch.d2sec): print(exploit) if args.v: print(exploit) exploitList.append(exploit) db.bulkUpdate("d2sec", exploitList) #update database info after successful program-run
cpe["id"] = sha1_hash return cpe if __name__ == '__main__': if args.u: try: (f, r) = Configuration.getFile(Configuration.getFeedURL('cpe')) except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cpe"))) # check modification date i = db.getLastModified('cpe') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i and not args.f: print("Not modified") sys.exit(0) cpej = json.loads(f.read()) cpeList = [] for cpeitem in cpej["matches"]: item = process_cpe_item(cpeitem) cpeList.append(item) db.bulkUpdate("cpe", cpeList) #update database info after successful program-run
self.statement = self.statement + self.statement.rstrip() self.vendor[-1]['statement'] = self.statement # dictionary vendordict = Configuration.getVendorDict() # make parser parser = make_parser() ch = VendorHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFile(vendordict, compressed = True) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vendordict)) i = db.getLastModified('vendor') if i is not None: if r.headers['last-modified'] == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) statements=[] for statement in progressbar(ch.vendor): if args.v: print (statement) statements.append(statement) db.bulkUpdate('vendor', statements) #update database info after successful program-run db.setColUpdate('vendor', r.headers['last-modified'])