def __init__(self): self.feed_type = "CPE" self.prefix = "matches.item" super().__init__(self.feed_type, self.prefix) self.feed_url = Configuration.getFeedURL(self.feed_type.lower()) self.logger = logging.getLogger("CPEDownloads")
def __init__(self): self.feed_type = "VIA4" self.prefix = "cves" super().__init__(self.feed_type, self.prefix) self.feed_url = Configuration.getFeedURL(self.feed_type.lower()) self.logger = logging.getLogger("VIADownloads")
def __init__(self): self.feed_type = "CVES" self.prefix = "CVE_Items.item" super().__init__(self.feed_type, self.prefix) self.feed_url = Configuration.getFeedURL("cve") self.modfile = file_prefix + file_mod + file_suffix self.recfile = file_prefix + file_rec + file_suffix self.logger = logging.getLogger("CVEDownloads")
def getFile(source, unpack=True): global Modified try: (f, r) = Configuration.getFeedData(source, unpack) if (r.headers['last-modified'] == None or r.headers['last-modified'] != db.getLastModified(source)): Modified = True return (f, r) else: return (None, None) except: print("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL(source)))
def __init__(self): self.feed_type = "CWE" super().__init__(self.feed_type) self.feed_url = Configuration.getFeedURL(self.feed_type.lower()) self.logger = logging.getLogger("CWEDownloads") # make parser self.parser = make_parser() self.ch = CWEHandler() self.parser.setContentHandler(self.ch)
def getFile(source, unpack=True): global Modified try: (f, r) = Configuration.getFeedData(source, unpack) if (r.headers['last-modified'] == None or r.headers['last-modified'] != db.getLastModified(source)): Modified = True return (f, r) else: return (None, None) except: print("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL(source)))
def __init__(self, update_watchlist=False, signal_groups=[]): self.feed_type = "CVES" self.prefix = "CVE_Items.item" super().__init__(self.feed_type, self.prefix) self.feed_url = Configuration.getFeedURL("cve") self.modfile = file_prefix + file_mod + file_suffix self.recfile = file_prefix + file_rec + file_suffix self.update_watchlist = update_watchlist self.signal_groups = signal_groups self.logger = logging.getLogger("CVEDownloads")
self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1]['description_summary'] = self.description_summary.replace("\n", "") elif name == 'Weakness': self.weakness_tag = False # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cwe"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('cwe') if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cweList=[] for cwe in progressbar(ch.cwe): cwe['description_summary']=cwe['description_summary'].replace("\t\t\t\t\t", " ") if args.v: print (cwe)
# dictionary tmppath = Configuration.getTmpdir() argparser = argparse.ArgumentParser( description='Populate/update the exploitdb ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() try: (f, r) = Configuration.getFeedData('exploitdb') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("exploitdb"))) i = db.getLastModified('exploitdb') if i is not None: if r.headers['last-modified'] == i: print("Not modified") sys.exit(0) if not os.path.exists(tmppath): os.mkdir(tmppath) csvfile = tmppath + '/exploitdb.csv' with open(csvfile, 'wb') as fp: shutil.copyfileobj(f, fp) fp.close()
self.exploittag = False self.refl = [] if name == 'elliot': self.elliottag = False # make parser parser = make_parser() ch = ExploitHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('d2sec') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("d2sec"))) last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified("d2sec") if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) exploitList = [] for exploit in progressbar(ch.d2sec): print(exploit) if args.v: print(exploit) exploitList.append(exploit) db.bulkUpdate("d2sec", exploitList)
else: if args.v: n_counter += 1 db.insertCVE(item) if args.v: print("New: %s Updated: %s" % (n_counter, u_counter)) print("") if __name__ == '__main__': # start here if it's an update. if args.u: # get the 'modified' file getfile = file_prefix + file_mod + file_suffix url = Configuration.getFeedURL('cve') + getfile cveItemsProcess("Modified NVE Database", url, args) # get the 'recent' file getfile = file_prefix + file_rec + file_suffix url = Configuration.getFeedURL('cve') + getfile cveItemsProcess("Recent NVE Database", url, args) if args.m: # mitre all items cveItemsProcess("All Mitre Database", Configuration.getFeedURL('m_cve'), args) elif args.p: # populate is pretty straight-forward, just grab all the files from NVD # and dump them into a DB.
version_info += cpe["versionEndExcluding"] if "versionEndIncluding" in item: cpe["versionEndIncluding"] = item["versionEndIncluding"] version_info += cpe["versionEndIncluding"] sha1_hash = hashlib.sha1(cpe["cpe_2_2"].encode("utf-8") + version_info.encode("utf-8")).hexdigest() cpe["id"] = sha1_hash return cpe if __name__ == '__main__': if args.u: try: (f, r) = Configuration.getFile(Configuration.getFeedURL('cpe')) except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cpe"))) # check modification date i = db.getLastModified('cpe') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i and not args.f: print("Not modified") sys.exit(0) cpej = json.loads(f.read())
if name == 'statement': self.statementtag = False self.statement = self.statement + self.statement.rstrip() self.vendor[-1]['statement'] = self.statement # make parser parser = make_parser() ch = VendorHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('vendor') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL('vendor'))) last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified('vendor') if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) statements = [] for statement in progressbar(ch.vendor): if args.v: print(statement) statements.append(statement) db.bulkUpdate('vendor', statements)
description='Populate/update the NIST ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() if args.v: verbose = True # check modification date try: (f, r) = Configuration.getFeedData('ref') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("ref"))) i = db.getLastModified('ref') if i is not None: if r.headers['last-modified'] == i: print("Not modified") sys.exit(0) # Create temp file and download and unpack database if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath + '/allrefmaps.zip', 'wb') as fp: shutil.copyfileobj(f, fp) x = zipfile.ZipFile(tmppath + '/allrefmaps.zip') for e in x.namelist():
self.inDTElem = 0 self.cves[-1]['Published'] = parse_datetime(self.DT, ignoretz=True) if name == 'vuln:last-modified-datetime': self.inPUBElem = 0 self.cves[-1]['Modified'] = parse_datetime(self.PUB, ignoretz=True) if __name__ == '__main__': parser = make_parser() ch = CVEHandler() parser.setContentHandler(ch) # start here if it's an update. if args.u: # get the 'modified' file getfile = file_prefix + file_mod + file_suffix try: (f, r) = Configuration.getFile(Configuration.getFeedURL('cve') + getfile) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cve") + getfile)) i = db.getInfo("cves") last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i['last-modified']: print("Not modified") sys.exit(0) db.setColUpdate("cves", last_modified) # get your parser on !! parser = make_parser() ch = CVEHandler() parser.setContentHandler(ch) parser.parse(f)
if flush or currentSize >= bufferSize: db.bulkUpdate("cves", buffer) buffer.clear() counter["count"] = counter["count"] + currentSize print( str(datetime.now()) + " --> " + str(counter["count"]) + " CVEs updated") if __name__ == '__main__': if args.u: # get the 'modified' file getfile = file_prefix + file_mod + file_suffix try: feedUrl = Configuration.getFeedURL('cve') + getfile path = os.path.join(tempfile.gettempdir(), getfile) DownloadManager.download(feedUrl, path) file = ArchiveManager.extract(path)[0] except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cve") + getfile)) i = db.getInfo("cves") response = requests.head(feedUrl) last_modified = parse_datetime(response.headers['last-modified'], ignoretz=True) if i is not None:
def minimalist_xldate_as_datetime(xldate, datemode): # datemode: 0 for 1900-based, 1 for 1904-based return (datetime.datetime(1899, 12, 30) + datetime.timedelta(days=xldate + 1462 * datemode)) # dictionary tmppath = Configuration.getTmpdir() try: (f, r) = Configuration.getFeedData('msbulletin') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("msbulletin"))) # check modification date i = dbLayer.getInfo("ms") if i is not None: if r.headers['last-modified'] == i['last-modified']: print("Not modified") sys.exit(0) if not os.path.exists(tmppath): os.mkdir(tmppath) with open(tmppath + '/BulletinSearch.xlsx', 'wb') as fp: shutil.copyfileobj(f, fp) fp.close() # parse xlsx and store in database
elif name == 'Weakness': self.weakness_tag = False # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) db = DatabaseLayer() # check modification date try: (f, r) = Configuration.getFeedData('cwe') except Exception as e: print(e) sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cwe"))) lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.CWE.updated() if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cweList = [] for cwe in progressbar(ch.cwe): data = CWE(cwe['id'], cwe['name'], cwe['description_summary'].replace("\t\t\t\t\t", " "), cwe['status'], cwe['weaknessabs']) if args.v:
try: redis = Configuration.getRedisRefConnection() try: redis.info() except: sys.exit("Redis server not running on %s:%s" % (Configuration.getRedisHost(), Configuration.getRedisPort())) except Exception as e: print(e) sys.exit(1) try: (f, r) = Configuration.getFeedData('via4') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("via4"))) # check modification date lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i = db.getLastModified("via4") db.setColUpdateCurrentTime('via4') if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) data = json.loads(f.read().decode('utf-8')) cves = data['cves'] bulk = [dict(val, id=key) for key, val in cves.items() if key] db.bulkUpdate('via4', bulk) db.setColInfo('via4', 'sources', data['metadata']['sources'])
currentSize = len(buffer) if flush or currentSize >= bufferSize: db.bulkUpdate("cpe", buffer) buffer.clear() counter["count"] = counter["count"] + currentSize print( str(datetime.now()) + " --> " + str(counter["count"]) + " CPEs updated") if __name__ == '__main__': if args.u: try: feedUrl = Configuration.getFeedURL('cpe') fileName = os.path.basename(feedUrl) path = os.path.join(tempfile.gettempdir(), fileName) DownloadManager.download(feedUrl, path) file = ArchiveManager.extract(path)[0] except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cpe"))) # check modification date i = db.getLastModified('cpe') response = requests.head(feedUrl) last_modified = parse_datetime(response.headers['last-modified'],
if "versionEndExcluding" in item: cpe["versionEndExcluding"] = item["versionEndExcluding"] version_info += cpe["versionEndExcluding"] if "versionEndIncluding" in item: cpe["versionEndIncluding"] = item["versionEndIncluding"] version_info += cpe["versionEndIncluding"] sha1_hash = hashlib.sha1(cpe["cpe_2_2"].encode("utf-8") + version_info.encode("utf-8")).hexdigest() cpe["id"] = sha1_hash return cpe if __name__ == '__main__': if args.u: try: (f, r) = Configuration.getFile(Configuration.getFeedURL('cpe')) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cpe"))) # check modification date i = db.getLastModified('cpe') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i and not args.f: print("Not modified") sys.exit(0) cpej = json.loads(f.read()) cpeList = [] for cpeitem in cpej["matches"]: item = process_cpe_item(cpeitem)
if __name__ == "__main__": # Make a SAX2 XML parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) # Retrieve CAPECs from the configuration's capec url try: print("[+] Getting CAPEC XML file") (f, r) = Configuration.getFeedData("capec") except Exception as e: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("capec")) ) db_last_modified = db.getLastModified("capec") last_modified = parse_datetime(r.headers["last-modified"], ignoretz=True) if db_last_modified is not None: if last_modified == db_last_modified: print("Not modified") sys.exit(0) # Parse XML and store in database parser.parse(f) attacks = [] for attack in progressbar(ch.capec): attacks.append(attack)
# To Do: Implement REDIS try: redis = Configuration.getRedisRefConnection() try: redis.info() except: sys.exit("Redis server not running on %s:%s"%(Configuration.getRedisHost(),Configuration.getRedisPort())) except Exception as e: print(e) sys.exit(1) try: (f, r) = Configuration.getFeedData('via4') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("via4"))) # check modification date lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True) i=db.getLastModified("via4") if i is not None: if lastmodified == i: print("Not modified") sys.exit(0) data = json.loads(f.read().decode('utf-8')) cves = data['cves'] bulk = [dict(val, id=key) for key, val in cves.items() if key] db.bulkUpdate('via4', bulk) db.setColInfo('via4', 'sources', data['metadata']['sources']) db.setColInfo('via4', 'searchables', data['metadata']['searchables'])
cve['cwe'] = cwe['value'] if not ('cwe' in cve): cve['cwe'] = defaultvalue['cwe'] else: cve['cwe'] = defaultvalue['cwe'] cve['vulnerable_configuration_cpe_2_2'] = [] return cve if __name__ == '__main__': if args.u: # get the 'modified' file getfile = file_prefix + file_mod + file_suffix try: (f, r) = Configuration.getFile( Configuration.getFeedURL('cve') + getfile) except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cve") + getfile)) i = db.getInfo("cves") last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i['last-modified'] and not args.f: print("Not modified") sys.exit(0) db.setColUpdate("cves", last_modified) cvej = json.loads(f.read()) for cveitem in cvej['CVE_Items']: item = process_cve_item(item=cveitem)