def __init__(self, collection="cves", rankinglookup=False, namelookup=False, vfeedlookup=False, capeclookup=False, subscorelookup=False, reflookup=False): self.collectionname = collection self.rankinglookup = rankinglookup self.namelookup = namelookup self.vfeedlookup = vfeedlookup self.capeclookup = capeclookup self.subscorelookup = subscorelookup connectdb = Configuration.getMongoConnection() self.collection = connectdb[self.collectionname] if rankinglookup: self.ranking = connectdb['ranking'] if namelookup: self.cpeOther = connectdb['cpeother'] self.cpe = connectdb['cpe'] if vfeedlookup: self.vfeed = connectdb['vfeed'] if capeclookup: self.capec = connectdb['capec'] if reflookup: self.ref = Configuration.getRedisRefConnection()
def main(): def sig_handler(sig, frame): print("Terminating...") tornado.ioloop.IOLoop.current().stop() tornado.options.parse_command_line() signal.signal(signal.SIGINT, sig_handler) signal.signal(signal.SIGTERM, sig_handler) db = Configuration.getMongoConnection() redisdb = Configuration.getRedisVendorConnection() port = Configuration.getFlaskPort() app = tornado.web.Application([ (r"/", MainHandler), (r"/api/stats", StatsHandler), (r"/api/search/([^/]+)", SearchHandler) ], db=db, redisdb=redisdb) app.listen(port) print("Listening on :%s" % port) tornado.ioloop.IOLoop.current().start()
def loadPlugins(self): settingsReader = ConfigReader(conf.getPluginsettings()) if not os.path.exists(conf.getPluginLoadSettings()): print("[!] Could not find plugin loader file!") return # Read and parse plugin file data = open(conf.getPluginLoadSettings(), "r").read() data = [x.split("\t") for x in data.split("\n") if not x.startswith("#") and x] data = [[x.strip() for x in y if x.strip()] for y in data] for x in [x for x in data if len(x) == 2]: try: if x[1].lower() == "load" or x[1].lower() == "default": # Load plugins i = importlib.import_module(x[0].replace("/", ".")) plugin = getattr(i, x[0].split("/")[-1])() plugin.setUID(plugin.getName().replace(" ", "_")) # Ensure UID's unique while True: if plugin.getUID() in self.plugins.keys(): plugin.setUID(plugin.getUID()+"_"); else: break # Load settings if needed if x[1].lower() == "load": plugin.loadSettings(settingsReader) # Set load state plugin.setLoadState(x[1]) # Add to list self.plugins[plugin.getUID().strip()] = plugin print("[+] Loaded plugin %s"%x[0]) except Exception as e: print("[!] Failed to load module %s: "%x[0]) print("[!] -> %s"%e)
def __init__(self): self.verbose=conf.verbose() self.logging=conf.logging() self.logPath=conf.logPath() self.logLevel=conf.logLevel() self.verboseLevel=conf.verboseLevel() self.levels=["DEBUG", "INFO", "WARN", "ERROR", "CRIT"]
def getFile(source, unpack=True): global Modified try: (f, r) = Configuration.getFeedData(source, unpack) if (r.headers['last-modified'] == None or r.headers['last-modified'] != db.getLastModified(source)): Modified = True return (f, r) else: return (None, None) except: print("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL(source)))
def admin(): if Configuration.loginRequired(): if not current_user.is_authenticated(): return render_template('login.html') else: person = User.get("_dummy_") login_user(person) output = None if os.path.isfile(Configuration.getUpdateLogFile()): with open(Configuration.getUpdateLogFile()) as updateFile: separator="==========================\n" output=updateFile.read().split(separator)[-2:] output=separator+separator.join(output) return render_template('admin.html', status="default", stats=adminStats(), updateOutput=filterUpdateField(output))
def enhance(scan): host,port=Configuration.getCVESearch() for system in scan['systems']: cpe=system['cpes'] if 'cpes' in system else None if cpe: cpes=[] for c in cpe: try: c = c.lower() try: print("Querying %s"%(api%(host,port,c))) data = (urlopen(api%(host,port,c)).read()).decode('utf8') except: data = (urlopen(api%(host,port,urllib.parse.quote_plus(c))).read()).decode('utf8') cpes.append({'cpe':c, 'cves':json.loads(str(data))}) except: pass system['cpes']=cpes #TODO get possible dpe info and store in dpe for service in system['services']: if 'cpe' in service: try: c=service['cpe'].lower() try: data = (urlopen(api%(host,port,c)).read()).decode('utf8') except: data = (urlopen(api%(host,port,urllib.parse.quote_plus(c))).read()).decode('utf8') service['cves']=json.loads(str(data)) except: pass #TODO get dpe info for service scan['enhanced']={"time": int(datetime.now().strftime('%s'))} return scan
def adminStats(): cveU = db.info.find_one({"db": "cve"}) cpeU = db.info.find_one({"db": "cpe"}) cpeOtherU = db.info.find_one({"db": "cpeother"}) capecU = db.info.find_one({"db": "capec"}) d2secU = db.info.find_one({"db": "d2sec"}) vendorU = db.info.find_one({"db": "vendor"}) vfeedU = db.info.find_one({"db": "vfeed"}) stats = { "cveA": db.cves.count(), "cveU": cveU["last-modified"] if cveU is not None else None, "cpeA": db.cpe.count(), "cpeU": cpeU["last-modified"] if cpeU is not None else None, "cpeOtherA": db.cpeother.count(), "cpeOtherU": cpeOtherU["last-modified"] if cpeOtherU is not None else None, "capecA": db.capec.count(), "capecU": capecU["last-modified"] if capecU is not None else None, "d2secA": db.d2sec.count(), "d2secU": d2secU["last-modified"] if d2secU is not None else None, "vendorA": db.vendor.count(), "vendorU": vendorU["last-modified"] if vendorU is not None else None, "vfeedA": db.vfeed.count(), "vfeedU": vfeedU["last-modified"] if vfeedU is not None else None, "blA": db.mgmt_blacklist.count(), "wlA": db.mgmt_whitelist.count(), "dbName": Configuration.getMongoDB(), "dbSize": db.command("dbstats")["dataSize"], "dbOnDisk": db.command("dbstats")["storageSize"], } return stats
def browse(vendor=None): try: if vendor is not None: vendor = urllib.parse.quote_plus(vendor).lower() browseList = getBrowseList(vendor) vendor = browseList["vendor"] product = browseList["product"] return render_template("browse.html", product=product, vendor=vendor) except redisExceptions.ConnectionError: return render_template( "error.html", status={ "except": "redis-connection", "info": {"host": Configuration.getRedisHost(), "port": Configuration.getRedisPort()}, }, )
def cve(cveid): host,port=Configuration.getCVESearch() data = (urlopen('http://%s:%s/api/cve/%s'%(host,port,cveid)).read()).decode('utf8') cvejson=json.loads(str(data)) if cvejson is {}: return page_not_found(404) return render_template('cve.html', cve=cvejson)
def log(message=""): if args.o: with open(Configuration.getUpdateLogFile(), "a") as log: log .write(message + "\n") if args.v: print (message) else: logging.info(message)
def getDBStats(): cols=['cve', 'cpe', 'cpeOther', 'capec', 'd2sec', 'vendor'] stats={x+'A': getSize(x.lower()) for x in cols} stats['cveA']=getSize('cves') stats.update({x+'U': getLastModified(x.lower()) for x in cols}) stats.update({'blA': colBLACKLIST.count(), 'wlA':colWHITELIST.count()}) stats.update({'dbOnDisk': db.command("dbstats")['storageSize'], 'dbSize':db.command('dbstats')['dataSize']}) stats['dbName']=conf.getMongoDB() return stats
def getDBStats(): cols = ["cve", "cpe", "cpeOther", "capec", "d2sec", "vendor", "vfeed"] stats = {x + "A": getSize(x.lower()) for x in cols} stats["cveA"] = getSize("cves") stats.update({x + "U": getLastModified(x.lower()) for x in cols}) stats.update({"blA": colBLACKLIST.count(), "wlA": colWHITELIST.count()}) stats.update({"dbOnDisk": db.command("dbstats")["storageSize"], "dbSize": db.command("dbstats")["dataSize"]}) stats["dbName"] = conf.getMongoDB() return stats
def __init__(self, id, auth_instance): '''Simple User class''' if not Configuration.loginRequired(): # dummy account for when logon is not required. self.id = "_dummy_" else: if not auth_instance.isCVESearchUser(id): raise UserNotFoundError() self.id = id self.authenticator = auth_instance
def admin(): status = ["default", "none"] if Configuration.loginRequired(): if not current_user.is_authenticated(): return render_template('login.html', status=status) else: return render_template('admin.html', status=status, stats=adminStats()) else: person = User.get("_dummy_") login_user(person) return render_template('admin.html', status=status, stats=adminStats())
def browse(vendor=None): try: if vendor is not None: vendor = urllib.parse.quote_plus(vendor).lower() browseList = getBrowseList(vendor) vendor = browseList["vendor"] product = browseList["product"] return render_template('browse.html', product=product, vendor=vendor) except redisExceptions.ConnectionError: return render_template('error.html', status={'except':'redis-connection', 'info':{'host':Configuration.getRedisHost(),'port':Configuration.getRedisPort()}})
def getDBStats(include_admin=False): data={'cves': {}, 'cpe': {}, 'cpeOther': {}, 'capec': {}, 'cwe': {}, 'via4': {}} for key in data.keys(): data[key] = {'size': getSize(key.lower()), 'last_update': getLastModified(key.lower())} if include_admin: data['whitelist']={'size': colWHITELIST.count()} data['blacklist']={'size': colBLACKLIST.count()} data = {'stats': {'size_on_disk': db.command("dbstats")['storageSize'], 'db_size': db.command('dbstats')['dataSize'], 'name': conf.getMongoDB()}, 'data': data} return data
def start(self): # get properties flaskHost = Configuration.getFlaskHost() flaskPort = Configuration.getFlaskPort() flaskDebug = Configuration.getFlaskDebug() # logging if Configuration.getLogging(): logfile = Configuration.getLogfile() pathToLog = logfile.rsplit('/', 1)[0] if not os.path.exists(pathToLog): os.makedirs(pathToLog) maxLogSize = Configuration.getMaxLogSize() backlog = Configuration.getBacklog() file_handler = RotatingFileHandler(logfile, maxBytes=maxLogSize, backupCount=backlog) file_handler.setLevel(logging.ERROR) formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") file_handler.setFormatter(formatter) self.app.logger.addHandler(file_handler) if flaskDebug: # start debug flask server self.app.run(host=flaskHost, port=flaskPort, debug=flaskDebug) else: # start asynchronous server using tornado wrapper for flask # ssl connection print("Server starting...") if Configuration.useSSL(): ssl_options = {"certfile": os.path.join(_runPath, "../", Configuration.getSSLCert()), "keyfile": os.path.join(_runPath, "../", Configuration.getSSLKey())} else: ssl_options = None signal.signal(signal.SIGTERM, self.sig_handler) signal.signal(signal.SIGINT, self.sig_handler) self.http_server = HTTPServer(WSGIContainer(self.app), ssl_options=ssl_options) self.http_server.bind(flaskPort, address=flaskHost) self.http_server.start(0) # Forks multiple sub-processes IOLoop.instance().start()
def listManagement(vendor=None, product=None): try: if product is None: # no product selected yet, so same function as /browse can be used if vendor: vendor = urllib.parse.quote_plus(vendor).lower() browseList = getBrowseList(vendor) vendor = browseList["vendor"] product = browseList["product"] version = None else: # product selected, product versions required version = getVersionsOfProduct(urllib.parse.quote_plus(product).lower()) status = ["default", "none"] return render_template("listmanagement.html", status=status, vendor=vendor, product=product, version=version) except redisExceptions.ConnectionError: return render_template( "error.html", status={ "except": "redis-connection", "info": {"host": Configuration.getRedisHost(), "port": Configuration.getRedisPort()}, }, )
def __init__(self, id): '''Simple User class''' if not Configuration.loginRequired(): # dummy account for when logon is not required. USERS = {"_dummy_": "_dummy_"} else: USERS = {} for user in db.getUsers(): USERS[user['username']] = user['password'] if not id in USERS: raise UserNotFoundError() self.id = id self.password = USERS[id]
def _load_methods(self): self.methods = [] if not os.path.exists(conf.getAuthLoadSettings()): print("[!] Could not find auth loader file!") return # Read and parse plugin file data = open(conf.getAuthLoadSettings(), "r").read() data = [x.split(maxsplit=2) for x in data.splitlines() if not x.startswith("#") and x] for x in [x for x in data if len(x) in [2, 3]]: try: x.extend(['']*(3-len(x))) # add empty args if none exist method, authType, args = x if authType.lower() not in ["required", "sufficient"]: # Skip if authType not known continue # Create object args = {y.split("=")[0]: y.split("=")[1] for y in args.split()} i = importlib.import_module("lib.authenticationMethods.%s"%method) authMethod = getattr(i, method.split("/")[-1])(**args) # Add object to list self.methods.append((method, authType.lower(), authMethod)) print("[+] Loaded Auth Method %s"%x[0]) except Exception as e: print("[!] Failed to load Auth Method %s: "%x[0]) print("[!] -> %s"%e)
def __init__(self, collection="cves", rankinglookup=False, namelookup=False, capeclookup=False, subscorelookup=False, reflookup=False): self.collectionname = collection self.rankinglookup = rankinglookup self.namelookup = namelookup self.capeclookup = capeclookup self.subscorelookup = subscorelookup self.reflookup = reflookup self.collection = collection if reflookup: self.ref = Configuration.getRedisRefConnection()
def __init__(self): # TODO: make auth handler and plugin manager singletons Advanced_API.__init__(self) Minimal.__init__(self) self.minimal = False self.auth_handler = AuthenticationHandler() self.plugManager = PluginManager() self.login_manager = LoginManager() self.plugManager.loadPlugins() self.login_manager.init_app(self.app) self.login_manager.user_loader(self.load_user) self.redisdb = Configuration.getRedisVendorConnection() self.defaultFilters.update({'blacklistSelect': 'on', 'whitelistSelect': 'on', 'unlistedSelect': 'show',}) self.args.update({'minimal': False}) self.pluginArgs = {"current_user": current_user, "plugin_manager": self.plugManager} routes = [{'r': '/cve/<cveid>', 'm': ['GET'], 'f': self.cve}, {'r': '/_get_plugins', 'm': ['GET'], 'f': self._get_plugins}, {'r': '/plugin/_get_cve_actions', 'm': ['GET'], 'f': self._get_cve_actions}, {'r': '/plugin/<plugin>', 'm': ['GET'], 'f': self.openPlugin}, {'r': '/plugin/<plugin>/subpage/<page>', 'm': ['GET'], 'f': self.openPluginSubpage}, {'r': '/plugin/<plugin>/_cve_action/<action>', 'm': ['GET'], 'f': self._jsonCVEAction}, {'r': '/login', 'm': ['POST'], 'f': self.login_check}, {'r': '/logout', 'm': ['POST'], 'f': self.logout}, {'r': '/admin', 'm': ['GET'], 'f': self.admin}, {'r': '/admin/', 'm': ['GET'], 'f': self.admin}, {'r': '/admin/change_pass', 'm': ['GET'], 'f': self.change_pass}, {'r': '/admin/request_token', 'm': ['GET'], 'f': self.request_token}, {'r': '/admin/updatedb', 'm': ['GET'], 'f': self.updatedb}, {'r': '/admin/whitelist/import', 'm': ['POST'], 'f': self.listImport}, {'r': '/admin/blacklist/import', 'm': ['POST'], 'f': self.listImport}, {'r': '/admin/whitelist/export', 'm': ['GET'], 'f': self.listExport}, {'r': '/admin/blacklist/export', 'm': ['GET'], 'f': self.listExport}, {'r': '/admin/whitelist/drop', 'm': ['POST'], 'f': self.listDrop}, {'r': '/admin/blacklist/drop', 'm': ['POST'], 'f': self.listDrop}, {'r': '/admin/whitelist', 'm': ['GET'], 'f': self.listView}, {'r': '/admin/blacklist', 'm': ['GET'], 'f': self.listView}, {'r': '/admin/addToList', 'm': ['GET'], 'f': self.listAdd}, {'r': '/admin/removeFromList', 'm': ['GET'], 'f': self.listRemove}, {'r': '/admin/editInList', 'm': ['GET'], 'f': self.listEdit}, {'r': '/admin/listmanagement', 'm': ['GET'], 'f': self.listManagement}, {'r': '/admin/listmanagement/<vendor>', 'm': ['GET'], 'f': self.listManagement}, {'r': '/admin/listmanagement/<vendor>/<product>', 'm': ['GET'], 'f': self.listManagement}, {'r': '/admin/listmanagement/add', 'm': ['GET'], 'f': self.listManagementAdd}, {'r': '/login', 'm': ['POST'], 'f': self.login_check}] for route in routes: self.addRoute(route)
def listManagement(vendor=None, product=None): try: if product is None: # no product selected yet, so same function as /browse can be used if vendor: vendor = urllib.parse.quote_plus(vendor).lower() browseList = getBrowseList(vendor) vendor = browseList["vendor"] product = browseList["product"] version = None else: # product selected, product versions required version = getVersionsOfProduct(urllib.parse.quote_plus(product).lower()) return render_template('listmanagement.html', vendor=vendor, product=product, version=version) except redisExceptions.ConnectionError: return render_template('error.html', status={'except':'redis-connection', 'info':{'host':Configuration.getRedisHost(),'port':Configuration.getRedisPort()}})
def adminStats(): cveU = db.info.find_one({'db': 'cve'}) cpeU = db.info.find_one({'db': 'cpe'}) cpeOtherU = db.info.find_one({'db': 'cpeother'}) capecU = db.info.find_one({'db': 'capec'}) d2secU = db.info.find_one({'db': 'd2sec'}) vendorU = db.info.find_one({'db': 'vendor'}) vfeedU = db.info.find_one({'db': 'vfeed'}) stats = {'cveA': db.cves.count(), 'cveU': cveU['last-modified'] if cveU is not None else None, 'cpeA': db.cpe.count(), 'cpeU': cpeU['last-modified'] if cpeU is not None else None, 'cpeOtherA': db.cpeother.count(), 'cpeOtherU': cpeOtherU['last-modified'] if cpeOtherU is not None else None, 'capecA': db.capec.count(), 'capecU': capecU['last-modified'] if capecU is not None else None, 'd2secA': db.d2sec.count(), 'd2secU': d2secU['last-modified'] if d2secU is not None else None, 'vendorA': db.vendor.count(), 'vendorU': vendorU['last-modified'] if vendorU is not None else None, 'vfeedA': db.vfeed.count(), 'vfeedU': vfeedU['last-modified'] if vfeedU is not None else None, 'blA': db.mgmt_blacklist.count(), 'wlA': db.mgmt_whitelist.count(), 'dbName': Configuration.getMongoDB(), 'dbSize': db.command("dbstats")['dataSize'], 'dbOnDisk': db.command("dbstats")['storageSize']} return stats
def enhance(systems): host,port=Configuration.getCVESearch() for system in systems: cpe=system['cpes'] if 'cpes' in system else None if cpe: cpes=[] for c in cpe: c=urllib.parse.quote_plus(c).lower() data = (urlopen('http://%s:%s/api/cvefor/%s'%(host,port,c)).read()).decode('utf8') vulns=json.loads(str(data)) cpes.append({'cpe':c, 'cves':vulns}) system['cpes']=cpes #get possible dpe info and store in dpe for service in system['services']: if 'cpe' in service: c=urllib.parse.quote_plus(service['cpe']).lower() data = (urlopen('http://%s:%s/api/cvefor/%s'%(host,port,c)).read()).decode('utf8') service['cves']=json.loads(str(data)) #get dpe info for service return systems
def enhance(scan,remove): host,port=Configuration.getCVESearch() encode=True copied={} for system in scan['systems']: cpe=system['cpes'] if 'cpes' in system else None if cpe: cpes=[] for c in cpe: data,encode,copied=send_request(host,port,c,encode,remove,copied) cpes.append({'cpe':c, 'cves':json.loads(str(data))}) system['cpes']=cpes #TODO get possible dpe info and store in dpe for service in system['services']: if 'cpe' in service: c=service["cpe"] data,encode,copied=send_request(host,port,c,encode,remove,copied) service['cves']=json.loads(str(data)) #TODO get dpe info for service scan['enhanced']={"time": int(datetime.now().strftime('%s'))} return scan
def enhance(systems, exploitsOnly=False, filters=[]): host,port=Configuration.getCVESearch() # deal with filters for system in systems: cpe=system['cpes'] if 'cpes' in system else None if cpe: cpes=[] for c in cpe: cEnc=c.lower() data = (urlopen('http://%s:%s/api/cvefor/%s'%(host,port,cEnc)).read()).decode('utf8') vulns=json.loads(str(data)) # filters if len(filters['access.vector'])!=0: vulns=[x for x in vulns if x['access']['vector'] in filters['access.vector']] if len(filters['impact'])!=0: for fil in filters['impact']: vulns=[x for x in vulns if x['impact'][fil] !='NONE'] # exploits only if exploitsOnly: vulns=[x for x in vulns if ('map_cve_exploitdb' in x or 'map_cve_msf' in x)] # done cpes.append({'cpe':c, 'cves':vulns}) system['cpes']=cpes #TODO get possible dpe info and store in dpe for service in system['services']: if 'cpe' in service: c=service['cpe'].lower() data = (urlopen('http://%s:%s/api/cvefor/%s'%(host,port,c)).read()).decode('utf8') vulns=json.loads(str(data)) # filters if len(filters['access.vector'])!=0: vulns=[x for x in vulns if x['access']['vector'] in filters['access.vector']] if len(filters['impact'])!=0: for fil in filters['impact']: vulns=[x for x in vulns if x['impact'][fil] !='NONE'] # exploits only if exploitsOnly: vulns=[x for x in vulns if ('map_cve_exploitdb' in x or 'map_cve_msf' in x)] # done service['cves']=vulns #TODO get dpe info for service return systems
from lib.PluginManager import PluginManager from lib.Authentication import AuthenticationHandler from lib.Toolkit import toStringFormattedCPE, toOldCPE, isURL, vFeedName, mergeSearchResults import lib.CVEs as cves import lib.DatabaseLayer as db from sbin.db_whitelist import * from sbin.db_blacklist import * # parse command line arguments argparser = argparse.ArgumentParser(description='Start CVE-Search web component') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() # variables app = Flask(__name__, static_folder='static', static_url_path='/static') app.config['MONGO_DBNAME'] = Configuration.getMongoDB() app.config['SECRET_KEY'] = str(random.getrandbits(256)) pageLength = Configuration.getPageLength() listLogin = Configuration.listLoginRequired() plugManager = PluginManager() auth_handler = AuthenticationHandler() defaultFilters={'blacklistSelect': 'on', 'whitelistSelect': 'on', 'unlistedSelect': 'show', 'timeSelect': 'all', 'startDate': '', 'endDate': '', 'timeTypeSelect': 'Modified', 'cvssSelect': 'all', 'cvss': '', 'rejectedSelect': 'hide'} # login manager login_manager = LoginManager() login_manager.init_app(app) # db connectors
# Copyright (c) 2016 Pieter-Jan Moreels # Imports import json import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) from lib.Config import Configuration import lib.DatabaseLayer as db # To Do: Implement REDIS try: redis = Configuration.getRedisRefConnection() try: redis.info() except: sys.exit("Redis server not running on %s:%s" % (Configuration.getRedisHost(), Configuration.getRedisPort())) except Exception as e: print(e) sys.exit(1) try: (f, r) = Configuration.getFeedData('via4') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("via4")))
if self.description_summary_tag: self.description_summary += ch.replace(" ", "") def endElement(self, name): if name == 'Description_Summary' and self.weakness_tag: self.description_summary_tag = False self.description_summary = self.description_summary + self.description_summary self.cwe[-1][ 'description_summary'] = self.description_summary.replace( "\n", "") elif name == 'Weakness': self.weakness_tag = False # dictionary cwedict = Configuration.getCWEDict() # make parser parser = make_parser() ch = CWEHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(cwedict) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (cwedict)) lastmodified = f.headers['last-modified'] i = db.getLastModified('cwe') if i is not None: if lastmodified == i:
class API: app = Flask(__name__, static_folder="static", static_url_path="/static") app.config["MONGO_DBNAME"] = Configuration.getMongoDB() app.config["SECRET_KEY"] = str(random.getrandbits(256)) def __init__(self): routes = [ { "r": "/api/", "m": ["GET"], "f": self.api_documentation }, { "r": "/api/cpe2.3/<path:cpe>", "m": ["GET"], "f": self.api_cpe23 }, { "r": "/api/cpe2.2/<path:cpe>", "m": ["GET"], "f": self.api_cpe22 }, { "r": "/api/cvefor/<path:cpe>", "m": ["GET"], "f": self.api_cvesFor }, { "r": "/api/cve/<cveid>", "m": ["GET"], "f": self.api_cve }, { "r": "/api/cwe", "m": ["GET"], "f": self.api_cwe }, { "r": "/api/cwe/<int:cwe_id>", "m": ["GET"], "f": self.api_cwe }, { "r": "/api/capec/<cweid>", "m": ["GET"], "f": self.api_capec }, { "r": "/api/last", "m": ["GET"], "f": self.api_last }, { "r": "/api/last/", "m": ["GET"], "f": self.api_last }, { "r": "/api/last/<int:limit>", "m": ["GET"], "f": self.api_last }, { "r": "/api/query", "m": ["GET"], "f": self.api_query }, { "r": "/api/browse", "m": ["GET"], "f": self.api_browse }, { "r": "/api/browse/", "m": ["GET"], "f": self.api_browse }, { "r": "/api/browse/<path:vendor>", "m": ["GET"], "f": self.api_browse }, { "r": "/api/search/<vendor>/<path:product>", "m": ["GET"], "f": self.api_search, }, { "r": "/api/search/<path:search>", "m": ["GET"], "f": self.api_text_search }, { "r": "/api/link/<key>/<value>", "m": ["GET"], "f": self.api_link }, { "r": "/api/dbInfo", "m": ["GET"], "f": self.api_dbInfo }, ] for route in routes: self.addRoute(route) def addRoute(self, route): self.app.add_url_rule(route["r"], view_func=route["f"], methods=route["m"]) ############# # Decorator # ############# def api(funct): @wraps(funct) def api_wrapper(*args, **kwargs): data = error = None # Get data (and possibly errors) try: data = funct(*args, **kwargs) except APIError as e: error = ({"status": "error", "reason": e.message}, e.status) except Exception as e: print(e) error = ({ "status": "error", "reason": "Internal server error" }, 500) # Check if data should be returned as html or data try: returnType = "application/json" if request.url_rule.rule.lower().startswith( "/api/") or request.url_rule.rule.lower().endswith( ".json"): # Support JSONP if request.args.get("callback", False): data = "%s(%s)" % (request.args.get("callback"), data) # Check API version for backwards compatibility. We'll call the old API v1.0 elif request.headers.get("Version") in ["1.1"]: # Get the requested return type returnType = request.headers.get("Accept", "*/*") # Default to JSON if any(t in returnType for t in ["json", "application/*", "text/*", "*/*"]): data = (error if error else { "status": "success", "data": data }) elif "plain" in returnType: pass # No need to do anything, but needs to be accepted else: data = ( { "status": "error", "reason": "Unknown Content-type requested", }, 415, ) returnType = "application/json" if type(data) is not str: if type(data) is tuple: data = list(data) data[0] = json.dumps( convertDatetime(dct=data[0]), indent=4, sort_keys=True, default=json_util.default, ) else: data = ( json.dumps( convertDatetime(dct=data), indent=4, sort_keys=True, default=json_util.default, ), 200, ) return Response(data[0], mimetype=returnType), data[1] except Exception as e: print(e) pass if error and error[1] == 500: raise (APIError(error[0]["reason"])) return data return api_wrapper ############# # FUNCTIONS # ############# def generate_minimal_query(self, f): query = [] # retrieving lists if f["rejectedSelect"] == "hide": query.append({ "summary": re.compile( r"^(?!\*\* REJECT \*\*\s+DO NOT USE THIS CANDIDATE NUMBER.*)" ) }) # cvss logic if f["cvssSelect"] == "above": query.append({"cvss": {"$gt": float(f["cvss"])}}) elif f["cvssSelect"] == "equals": query.append({"cvss": float(f["cvss"])}) elif f["cvssSelect"] == "below": query.append({"cvss": {"$lt": float(f["cvss"])}}) # date logic if f["timeSelect"] != "all": if f["startDate"]: startDate = parse_datetime(f["startDate"], ignoretz=True, dayfirst=True) if f["endDate"]: endDate = parse_datetime(f["endDate"], ignoretz=True, dayfirst=True) if f["timeSelect"] == "from": query.append({f["timeTypeSelect"]: {"$gt": startDate}}) elif f["timeSelect"] == "until": query.append({f["timeTypeSelect"]: {"$lt": endDate}}) elif f["timeSelect"] == "between": query.append( {f["timeTypeSelect"]: { "$gt": startDate, "$lt": endDate }}) elif f["timeSelect"] == "outside": query.append({ "$or": [ { f["timeTypeSelect"]: { "$lt": startDate } }, { f["timeTypeSelect"]: { "$gt": endDate } }, ] }) return query def filter_logic(self, filters, skip, limit=None): query = self.generate_minimal_query(filters) limit = limit if limit else self.args["pageLength"] return getCVEs(limit=limit, skip=skip, query=query) ########## # ROUTES # ########## # /api def api_documentation(self): return render_template("api.html") # /api/cpe2.3/<cpe> @api def api_cpe23(self, cpe): cpe = toStringFormattedCPE(cpe) return cpe, 200 if cpe else "None", 404 # /api/cpe2.2/<cpe> @api def api_cpe22(self, cpe): cpe = toOldCPE(cpe) return cpe, 200 if cpe else "None", 404 # /api/cvefor/<cpe> @api def api_cvesFor(self, cpe): cpe = urllib.parse.unquote_plus(cpe) return qcvesForCPE(cpe) # /api/cve/<cveid> @api def api_cve(self, cveid): cvesp = cves.last(rankinglookup=True, namelookup=True, via4lookup=True, capeclookup=True) cve = cvesp.getcve(cveid=cveid.upper()) if not cve: raise (APIError("cve not found", 404)) return cve # /api/cwe # /api/cwe/<cwe_id> @api def api_cwe(self, cwe_id=None): return getCAPECFor(str(cwe_id)) if cwe_id else getCWEs() # /api/capec/<cweid> @api def api_capec(self, cweid): return getCAPEC(cweid) # /api/last # /api/last/ # /api/last/<limit> @api def api_last(self, limit=None): limit = limit if limit else 30 cvesp = cves.last(rankinglookup=True, namelookup=True, via4lookup=True, capeclookup=True) cve = cvesp.get(limit=limit) return cve # /query @api def api_query(self): f = { "rejectedSelect": request.headers.get("rejected"), "cvss": request.headers.get("cvss_score"), "cvssSelect": request.headers.get("cvss_modifier"), "startDate": request.headers.get("time_start"), "endDate": request.headers.get("time_end"), "timeSelect": request.headers.get("time_modifier"), "timeTypeSelect": request.headers.get("time_type"), "skip": request.headers.get("skip"), "limit": request.headers.get("limit"), } try: skip = int(f["skip"]) if f["skip"] else 0 except: raise (APIError("skip must be an int", 400)) try: limit = int(f["limit"]) if f["limit"] else 0 except: raise (APIError("limit must be an int", 400)) return self.filter_logic(f, skip, limit) # /api/browse # /api/browse/ # /api/browse/<vendor> @api def api_browse(self, vendor=None): if vendor: vendor = urllib.parse.quote_plus(vendor).lower() try: browseList = getBrowseList(vendor) except redis_connection_error: raise (APIError( "Server could not connect to the browsing repository", 503)) if isinstance(browseList, dict): return browseList else: return {} # /api/search/<vendor>/<path:product> @api def api_search(self, vendor=None, product=None): if not (vendor and product): return {} search = vendor + ":" + product # Not using query.cvesForCPE, because that one gives too much info # return json.dumps(db.cvesForCPE(search), default=json_util.default) return cvesForCPE(search) # /api/search/<path:search> @api def api_text_search(self, search=None): return getSearchResults(search) # /api/link/<key>/<value> @api def api_link(self, key=None, value=None): key = self.htmlDecode(key) value = self.htmlDecode(value) regex = re.compile(re.escape(value), re.I) data = {"cves": via4Linked(key, regex)} cvssList = [float(x["cvss"]) for x in data["cves"] if x.get("cvss")] if cvssList: data["stats"] = { "maxCVSS": max(cvssList), "minCVSS": min(cvssList), "count": len(data["cves"]), } else: data["stats"] = { "maxCVSS": 0, "minCVSS": 0, "count": len(data["cves"]) } return data # /api/dbInfo @api def api_dbInfo(self): return getDBStats() ######################## # Web Server Functions # ######################## # signal handlers def sig_handler(self, sig, frame): print("Caught signal: %s" % sig) IOLoop.instance().add_callback(self.shutdown) def shutdown(self): MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 3 print("Stopping http server") self.http_server.stop() print("Will shutdown in %s seconds ..." % MAX_WAIT_SECONDS_BEFORE_SHUTDOWN) io_loop = IOLoop.instance() deadline = time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN def stop_loop(): now = time.time() if now < deadline and (io_loop._callbacks or io_loop._timeouts): io_loop.add_timeout(now + 1, stop_loop) else: io_loop.stop() print("Shutdown") stop_loop() def start(self): # get properties flaskHost = Configuration.getFlaskHost() flaskPort = Configuration.getFlaskPort() flaskDebug = Configuration.getFlaskDebug() # logging if Configuration.getLogging(): logfile = Configuration.getLogfile() pathToLog = logfile.rsplit("/", 1)[0] if not os.path.exists(pathToLog): os.makedirs(pathToLog) maxLogSize = Configuration.getMaxLogSize() backlog = Configuration.getBacklog() file_handler = RotatingFileHandler(logfile, maxBytes=maxLogSize, backupCount=backlog) file_handler.setLevel(logging.ERROR) formatter = logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s") file_handler.setFormatter(formatter) self.app.logger.addHandler(file_handler) if flaskDebug: # start debug flask server self.app.run(host=flaskHost, port=flaskPort, debug=flaskDebug) else: # start asynchronous server using tornado wrapper for flask # ssl connection print("Server starting...") if Configuration.useSSL(): ssl_options = { "certfile": os.path.join(_runPath, "../", Configuration.getSSLCert()), "keyfile": os.path.join(_runPath, "../", Configuration.getSSLKey()), } else: ssl_options = None signal.signal(signal.SIGTERM, self.sig_handler) signal.signal(signal.SIGINT, self.sig_handler) self.http_server = HTTPServer(WSGIContainer(self.app), ssl_options=ssl_options) self.http_server.bind(flaskPort, address=flaskHost) self.http_server.start(0) # Forks multiple sub-processes IOLoop.instance().start()
from lib.Config import Configuration from lib.PluginManager import PluginManager from lib.Toolkit import tk_compile from lib.DatabaseLayer import ( getRules, getCVEs, getDBStats, ) from dateutil.parser import parse as parse_datetime from sbin.db_blacklist import insertBlacklist from sbin.db_whitelist import insertWhitelist config = Configuration() plugManager = PluginManager() plugManager.loadPlugins() defaultFilters = { "timeSelect": "all", "startDate": "", "endDate": "", "timeTypeSelect": "Modified", "cvssSelect": "all", "cvss": "0", "rejectedSelect": "hide", } config_args = {
action='append', metavar="file", help='Read a file of CPEs and remove them from the blacklist') argparser.add_argument('-t', metavar="type", default="cpe", help='Type of item to blacklist. Default: CPE') argparser.add_argument('-i', metavar="file", help='Import blacklist from file') argparser.add_argument('-e', metavar="file", help='Export blacklist to file') argparser.add_argument('-d', action='store_true', help='Drop the blacklist') argparser.add_argument('-f', action='store_true', help='Force') argparser.add_argument('-v', action='store_true', help='Verbose') args = argparser.parse_args() # connect to db db = Configuration.getMongoConnection() collection = db.mgmt_blacklist def importBlacklist(importFile): oList = CPEList(collection, args) oList.importList(importFile) def exportBlacklist(exportFile): oList = CPEList(collection, args) oList.exportList(exportFile) def dropBlacklist(): oList = CPEList(collection, args)
class Minimal(API): ############# # Variables # ############# defaultFilters = { "timeSelect": "all", "startDate": "", "endDate": "", "timeTypeSelect": "Modified", "cvssSelect": "all", "cvss": "", "rejectedSelect": "hide", } args = { "pageLength": Configuration.getPageLength(), "listLogin": Configuration.listLoginRequired(), "minimal": True, } def __init__(self): self.minimal = True super().__init__() routes = [ { "r": "/", "m": ["GET"], "f": self.index }, { "r": "/", "m": ["POST"], "f": self.index_post }, { "r": "/r/<int:r>", "m": ["GET"], "f": self.index_filter_get }, { "r": "/r/<int:r>", "m": ["POST"], "f": self.index_filter_post }, { "r": "/cve/<cveid>", "m": ["GET"], "f": self.cve }, { "r": "/cwe", "m": ["GET"], "f": self.cwe }, { "r": "/cwe/<cweid>", "m": ["GET"], "f": self.relatedCWE }, { "r": "/capec/<capecid>", "m": ["GET"], "f": self.capec }, { "r": "/browse", "m": ["GET"], "f": self.browse }, { "r": "/browse/", "m": ["GET"], "f": self.browse }, { "r": "/browse/<vendor>", "m": ["GET"], "f": self.browse }, { "r": "/search/<vendor>/<path:product>", "m": ["GET"], "f": self.search }, { "r": "/search", "m": ["POST"], "f": self.freetext_search }, { "r": "/link/<key>/<value>", "m": ["GET"], "f": self.link }, ] filters = [ { "n": "htmlEncode", "f": self.htmlEncode }, { "n": "htmlDecode", "f": self.htmlDecode }, { "n": "sortIntLikeStr", "f": self.sortIntLikeStr }, ] context_processors = [self.JSON2HTMLTable] error_handlers = [{"e": 404, "f": self.page_not_found}] for route in routes: self.addRoute(route) for _filter in filters: self.addFilter(_filter) for context in context_processors: self.addContextProcessors(context) for handler in error_handlers: self.app.register_error_handler(handler["e"], handler["f"]) ############# # Functions # ############# def addFilter(self, _filter): self.app.add_template_filter(_filter["f"], _filter["n"]) def addContextProcessors(self, context_processor): self.app.context_processor(context_processor) def getFilterSettingsFromPost(self, r): filters = dict(request.form) errors = False # retrieving data try: cve = self.filter_logic(filters, r) except Exception as e: cve = getCVEs(limit=self.args["pageLength"], skip=r) errors = True return {"filters": filters, "cve": cve, "errors": errors} ########## # ROUTES # ########## # / def index(self): cve = self.filter_logic(self.defaultFilters, 0) return render_template("index.html", cve=cve, r=0, **self.args) # / def index_post(self): args = dict(self.getFilterSettingsFromPost(0), **self.args) return render_template("index.html", r=0, **args) # /r/<r> def index_filter_get(self, r): if not r or r < 0: r = 0 cve = self.filter_logic(self.defaultFilters, r) return render_template("index.html", cve=cve, r=r, **self.args) # /r/<r> def index_filter_post(self, r): if not r or r < 0: r = 0 args = dict(self.getFilterSettingsFromPost(r), **self.args) return render_template("index.html", r=r, **args) # /cve/<cveid> def cve(self, cveid): cve = self.api_cve(cveid) if not cve: return render_template( "error.html", status={ "except": "cve-not-found", "info": { "cve": cveid } }, minimal=self.minimal, ) return render_template("cve.html", cve=cve, minimal=self.minimal) # /cwe def cwe(self): cwes = [ x for x in self.api_cwe() if x["weaknessabs"].lower() == "class" ] return render_template("cwe.html", cwes=cwes, capec=None, minimal=self.minimal) # /cwe/<cweid> def relatedCWE(self, cweid): cwes = {x["id"]: x["name"] for x in self.api_cwe()} return render_template( "cwe.html", cwes=cwes, cwe=cweid, capec=getCAPECFor(cweid), minimal=self.minimal, ) # /capec/<capecid> def capec(self, capecid): cwes = {x["id"]: x["name"] for x in self.api_cwe()} return render_template("capec.html", cwes=cwes, capec=getCAPEC(capecid), minimal=self.minimal) # /browse # /browse/ # /browse/<vendor> def browse(self, vendor=None): try: data = self.api_browse(vendor) if "product" in data and "vendor" in data: return render_template( "browse.html", product=data["product"], vendor=data["vendor"], minimal=self.minimal, ) else: return render_template( "error.html", minimal=self.minimal, status={ "except": "browse_exception", "info": "No CPE" }, ) except APIError as e: return render_template( "error.html", minimal=self.minimal, status={ "except": "browse_exception", "info": e.message }, ) # /search/<vendor>/<product> def search(self, vendor=None, product=None): search = vendor + ":" + product cve = cvesForCPE(search) return render_template("search.html", vendor=vendor, product=product, cve=cve, minimal=self.minimal) # /search def freetext_search(self): search = request.form.get("search") if search == "": return self.index() result = getSearchResults(search) cve = {"results": result["data"], "total": len(result["data"])} errors = result["errors"] if "errors" in result else [] return render_template( "search.html", cve=cve, errors=errors, freetextsearch=search, minimal=self.minimal, ) # /link/<key>/<value> def link(self, key=None, value=None): key = self.htmlDecode(key) value = self.htmlDecode(value) regex = re.compile(re.escape(value), re.I) cve = via4Linked(key, regex) cvssList = [float(x["cvss"]) for x in cve["results"] if x.get("cvss")] if cvssList: stats = { "maxCVSS": max(cvssList), "minCVSS": min(cvssList), "count": len(cve), } else: stats = {"maxCVSS": 0, "minCVSS": 0, "count": len(cve)} return render_template( "linked.html", via4map=key.split(".")[0], field=".".join(key.split(".")[1:]), value=value, cve=cve, stats=stats, minimal=self.minimal, ) ########### # Filters # ########### def htmlEncode(self, string): return urllib.parse.quote_plus(string).lower() def htmlDecode(self, string): return urllib.parse.unquote_plus(string) def sortIntLikeStr(self, datalist): return sorted(datalist, key=lambda k: int(k)) def JSON2HTMLTable(self): # Doublequote, because we have to |safe the content for the tags def doublequote(data): return urllib.parse.quote_plus(urllib.parse.quote_plus(data)) def JSON2HTMLTableFilter(data, stack=None): _return = "" if type(stack) == str: stack = [stack] if type(data) == list: if len(data) == 1: _return += JSON2HTMLTableFilter(data[0], stack) else: _return += '<ul class="via4">' for item in data: _return += "<li>%s</li>" % JSON2HTMLTableFilter( item, stack) _return += "</ul>" elif type(data) == dict: _return += '<table class="invisiTable">' for key, val in sorted(data.items()): _return += "<tr><td><b>%s</b></td><td>%s</td></tr>" % ( key, JSON2HTMLTableFilter(val, stack + [key]), ) _return += "</table>" elif type(data) == str: if stack: _return += ("<a href='/link/" + doublequote(".".join(stack)) + "/" + doublequote(data) + "'>") # link opening _return += "<span class='glyphicon glyphicon-link' aria-hidden='true'></span> </a>" _return += ("<a target='_blank' href='%s'>%s</a>" % (data, data) if isURL(data) else data) _return += "" return _return return dict(JSON2HTMLTable=JSON2HTMLTableFilter) ################## # Error Messages # ################## def page_not_found(self, e): return render_template("404.html", minimal=self.minimal), 404
import os import sys import argparse import re from lxml.html import fromstring runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import zipfile import shutil verbose = False from lib.Config import Configuration import lib.DatabaseLayer as db RefUrl = Configuration.getRefURL() tmppath = Configuration.getTmpdir() argparser = argparse.ArgumentParser(description='Populate/update the NIST ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() if args.v: verbose = True # check modification date try: u = Configuration.getFile(RefUrl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(RefUrl)) i = db.getLastModified('ref')
def __init__(self): # TODO: make auth handler and plugin manager singletons Advanced_API.__init__(self) Minimal.__init__(self) self.minimal = False self.auth_handler = AuthenticationHandler() self.plugManager = PluginManager() self.login_manager = LoginManager() self.plugManager.loadPlugins() self.login_manager.init_app(self.app) self.login_manager.user_loader(self.load_user) self.redisdb = Configuration.getRedisVendorConnection() self.defaultFilters.update({ 'blacklistSelect': 'on', 'whitelistSelect': 'on', 'unlistedSelect': 'show', }) self.args.update({'minimal': False}) self.pluginArgs = { "current_user": current_user, "plugin_manager": self.plugManager } routes = [{ 'r': '/cve/<cveid>', 'm': ['GET'], 'f': self.cve }, { 'r': '/_get_plugins', 'm': ['GET'], 'f': self._get_plugins }, { 'r': '/plugin/_get_cve_actions', 'm': ['GET'], 'f': self._get_cve_actions }, { 'r': '/plugin/<plugin>', 'm': ['GET'], 'f': self.openPlugin }, { 'r': '/plugin/<plugin>/subpage/<page>', 'm': ['GET'], 'f': self.openPluginSubpage }, { 'r': '/plugin/<plugin>/_cve_action/<action>', 'm': ['GET'], 'f': self._jsonCVEAction }, { 'r': '/login', 'm': ['POST'], 'f': self.login_check }, { 'r': '/logout', 'm': ['POST'], 'f': self.logout }, { 'r': '/admin', 'm': ['GET'], 'f': self.admin }, { 'r': '/admin/', 'm': ['GET'], 'f': self.admin }, { 'r': '/admin/change_pass', 'm': ['GET'], 'f': self.change_pass }, { 'r': '/admin/request_token', 'm': ['GET'], 'f': self.request_token }, { 'r': '/admin/updatedb', 'm': ['GET'], 'f': self.updatedb }, { 'r': '/admin/whitelist/import', 'm': ['POST'], 'f': self.listImport }, { 'r': '/admin/blacklist/import', 'm': ['POST'], 'f': self.listImport }, { 'r': '/admin/whitelist/export', 'm': ['GET'], 'f': self.listExport }, { 'r': '/admin/blacklist/export', 'm': ['GET'], 'f': self.listExport }, { 'r': '/admin/whitelist/drop', 'm': ['POST'], 'f': self.listDrop }, { 'r': '/admin/blacklist/drop', 'm': ['POST'], 'f': self.listDrop }, { 'r': '/admin/whitelist', 'm': ['GET'], 'f': self.listView }, { 'r': '/admin/blacklist', 'm': ['GET'], 'f': self.listView }, { 'r': '/admin/addToList', 'm': ['GET'], 'f': self.listAdd }, { 'r': '/admin/removeFromList', 'm': ['GET'], 'f': self.listRemove }, { 'r': '/admin/editInList', 'm': ['GET'], 'f': self.listEdit }, { 'r': '/admin/listmanagement', 'm': ['GET'], 'f': self.listManagement }, { 'r': '/admin/listmanagement/<vendor>', 'm': ['GET'], 'f': self.listManagement }, { 'r': '/admin/listmanagement/<vendor>/<product>', 'm': ['GET'], 'f': self.listManagement }, { 'r': '/admin/listmanagement/add', 'm': ['GET'], 'f': self.listManagementAdd }, { 'r': '/login', 'm': ['POST'], 'f': self.login_check }] for route in routes: self.addRoute(route)
# Copyright (c) 2015 Pieter-Jan Moreels - [email protected] # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import argparse import lib.CVEs as cves from lib.Config import Configuration from lib.ProgressBar import progressbar # connect to db db = Configuration.getMongoConnection() collection = db.cves argParser = argparse.ArgumentParser(description='Fulltext indexer for the MongoDB CVE collection') argParser.add_argument('-v', action='store_true', default=False, help='Verbose logging') argParser.add_argument('-l', default=5, help='Number of last entries to index (Default: 5) - 0 to index all documents') argParser.add_argument('-n', action='store_true', default=False, help='lookup complete cpe (Common Platform Enumeration) name for vulnerable configuration to add in the index') args = argParser.parse_args() c = cves.last(namelookup=args.n) indexpath = Configuration.getIndexdir() from whoosh.index import create_in, exists_in, open_dir from whoosh.fields import Schema, TEXT, ID
def create_app(version, run_path): global app, token_blacklist, socketio app = Flask(__name__) app.config["version"] = version app.config["run_path"] = run_path config = Configuration() if config.getWebInterface().lower() == "full": app.config["WebInterface"] = False else: app.config["WebInterface"] = True app.config["MONGO_DBNAME"] = config.getMongoDB() app.config["SECRET_KEY"] = str(random.getrandbits(256)) app.config["JWT_SECRET_KEY"] = str(random.getrandbits(256)) app.config["JWT_ACCESS_TOKEN_EXPIRES"] = ACCESS_EXPIRES app.config["JWT_REFRESH_TOKEN_EXPIRES"] = REFRESH_EXPIRES app.config["JWT_BLACKLIST_ENABLED"] = True app.config["JWT_BLACKLIST_TOKEN_CHECKS"] = ["access", "refresh"] token_blacklist = config.getRedisTokenConnection() app.config["RESTX_MASK_SWAGGER"] = False socketio = SocketIO(app) Breadcrumbs(app=app) Bootstrap(app) jwt = JWTManager(app) @jwt.additional_claims_loader def add_claims_to_access_token(identity): return {"user": identity} @jwt.token_in_blocklist_loader def check_if_token_is_revoked(decrypted_token): jti = decrypted_token["jti"] entry = token_blacklist.get(jti) if entry == "true": return True return False login_manager.init_app(app) login_manager.login_message = "You must be logged in to access this page!!!" login_manager.login_view = "auth.login" @login_manager.user_loader def load_user(id): return User.get(id, auth_handler) from .home import home as home_blueprint app.register_blueprint(home_blueprint) from .plugins import plugins as plugins_blueprint app.register_blueprint(plugins_blueprint, url_prefix="/plugin") if not app.config["WebInterface"]: from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint) from .admin import admin as admin_blueprint app.register_blueprint(admin_blueprint, url_prefix="/admin") from .restapi import blueprint as api app.register_blueprint(api) from .restapidocs import docs as docs_blueprint app.register_blueprint(docs_blueprint) @app.context_processor def version(): def get_version(): return app.config["version"] return dict(get_version=get_version) @app.context_processor def db_schema(): def db_schema(): sc = SchemaChecker() try: return sc.validate_schema() except DatabaseSchemaError as err: return err return dict(db_schema=db_schema) @app.context_processor def WebInterface(): def get_WebInterface(): return app.config["WebInterface"] return dict(get_WebInterface=get_WebInterface) @app.context_processor def JSON2HTMLTable(): # Doublequote, because we have to |safe the content for the tags def doublequote(data): return urllib.parse.quote_plus(urllib.parse.quote_plus(data)) def JSON2HTMLTableFilter(data, stack=None): _return = "" if type(stack) == str: stack = [stack] if type(data) == list: if len(data) == 1: _return += JSON2HTMLTableFilter(data[0], stack) else: _return += '<ul class="via4">' for item in data: _return += "<li>%s</li>" % JSON2HTMLTableFilter( item, stack) _return += "</ul>" elif type(data) == dict: _return += '<table class="invisiTable">' for key, val in sorted(data.items()): _return += "<tr><td><b>%s</b></td><td>%s</td></tr>" % ( key, JSON2HTMLTableFilter(val, stack + [key]), ) _return += "</table>" elif type(data) == str: if stack: _return += ("<a href='/link/" + doublequote(".".join(stack)) + "/" + doublequote(data) + "'>") # link opening _return += "<i class='fas fa-link' aria-hidden='true'></i> </a>" _return += ("<a target='_blank' href='%s'>%s</a>" % (data, data) if isURL(data) else data) _return += "" return _return return dict(JSON2HTMLTable=JSON2HTMLTableFilter) @app.template_filter("htmlEncode") def htmlEncode(string): return urllib.parse.quote_plus(string).lower() @app.template_filter("htmlDecode") def htmlDecode(string): return urllib.parse.unquote_plus(string) @app.template_filter("sortIntLikeStr") def sortIntLikeStr(datalist): return sorted(datalist, key=lambda k: int(k)) @app.errorhandler(404) def page_not_found(error): return ( render_template("404.html", ), 404, ) return app, socketio
# Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) from dateutil.parser import parse as parse_datetime import tarfile import shutil from lib.Config import Configuration import lib.DatabaseLayer as db vFeedurl = Configuration.getvFeedURL() vFeedstatus = Configuration.getvFeedStatus() tmppath = os.path.join(runPath, "..", Configuration.getTmpdir()) print(tmppath) # check modification date try: u = Configuration.getFile(vFeedurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl)) last_modified = parse_datetime(u.headers['last-modified'], ignoretz=True) i = db.getLastModified('vfeed') if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # create temp file and download and unpack database
# # Copyright (c) 2015-2018 Pieter-Jan Moreels - [email protected] # imports import ast import re import sys import uuid import pymongo from passlib.hash import pbkdf2_sha256 from lib.Config import Configuration as conf # Variables db = conf.getMongoConnection() colCVE = db["cves"] colCPE = db["cpe"] colCWE = db["cwe"] colCPEOTHER = db["cpeother"] colWHITELIST = db["mgmt_whitelist"] colBLACKLIST = db["mgmt_blacklist"] colUSERS = db["mgmt_users"] colINFO = db["info"] colRANKING = db["ranking"] colVIA4 = db["via4"] colCAPEC = db["capec"] colPlugSettings = db["plugin_settings"] colPlugUserSettings = db["plugin_user_settings"] hash_rounds = 8000
class DownloadHandler(ABC): """ DownloadHandler is the base class for all downloads and subsequent processing of the downloaded content. Each download script has a derived class which handles specifics for that type of content / download. """ def __init__(self, feed_type, prefix=None): self._end = None self.feed_type = feed_type self.prefix = prefix self.queue = RedisQueue(name=self.feed_type) self.file_queue = RedisQueue(name=f"{self.feed_type}:files") self.file_queue.clear() self.progress_bar = None self.last_modified = None self.do_process = True self.logger = logging.getLogger("DownloadHandler") self.config = Configuration() def __repr__(self): """return string representation of object""" return "<< DownloadHandler:{} >>".format(self.feed_type) def get_session( self, retries=3, backoff_factor=0.3, status_forcelist=(429, 500, 502, 503, 504), session=None, ): """ Method for returning a session object per every requesting thread """ proxies = { "http": self.config.getProxy(), "https": self.config.getProxy() } if not hasattr(thread_local, "session"): session = session or requests.Session() retry = Retry( total=retries, read=retries, connect=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist, ) session.proxies.update(proxies) adapter = HTTPAdapter(max_retries=retry) session.mount("http://", adapter) session.mount("https://", adapter) thread_local.session = session return thread_local.session def process_downloads(self, sites, collection): """ Method to download and process files :param sites: List of file to download and process :type sites: list :param collection: Mongodb Collection name :type collection: str :return: :rtype: """ worker_size = (int(os.getenv("WORKER_SIZE")) if os.getenv("WORKER_SIZE") else min( 32, os.cpu_count() + 4)) start_time = time.time() thread_map(self.download_site, sites, desc="Downloading files") if self.do_process: thread_map( self.file_to_queue, self.file_queue.get_full_list(), desc="Processing downloaded files", ) self._process_queue_to_db(worker_size, collection=collection) # checking if last-modified was in the response headers and not set to default if "01-01-1970" != self.last_modified.strftime("%d-%m-%Y"): setColUpdate(self.feed_type.lower(), self.last_modified) self.logger.info("Duration: {}".format( timedelta(seconds=time.time() - start_time))) def chunk_list(self, lst, number): """ Yield successive n-sized chunks from lst. :param lst: List to be chunked :type lst: list :param number: Chunk size :type number: int :return: Chunked list :rtype: list """ for i in range(0, len(lst), number): yield lst[i:i + number] def _handle_queue_progressbar(self, description): """ Method for handling the progressbar during queue processing :param description: Description for tqdm progressbar :type description: str """ max_len = self.queue.qsize() pbar = tqdm(total=max_len, desc=description) not_Done = True q_len = max_len dif_old = 0 x = 0 while not_Done: current_q_len = self.queue.qsize() if x % 10 == 0: # log stats the first cycle and every 10th cycle thereafter self.logger.debug( "Queue max_len: {}, current_q_len: {}, q_len: {}, dif_old: {}, cycle: {}" .format(max_len, current_q_len, q_len, dif_old, x)) if current_q_len != 0: if current_q_len != q_len: q_len = current_q_len dif = max_len - q_len pbar.update(int(dif - dif_old)) dif_old = dif else: pbar.update(int(max_len - dif_old)) not_Done = False x += 1 time.sleep(5) self.logger.debug( "Queue max_len: {}, q_len: {}, dif_old: {}, cycles: {}".format( max_len, q_len, dif_old, x)) pbar.close() def _process_queue_to_db(self, max_workers, collection): """ Method to write the queued database transactions into the database given a Queue reference and Collection name :param max_workers: Max amount of worker processes to use; defaults to min(32, os.cpu_count() + 4) :type max_workers: int :param collection: Mongodb Collection name :type collection: str """ pbar = mp.Process( target=self._handle_queue_progressbar, args=("Transferring queue to database", ), ) processes = [ mp.Process(target=self._db_bulk_writer, args=(collection, )) for _ in range(max_workers) ] for proc in processes: proc.start() # Put triggers in the Queue to tell the workers to exit their for-loop self.queue.put(self._end) pbar.start() for proc in processes: proc.join() pbar.join() def _db_bulk_writer(self, collection, threshold=1000): """ Method to act as worker for writing queued entries into the database :param collection: Mongodb Collection name :type collection: str :param threshold: Batch size threshold; defaults to 1000 :type threshold: int """ database = self.config.getMongoConnection() for batch in iter(lambda: list(islice(self.queue, threshold)), []): try: database[collection].bulk_write(batch, ordered=False) except BulkWriteError as err: self.logger.debug("Error during bulk write: {}".format(err)) pass def store_file(self, response_content, content_type, url): """ Method to store the download based on the headers content type :param response_content: Response content :type response_content: bytes :param content_type: Content type; e.g. 'application/zip' :type content_type: str :param url: Download url :type url: str :return: A working directory and a filename :rtype: str and str """ wd = tempfile.mkdtemp() filename = None if (content_type == "application/zip" or content_type == "application/x-zip" or content_type == "application/x-zip-compressed" or content_type == "application/zip-compressed"): filename = os.path.join(wd, url.split("/")[-1][:-4]) self.logger.debug("Saving file to: {}".format(filename)) with zipfile.ZipFile(BytesIO(response_content)) as zip_file: zip_file.extractall(wd) elif (content_type == "application/x-gzip" or content_type == "application/gzip" or content_type == "application/x-gzip-compressed" or content_type == "application/gzip-compressed"): filename = os.path.join(wd, url.split("/")[-1][:-3]) self.logger.debug("Saving file to: {}".format(filename)) buf = BytesIO(response_content) with open(filename, "wb") as f: f.write(gzip.GzipFile(fileobj=buf).read()) elif content_type == "application/json" or content_type == "application/xml": filename = os.path.join(wd, url.split("/")[-1]) self.logger.debug("Saving file to: {}".format(filename)) with open(filename, "wb") as output_file: output_file.write(response_content) elif content_type == "application/local": filename = os.path.join(wd, url.split("/")[-1]) self.logger.debug("Saving file to: {}".format(filename)) copy(url[7:], filename) else: self.logger.error( "Unhandled Content-Type encountered: {} from url".format( content_type, url)) sys.exit(1) return wd, filename def download_site(self, url): if url[:4] == "file": self.logger.info("Scheduling local hosted file: {}".format(url)) # local file do not get last_modified header; so completely ignoring last_modified check and always asume # local file == the last modified file and set to current time. self.last_modified = datetime.datetime.now() self.logger.debug("Last {} modified value: {} for URL: {}".format( self.feed_type, self.last_modified, url)) wd, filename = self.store_file(response_content=b"local", content_type="application/local", url=url) if filename is not None: self.file_queue.put((wd, filename)) else: self.logger.error( "Unable to retrieve a filename; something went wrong when trying to save the file" ) sys.exit(1) else: self.logger.debug("Downloading from url: {}".format(url)) session = self.get_session() try: with session.get(url) as response: try: self.last_modified = parse_datetime( response.headers["last-modified"], ignoretz=True) except KeyError: self.logger.error( "Did not receive last-modified header in the response; setting to default " "(01-01-1970) and force update! Headers received: {}" .format(response.headers)) # setting to last_modified to default value self.last_modified = parse_datetime("01-01-1970") self.logger.debug( "Last {} modified value: {} for URL: {}".format( self.feed_type, self.last_modified, url)) i = getInfo(self.feed_type.lower()) if i is not None: if self.last_modified == i["last-modified"]: self.logger.info( "{}'s are not modified since the last update". format(self.feed_type)) self.file_queue.get_full_list() self.do_process = False if self.do_process: content_type = response.headers["content-type"] self.logger.debug( "URL: {} fetched Content-Type: {}".format( url, content_type)) wd, filename = self.store_file( response_content=response.content, content_type=content_type, url=url, ) if filename is not None: self.file_queue.put((wd, filename)) else: self.logger.error( "Unable to retrieve a filename; something went wrong when trying to save the file" ) sys.exit(1) except Exception as err: self.logger.info( "Exception encountered during download from: {}. Please check the logs for more information!" .format(url)) self.logger.error( "Exception encountered during the download from: {}. Error encountered: {}" .format(url, err)) self.do_process = False @abstractmethod def process_item(self, **kwargs): raise NotImplementedError @abstractmethod def file_to_queue(self, *args): raise NotImplementedError @abstractmethod def update(self, **kwargs): raise NotImplementedError @abstractmethod def populate(self, **kwargs): raise NotImplementedError
self.Related_Weakness }) self.Summary = [] self.Attack_Prerequisite = [] self.Solution_or_Mitigation = [] self.Related_Weakness = [] self.Attack_Pattern_tag = False if name == 'capec:Attack_Patterns': self.Attack_Patterns_tag = False if name == 'capec:Attack_Pattern_Catalog': self.Attack_Pattern_Catalog_tag = False # dictionary capecurl = Configuration.getCAPECDict() # make parser parser = make_parser() ch = CapecHandler() parser.setContentHandler(ch) # check modification date try: f = Configuration.getFile(capecurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (capecurl)) i = db.getLastModified('capec') last_modified = parse_datetime(f.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i:
version_info += cpe["versionEndExcluding"] if "versionEndIncluding" in item: cpe["versionEndIncluding"] = item["versionEndIncluding"] version_info += cpe["versionEndIncluding"] sha1_hash = hashlib.sha1(cpe["cpe_2_2"].encode("utf-8") + version_info.encode("utf-8")).hexdigest() cpe["id"] = sha1_hash return cpe if __name__ == '__main__': if args.u: try: (f, r) = Configuration.getFile(Configuration.getFeedURL('cpe')) except: sys.exit( "Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cpe"))) # check modification date i = db.getLastModified('cpe') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i and not args.f: print("Not modified") sys.exit(0) cpej = json.loads(f.read().decode('utf-8'))
from lib.content_handlers import CapecHandler, CWEHandler from lib.db_action import DatabaseAction # init parts of the file names to enable looped file download file_prefix = "nvdcve-1.1-" file_suffix = ".json.gz" file_mod = "modified" file_rec = "recent" date = datetime.datetime.now() year = date.year + 1 # default config defaultvalue = {"cwe": "Unknown"} cveStartYear = Configuration.getCVEStartYear() class CPEDownloads(JSONFileHandler): def __init__(self): self.feed_type = "CPE" self.prefix = "matches.item" super().__init__(self.feed_type, self.prefix) self.feed_url = Configuration.getFeedURL(self.feed_type.lower()) self.logger = logging.getLogger("CPEDownloads") @staticmethod def process_cpe_item(item=None): if item is None:
def start(self, port=None, scan=None): app = Flask(__name__, static_folder='static', static_url_path='/static') # functions # routes @app.route('/') def index(): return render_template('index.html', scan=scan) @app.route('/cve/<cveid>') def cve(cveid): host, port = Configuration.getCVESearch() data = (urlopen('http://%s:%s/api/cve/%s' % (host, port, cveid)).read()).decode('utf8') cvejson = json.loads(str(data)) if cvejson is {}: return page_not_found(404) return render_template('cve.html', cve=cvejson) # error handeling @app.errorhandler(404) def page_not_found(e): return render_template('404.html'), 404 # filters @app.template_filter('product') def product(banner): if banner: r = make_dict(banner) return r['product'] if 'product' in r else 'unknown' else: return "unknown" @app.template_filter('toHuman') def humanify(cpe): return toHuman(cpe) @app.template_filter('currentTime') def currentTime(utc): return toLocalTime(utc) @app.template_filter('impact') def impact(string): if string.lower() == "none": return "good" elif string.lower() == "partial": return "medium" elif string.lower() == "complete": return "bad" @app.template_filter('vFeedName') def vFeedName(string): string = string.replace('map_', '') string = string.replace('cve_', '') return string.title() @app.template_filter('htmlEncode') def htmlEncode(string): return urllib.parse.quote_plus(string).lower() @app.template_filter('isURL') def isURL(string): urlTypes = [re.escape(x) for x in ['http://', 'https://', 'www.']] return re.match("^(" + "|".join(urlTypes) + ")", string) @app.template_filter('fromEpoch') def fromEpoch_filter(epoch): return fromEpoch(epoch) # debug filter @app.template_filter('type') def isType(var): return type(var) #start webserver host = Configuration.getFlaskHost() port = Configuration.getFlaskPort() debug = Configuration.getFlaskDebug() app.run(host=host, port=port, debug=debug)
# Copyright (c) 2015 Pieter-Jan Moreels - [email protected] # Imports import os import sys runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) import tarfile import shutil import sqlite3 from lib.ProgressBar import progressbar from lib.Config import Configuration vFeedurl = Configuration.getvFeedURL() vFeedstatus = Configuration.getvFeedStatus() tmppath = Configuration.getTmpdir() # connect to db db = Configuration.getMongoConnection() info = db.info # check modification date try: u = Configuration.getFile(vFeedurl) except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(vFeedurl)) i = info.find_one({'db': 'vfeed'}) if i is not None: if u.headers['last-modified'] == i['last-modified']: print("Not modified")
# Imports import copy import itertools import re import time import traceback import sys from datetime import date from datetime import datetime from passlib.hash import pbkdf2_sha256 from lib.objects import SystemGroup, Component, Ticket from lib.Config import Configuration as conf from lib.Toolkit import xFromy import lib.database.CVESearch as cvedb # Variables vmdb = conf.getVMpsqlConnection() # Decorators def cursor_wrapped(func): def func_wrapper(*args, **kwargs): cur=vmdb.cursor() result = func(cur, *args, **kwargs) cur.close() return result return func_wrapper
# init parts of the file names to enable looped file download file_prefix = "nvdcve-2.0-" file_suffix = ".xml.gz" file_mod = "modified" file_rec = "recent" # get the current year. This enables us to download all CVE's up to this year :-) date = datetime.datetime.now() year = date.year + 1 # default config defaultvalue = {} defaultvalue['cwe'] = "Unknown" cveStartYear = Configuration.getCVEStartYear() # define the CVE parser. Thanks to Meredith Patterson (@maradydd) for help on this one. class CVEHandler(ContentHandler): def __init__(self): self.cves = [] self.inCVSSElem = 0 self.inSUMMElem = 0 self.inDTElem = 0 self.inPUBElem = 0 self.inAccessvElem = 0 self.inAccesscElem = 0 self.inAccessaElem = 0 self.inCVSSgenElem = 0
loop = True if args.f: logging("Dropping metadata") dropcollection("info") while (loop): if args.v: logging("==========================") logging(time.strftime("%a %d %B %Y %H:%M", time.gmtime())) logging("==========================") if not args.l: loop = False newelement = 0 for source in sources: if not Configuration.includesFeed(source['name']): continue if args.f and source['name'] is not "redis-cache-cpe": logging("Dropping collection: " + source['name']) dropcollection(collection=source['name']) logging(source['name'] + " dropped") if source['name'] is "cpeother": if "cpeother" not in db.getTableNames(): continue if source['name'] is not "redis-cache-cpe": message = 'Starting ' + source['name'] logging(message) before = nbelement(collection=source['name']) if args.f and source['name'] is "cves": updater = "python3 " + os.path.join(runPath, "db_mgmt.py -p") subprocess.Popen((shlex.split(updater))).wait()
from flask import Flask from werkzeug.middleware.dispatcher import DispatcherMiddleware import os import sys _runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(_runPath, "..")) from lib.Config import Configuration from web.run import create_app from web.set_version import _version __version__ = _version() config = Configuration() app = Flask(__name__) cveapp, cvesocketio = create_app(__version__, _runPath) app.wsgi_app = DispatcherMiddleware(Flask("FRAME"), {config.getMountPath(): cveapp}) if __name__ == "__main__": app.run(host="0.0.0.0")
args = argparser.parse_args() # init parts of the file names to enable looped file download file_prefix = "nvdcve-1.0-" file_suffix = ".json.gz" file_mod = "modified" file_rec = "recent" date = datetime.datetime.now() year = date.year + 1 # default config defaultvalue = {} defaultvalue['cwe'] = "Unknown" cveStartYear = Configuration.getCVEStartYear() def get_cpe_info(cpeuri): query = {} version_info = "" if "versionStartExcluding" in cpeuri: query["versionStartExcluding"] = cpeuri["versionStartExcluding"] version_info += query["versionStartExcluding"] if "versionStartIncluding" in cpeuri: query["versionStartIncluding"] = cpeuri["versionStartIncluding"] version_info += query["versionStartIncluding"] if "versionEndExcluding" in cpeuri: query["versionEndExcluding"] = cpeuri["versionEndExcluding"] version_info += query["versionEndExcluding"] if "versionEndIncluding" in cpeuri:
import re import sre_constants import urllib from collections import defaultdict import bson import pymongo from pymongo import DESCENDING, ASCENDING from pymongo.collection import Collection from werkzeug.security import generate_password_hash, check_password_hash from lib.DatabaseLayer import sanitize from lib.DatabasePluginBase import DatabasePluginBase from lib.Config import Configuration config = Configuration() HOST = config.readSetting("Database", "Host", config.default["mongoHost"]) PORT = config.readSetting("Database", "Port", config.default["mongoPort"]) DATABASE = config.getMongoDB() USERNAME = urllib.parse.quote( config.readSetting("Database", "Username", config.default["mongoUsername"])) PASSWORD = urllib.parse.quote( config.readSetting("Database", "Password", config.default["mongoPassword"])) class MongoPlugin(DatabasePluginBase): def __init__(self): """
from dateutil.parser import parse as parse_datetime from lib.Config import Configuration from lib.Toolkit import toStringFormattedCPE, toOldCPE, isURL, vFeedName import lib.CVEs as cves import lib.DatabaseLayer as dbLayer # parse command line arguments argparser = argparse.ArgumentParser( description='Start CVE-Search web component') argparser.add_argument('-v', action='store_true', help='verbose output') args = argparser.parse_args() # variables app = Flask(__name__, static_folder='static', static_url_path='/static') app.config['MONGO_DBNAME'] = Configuration.getMongoDB() app.config['SECRET_KEY'] = str(random.getrandbits(256)) pageLength = Configuration.getPageLength() # db connectors redisdb = Configuration.getRedisVendorConnection() pageSettings = { 'pageLength': Configuration.getPageLength(), 'listLogin': Configuration.listLoginRequired(), 'minimal': True } # functions def getBrowseList(vendor):
self.titletag = False self.cpe[-1]['title'].append(self.title.rstrip()) elif name == 'references': self.referencestag = False elif name == 'reference': self.referencetag = False self.href = None # make parser parser = make_parser() ch = CPEHandler() parser.setContentHandler(ch) # check modification date try: (f, r) = Configuration.getFeedData('cpe') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("cpe"))) i = db.getLastModified('cpe') last_modified = parse_datetime(r.headers['last-modified'], ignoretz=True) if i is not None: if last_modified == i: print("Not modified") sys.exit(0) # parse xml and store in database parser.parse(f) cpeList = [] for x in progressbar(ch.cpe): x['id'] = toStringFormattedCPE(x['name']) x['title'] = x['title'][0]
from lib.Config import Configuration from lib.DatabaseLayer import DatabaseLayer from lib.Toolkit import pad argParser = argparse.ArgumentParser(description='CPE entries importer in Redis cache') argParser.add_argument('-v', action='store_true', default=False, help='Verbose logging') argParser.add_argument('-o', action='store_true', default=False, help='Import cpeother database in Redis cache') args = argParser.parse_args() if args.o: cpe = DatabaseLayer().CPE.getAllAlternative() else: cpe = DatabaseLayer().CPE.getAll() try: r = Configuration.getRedisVendorConnection() except: sys.exit(1) for e in cpe: try: if not args.o: if e.id.count(':') > 4: value = ":".join(e.id.split(':')[:6]) (prefix, cpeversion, cpetype, vendor, product, version) = pad(value.split(':'),6) else: (prefix, cpeversion, cpetype, vendor, product, version, *remaining) = pad(e.id.split(':'),6) except Exception as ex: print(ex) pass try:
loop = True if args.f: logger.info("Dropping metadata") dropcollection("info") while loop: if args.v: logger.info("==========================") logger.info(time.strftime("%a %d %B %Y %H:%M", time.gmtime())) logger.info("==========================") if not args.l: loop = False newelement = 0 for source in sources: if (not Configuration.includesFeed(source["name"]) and source["name"] != "redis-cache-cpe"): continue if args.f and source["name"] != "redis-cache-cpe": logger.info("Dropping collection: " + source["name"]) dropcollection(collection=source["name"]) logger.info(source["name"] + " dropped") if source["name"] == "cpeother": if "cpeother" not in getTableNames(): continue if source["name"] != "redis-cache-cpe": logger.info("Starting " + source["name"]) before = nbelement(collection=source["name"]) if args.f and source["name"] == "cpe": cpd = CPEDownloads()
# Copyright (c) 2016 Pieter-Jan Moreels import os import sys import shutil runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) from lib.Config import Configuration import lib.DatabaseLayer as db import csv import argparse # dictionary tmppath = Configuration.getTmpdir() argparser = argparse.ArgumentParser( description='Populate/update the exploitdb ref database') argparser.add_argument('-v', action='store_true', help='verbose output', default=False) args = argparser.parse_args() try: (f, r) = Configuration.getFeedData('exploitdb') except: sys.exit("Cannot open url %s. Bad URL or not connected to the internet?" % (Configuration.getFeedURL("exploitdb")))
# # Software is free software released under the "Original BSD license" import sys try: import os _runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(_runPath, "..")) from passlib.hash import pbkdf2_sha256 from lib.Config import Configuration as conf except Exception as e: print(e) sys.exit("Dependencies missing! First run the install script.") import traceback rounds = conf.getUserEncryptionRounds() saltLength = conf.getUserSaltLength() try: conn=conf.getVMpsqlConnection() cur = conn.cursor() teams=[ ["Team A", "TestTeam one"], ["Team B", "TestTeam two"], ["engineers", "", ] ] users=[ ["test", "firstname", "lastname", "test", [["Team A", "reviewer" ],["Team B", "reviewer"] ]], ["teamleader", "John", "Doe", "test", [["Team A", "teamleader" ],["Team A", "reviewer"], ["Team B", "reviewer"]]], ["engineer", "Foo", "Bar", "test", [["-", "db-engineer"] ]], ["management", "Foo", "Bar", "test", [["-", "management" ] ]]] systemgroups=[ ["DNS", "Team A", "Our DNS Servers", ["cpe:2.3:a:libssh:libssh:2.3", "cpe:2.3:a:openssl:openssl:1.0.1e",
import os import sys #runPath = os.getcwd() #radni direktorij runPath = os.path.dirname(os.path.realpath(__file__)) sys.path.append(os.path.join(runPath, "..")) #dodaj parent dir u PATH varijablu from lib.Config import Configuration from lib.Toolkit import pad import lib.DatabaseLayer as db #cve-search lib za rad s bazom #cpe = db.getCPEs() from redis import exceptions as redisExceptions try: r = Configuration.getRedisVendorConnection() except: sys.exit(1) # for key in r.scan_iter(): # print (key) #1kxun #qianxun_yingshi # Redis db 10 (cpe) # Redis db 11 (notification) def vendors(): #vraca sve vendore return r.sunion('o', 'a', 'h') #r.sunion('t:/a', 't:/h', 't:/o')