def getConfigurationFromCPE(self, cpe: str, cve: CVE): ''' Reads the given CPE type and assigns it to the given CPE if it doesn't already have that configuration ''' partRegex = r'cpe:2.3:\/?([\w])+:' match = re.match(partRegex, cpe) cpe = CPE(cpe) if not match: print("Unrecognized CPE " + cpe) else: matches = match.groups() if len(matches) > 0: part = matches[0] if part == "a" and (cpe, "Application") not in cve.configurations: cve.configurations.append((cpe, "Application")) elif part == "h" and (cpe, "Hardware") not in cve.configurations: cve.configurations.append((cpe, "Hardware")) elif part == "o" and ( cpe, "Operating System") not in cve.configurations: cve.configurations.append((cpe, "Operating System"))
def parse(self, entry: typing.Any): try: version_exact = CPE(entry['cpe23Uri']).get_version()[0] or None except NotImplementedError: # workaround for invalid CPE string entry, see [#6] # [#6]: https://github.com/fabric8-analytics/nvdlib/pull/6 version_exact = None if version_exact in ['-', '*']: # same as missing entry version_exact = None version_end_excl = entry.get('versionEndExcluding', None) version_end_incl = entry.get('versionEndIncluding', None) version_start_incl = entry.get('versionStartIncluding', None) version_start_excl = entry.get('versionStartExcluding', None) version_range = utils.get_victims_notation( (version_exact, version_end_excl, version_end_incl, version_start_incl, version_start_excl)) return self.ConfigurationsNode(vulnerable=entry['vulnerable'], cpe=entry['cpe23Uri'], version_range=version_range)
def manage_vulner_script(self, test, dupes, script_element, endpoint): for component_element in script_element.findall('table'): component_cpe = CPE(component_element.attrib['key']) for vuln in component_element.findall('table'): description = "### Vulnerability\n\n" description += "**CPE**: " + str(component_cpe) + "\n" vuln_attributes = dict() for elem in vuln.findall('elem'): vuln_attributes[elem.attrib['key'].lower()] = elem.text description += "**" + elem.attrib[ 'key'] + "**: " + elem.text + "\n" cve = vuln_attributes['id'] severity = self.convert_cvss_score(vuln_attributes['cvss']) dupe_key = cve if dupe_key in dupes: find = dupes[dupe_key] if description is not None: find.description += description else: find = Finding( title=cve, cve=cve, test=test, active=False, verified=False, description=description, severity=severity, mitigation="N/A", impact="No impact provided", numerical_severity=Finding.get_numerical_severity( severity), component_name=component_cpe.get_product()[0] if len(component_cpe.get_product()) > 0 else '', component_version=component_cpe.get_version()[0] if len(component_cpe.get_version()) > 0 else '', ) find.unsaved_endpoints = list() dupes[dupe_key] = find find.unsaved_endpoints.append(endpoint)
def get_description(self, address, name, cpe): '''Define a description based on hostname and CPE''' if name: return name else: c = CPE(cpe[0], CPE.VERSION_2_3) vendor = c.get_vendor()[0] if self.tacacs and vendor == 'cisco': try: client = SSHClient() client.set_missing_host_key_policy(AutoAddPolicy()) client.connect(address, username=self.tacacs['user'], password=self.tacacs['password']) stdin,stdout,stderr = client.exec_command(self.tacacs['command']) return '{}:{}'.format(vendor.lower(), re.search(self.tacacs['regex'], str(stdout.read().decode('utf-8'))).group(self.tacacs['regroup'])) except (AuthenticationException, SSHException, NoValidConnectionsError, TimeoutError, ConnectionResetError): pass return '{}.{}.{}'.format(c.get_vendor()[0], c.get_product()[0], c.get_version()[0])
def get_component_name_and_version_from_dependency(self, dependency, related_dependency, namespace): component_name, component_version = None, None # big try catch to avoid crashint the parser on some unexpected stuff try: identifiers_node = dependency.find(namespace + 'identifiers') if identifiers_node: # <identifiers> # <identifier type="cpe" confidence="HIGHEST"> # <name>cpe:/a:apache:xalan-java:2.7.1</name> # <url>https://web.nvd.nist.gov/view/vuln/search-results?adv_search=true&cves=on&cpe_version=cpe%3A%2Fa%3Aapache%3Axalan-java%3A2.7.1</url> # </identifier> # <identifier type="maven" confidence="HIGHEST"> # <name>xalan:serializer:2.7.1</name> # <url>https://search.maven.org/remotecontent?filepath=xalan/serializer/2.7.1/serializer-2.7.1.jar</url> # </identifier> # </identifiers> # newly found in v6.0.0 # <identifiers> # <package confidence="HIGH"> # <id>pkg:maven/nl.isaac.client.offerservice/[email protected]</id> # <url>https://ossindex.sonatype.org/component/pkg:maven/nl.isaac.client.offerservice/[email protected]</url> # </package> # </identifiers> # <identifiers> # <package confidence="HIGHEST"> # <id>pkg:npm/[email protected]</id> # <url>https://ossindex.sonatype.org/component/pkg:npm/[email protected]</url> # </package> # </identifiers> package_node = identifiers_node.find('.//' + namespace + 'package') if package_node: logger.debug( 'package string: ' + self.get_field_value(package_node, 'id', namespace)) id = self.get_field_value(package_node, 'id', namespace) purl = PackageURL.from_string(id) purl_parts = purl.to_dict() component_name = purl_parts[ 'namespace'] + ':' if purl_parts['namespace'] and len( purl_parts['namespace']) > 0 else '' component_name += purl_parts['name'] if purl_parts[ 'name'] and len(purl_parts['name']) > 0 else '' component_name = component_name if component_name else None component_version = purl_parts['version'] if purl_parts[ 'version'] and len(purl_parts['version']) > 0 else '' return component_name, component_version cpe_node = identifiers_node.find('.//' + namespace + 'identifier[@type="cpe"]') if cpe_node: # logger.debug('cpe string: ' + self.get_field_value(cpe_node, 'name')) cpe = CPE(self.get_field_value(cpe_node, 'name')) component_name = cpe.get_vendor()[0] + ':' if len( cpe.get_vendor()) > 0 else '' component_name += cpe.get_product()[0] if len( cpe.get_product()) > 0 else '' component_name = component_name if component_name else None component_version = cpe.get_version()[0] if len( cpe.get_version()) > 0 else None # logger.debug('get_edition: ' + str(cpe.get_edition())) # logger.debug('get_language: ' + str(cpe.get_language())) # logger.debug('get_part: ' + str(cpe.get_part())) # logger.debug('get_software_edition: ' + str(cpe.get_software_edition())) # logger.debug('get_target_hardware: ' + str(cpe.get_target_hardware())) # logger.debug('get_target_software: ' + str(cpe.get_target_software())) # logger.debug('get_vendor: ' + str(cpe.get_vendor())) # logger.debug('get_update: ' + str(cpe.get_update())) return component_name, component_version maven_node = identifiers_node.find('.//' + namespace + 'identifier[@type="maven"]') if maven_node: # logger.debug('maven_string: ' + self.get_field_value(maven_node, 'name')) maven_parts = self.get_field_value(maven_node, 'name', namespace).split(':') # logger.debug('maven_parts:' + str(maven_parts)) if len(maven_parts) == 3: component_name = maven_parts[0] + ':' + maven_parts[1] component_version = maven_parts[2] return component_name, component_version # TODO # include identifiers in description? # <identifiers> # <package confidence="HIGH"> # <id>pkg:maven/org.dom4j/[email protected]</id> # <url>https://ossindex.sonatype.org/component/pkg:maven/org.dom4j/[email protected]</url> # </package> # <vulnerabilityIds confidence="HIGHEST"> # <id>cpe:2.3:a:dom4j_project:dom4j:2.1.1.hat-00001:*:*:*:*:*:*:*</id> # <url>https://nvd.nist.gov/vuln/search/results?form_type=Advanced&results_type=overview&search_type=all&cpe_vendor=cpe%3A%2F%3Adom4j_project&cpe_product=cpe%3A%2F%3Adom4j_project%3Adom4j&cpe_version=cpe%3A%2F%3Adom4j_project%3Adom4j%3A2.1.1.hat-00001</url> # </vulnerabilityIds> # TODO what happens when there multiple evidencecollectednodes with product or version as type? evidence_collected_node = dependency.find(namespace + 'evidenceCollected') if evidence_collected_node: # <evidenceCollected> # <evidence type="product" confidence="HIGH"> # <source>file</source> # <name>name</name> # <value>jquery</value> # </evidence> # <evidence type="version" confidence="HIGH"> # <source>file</source> # <name>version</name> # <value>3.1.1</value> # </evidence>' # will find the first product and version node. if there are multiple it may not pick the best # since 6.0.0 howoever it seems like there's always a packageurl above so not sure if we need the effort to # implement more logic here product_node = evidence_collected_node.find( './/' + namespace + 'evidence[@type="product"]') if product_node: component_name = self.get_field_value( product_node, 'value', namespace) version_node = evidence_collected_node.find( './/' + namespace + 'evidence[@type="version"]') if version_node: component_version = self.get_field_value( version_node, 'value', namespace) return component_name, component_version except: logger.exception( 'error parsing component_name and component_version') logger.debug( 'dependency: %s', ElementTree.tostring(dependency, encoding='utf8', method='xml')) return component_name, component_version
def process_running_on_configuration(self, node): simple_cpes = defaultdict(lambda: defaultdict(set)) running_on = defaultdict(lambda: defaultdict(set)) if 'children' not in node: # TODO if this happens, we would need to add a constraint in FMR saying that X cpe needs Y cpe and viceversa. for complex_cpe in node['cpe_match']: aux = CPE(complex_cpe['cpe23Uri']) simple_cpes[aux.get_vendor()[0]][aux.get_product()[0]].update(self.expand(complex_cpe)) else: for subnode in node['children']: isVulnerable = subnode['cpe_match'][0]['vulnerable'] if isVulnerable: # simple cpe for complex_cpe in subnode['cpe_match']: aux = CPE(complex_cpe['cpe23Uri']) simple_cpes[aux.get_vendor()[0]][aux.get_product()[0]].update(self.expand(complex_cpe)) else: # running on for complex_cpe in subnode['cpe_match']: aux = CPE(complex_cpe['cpe23Uri']) running_on[aux.get_vendor()[0]][aux.get_product()[0]].update(self.expand(complex_cpe)) return simple_cpes, running_on
def process_basic_configuration(self, node): res = defaultdict(lambda: defaultdict(set)) for complex_cpe in node['cpe_match']: aux = CPE(complex_cpe['cpe23Uri']) res[aux.get_vendor()[0]][aux.get_product()[0]].update(self.expand(complex_cpe)) return res
def get_component_name_and_version_from_dependency(self, dependency, related_dependency, namespace): identifiers_node = dependency.find(namespace + 'identifiers') if identifiers_node: # analyzing identifier from the more generic to package_node = identifiers_node.find('.//' + namespace + 'package') if package_node: id = package_node.findtext(f'{namespace}id') purl = PackageURL.from_string(id) purl_parts = purl.to_dict() component_name = purl_parts['namespace'] + ':' if purl_parts[ 'namespace'] and len(purl_parts['namespace']) > 0 else '' component_name += purl_parts['name'] if purl_parts[ 'name'] and len(purl_parts['name']) > 0 else '' component_name = component_name if component_name else None component_version = purl_parts['version'] if purl_parts[ 'version'] and len(purl_parts['version']) > 0 else '' return component_name, component_version # vulnerabilityIds_node = identifiers_node.find('.//' + namespace + 'vulnerabilityIds') # if vulnerabilityIds_node: # id = vulnerabilityIds_node.findtext(f'{namespace}id') # cpe = CPE(id) # component_name = cpe.get_vendor()[0] + ':' if len(cpe.get_vendor()) > 0 else '' # component_name += cpe.get_product()[0] if len(cpe.get_product()) > 0 else '' # component_name = component_name if component_name else None # component_version = cpe.get_version()[0] if len(cpe.get_version()) > 0 else None # return component_name, component_version cpe_node = identifiers_node.find('.//' + namespace + 'identifier[@type="cpe"]') if cpe_node: id = cpe_node.findtext(f'{namespace}name') cpe = CPE(id) component_name = cpe.get_vendor()[0] + ':' if len( cpe.get_vendor()) > 0 else '' component_name += cpe.get_product()[0] if len( cpe.get_product()) > 0 else '' component_name = component_name if component_name else None component_version = cpe.get_version()[0] if len( cpe.get_version()) > 0 else None return component_name, component_version maven_node = identifiers_node.find('.//' + namespace + 'identifier[@type="maven"]') if maven_node: maven_parts = maven_node.findtext(f'{namespace}name').split( ':') # logger.debug('maven_parts:' + str(maven_parts)) if len(maven_parts) == 3: component_name = maven_parts[0] + ':' + maven_parts[1] component_version = maven_parts[2] return component_name, component_version # TODO what happens when there multiple evidencecollectednodes with product or version as type? evidence_collected_node = dependency.find(namespace + 'evidenceCollected') if evidence_collected_node: # <evidenceCollected> # <evidence type="product" confidence="HIGH"> # <source>file</source> # <name>name</name> # <value>jquery</value> # </evidence> # <evidence type="version" confidence="HIGH"> # <source>file</source> # <name>version</name> # <value>3.1.1</value> # </evidence>' # will find the first product and version node. if there are multiple it may not pick the best # since 6.0.0 howoever it seems like there's always a packageurl above so not sure if we need the effort to # implement more logic here product_node = evidence_collected_node.find( './/' + namespace + 'evidence[@type="product"]') if product_node: component_name = product_node.findtext(f'{namespace}value') version_node = evidence_collected_node.find( './/' + namespace + 'evidence[@type="version"]') if version_node: component_version = version_node.findtext( f'{namespace}value') return component_name, component_version return None, None
class Service(object): """ Represents service/application/operating system. Contains basic information: name, version """ _CPE_SPECIAL = r"\!|\"|\;|\#|\$|\%|\&|\'|\(|\)|\+|\,|\/|\:|\<|\=|\>|\@|\[|\]|\^|\`|\{|\||\}|\~|\-" _ESCAPE_CPE = re.compile(_CPE_SPECIAL) _UNESCAPE_CPE = re.compile(r"(\\({0}))".format(_CPE_SPECIAL)) def __init__(self, name=None, version=None, cpe=None): self.name = name self.version = version self._cpe = None self.cpe = cpe @property def cpe(self) -> CPE: """ CPE representation of service """ return self._cpe @cpe.setter def cpe(self, value): if value: self._cpe = CPE(value) @property def cpe_vendor(self) -> str: """ Get vendor name based on CPE """ if isinstance(self._cpe, CPE): return self._unescape_cpe(" ".join(self._cpe.get_vendor())) @property def name_with_version(self) -> str: """ Service name with version included """ if self.version is None or self.name is None: return None return "{name} {version}".format(name=self.name, version=self.version) @property def cpe_product(self) -> str: """ Get product name based on CPE """ if isinstance(self._cpe, CPE): return self._unescape_cpe(" ".join(self._cpe.get_product())) @property def cpe_version(self) -> str: """ Get product name based on CPE """ if isinstance(self._cpe, CPE): return self._unescape_cpe(" ".join(self._cpe.get_version())) def __str__(self): return "{name} {version}".format(name=self.name or '', version=self.version or '').strip() def copy(self) -> 'Service': """ Make copy of service """ return_value = Service(name=self.name, version=self.version) return_value._cpe = self._cpe return return_value @classmethod def _escape_cpe(cls, text: str) -> str: """ Special characters should be escaped before building CPE string """ text = text.lower() def _replace(txt): return r"\{0}".format(txt.group()) if " " in text: raise ValueError("{0}: Space is not allowed in CPE string".format(text)) return cls._ESCAPE_CPE.sub(_replace, text) @classmethod def _unescape_cpe(cls, text: str) -> str: text = text.lower() def _replace(txt): return txt.group()[1] return cls._UNESCAPE_CPE.sub(_replace, text) @classmethod def validate_cpe_arguments(cls, vendor: str, product: str, version: str) -> (str, str, str): """ Validate cpe arguments, and fix as much as possible """ if " " in version: if product.lower() == "ios": version = version.split(" ")[0].strip(",") if vendor == "*": if product.lower() == "ios": vendor = "cisco" return cls._escape_cpe(vendor), cls._escape_cpe(product), cls._escape_cpe(version) @classmethod def build_cpe(cls, part: 'CPEType', vendor: str = '*', product: str = '*', version: str = '*') -> str: """ Build cpe 2.3 string base on vendor, product, version and part """ vendor, product, version = cls.validate_cpe_arguments(vendor=vendor, product=product, version=version) return "cpe:2.3:{part}:{vendor}:{product}:{version}:*:*:*:*:*:*:*".format(part=str(part.value), vendor=vendor, product=product, version=version)
def get_findings(self, filename, test: Test): content = filename.read() if type(content) is bytes: content = content.decode('utf-8') csv.field_size_limit(int(sys.maxsize / 10)) # the request/resp are big reader = csv.DictReader(io.StringIO(content)) dupes = dict() for row in reader: # manage severity from two possible columns 'Severity' and 'Risk' severity = 'Info' if 'Severity' in row: severity = self._convert_severity(row.get('Severity')) elif 'Risk' in row: severity = self._convert_severity(row.get('Risk')) # manage title from two possible columns 'Nme' and 'Plugin Name' title = row.get('Name') if title is None and 'Plugin Name' in row: title = row.get('Plugin Name') # special case to skip empty titles if not title: continue description = row.get('Synopsis') mitigation = str(row.get('Solution')) impact = row.get('Description', 'N/A') references = row.get('See Also', 'N/A') dupe_key = severity + title + row.get('Host', 'No host') + str( row.get('Port', 'No port')) + row.get('Synopsis', 'No synopsis') detected_cve = self._format_cve(str(row.get('CVE'))) cve = None if detected_cve: # FIXME support more than one CVE in Nessus CSV parser cve = detected_cve[0] if len(detected_cve) > 1: LOGGER.warning( "more than one CVE for a finding. NOT supported by Nessus CSV parser" ) if dupe_key in dupes: find = dupes[dupe_key] if 'Plugin Output' in row: find.description += row.get('Plugin Output') else: if 'Plugin Output' in row: description = description + str(row.get('Plugin Output')) find = Finding(title=title, test=test, cve=cve, description=description, severity=severity, mitigation=mitigation, impact=impact, references=references) # manage CVSS vector (only v3.x for now) if 'CVSS V3 Vector' in row and '' != row.get('CVSS V3 Vector'): find.cvssv3 = CVSS3('CVSS:3.0/' + str(row.get('CVSS V3 Vector')) ).clean_vector(output_prefix=False) # manage CPE data detected_cpe = self._format_cpe(str(row.get('CPE'))) if detected_cpe: # FIXME support more than one CPE in Nessus CSV parser if len(detected_cpe) > 1: LOGGER.warning( "more than one CPE for a finding. NOT supported by Nessus CSV parser" ) cpe_decoded = CPE(detected_cpe[0]) find.component_name = cpe_decoded.get_product()[0] if len( cpe_decoded.get_product()) > 0 else None find.component_version = cpe_decoded.get_version( )[0] if len(cpe_decoded.get_version()) > 0 else None find.unsaved_endpoints = list() dupes[dupe_key] = find # manage endpoints endpoint = Endpoint(host='localhost') if 'Host' in row: endpoint.host = row.get('Host') elif 'IP Address' in row: endpoint.host = row.get('IP Address') endpoint.port = row.get('Port') if 'Protocol' in row: endpoint.protocol = row.get('Protocol').lower() find.unsaved_endpoints.append(endpoint) return list(dupes.values())
def on_rx_worklist_bind_physic(msg, obj): """ obj = MsgWorklistBindPhysical 1 query exist? 2 exist, status right? 3 update sn + type """ ret = ERR_FAIL # default msg_rsp = msg + 2 # default fail obj_database = obj for nwf in [1]: # check args if (not isinstance(obj, MsgWorklistBindPhysical)): log.app_err("obj is not MsgWorklistBindPhysical") ret = ERR_FATAL break # 1 query exist? id_ = obj.id_ #worklist_id = obj.id_ #worklist_id = CpeUserWorklist.m_dict_desc_id[id_] """ dict_col = dict(WORKLIST_DESC='', WORKLIST_NAME='', STATUS='') dict_data = {} dict_data['columns'] = dict_col dict_data['condition'] = 'WORKLIST_ID=%s' % worklist_id flag = operate_db('WORKLIST', 'SELECT', dict_data) if not flag: """ obj_database = restore_acs_part_worklist(id_) if (obj_database is None): desc = "id(%s) is not exist." % (id_) log.app_err(desc) obj_database = obj obj_database.dict_ret["str_result"] = desc break """ obj_database.worklist_name = dict_col['WORKLIST_NAME'] obj_database.status = dict_col['STATUS'] """ log.app_info("id=%s, worklist_name=%s" % (id_, obj_database.worklist_name)) worklist_id = obj_database.worklist_id # 2 status right? status_expect = [WORK_LIST_STATUS_BUILD, WORK_LIST_STATUS_BIND] if (obj_database.status not in status_expect): desc = "worklist status is %s, not in (%s)" % (obj_database.status, status_expect) log.app_err(desc) obj_database.dict_ret["str_result"] = desc break # 3 update obj_database.status = WORK_LIST_STATUS_BIND obj_database.sn = obj.sn # be careful ,cpe(sn)'s domain delay to exec start obj_database.type_ = WORK_LIST_TYPE_PHISIC obj_database.time_bind = datetime.now().strftime('%Y-%m-%d %H:%M:%S') # mysql """ update_acs_worklist(obj_database, "STATUS", obj_database.status) update_acs_worklist(obj_database, "SN", obj_database.sn) update_acs_worklist(obj_database, "WORKLIST_TYPE", obj_database.type_) update_acs_worklist(obj_database, "TIME_BIND", obj_database.time_bind) """ from cpe import CPE cpe = CPE.get_cpe(obj_database.sn) dict_col = {} dict_data = {} dict_col['STATUS'] = obj_database.status dict_col['CPE_ID'] = cpe.get_cpe_id() dict_col['SN'] = obj_database.sn dict_col['WORKLIST_TYPE'] = obj_database.type_ dict_col['TIME_BIND'] = obj_database.time_bind if dict_col: # 字典的值不为空时,才更新数据库 dict_data['columns'] = dict_col dict_data['condition'] = 'WORKLIST_ID=%s' % worklist_id operate_db('WORKLIST', 'UPDATE', dict_data) ret = ERR_SUCCESS msg_rsp = msg + 1 return ret, msg_rsp, obj_database
def processRequest(self, clientsock): # 1st Receive fixed packet length msg = '' while len(msg) < 4: chunk = clientsock.recv(4 - len(msg)) if chunk == '': raise RuntimeError, "socket connection broken" msg = msg + chunk res = struct.unpack('<i', msg) SimulatorLogger.debug("Simulator will receive %d bytes from client", res[0]) # read data from network cmd = simuProtocol_pb2.clientCommand() networkData = '' while len(networkData) < res[0]: rawData = clientsock.recv(res[0] - len(networkData)) if rawData == '': raise RuntimeError, "socket connection broken" networkData += rawData cmd.ParseFromString(networkData) simAnswer = simuProtocol_pb2.simulatorAnswer() if cmd.id == CMD_SIMULATOR_START: SimulatorLogger.info("SIMULATOR_START cmd received...") self.simulator.simulatorStatus = RUNNING simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_STOP: SimulatorLogger.info("SIMULATOR_STOP cmd received...") self.simulator.simulatorStatus = STOPPED self.simulator.machine.cms = {} self.simulator.machine.IPcms = {} simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_EXIT: SimulatorLogger.info("SIMULATOR_EXIT cmd received...") self.simulator.simulatorStatus = STOPPED self.simulator.doLoop = False self.simulator.signal(simMessage('exit', mac='00:00:00:00:00:00')) simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_FLUSHCMS: SimulatorLogger.info("SIMULATOR_FLUSHCMS cmd received...") self.simulator.machine.cms = {} self.simulator.machine.IPcms = {} simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_FLUSHCMTS: SimulatorLogger.info("SIMULATOR_FLUSHCMTS cmd received...") self.simulator.machine.IPcmts = {} self.simulator.machine.cmts = {} simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_GETCMIP: """ return for a given macaddres (specified in cmMac), it's IP in ipCM""" SimulatorLogger.info("SIMULATOR_GETCMIP cmd received...") item = cmd.devices._values[0] if (self.simulator.machine.cms.has_key(item.cmMac)): dev = self.simulator.machine.cms[item.cmMac] simAnswer.ipCM = dev.ip SimulatorLogger.info("SIMULATOR_GETCMIP IP assigned=%s", simAnswer.ipCM) simAnswer.id = ANS_OK else: SimulatorLogger.error( "SIMULATOR_GETCMIP did not find device %s", item.cmMac) simAnswer.id = ANS_ERR elif cmd.id == CMD_SIMULATOR_SHOWCMS: """ return a list of macaddres registered into simulator """ SimulatorLogger.info("SIMULATOR_SHOWCMS cmd received...") for mac in self.simulator.machine.cms.iterkeys(): simAnswer.macCms.append(mac) simAnswer.id = ANS_SHOWCMS elif cmd.id == CMD_SIMULATOR_SHOWCMS_DETAILED: SimulatorLogger.debug("SIMULATOR_SHOWCMS_DETAILED cmd received...") for mac, dev in self.simulator.machine.cms.iteritems(): item = simAnswer.cablemodems.add() item.cmMac = mac item.cmtsMac = '' item.ip = dev.ip if dev.cmTimers.has_key('dhcp_discover'): item.timer_dhcp_discover = dev.cmTimers['dhcp_discover'] if dev.cmTimers.has_key('dhcp_offer'): item.timer_dhcp_offer = dev.cmTimers['dhcp_offer'] if dev.cmTimers.has_key('dhcp_request'): item.timer_dhcp_request = dev.cmTimers['dhcp_request'] if dev.cmTimers.has_key('dhcp_ack'): item.timer_dhcp_ack = dev.cmTimers['dhcp_ack'] simAnswer.id = ANS_SHOWCMS_DETAILED elif cmd.id == CMD_SIMULATOR_GETAMOUNTCMS_WITH_IP: SimulatorLogger.debug( "SIMULATOR_GETAMOUNTCMS_WITH_IP cmd received...") cmsWithIP = 0 for mac, dev in self.simulator.machine.cms.iteritems(): if (dev.ip != "0.0.0.0"): cmsWithIP += 1 simAnswer.ipCM = str(cmsWithIP) simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_GETAMOUNTCMS: SimulatorLogger.info("SIMULATOR_GETAMOUNTCMS cmd received...") simAnswer.ipCM = str(len(self.simulator.machine.cms)) simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_SENDMSG: SimulatorLogger.debug("SIMULATOR_SENDMSG cmd received...") for index in cmd.devices._values: SimulatorLogger.debug("%s :: signal=%s", index.cmMac, index.msg) self.simulator.signal(simMessage(index.msg, mac=index.cmMac)) simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_GETSTATUS_TFTP: cmCounter = 0 for mac, dev in self.simulator.machine.cms.iteritems(): if (dev.tftp.bootfileSize > 0): cmCounter += 1 self.answer.msg = cmCounter self.answer.setCmdId(ANS_GETSTATUS_TFTP) elif cmd.id == CMD_SIMULATOR_GETSTATUS: # XXX not implemented... self.answer.setCmdId(ANS_GETSTATUS) elif cmd.id == CMD_SIMULATOR_SHOWCMTS: SimulatorLogger.info("SIMULATOR_SHOWCMTS cmd received...") for mac in self.simulator.machine.cmts.iterkeys(): simAnswer.macCmts.append(mac) simAnswer.id = ANS_SHOWCMTS elif cmd.id == CMD_SIMULATOR_ADDCMTS: for index in cmd.CMTSDevices._values: newcmts = cmts.CMTS(CMTSName=index.cmtsName, macaddress=index.cmtsMac, ip=index.cmtsIP) cmtsOperation = self.simulator.add_cmts(newcmts) SimulatorLogger.info( "SIMULATOR_ADDCMTS cmd received (%s:%s)=%s", index.cmtsMac, index.cmtsIP, cmtsOperation) if (cmtsOperation and index.helperAddressIP): # turn on CMTS ( by default CMTS will be on ) self.simulator.signal( simMessage("power_on", mac=index.cmtsMac)) SimulatorLogger.info( "SIMULATOR_ADDCMTS setting Helper Address Parameters (%s:%s)", index.helperAddressIP, index.helperAddressMAC) opResult = newcmts.setDhcpServer(index.helperAddressIP, index.helperAddressMAC) else: opResult = ANS_ERR SimulatorLogger.error( "Invalid CM Mac(%s). CPE %s could not be registered", index.cmMac, index.cmMac) simAnswer.id = opResult elif cmd.id == CMD_SIMULATOR_ADDCPE: SimulatorLogger.info("Cmd received:ADD_CPE") for index in cmd.devices._values: if self.simulator.machine.cms.has_key( index.cmMac): # if the CM associated to CPE exists cm_cpe = self.simulator.machine.cms[index.cmMac] # CPE mac travels on cmtsMac field opResult = self.simulator.add_cpe( CPE(index.cmtsMac, cm=cm_cpe)) SimulatorLogger.info("CPE %s added behing CM %s", index.cmtsMac, index.cmMac) else: SimulatorLogger.error( "Invalid CM Mac(%s). CPE %s could not be registered", index.cmMac, index.cmtsMac) opResult = ANS_ERR simAnswer.id = opResult elif cmd.id == CMD_SIMULATOR_GETAMOUNTCPES_WITH_IP: SimulatorLogger.debug( "SIMULATOR_GETAMOUNTCPES_WITH_IP cmd received...") cmsWithIP = 0 for mac, dev in self.simulator.machine.cpes.iteritems(): if (dev.ip != "0.0.0.0"): cmsWithIP += 1 simAnswer.ipCM = str(cmsWithIP) simAnswer.id = ANS_OK elif cmd.id == CMD_SIMULATOR_ADDCM: SimulatorLogger.info("Cmd received:ADDCM") for index in cmd.devices._values: if self.simulator.machine.cmts.has_key(index.cmtsMac): localcmts = self.simulator.machine.cmts[index.cmtsMac] opResult = self.simulator.add_cm( CM(index.cmMac, cmts=localcmts)) else: pass #self.answer.answer.append( (data.maclist[index][0], False) ) #self.answer.msg="Did not find CMTS for association with CM" #self.answer.setCmdId(ANS_ERR) simAnswer.id = ANS_ADDCM else: SimulatorLogger.error("Unknown cmd received (%d)", data.cmdid) #clientsock.sendall( simAnswer.SerializeToString() ) answerSize = len(simAnswer.SerializeToString()) res = struct.pack('<i', answerSize) #SimulatorLogger.debug("client must received %d bytes", answerSize) clientsock.send(res) totalsent = 0 data = simAnswer.SerializeToString() while totalsent < answerSize: sent = clientsock.send(data[totalsent:]) if sent == 0: raise RuntimeError, "socket connection broken" totalsent = totalsent + sent
def test_cpe_setter_and_getter(self): self.assertEqual(self.service.cpe, CPE(self.cpe))
def extract_vendor_product_version(cpe_str): """Extract vendor and product from NVD cve entry.""" cpe_ = CPE(cpe_str) return cpe_.get_vendor()[0], cpe_.get_product()[0], cpe_.get_version()[0]
def cpe_is_app(cpe_str): """Return True if cpe is of application entry type.""" return CPE(cpe_str).is_application()
def config_cpe_match(self, cm): if all("$.vulnerable", cm)[0]: v = PLATFORM.VulnerableConfiguration else: v = PLATFORM.NotVulnerableConfiguration subject = BNode() cveStr = all("$.cpe23Uri", cm)[0] self.triples(subject, v, [(PLATFORM.hasPlatform, cpeURI(cveStr))] + \ self.versionStartExcluding(cm) + self.versionStartIncluding(cm) + self.versionEndExcluding(cm) + self.versionEndIncluding(cm)) #print(cveStr) c = CPE(cveStr) if c.is_hardware(): self.g.add((cpeURI(cveStr), RDF.type, PLATFORM.HardwarePlatform)) elif c.is_application(): self.g.add( (cpeURI(cveStr), RDF.type, PLATFORM.ApplicationPlatform)) elif c.is_operating_system(): self.g.add( (cpeURI(cveStr), RDF.type, PLATFORM.OperatingSystemPlatform)) vendor = "" for i in c.get_vendor(): self.g.add((cpeURI(cveStr), PLATFORM.vendor, self.plEnt(i, "Vendor_", cls=PLATFORM.Vendor))) vendor = i for i in c.get_product(): self.g.add((cpeURI(cveStr), PLATFORM.product, self.plEnt(i, "Product_" + vendor + "_", cls=PLATFORM.Product))) for i in c.get_edition(): self.g.add((cpeURI(cveStr), PLATFORM.edition, self.plEnt(i, "Edition_", cls=PLATFORM.Edition))) for i in c.get_language(): self.g.add((cpeURI(cveStr), PLATFORM.language, self.plEnt(i, "Language_", cls=PLATFORM.Language))) for i in c.get_other(): self.g.add((cpeURI(cveStr), PLATFORM.other, self.plEnt(i, "Other_", cls=PLATFORM.Other))) for i in c.get_software_edition(): self.g.add((cpeURI(cveStr), PLATFORM.softwareEdition, self.plEnt(i, "SoftwareEdition_", cls=PLATFORM.SoftwareEdition))) for i in c.get_target_hardware(): self.g.add((cpeURI(cveStr), PLATFORM.targetHardware, self.plEnt(i, "Hardware_", cls=CORE.Hardware))) for i in c.get_target_software(): self.g.add((cpeURI(cveStr), PLATFORM.targetSoftware, self.plEnt(i, "Software_", cls=CORE.Software))) for i in c.get_update(): if not i == "-": self.g.add((cpeURI(cveStr), PLATFORM.update, Literal(i))) for i in c.get_version(): if not i == "-": self.g.add((cpeURI(cveStr), PLATFORM.version, Literal(i))) return subject
mongoCves = mongoclient['cvedb']['cves'] mongoCwes = mongoclient['cvedb']['cwe'] # extract relevant data from every cve in the mongodb # and write it into the newly created postgres table cverefcount = 0 cvecount = 0 cveproductcombinations = 0 cwecvecount = 0 versionRegex = re.compile("[0-9][0-9a-z-.]+") for cve in mongoCves.find(): # parsing and inserting cpe-cve combinations parsed_cpes = [] try: parsed_cpes = [ CPE(s) for s in cve["vulnerable_configuration_cpe_2_2"] ] except: print("failed to parse CPEs from ", cve["id"]) for_insertion = [] for c in parsed_cpes: if versionRegex.match(c.get_version()[0]): for_insertion.append((cve["id"], c.get_product()[0],c.get_version()[0])) cvecount += 1 cveproductcombinations += len(for_insertion) for row in for_insertion: try: postgresCursor.execute(cveProductInsertStatement, row) except:
def cpe(self, value): if value: self._cpe = CPE(value)
def populate_CVE(root): cve_data = [] vuln_data = [] for entry in root: cve_id = entry.find(prefixed('vuln', 'cve-id')).text cve_id = int(re.sub("[^0-9]", "", cve_id)) pubdate = entry.find(prefixed('vuln', 'published-datetime')).text moddate = entry.find(prefixed('vuln', 'last-modified-datetime')).text summary = entry.find(prefixed('vuln', 'summary')).text pubdate = parser.parse(pubdate) moddate = parser.parse(moddate) vulnSoftware = entry.find(prefixed('vuln', 'vulnerable-software-list')) vulnList = [] unableToParse=0 if vulnSoftware is not None: for v in vulnSoftware: try: myCPE = CPE(v.text) except NotImplementedError: unableToParse+=1 #logging.warning("Unable to parse CPE '%s'" % v.text) else: part = myCPE.get_part()[0] vendor = myCPE.get_vendor()[0] product = myCPE.get_product()[0] version = myCPE.get_version()[0] update = myCPE.get_update()[0] edition = myCPE.get_edition()[0] language = myCPE.get_language()[0] derpa = {"part" : part, "vendor":vendor, "product":product, "version":version, "update":update, "edition":edition, "language":language, "cve":cve_id} vuln_data.append(derpa) if unableToParse>0: logging.warning("Could not parse %d lines from file." % unableToParse) vuln = entry.find(prefixed('vuln','cvss')) #metrics = vuln.find(prefixed('cvss','base_metrics')) if vuln is not None: score = vuln.getchildren()[0].getchildren()[0].text accessVector = vuln.getchildren()[0].getchildren()[1].text accessComplexity = vuln.getchildren()[0].getchildren()[2].text auth = vuln.getchildren()[0].getchildren()[3].text impactConf = vuln.getchildren()[0].getchildren()[4].text impactInt = vuln.getchildren()[0].getchildren()[5].text impactAvail = vuln.getchildren()[0].getchildren()[6].text if "DO NOT USE THIS CANDIDATE NUMBER" not in summary: data = { "cve":cve_id, "pubdate":pubdate, "moddate":moddate, "summary":summary, "score":score, "accessVector":accessVector, "accessComp":accessComplexity, "auth":auth, "impactConf": impactConf, "impactInt": impactInt, "impactAvail": impactAvail } cve_data.append(data) tables['CVEs'].insert().execute(cve_data) tables['VulnSoftware'].insert().execute(vuln_data)
def get_cpe_df(self, debug=False): """Get the list of CPE names for the vulnerability. """ type_list = [] part_list = [] vendor_list = [] product_list = [] version_list = [] update_list = [] edition_list = [] language_list = [] sw_edition_list = [] target_sw_list = [] target_hw_list = [] other_list = [] published_datetime_list = [] for cpe_entry in self.cpe_list: #if(debug): #print(cpe_entry) try: cp = CPE(cpe_entry) if(cp.is_hardware()): type_list.append("HW") elif(cp.is_operating_system()): type_list.append("OS") elif(cp.is_application()): type_list.append("APP") else: type_list.append("UNDEFINED") part_list.append(list_to_string(cp.get_part())) vendor_list.append(list_to_string(cp.get_vendor())) product_list.append(list_to_string(cp.get_product())) version_list.append(list_to_string(cp.get_version())) update_list.append(list_to_string(cp.get_update())) edition_list.append(list_to_string(cp.get_edition())) language_list.append(list_to_string(cp.get_language())) sw_edition_list.append(list_to_string(cp.get_software_edition())) target_sw_list.append(list_to_string(cp.get_target_software())) target_hw_list.append(list_to_string(cp.get_target_hardware())) other_list.append(list_to_string(cp.get_other())) published_datetime_list.append(self.published_datetime) except Exception as inst: print(inst) data = pd.DataFrame() data['type'] = type_list data['part'] = part_list data['vendor'] = vendor_list data['product'] = product_list data['version'] = version_list data['update'] = update_list data['edition'] = edition_list data['language'] = language_list data['sw_edition'] = sw_edition_list data['target_sw'] = target_sw_list data['target_hw'] = target_hw_list data['other'] = other_list data['published_datetime'] = published_datetime_list return data
def on_rx_worklist_exec_start(msg, obj): """ logic & physic worklist share 1 query exist? 2 status right? 3 exist, update exec time 4 timer, wait exec finish """ from cpe import CPE ret = ERR_FAIL # default cpe = None msg_rsp = msg + 2 # default fail obj_database = obj for nwf in [1]: # check args if (not isinstance(obj, MsgWorklistExecStart)): desc = "obj is not MsgWorklistExecStart" log.app_err(desc) ret = ERR_FATAL break # 1 query exist? id_ = obj.id_ #worklist_id = obj.id_ #worklist_id = CpeUserWorklist.m_dict_desc_id[id_] obj_database = restore_acs_worklist(id_) if (obj_database is None): desc = "id(%s) is not exist." % id_ log.app_err(desc) obj_database = obj obj_database.dict_ret["str_result"] = desc break log.app_info("id=%s, worklist_name=%s, sn=%s" % (id_, obj_database.worklist_name, obj_database.sn)) worklist_id = obj_database.worklist_id # cancel pre status timer (msg is valid for worklist id can kill timer # , otherwise skip) try: timer = get_worklist_timer(obj_database.id_) if (timer): pop_worklist_timer(obj_database.id_) timer.cancel() except Exception, e: pass # this msg try only 1 time, if fail, then worklist fail # 2 status right? status_expect = [WORK_LIST_STATUS_RESERVE] if (obj_database.status not in status_expect): desc = "worklist status is %s, not in (%s)" % (obj_database.status, status_expect) log.app_err(desc) obj_database.dict_ret["str_result"] = desc set_worklist_status(obj_database, WORK_LIST_STATUS_FAIL, desc) break # be careful, cpe(sn)'s domain delay to here(exec start) sn = obj_database.sn cpe = CPE.get_cpe(sn) if (cpe is None): desc = "cpe(sn=%s) is not online" % sn log.app_err(desc) obj_database.dict_ret["str_result"] = desc set_worklist_status(obj_database, WORK_LIST_STATUS_FAIL, desc) break # 3 can update ? domain = cpe.cpe_property.get_cpe_domain() if (not domain): desc = "cpe(sn=%s) domain(type) is Not config" % sn log.app_err(desc) obj_database.dict_ret["str_result"] = desc set_worklist_status(obj_database, WORK_LIST_STATUS_FAIL, desc) break operator = cpe.cpe_property.get_cpe_operator() if (not operator): # delay to worklist server's judge, fail too operator = "standard" desc = "cpe(sn=%s) operator(type) is Not config, use default" % sn log.app_info(desc) version = cpe.cpe_property.get_cpe_interface_version() # 用户执行的工单需要对执行的参数和模板做匹配,系统工单暂时没有处理 if obj_database.group and obj_database.group.lower() == "user": cpe_interface_version = cpe.cpe_property.get_cpe_interface_version( ) # 获取工单参数模板 worklist_template_data = get_worklist_template( operator, cpe_interface_version, domain, obj_database.worklist_name) if worklist_template_data is None: # 可能工单参数为空情况 by zsj 2014-6-17 desc = u"用户新增的%s %s %s %s工单,服务器不支持!" % ( operator, cpe_interface_version, domain, obj_database.worklist_name) log.app_err(desc) obj_database.dict_ret["str_result"] = desc set_worklist_status(obj_database, WORK_LIST_STATUS_FAIL, desc) break # 用户参数和模板做匹配,重置工单参数 obj_database.dict_data = _set_worklist_args( worklist_template_data, obj_database.dict_data) # 更新数工单参数到据库 str_data = str(obj_database.dict_data) update_acs_worklist_ex_by_id(worklist_id, 'PARAMETERS', str_data) # 3 update obj_database.operator = operator obj_database.cpe_interface_version = version obj_database.domain = domain obj_database.status = WORK_LIST_STATUS_RUNNING obj_database.time_exec_start = datetime.now().strftime( '%Y-%m-%d %H:%M:%S') obj_database.rollback = cpe.cpe_property.get_cpe_worklist_rollback( ) # default = False # 4 wait exec finish timer = reactor.callLater( webservercfg.ACS_WAIT_WORKLIST_EXEC_FINISH_TIMEOUT, on_worklist_timeout, EV_WORKLIST_EXEC_FINISH_RQST, obj_database) save_worklist_timer(obj_database.id_, timer) # mysql """ update_acs_worklist(obj_database, "OPERATOR", obj_database.operator) update_acs_worklist(obj_database, "CPE_DEVICE_TYPE", obj_database.domain) update_acs_worklist(obj_database, "STATUS", obj_database.status) update_acs_worklist(obj_database, "TIME_EXEC_START", obj_database.time_exec_start) update_acs_worklist(obj_database, "ROLLBACK", obj_database.rollback) update_acs_worklist(obj_database, "SN", obj_database.sn) """ dict_col = {} dict_data = {} dict_col['STATUS'] = obj_database.status dict_col['OPERATOR'] = obj_database.operator dict_col['OPERATOR_VERSION'] = obj_database.cpe_interface_version dict_col['DOMAIN'] = obj_database.domain dict_col['TIME_EXEC_START'] = obj_database.time_exec_start dict_col['ROLLBACK'] = str(obj_database.rollback) if dict_col: # 字典的值不为空时,才更新数据库 dict_data['columns'] = dict_col dict_data['condition'] = 'WORKLIST_ID=%s' % worklist_id operate_db('WORKLIST', 'UPDATE', dict_data) ret = ERR_SUCCESS msg_rsp = msg + 1 # response
def get_url(sn, url, username, password): """ """ from cpe import CPE ret = AUTHENTICATE_FAIL ret_api = None err_messsage = "" soap_id = 0 desc = "begin get url(url=%s, username=%s, password=%s)." % (url, username, password) log.app_info(desc) if (not sn): err_messsage = "The sn is not exist" log.app_info(err_messsage) return AUTHENTICATE_FAIL, err_messsage if (not url): err_messsage = "The url is not exist(need inform?)" log.app_info(err_messsage) return AUTHENTICATE_FAIL, err_messsage # nwf 2013-06-09; retry 3times if error(10060 or 10065) for i in [1, 2]: try: #conn = httplib2.Http(timeout = 60) conn = MyHttp(sn, timeout=60) # Alter by lizn 2014-05-30 conn.add_credentials(username, password) # mysql ; first cpe = CPE.get_cpe(sn) time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') cpe.cpe_soap.time_s1_start = time # Alter by lizn 2014-03-11 # mysql ------------- out content = "username=%s; password=%s; url=%s" % (username, password, url) #insert_acs_soap("connection request", "OUT", sn, content) by lizn 2014-05-30 ret_api, data = conn.request(url) # mysql -------------- in content = str(ret_api) + "\n\n" + data #soap_id = insert_acs_soap("connection request", "IN", sn, content) by lizn 2014-05-30 # mysql ; first end time = datetime.now().strftime('%Y-%m-%d %H:%M:%S') cpe.cpe_soap.time_s1_finish = time # Alter by lizn 2014-03-11 status = ret_api.status conn.close() if status == AUTHENTICTATE_SUCCEED: err_messsage = "Get cpe url(url=%s, username=%s, password=%s) success." % ( url, username, password) log.app_info(err_messsage) ret = AUTHENTICTATE_SUCCEED break else: err_messsage = "Get cpe url(url=%s, username=%s, password=%s) not pass." % ( url, username, password) log.app_info(err_messsage) ret = AUTHENTICATE_FAIL break except Exception, e: err_messsage = "Get cpe url(url=%s, username=%s, password=%s) fail:%s." % ( url, username, password, e) log.app_err(err_messsage) # friendly tip err_messsage = "Get cpe url(url=%s, username=%s, password=%s) fail:connect to cpe fail." % ( url, username, password) try: # retry if ((e.errno == 10060) or (e.errno == 10065)): continue except Exception: pass # other error, fail ret = AUTHENTICATE_FAIL break
def get_item(vuln, test): finding = Finding( test=test, unique_id_from_tool=vuln["id"], nb_occurences=1, ) # Defining variables location = vuln["location"] # Endpoint # using url if "url" in location and location["url"] and location["url"] != "None": endpoint = Endpoint.from_uri(location["url"]) # fallback to using old way of creating endpoints elif "domain" in location and location["domain"] and location["domain"] != "None": endpoint = Endpoint(host=str(location["domain"])) else: # no domain, use ip instead if "ip" in location and location["ip"] and location["ip"] != "None": endpoint = Endpoint(host=str(location["ip"])) # check for protocol if ( "applicationProtocol" in location and location["applicationProtocol"] and location["applicationProtocol"] != "None" ): endpoint.protocol = location["applicationProtocol"] # check for port if ( "port" in location and location["port"] in location and location["port"] != "None" ): endpoint.port = location["port"] finding.unsaved_endpoints = [endpoint] # assigning endpoint # Title finding.title = vuln["name"] # Description + CVEs description = vuln["classification"] cves = "no match" if "CVE-NO-MATCH" not in vuln["kb"]["cves"]: finding.cve = vuln["kb"]["cves"][0] cves = "" for cve in vuln["kb"]["cves"]: cves += f"{cve}, " cves = cves[: len(cves) - 2] # removing the comma and the blank space finding.description = description + "; CVEs: " + cves finding.severity = vuln["severity"].title() # Date date_str = vuln["createdOn"] date_str = date_str[: len(date_str) - 3] + date_str[-2:] finding.date = datetime.strptime(date_str, "%Y-%m-%dT%H:%M:%S.%f%z") # Component Name and Version if ( "applicationCpe" in location and location["applicationCpe"] and location["applicationCpe"] != "None" ): cpe = CPE(location["applicationCpe"]) component_name = cpe.get_vendor()[0] + ":" if len( cpe.get_vendor()) > 0 else "" component_name += cpe.get_product()[0] if len( cpe.get_product()) > 0 else "" finding.component_name = component_name if component_name else None finding.component_version = ( cpe.get_version()[0] if len(cpe.get_version()) > 0 else None ) return finding
def populate_CVE(root): cve_data = [] vuln_data = [] for entry in root: cve_id = entry.find(prefixed("vuln", "cve-id")).text cve_id = int(re.sub("[^0-9]", "", cve_id)) pubdate = entry.find(prefixed("vuln", "published-datetime")).text moddate = entry.find(prefixed("vuln", "last-modified-datetime")).text summary = entry.find(prefixed("vuln", "summary")).text pubdate = parser.parse(pubdate) moddate = parser.parse(moddate) vulnSoftware = entry.find(prefixed("vuln", "vulnerable-software-list")) vulnList = [] unableToParse = 0 if vulnSoftware is not None: for v in vulnSoftware: try: myCPE = CPE(v.text) except NotImplementedError: unableToParse += 1 # logging.warning("Unable to parse CPE '%s'" % v.text) else: part = myCPE.get_part()[0] vendor = myCPE.get_vendor()[0] product = myCPE.get_product()[0] version = myCPE.get_version()[0] update = myCPE.get_update()[0] edition = myCPE.get_edition()[0] language = myCPE.get_language()[0] derpa = { "part": part, "vendor": vendor, "product": product, "version": version, "update": update, "edition": edition, "language": language, "cve": cve_id, } vuln_data.append(derpa) if unableToParse > 0: logging.warning("Could not parse %d lines from file." % unableToParse) vuln = entry.find(prefixed("vuln", "cvss")) # metrics = vuln.find(prefixed('cvss','base_metrics')) if vuln is not None: score = vuln.getchildren()[0].getchildren()[0].text accessVector = vuln.getchildren()[0].getchildren()[1].text accessComplexity = vuln.getchildren()[0].getchildren()[2].text auth = vuln.getchildren()[0].getchildren()[3].text impactConf = vuln.getchildren()[0].getchildren()[4].text impactInt = vuln.getchildren()[0].getchildren()[5].text impactAvail = vuln.getchildren()[0].getchildren()[6].text if "DO NOT USE THIS CANDIDATE NUMBER" not in summary: data = { "cve": cve_id, "pubdate": pubdate, "moddate": moddate, "summary": summary, "score": score, "accessVector": accessVector, "accessComp": accessComplexity, "auth": auth, "impactConf": impactConf, "impactInt": impactInt, "impactAvail": impactAvail, } cve_data.append(data) tables["CVEs"].insert().execute(cve_data) tables["VulnSoftware"].insert().execute(vuln_data)