def candidates_for_package(package_obj, distro_namespace=None): """ Return all vulnerabilities for the named package with the specified distro. Will apply to any version of the package. If version is used, will filter to only those for the specified version. :param package_obj: the package to match against :param distro_namespace: the DistroNamespace object to match against (typically computed :return: List of Vulnerabilities """ db = get_thread_scoped_session() if not distro_namespace: namespace_name = DistroNamespace.for_obj(package_obj).namespace_name else: namespace_name = distro_namespace # Match the namespace and package name or src pkg name fix_candidates = db.query(FixedArtifact).filter( FixedArtifact.namespace_name == namespace_name, or_(FixedArtifact.name == package_obj.name, FixedArtifact.name == package_obj.normalized_src_pkg)).all() # Match the namespace and package name or src pkg name vulnerable_candidates = db.query(VulnerableArtifact).filter( VulnerableArtifact.namespace_name == namespace_name, or_(VulnerableArtifact.name == package_obj.name, VulnerableArtifact.name == package_obj.normalized_src_pkg)).all() return fix_candidates, vulnerable_candidates
def match_and_vulnerable(vuln_obj, package_obj): """ Given a VulnerableArtifact record, is the given package object a match indicating that the package is vulnerable. :param vuln_obj: :param package_obj: :param has_fix: boolean indicating if there is a corresponding fix record :return: """ if not isinstance(vuln_obj, VulnerableArtifact): raise TypeError('Expected a VulnerableArtifact type, got: {}'.format( type(vuln_obj))) dist = DistroNamespace.for_obj(package_obj) flavor = dist.flavor # Double-check names if vuln_obj.name != package_obj.name and vuln_obj.name != package_obj.normalized_src_pkg: log.warn( 'Name mismatch in vulnerable check. This should not happen: Fix: {}, Package: {}, Package_Norm_Src: {}, Package_Src: {}' .format(vuln_obj.name, package_obj.name, package_obj.normalized_src_pkg, package_obj.src_pkg)) return False # Is it a catch-all record? Explicit 'None' or 'all' versions indicate all versions of the named package are vulnerable. if vuln_obj.epochless_version in ['all', 'None']: return True # Is the package older than the fix? if package_obj.fullversion == vuln_obj.epochless_version or package_obj.version == vuln_obj.epochless_version: return True # Newer or the same return False
def evaluate(self, image_obj, context): # Map to a namespace ns = DistroNamespace.for_obj(image_obj) oldest_update = None if ns: vulnerability_feed = DataFeeds.instance().vulnerabilities for namespace_name in ns.like_namespace_names: # Check feed names groups = vulnerability_feed.group_by_name(namespace_name) if groups: # No records yet, but we have the feed, so may just not have any data yet oldest_update = groups[0].last_sync break if self.max_age.value() is not None: try: if oldest_update is not None: oldest_update = calendar.timegm(oldest_update.timetuple()) mintime = time.time() - int(int(self.max_age.value()) * 86400) if oldest_update < mintime: self._fire(msg="The vulnerability feed for this image distro is older than MAXAGE ("+str(self.max_age.value())+") days") else: self._fire( msg="The vulnerability feed for this image distro is older than MAXAGE (" + str(self.max_age.value()) + ") days") except Exception as err: self._fire(msg="Cannot perform data feed up-to-date check - message from server: " + str(err))
def match_but_not_fixed(fix_obj, package_obj): """ Does the FixedArtifact match the package as a vulnerability such that the fix indicates the package is *not* fixed and is therefore vulnerable. :param fix_obj: as FixedArtifact record :param package_obj: an ImagePackage record :return: True if the names match and the fix record indicates the package is vulnerable and not fixed. False if no match or fix is applied and no vulnerability match """ if not isinstance(fix_obj, FixedArtifact): raise TypeError('Expected a FixedArtifact type, got: {}'.format( type(fix_obj))) dist = DistroNamespace.for_obj(package_obj) flavor = dist.flavor log.debug('Package: {}, Fix: {}, Flavor: {}'.format( package_obj.name, fix_obj.name, flavor)) # Double-check names if fix_obj.name != package_obj.name and fix_obj.name != package_obj.normalized_src_pkg: log.warn( 'Name mismatch in fix check. This should not happen: Fix: {}, Package: {}, Package_Norm_Src: {}, Package_Src: {}' .format(fix_obj.name, package_obj.name, package_obj.normalized_src_pkg, package_obj.src_pkg)) return False # Handle the case where there is no version, indicating no fix available, all versions are vulnerable. # Is it a catch-all record? Explicit 'None' versions indicate all versions of the named package are vulnerable. if fix_obj.version == 'None': return True # Is the package older than the fix? if flavor == 'RHEL': if rpm_compare_versions(package_obj.name, package_obj.fullversion, fix_obj.name, fix_obj.epochless_version) < 0: log.debug('rpm Compared: {} < {}: True'.format( package_obj.fullversion, fix_obj.epochless_version)) return True elif flavor == 'DEB': if dpkg_compare_versions(package_obj.fullversion, 'lt', fix_obj.epochless_version): log.debug('dpkg Compared: {} < {}: True'.format( package_obj.fullversion, fix_obj.epochless_version)) return True elif flavor == 'ALPINE': if apkg_compare_versions(package_obj.fullversion, 'lt', fix_obj.epochless_version): log.debug('apkg Compared: {} < {}: True'.format( package_obj.fullversion, fix_obj.epochless_version)) return True # Newer or the same return False
def evaluate(self, image_obj, context): # Map to a namespace ns = DistroNamespace.for_obj(image_obj) oldest_update = None if ns: for namespace_name in ns.like_namespace_names: # Check feed names for feed in feed_registry.registered_vulnerability_feed_names( ): # First match, assume only one matches for the namespace group = get_feed_group_detached(feed, namespace_name) if group: # No records yet, but we have the feed, so may just not have any data yet oldest_update = group.last_sync logger.debug( "Found date for oldest update in feed %s group %s date = %s", feed, group.name, oldest_update, ) break if self.max_age.value() is not None: try: if oldest_update is not None: oldest_update = calendar.timegm(oldest_update.timetuple()) mintime = time.time() - int( int(self.max_age.value()) * 86400) if oldest_update < mintime: self._fire( msg= "The vulnerability feed for this image distro is older than MAXAGE (" + str(self.max_age.value()) + ") days") else: self._fire( msg= "The vulnerability feed for this image distro is older than MAXAGE (" + str(self.max_age.value()) + ") days") except Exception as err: self._fire( msg= "Cannot perform data feed up-to-date check - message from server: " + str(err))
def load_and_normalize_packages(self, package_analysis_json, image_obj): """ Loads and normalizes package data from all distros :param image_obj: :param package_analysis_json: :return: list of Package objects that can be added to an image """ pkgs = [] img_distro = DistroNamespace.for_obj(image_obj) # pkgs.allinfo handling pkgs_all = package_analysis_json.get('pkgs.allinfo', {}).values() if not pkgs_all: return [] else: pkgs_all = pkgs_all[0] for pkg_name, metadata_str in pkgs_all.items(): metadata = json.loads(metadata_str) p = ImagePackage() p.distro_name = image_obj.distro_name p.distro_version = image_obj.distro_version p.like_distro = image_obj.like_distro p.name = pkg_name p.version = metadata.get('version') p.origin = metadata.get('origin') p.size = metadata.get('size') p.arch = metadata.get('arch') p.license = metadata.get('license') if metadata.get( 'license') else metadata.get('lics') p.release = metadata.get('release', 'N/A') p.pkg_type = metadata.get('type') p.src_pkg = metadata.get('sourcepkg') p.image_user_id = image_obj.user_id p.image_id = image_obj.id if 'files' in metadata: # Handle file data p.files = metadata.get('files') if p.release != 'N/A': p.fullversion = p.version + '-' + p.release else: p.fullversion = p.version if img_distro.flavor == 'DEB': cleanvers = re.sub(re.escape("+b") + "\d+.*", "", p.version) spkg = re.sub(re.escape("-" + cleanvers), "", p.src_pkg) else: spkg = re.sub(re.escape("-" + p.version) + ".*", "", p.src_pkg) p.normalized_src_pkg = spkg pkgs.append(p) if pkgs: return pkgs else: log.warn('Pkg Allinfo not found, reverting to using pkgs.all') all_pkgs = package_analysis_json['pkgs.all']['base'] all_pkgs_src = package_analysis_json['pkgs_plus_source.all']['base'] for pkg_name, version in all_pkgs.items(): p = ImagePackage() p.image_user_id = image_obj.user_id p.image_id = image_obj.id p.name = pkg_name p.version = version p.fullversion = all_pkgs_src[pkg_name] if img_distro.flavor == 'RHEL': name, parsed_version, release, epoch, arch = split_rpm_filename( pkg_name + '-' + version + '.tmparch.rpm') p.version = parsed_version p.release = release p.pkg_type = 'RPM' p.origin = 'N/A' p.src_pkg = 'N/A' p.license = 'N/A' p.arch = 'N/A' elif img_distro.flavor == 'DEB': try: p.version, p.release = version.split('-') except: p.version = version p.release = None return pkgs
def evaluate(self, image_obj, context): if not have_vulnerabilities_for(DistroNamespace.for_obj(image_obj)): self._fire( msg= "Feed data unavailable, cannot perform CVE scan for distro: " + str(image_obj.distro_namespace))
def get_image_vulnerabilities(user_id, image_id, force_refresh=False): """ Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked to do so. Example json output: { "multi" : { "url_column_index" : 7, "result" : { "rows" : [], "rowcount" : 0, "colcount" : 8, "header" : [ "CVE_ID", "Severity", "*Total_Affected", "Vulnerable_Package", "Fix_Available", "Fix_Images", "Rebuild_Images", "URL" ] }, "querycommand" : "/usr/lib/python2.7/site-packages/anchore/anchore-modules/multi-queries/cve-scan.py /ebs_data/anchore/querytmp/queryimages.7026386 /ebs_data/anchore/data /ebs_data/anchore/querytmp/query.59057288 all", "queryparams" : "all", "warns" : [ "0005b136f0fb (prom/prometheus:master) cannot perform CVE scan: no CVE data is currently available for the detected base distro type (busybox:unknown_version,busybox:v1.26.2)" ] } } :param user_id: user id of image to evaluate :param image_id: image id to evaluate :param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values :return: """ # Has image? db = get_session() try: img = db.query(Image).get((image_id, user_id)) vulns = [] if not img: abort(404) else: if force_refresh: log.info('Forcing refresh of vulnerabiltiies for {}/{}'.format(user_id, image_id)) try: current_vulns = img.vulnerabilities() log.info('Removing {} current vulnerabilities for {}/{} to rescan'.format(len(current_vulns), user_id, image_id)) for v in current_vulns: db.delete(v) db.flush() vulns = vulnerabilities_for_image(img) log.info('Adding {} vulnerabilities from rescan to {}/{}'.format(len(vulns), user_id, image_id)) for v in vulns: db.add(v) db.commit() except Exception as e: log.exception('Error refreshing cve matches for image {}/{}'.format(user_id, image_id)) db.rollback() abort(Response('Error refreshing vulnerability listing for image.', 500)) db = get_session() db.refresh(img) else: vulns = img.vulnerabilities() # Has vulnerabilities? warns = [] if not vulns: vulns = [] ns = DistroNamespace.for_obj(img) if not have_vulnerabilities_for(ns): warns = ['No vulnerability data available for image distro: {}'.format(ns.namespace_name)] rows = [] for vuln in vulns: # if vuln.vulnerability.fixed_in: # fixes_in = filter(lambda x: x.name == vuln.pkg_name or x.name == vuln.package.normalized_src_pkg, # vuln.vulnerability.fixed_in) # fix_available_in = fixes_in[0].version if fixes_in else 'None' # else: # fix_available_in = 'None' rows.append([ vuln.vulnerability_id, vuln.vulnerability.severity, 1, vuln.pkg_name + '-' + vuln.package.fullversion, str(vuln.fixed_in()), vuln.pkg_image_id, 'None', # Always empty this for now vuln.vulnerability.link ] ) vuln_listing = { 'multi': { 'url_column_index': 7, 'result': { 'header': TABLE_STYLE_HEADER_LIST, 'rowcount': len(rows), 'colcount': len(TABLE_STYLE_HEADER_LIST), 'rows': rows }, 'warns': warns } } report = LegacyVulnerabilityReport.from_dict(vuln_listing) resp = ImageVulnerabilityListing(user_id=user_id, image_id=image_id, legacy_report=report) return resp.to_dict() except HTTPException: db.rollback() raise except Exception as e: log.exception('Error checking image {}, {} for vulnerabiltiies. Rolling back'.format(user_id, image_id)) db.rollback() abort(500) finally: db.close()
def get_image_vulnerabilities(user_id, image_id, force_refresh=False, vendor_only=True): """ Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked to do so. Example json output: { "multi" : { "url_column_index" : 7, "result" : { "rows" : [], "rowcount" : 0, "colcount" : 8, "header" : [ "CVE_ID", "Severity", "*Total_Affected", "Vulnerable_Package", "Fix_Available", "Fix_Images", "Rebuild_Images", "URL" ] }, "querycommand" : "/usr/lib/python2.7/site-packages/anchore/anchore-modules/multi-queries/cve-scan.py /ebs_data/anchore/querytmp/queryimages.7026386 /ebs_data/anchore/data /ebs_data/anchore/querytmp/query.59057288 all", "queryparams" : "all", "warns" : [ "0005b136f0fb (prom/prometheus:master) cannot perform CVE scan: no CVE data is currently available for the detected base distro type (busybox:unknown_version,busybox:v1.26.2)" ] } } :param user_id: user id of image to evaluate :param image_id: image id to evaluate :param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values :param vendor_only: if true, filter out the vulnerabilities that vendors will explicitly not address :return: """ # Has image? db = get_session() try: img = db.query(Image).get((image_id, user_id)) vulns = [] if not img: abort(404) else: if force_refresh: log.info('Forcing refresh of vulnerabiltiies for {}/{}'.format( user_id, image_id)) try: vulns = rescan_image(img, db_session=db) db.commit() except Exception as e: log.exception( 'Error refreshing cve matches for image {}/{}'.format( user_id, image_id)) db.rollback() abort( Response( 'Error refreshing vulnerability listing for image.', 500)) db = get_session() db.refresh(img) vulns = img.vulnerabilities() # Has vulnerabilities? warns = [] if not vulns: vulns = [] ns = DistroNamespace.for_obj(img) if not have_vulnerabilities_for(ns): warns = [ 'No vulnerability data available for image distro: {}'. format(ns.namespace_name) ] rows = [] for vuln in vulns: # if vuln.vulnerability.fixed_in: # fixes_in = filter(lambda x: x.name == vuln.pkg_name or x.name == vuln.package.normalized_src_pkg, # vuln.vulnerability.fixed_in) # fix_available_in = fixes_in[0].version if fixes_in else 'None' # else: # fix_available_in = 'None' # Skip the vulnerability if the vendor_only flag is set to True and the issue won't be addressed by the vendor if vendor_only and vuln.fix_has_no_advisory(): continue rows.append([ vuln.vulnerability_id, vuln.vulnerability.severity, 1, vuln.pkg_name + '-' + vuln.package.fullversion, str(vuln.fixed_in()), vuln.pkg_image_id, 'None', # Always empty this for now vuln.vulnerability.link, vuln.pkg_type, 'vulnerabilities', vuln.vulnerability.namespace_name, vuln.pkg_name, vuln.package.fullversion, ]) vuln_listing = { 'multi': { 'url_column_index': 7, 'result': { 'header': TABLE_STYLE_HEADER_LIST, 'rowcount': len(rows), 'colcount': len(TABLE_STYLE_HEADER_LIST), 'rows': rows }, 'warns': warns } } cpe_vuln_listing = [] try: all_cpe_matches = db.query( ImageCpe, CpeVulnerability).filter(ImageCpe.image_id == image_id).filter( ImageCpe.name == CpeVulnerability.name).filter( ImageCpe.version == CpeVulnerability.version) if not all_cpe_matches: all_cpe_matches = [] cpe_hashes = {} for image_cpe, vulnerability_cpe in all_cpe_matches: cpe_vuln_el = { 'vulnerability_id': vulnerability_cpe.vulnerability_id, 'severity': vulnerability_cpe.severity, 'link': vulnerability_cpe.link, 'pkg_type': image_cpe.pkg_type, 'pkg_path': image_cpe.pkg_path, 'name': image_cpe.name, 'version': image_cpe.version, 'cpe': image_cpe.get_cpestring(), 'feed_name': vulnerability_cpe.feed_name, 'feed_namespace': vulnerability_cpe.namespace_name, } cpe_hash = hashlib.sha256( utils.ensure_bytes(json.dumps(cpe_vuln_el))).hexdigest() if not cpe_hashes.get(cpe_hash, False): cpe_vuln_listing.append(cpe_vuln_el) cpe_hashes[cpe_hash] = True except Exception as err: log.warn("could not fetch CPE matches - exception: " + str(err)) report = LegacyVulnerabilityReport.from_dict(vuln_listing) resp = ImageVulnerabilityListing(user_id=user_id, image_id=image_id, legacy_report=report, cpe_report=cpe_vuln_listing) return resp.to_dict() except HTTPException: db.rollback() raise except Exception as e: log.exception( 'Error checking image {}, {} for vulnerabiltiies. Rolling back'. format(user_id, image_id)) db.rollback() abort(500) finally: db.close()
def evaluate(self, image_obj, context): if not have_vulnerabilities_for(DistroNamespace.for_obj(image_obj)): self._fire(msg="UNSUPPORTEDDISTRO cannot perform CVE scan: " + str(image_obj.distro_namespace))
def evaluate(self, image_obj, context): if not have_vulnerabilities_for(DistroNamespace.for_obj(image_obj)): self._fire( msg= "Distro-specific feed data not found for distro namespace: %s. Cannot perform CVE scan OS/distro packages" % image_obj.distro_namespace)
def get_image_vulnerabilities( self, image: Image, db_session, vendor_only: bool = True, force_refresh: bool = False, cache: bool = True, ): # select the nvd class once and be done _nvd_cls, _cpe_cls = select_nvd_classes(db_session) # initialize the scanner scanner = self.__scanner__() user_id = image.user_id image_id = image.id results = [] if force_refresh: log.info("Forcing refresh of vulnerabilities for {}/{}".format( user_id, image_id)) try: scanner.flush_and_recompute_vulnerabilities( image, db_session=db_session) db_session.commit() except Exception as e: log.exception( "Error refreshing cve matches for image {}/{}".format( user_id, image_id)) db_session.rollback() return make_response_error( "Error refreshing vulnerability listing for image.", in_httpcode=500, ) db_session = get_session() db_session.refresh(image) with timer("Image vulnerability primary lookup", log_level="debug"): vulns = scanner.get_vulnerabilities(image) # Has vulnerabilities? warns = [] if not vulns: vulns = [] ns = DistroNamespace.for_obj(image) if not have_vulnerabilities_for(ns): warns = [ "No vulnerability data available for image distro: {}". format(ns.namespace_name) ] rows = [] with timer("Image vulnerability nvd metadata merge", log_level="debug"): vulns = merge_nvd_metadata_image_packages(db_session, vulns, _nvd_cls, _cpe_cls) with timer("Image vulnerability output formatting", log_level="debug"): for vuln, nvd_records in vulns: fixed_artifact = vuln.fixed_artifact() # Skip the vulnerability if the vendor_only flag is set to True and the issue won't be addressed by the vendor if vendor_only and vuln.fix_has_no_advisory( fixed_in=fixed_artifact): continue nvd_scores = [ self._make_cvss_score(score) for nvd_record in nvd_records for score in nvd_record.get_cvss_scores_nvd() ] results.append( VulnerabilityMatch( vulnerability=VulnerabilityModel( vulnerability_id=vuln.vulnerability_id, description="NA", severity=vuln.vulnerability.severity, link=vuln.vulnerability.link, feed="vulnerabilities", feed_group=vuln.vulnerability.namespace_name, cvss_scores_nvd=nvd_scores, cvss_scores_vendor=[], created_at=vuln.vulnerability.created_at, last_modified=vuln.vulnerability.updated_at, ), artifact=Artifact( name=vuln.pkg_name, version=vuln.package.fullversion, pkg_type=vuln.pkg_type, pkg_path=vuln.pkg_path, cpe="None", cpe23="None", ), fixes=[ FixedArtifact( version=str( vuln.fixed_in(fixed_in=fixed_artifact)), wont_fix=vuln.fix_has_no_advisory( fixed_in=fixed_artifact), observed_at=fixed_artifact.fix_observed_at if fixed_artifact else None, ) ], match=Match(detected_at=vuln.created_at), )) # TODO move dedup here so api doesn't have to # cpe_vuln_listing = [] try: with timer("Image vulnerabilities cpe matches", log_level="debug"): all_cpe_matches = scanner.get_cpe_vulnerabilities( image, _nvd_cls, _cpe_cls) if not all_cpe_matches: all_cpe_matches = [] api_endpoint = self._get_api_endpoint() for image_cpe, vulnerability_cpe in all_cpe_matches: link = vulnerability_cpe.parent.link if not link: link = "{}/query/vulnerabilities?id={}".format( api_endpoint, vulnerability_cpe.vulnerability_id) nvd_scores = [ self._make_cvss_score(score) for score in vulnerability_cpe.parent.get_cvss_scores_nvd() ] vendor_scores = [ self._make_cvss_score(score) for score in vulnerability_cpe.parent.get_cvss_scores_vendor() ] results.append( VulnerabilityMatch( vulnerability=VulnerabilityModel( vulnerability_id=vulnerability_cpe.parent. normalized_id, description="NA", severity=vulnerability_cpe.parent.severity, link=link, feed=vulnerability_cpe.feed_name, feed_group=vulnerability_cpe.namespace_name, cvss_scores_nvd=nvd_scores, cvss_scores_vendor=vendor_scores, created_at=vulnerability_cpe.parent.created_at, last_modified=vulnerability_cpe.parent. updated_at, ), artifact=Artifact( name=image_cpe.name, version=image_cpe.version, pkg_type=image_cpe.pkg_type, pkg_path=image_cpe.pkg_path, cpe=image_cpe.get_cpestring(), cpe23=image_cpe.get_cpe23string(), ), fixes=[ FixedArtifact( version=item, wont_fix=False, observed_at=vulnerability_cpe.created_at, ) for item in vulnerability_cpe.get_fixed_in() ], # using vulnerability created_at to indicate the match timestamp for now match=Match( detected_at=vulnerability_cpe.created_at), )) except Exception as err: log.exception("could not fetch CPE matches") import uuid return ImageVulnerabilitiesReport( account_id=image.user_id, image_id=image_id, results=results, metadata=VulnerabilitiesReportMetadata( generated_at=datetime.datetime.utcnow(), uuid=str(uuid.uuid4()), generated_by=self._get_provider_metadata(), ), problems=[], )