def test_cve_updates(test_data_env):
    test_env = test_data_env
    test_env.init_feeds()

    test_user_id = "test1"
    test_img_id = "img1"
    test_image = Image(user_id=test_user_id,
                       id=test_img_id,
                       distro_name="centos",
                       distro_version="7")
    test_image.familytree_json = [test_img_id]
    test_image.layers_json = [test_img_id]
    test_image.layer_info_json = ["somelayer_here"]
    test_image.like_distro = "centos"
    test_image.state = "analyzed"
    test_image.digest = "digest1"
    test_image.anchore_type = "undefined"
    test_image.dockerfile_mode = "Guessed"
    test_image.docker_history_json = ["line1", "line2"]
    test_image.docker_data_json = {"Config": {}, "ContainerConfig": {}}
    test_image.dockerfile_contents = "FROM BLAH"

    test_package = ImagePackage(
        image_user_id=test_user_id,
        image_id=test_img_id,
        name="testpackage",
        version="1.0",
        pkg_type="RPM",
    )
    test_package.src_pkg = "testpackage"
    test_package.distro_name = "centos"
    test_package.distro_version = "7"
    test_package.like_distro = "centos"
    test_package.license = "apache2"
    test_package.fullversion = "1.0"
    test_package.normalized_src_pkg = "1.0"
    test_package.release = ""
    test_package.size = 1000
    test_package.origin = "upstream"
    test_package.arch = "x86_64"
    test_package.image = test_image

    test_cve = Vulnerability(id="CVE123", namespace_name="centos:7")
    test_cve.severity = "High"
    test_cve.description = "some test cve"
    test_cve.cvss2_score = "1.0"
    test_cve.metadata_json = {}
    test_cve.cvss2_vectors = ""
    test_cve.link = "http://mitre.com/cve123"

    test_fixedin = FixedArtifact(vulnerability_id=test_cve.id)
    test_fixedin.name = "testpackage"
    test_fixedin.version = "1.1"
    test_fixedin.version_format = "rpm"
    test_fixedin.epochless_version = "1.1"
    test_fixedin.include_later_versions = True
    test_fixedin.parent = test_cve
    test_cve.fixed_in = [test_fixedin]

    test_vulnin = VulnerableArtifact(vulnerability_id=test_cve.id)
    test_vulnin.name = "testpackage"
    test_vulnin.version = "0.9"
    test_vulnin.epochless_version = "0.9"
    test_vulnin.namespace_name = "centos:7"
    test_vulnin.version_format = "rpm"
    test_vulnin.include_previous_versions = False
    test_vulnin.parent = test_cve
    test_cve.vulnerable_in = [test_vulnin]

    db = get_session()
    try:
        db.add(test_image)
        db.add(test_package)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        db.rollback()
    except Exception:
        logger.exception("Unexpected failure")
        raise

    db = get_session()
    try:
        db.add(test_cve)
        feeds.process_updated_vulnerability(db, test_cve)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception("Failed!")
        db.rollback()
    finally:
        db = get_session()
        i = db.query(Image).get((test_img_id, test_user_id))
        print(("Vulns: {}".format(i.vulnerabilities())))
        db.commit()

    test_cve2 = Vulnerability(id="CVE123", namespace_name="centos:7")
    test_cve2.severity = "Medium"
    test_cve2.description = "some test cve"
    test_cve2.cvss2_score = "1.0"
    test_cve2.metadata_json = {}
    test_cve2.cvss2_vectors = ""
    test_cve2.link = "http://mitre.com/cve123"
    fix2 = FixedArtifact(name="pkg2", version="1.2", epochless_version="1.2")
    fix2.namespace_name = "centos:7"
    fix2.vulnerability_id = test_cve2.id
    test_cve2.fixed_in = [fix2]

    db = get_session()
    try:
        t2 = db.merge(test_cve2)
        db.add(t2)
        feeds.process_updated_vulnerability(db, t2)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception("Failed!")
        db.rollback()
    finally:
        db = get_session()
        i = db.query(Image).get((test_img_id, test_user_id))
        print(("Vulns: {}".format(i.vulnerabilities())))
        db.commit()
Exemple #2
0
    def sync_metadata(feed_client: IFeedSource, to_sync: list = None, operation_id=None) -> tuple:
        """
        Get metadata from source and sync db metadata records to that (e.g. add any new groups or feeds)
        Executes as a unit-of-work for db, so will commit result and returns the records found on upstream source.

        If a record exists in db but was not found upstream, it is not returned

        :param feed_client:
        :param to_sync: list of string feed names to sync metadata on
        :return: tuple, first element: dict of names mapped to db records post-sync only including records successfully updated by upstream, second element is a list of tuples where each tuple is (failed_feed_name, error_obj)
        """

        if not to_sync:
            return {}, []

        db = get_session()
        try:
            log.info('Syncing feed and group metadata from upstream source (operation_id={})'.format(operation_id))

            source_resp = feed_client.list_feeds()
            if to_sync:
                feeds = filter(lambda x: x.name in to_sync, source_resp.feeds)
            else:
                feeds = []

            failed = []
            source_feeds = {x.name: {'meta': x, 'groups': feed_client.list_feed_groups(x.name).groups} for x in feeds}
            log.debug('Upstream feeds available: {}'.format(source_feeds))
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(to_sync, list(source_feeds.keys()), get_all_feeds(db))

            for feed_name, feed_api_record in source_feeds.items():
                try:
                    log.info('Syncing metadata for feed: {} (operation_id={})'.format(feed_name, operation_id))

                    api_feed = feed_api_record['meta']
                    db_feed = db_feeds.get(api_feed.name)

                    # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                    if not db_feed:
                        log.debug('Adding new feed metadata record to db: {} (operation_id={})'.format(api_feed.name, operation_id))
                        db_feed = FeedMetadata(name=api_feed.name, description=api_feed.description, access_tier=api_feed.access_tier)
                        db.add(db_feed)
                        db.flush()
                    else:
                        log.debug('Feed metadata already in db: {} (operation_id={})'.format(api_feed.name, operation_id))

                    # Check for any update
                    db_feed.description = api_feed.description
                    db_feed.access_tier = api_feed.access_tier

                    db_groups = {x.name: x for x in db_feed.groups}
                    for api_group in feed_api_record.get('groups', []):
                        db_group = db_groups.get(api_group.name)
                        # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                        if not db_group:
                            log.debug('Adding new feed metadata record to db: {} (operation_id={})'.format(api_group.name, operation_id))
                            db_group = FeedGroupMetadata(name=api_group.name, description=api_group.description, access_tier=api_group.access_tier, feed=db_feed)
                            db_group.last_sync = None
                            db.add(db_group)
                        else:
                            log.debug('Feed group metadata already in db: {} (operation_id={})'.format(api_group.name, operation_id))

                        db_group.access_tier = api_group.access_tier
                        db_group.description = api_group.description
                except Exception as e:
                    log.exception('Error syncing feed {}'.format(feed_name))
                    log.warn('Could not sync metadata for feed: {} (operation_id={})'.format(feed_name, operation_id))
                    failed.append((feed_name, e))
                finally:
                    db.flush()

            # Reload
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(to_sync, list(source_feeds.keys()), get_all_feeds(db))

            db.commit()
            log.info('Metadata sync from feeds upstream source complete (operation_id={})'.format(operation_id))
            return db_feeds, failed
        except Exception as e:
            log.error('Rolling back feed metadata update due to error: {} (operation_id={})'.format(e, operation_id))
            db.rollback()
            raise
def get_image_vulnerabilities(user_id,
                              image_id,
                              force_refresh=False,
                              vendor_only=True):
    """
    Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked
    to do so.


    Example json output:
    {
       "multi" : {
          "url_column_index" : 7,
          "result" : {
             "rows" : [],
             "rowcount" : 0,
             "colcount" : 8,
             "header" : [
                "CVE_ID",
                "Severity",
                "*Total_Affected",
                "Vulnerable_Package",
                "Fix_Available",
                "Fix_Images",
                "Rebuild_Images",
                "URL"
             ]
          },
          "querycommand" : "/usr/lib/python2.7/site-packages/anchore/anchore-modules/multi-queries/cve-scan.py /ebs_data/anchore/querytmp/queryimages.7026386 /ebs_data/anchore/data /ebs_data/anchore/querytmp/query.59057288 all",
          "queryparams" : "all",
          "warns" : [
             "0005b136f0fb (prom/prometheus:master) cannot perform CVE scan: no CVE data is currently available for the detected base distro type (busybox:unknown_version,busybox:v1.26.2)"
          ]
       }
    }

    :param user_id: user id of image to evaluate
    :param image_id: image id to evaluate
    :param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values
    :param vendor_only: if true, filter out the vulnerabilities that vendors will explicitly not address
    :return:
    """

    # Has image?
    db = get_session()
    try:
        img = db.query(Image).get((image_id, user_id))
        vulns = []
        if not img:
            abort(404)
        else:
            if force_refresh:
                log.info('Forcing refresh of vulnerabiltiies for {}/{}'.format(
                    user_id, image_id))
                try:
                    vulns = rescan_image(img, db_session=db)
                    db.commit()
                except Exception as e:
                    log.exception(
                        'Error refreshing cve matches for image {}/{}'.format(
                            user_id, image_id))
                    db.rollback()
                    abort(
                        Response(
                            'Error refreshing vulnerability listing for image.',
                            500))

                db = get_session()
                db.refresh(img)

            vulns = img.vulnerabilities()

        # Has vulnerabilities?
        warns = []
        if not vulns:
            vulns = []
            ns = DistroNamespace.for_obj(img)
            if not have_vulnerabilities_for(ns):
                warns = [
                    'No vulnerability data available for image distro: {}'.
                    format(ns.namespace_name)
                ]

        rows = []
        for vuln in vulns:
            # if vuln.vulnerability.fixed_in:
            #     fixes_in = filter(lambda x: x.name == vuln.pkg_name or x.name == vuln.package.normalized_src_pkg,
            #            vuln.vulnerability.fixed_in)
            #     fix_available_in = fixes_in[0].version if fixes_in else 'None'
            # else:
            #     fix_available_in = 'None'

            # Skip the vulnerability if the vendor_only flag is set to True and the issue won't be addressed by the vendor
            if vendor_only and vuln.fix_has_no_advisory():
                continue

            rows.append([
                vuln.vulnerability_id,
                vuln.vulnerability.severity,
                1,
                vuln.pkg_name + '-' + vuln.package.fullversion,
                str(vuln.fixed_in()),
                vuln.pkg_image_id,
                'None',  # Always empty this for now
                vuln.vulnerability.link,
                vuln.pkg_type,
                'vulnerabilities',
                vuln.vulnerability.namespace_name,
                vuln.pkg_name,
                vuln.package.fullversion,
            ])

        vuln_listing = {
            'multi': {
                'url_column_index': 7,
                'result': {
                    'header': TABLE_STYLE_HEADER_LIST,
                    'rowcount': len(rows),
                    'colcount': len(TABLE_STYLE_HEADER_LIST),
                    'rows': rows
                },
                'warns': warns
            }
        }

        cpe_vuln_listing = []
        try:
            all_cpe_matches = db.query(
                ImageCpe,
                CpeVulnerability).filter(ImageCpe.image_id == image_id).filter(
                    ImageCpe.name == CpeVulnerability.name).filter(
                        ImageCpe.version == CpeVulnerability.version)
            if not all_cpe_matches:
                all_cpe_matches = []

            for image_cpe, vulnerability_cpe in all_cpe_matches:
                cpe_vuln_el = {
                    'vulnerability_id': vulnerability_cpe.vulnerability_id,
                    'severity': vulnerability_cpe.severity,
                    'link': vulnerability_cpe.link,
                    'pkg_type': image_cpe.pkg_type,
                    'pkg_path': image_cpe.pkg_path,
                    'name': image_cpe.name,
                    'version': image_cpe.version,
                    'cpe': image_cpe.get_cpestring(),
                    'feed_name': vulnerability_cpe.feed_name,
                    'feed_namespace': vulnerability_cpe.namespace_name,
                }
                cpe_vuln_listing.append(cpe_vuln_el)
        except Exception as err:
            log.warn("could not fetch CPE matches - exception: " + str(err))

        report = LegacyVulnerabilityReport.from_dict(vuln_listing)
        resp = ImageVulnerabilityListing(user_id=user_id,
                                         image_id=image_id,
                                         legacy_report=report,
                                         cpe_report=cpe_vuln_listing)
        return resp.to_dict()
    except HTTPException:
        db.rollback()
        raise
    except Exception as e:
        log.exception(
            'Error checking image {}, {} for vulnerabiltiies. Rolling back'.
            format(user_id, image_id))
        db.rollback()
        abort(500)
    finally:
        db.close()
    def sync_metadata(
        source_feeds: Dict[
            str, Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]]
        ],
        to_sync: List[str] = None,
        operation_id: Optional[str] = None,
        groups: bool = True,
    ) -> Tuple[Dict[str, FeedMetadata], List[Tuple[str, Union[str, BaseException]]]]:
        """
        Get metadata from source and sync db metadata records to that (e.g. add any new groups or feeds)
        Executes as a unit-of-work for db, so will commit result and returns the records found on upstream source.

        If a record exists in db but was not found upstream, it is not returned

        :param source_feeds: mapping containing FeedAPIRecord and FeedAPIGroupRecord
        :type source_feeds: Dict[str, Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]]]
        :param to_sync: list of string feed names to sync metadata on
        :type to_sync: List[str]
        :param operation_id: UUID4 hexadecimal string
        :type operation_id: Optional[str]
        :param groups: whether or not to sync group metadata (defaults to True, which will sync group metadata)
        :type groups: bool
        :return: tuple, first element: dict of names mapped to db records post-sync only including records successfully updated by upstream, second element is a list of tuples where each tuple is (failed_feed_name, error_obj)
        :rtype: Tuple[Dict[str, FeedMetadata], List[Tuple[str, Union[str, BaseException]]]
        """

        if not to_sync:
            return {}, []

        db = get_session()
        try:
            logger.info(
                "Syncing feed and group metadata from upstream source (operation_id={})".format(
                    operation_id
                )
            )
            failed = []
            db_feeds = MetadataSyncUtils._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db)
            )

            for feed_name, feed_api_record in source_feeds.items():
                try:
                    logger.info(
                        "Syncing metadata for feed: {} (operation_id={})".format(
                            feed_name, operation_id
                        )
                    )
                    feed_metadata_map = MetadataSyncUtils._sync_feed_metadata(
                        db, feed_api_record, db_feeds, operation_id
                    )
                    if groups:
                        MetadataSyncUtils._sync_feed_group_metadata(
                            db, feed_api_record, feed_metadata_map, operation_id
                        )
                except Exception as e:
                    logger.exception("Error syncing feed {}".format(feed_name))
                    logger.warn(
                        "Could not sync metadata for feed: {} (operation_id={})".format(
                            feed_name, operation_id
                        )
                    )
                    failed.append((feed_name, e))
                finally:
                    db.flush()

            # Reload
            db_feeds = MetadataSyncUtils._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db)
            )

            db.commit()
            logger.info(
                "Metadata sync from feeds upstream source complete (operation_id={})".format(
                    operation_id
                )
            )
            return db_feeds, failed
        except Exception as e:
            logger.error(
                "Rolling back feed metadata update due to error: {} (operation_id={})".format(
                    e, operation_id
                )
            )
            db.rollback()
            raise
Exemple #5
0
    def __init__(self, metadata=None, src=None):
        if not metadata:
            db = get_session()
            metadata = db.query(FeedMetadata).get(self.__feed_name__)

        super(AnchoreServiceFeed, self).__init__(metadata=metadata, src=src)
def check_user_image_inline(user_id, image_id, tag, bundle):
    """
    Execute a policy evaluation using the info in the request body including the bundle content

    :param user_id:
    :param image_id:
    :param tag:
    :param bundle:
    :return:
    """
    db = get_session()
    try:
        # Input validation
        try:
            img_obj = db.query(Image).get((image_id, user_id))
        except:
            abort(Response(response='Image not found', status=404))

        if not img_obj:
            log.info(
                'Request for evaluation of image that cannot be found: user_id = {}, image_id = {}'
                .format(user_id, image_id))
            abort(Response(response='Image not found', status=404))

        # Build bundle exec.
        problems = []
        executable_bundle = None
        try:
            executable_bundle = build_bundle(bundle, for_tag=tag)
            if executable_bundle.init_errors:
                problems = executable_bundle.init_errors
        except InitializationError as e:
            log.exception(
                'Bundle construction and initialization returned errors')
            problems = e.causes

        if not problems:
            # Execute bundle
            try:
                eval_result = executable_bundle.execute(
                    img_obj, tag,
                    ExecutionContext(db_session=db, configuration={}))
            except Exception as e:
                log.exception(
                    'Error executing policy bundle {} against image {} w/tag {}: {}'
                    .format(bundle['id'], image_id, tag, e.message))
                abort(
                    Response(
                        response=
                        'Cannot execute given policy against the image due to errors executing the policy bundle: {}'
                        .format(e.message),
                        status=500))
        else:
            # Construct a failure eval with details on the errors and mappings to send to client
            eval_result = build_empty_error_execution(img_obj,
                                                      tag,
                                                      executable_bundle,
                                                      errors=problems,
                                                      warnings=[])
            if executable_bundle and executable_bundle.mapping and len(
                    executable_bundle.mapping.mapping_rules) == 1:
                eval_result.executed_mapping = executable_bundle.mapping.mapping_rules[
                    0]

        resp = PolicyEvaluation()
        resp.user_id = user_id
        resp.image_id = image_id
        resp.tag = tag
        resp.bundle = bundle
        resp.matched_mapping_rule = eval_result.executed_mapping.json(
        ) if eval_result.executed_mapping else {}
        resp.last_modified = int(time.time())
        resp.final_action = eval_result.policy_decision.final_decision
        resp.result = eval_result.as_table_json()
        resp.created_at = int(time.time())
        resp.evaluation_problems = [
            problem_from_exception(i) for i in eval_result.errors
        ]
        resp.evaluation_problems += [
            problem_from_exception(i) for i in eval_result.warnings
        ]
        if resp.evaluation_problems:
            for i in resp.evaluation_problems:
                log.warn(
                    'Returning evaluation response for image {}/{} w/tag {} and bundle {} that contains error: {}'
                    .format(user_id, image_id, tag, bundle['id'],
                            json.dumps(i.to_dict())))

        return resp.to_dict()

    except HTTPException as e:
        db.rollback()
        log.exception('Caught exception in execution: {}'.format(e))
        raise
    except Exception as e:
        db.rollback()
        log.exception('Failed processing bundle evaluation: {}'.format(e))
        abort(Response('Unexpected internal error', 500))
    finally:
        db.close()
def test_github_advisory_fixed_in(test_data_env):
    test_env = test_data_env
    test_env.init_feeds()

    test_user_id = 'test1'
    test_img_id = 'img1'
    test_image = Image(
        user_id=test_user_id, id=test_img_id,
        distro_name='centos', distro_version='7'
    )
    test_image.familytree_json = [test_img_id]
    test_image.layers_json = [test_img_id]
    test_image.layer_info_json = ['somelayer_here']
    test_image.like_distro = 'centos'
    test_image.state = 'analyzed'
    test_image.digest = 'digest1'
    test_image.anchore_type = 'undefined'
    test_image.dockerfile_mode = 'Guessed'
    test_image.docker_history_json = ['line1', 'line2']
    test_image.docker_data_json = {'Config': {}, 'ContainerConfig': {}}
    test_image.dockerfile_contents = 'FROM BLAH'

    test_package = ImagePackage(
        image_user_id=test_user_id, image_id=test_img_id,
        name='testpackage', version='1.0', pkg_type='python'
    )
    test_package.src_pkg = 'testpackage'
    test_package.distro_name = 'centos'
    test_package.distro_version = '7'
    test_package.like_distro = 'centos'
    test_package.license = 'apache2'
    test_package.fullversion = '1.0'
    test_package.normalized_src_pkg = '1.0'
    test_package.release = ''
    test_package.size = 1000
    test_package.origin = 'upstream'
    test_package.arch = 'x86_64'
    test_package.image = test_image

    test_cve = Vulnerability(id='GHSA-rpch-cqj9-h65r', namespace_name='github:python')
    test_cve.severity = 'High'
    test_cve.description = 'some advisory ghsa'
    test_cve.link = 'http://mitre.com/cve123'

    test_fixedin = FixedArtifact(vulnerability_id=test_cve.id)
    test_fixedin.name = 'testpackage'
    test_fixedin.version = 'None'
    test_fixedin.fix_metadata = {'first_patched_version': '1.2'}
    test_fixedin.version_format = 'semver'
    test_fixedin.parent = test_cve
    test_cve.fixed_in = [test_fixedin]

    db = get_session()
    try:
        db.add(test_image)
        db.add(test_package)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        db.rollback()
    except Exception:
        logger.exception('Unexpected failure')
        raise

    db = get_session()
    # XXX This needs to be a fixture
    try:
        db.add(test_cve)
        feeds.process_updated_vulnerability(db, test_cve)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception('Failed!')
        db.rollback()

    db = get_session()
    image_vuln = db.query(Image).get((test_img_id, test_user_id))
    # should be one vulnerability
    vulnerabilities = image_vuln.vulnerabilities()
    assert len(vulnerabilities) == 1
    img_pkg_vuln = vulnerabilities[0]
    assert img_pkg_vuln.fixed_in() == '1.2'
Exemple #8
0
def test_regexes(large_whitelist, test_data_env_with_images_loaded):
    """
    Test regular expressions in the trigger_id part of the WL rule
    :return:
    """
    logger.info('Building executable bundle from default bundle')
    test_tag = 'docker.io/library/node:latest'

    bundle = copy.deepcopy(default_bundle)
    node_whitelist = [
        x for x in bundle['whitelists'] if x['id'] == 'wl_jessie'
    ][0]
    node_whitelist['items'] = [
        x for x in node_whitelist['items'] if 'binutils' in x['trigger_id']
    ]
    node_whitelist['items'].append({
        'gate': 'vulnerabilities',
        'trigger_id': 'CVE-2016-6515+openssh-client',
        'id': 'testinserted3'
    })
    node_whitelist['items'].append({
        'gate': 'vulnerabilities',
        'trigger_id': 'CVE-2016-6515+*',
        'id': 'test-cve-2016-6515'
    })
    node_whitelist['items'].append({
        'gate': 'vulnerabilities',
        'trigger_id': 'CVE-2017*',
        'id': 'testinserted2'
    })
    node_whitelist['items'].append({
        'gate': 'vulnerabilities',
        'trigger_id': '*binutils*',
        'id': 'testinserted1'
    })

    db = get_session()
    img_obj = db.query(Image).get(
        (test_data_env_with_images_loaded.get_images_named('node')[0][0], '0'))
    assert img_obj is not None

    ExecutableWhitelist._use_indexes = True
    built = build_bundle(bundle, for_tag=test_tag)
    assert not built.init_errors

    logger.info('Executing with indexes')
    t = time.time()
    evaluation = built.execute(img_obj,
                               tag=test_tag,
                               context=ExecutionContext(db_session=db,
                                                        configuration={}))
    t1 = time.time() - t
    logger.info(('Took: {}'.format(t1)))
    assert evaluation is not None

    ExecutableWhitelist._use_indexes = False
    non_index_built = build_bundle(bundle, for_tag=test_tag)
    assert not non_index_built.init_errors
    logger.info('Executing without indexes')
    t2 = time.time()
    evaluation2 = non_index_built.execute(img_obj,
                                          tag=test_tag,
                                          context=ExecutionContext(
                                              db_session=db, configuration={}))
    t2 = time.time() - t2
    logger.info(('Took: {}'.format(t2)))
    assert evaluation2 is not None
    ExecutableWhitelist._use_indexes = True

    assert evaluation.json()['bundle_decision']['policy_decisions'][0][
        'decisions'] == evaluation2.json(
        )['bundle_decision']['policy_decisions'][0]['decisions']
    logger.info(
        ('Evaluation: {}'.format(json.dumps(evaluation.json(), indent=2))))
    open_ssl_wl_match = {
        "action": "go",
        "rule": {
            "action": "stop",
            "gate": "vulnerabilities",
            "trigger": "package",
            "params": {}
        },
        "match": {
            "message":
            "HIGH Vulnerability found in package - openssh-client (CVE-2016-6515 - https://security-tracker.debian.org/tracker/CVE-2016-6515)",
            "trigger": "package",
            "whitelisted": {
                "whitelist_id": "wl_jessie",
                "matched_rule_id": "testinserted3",
                "whitelist_name": "CVE whitelist for jessie - 12092017"
            },
            "trigger_id": "CVE-2016-6515+openssh-client"
        }
    }
    assert open_ssl_wl_match in evaluation.json(
    )['bundle_decision']['policy_decisions'][0]['decisions']
    assert len([
        x for x in evaluation.json()['bundle_decision']['policy_decisions'][0]
        ['decisions']
        if x['match'].get('whitelisted', {}).get('matched_rule_id', '') in
        ['testinserted1', 'testinserted2', 'testinserted3']
    ]) >= 1
Exemple #9
0
    def rescan_images_created_between(self, from_time, to_time):
        """
        If this was a vulnerability update (e.g. timestamps vuln feeds lies in that interval), then look for any images that were loaded in that interval and
        re-scan the cves for those to ensure that no ordering of transactions caused cves to be missed for an image.

        This is an alternative to a blocking approach by which image loading is blocked during feed syncs.

        :param from_time:
        :param to_time:
        :return: count of updated images
        """

        if from_time is None or to_time is None:
            raise ValueError('Cannot process None timestamp')

        log.info('Rescanning images loaded between {} and {}'.format(
            from_time.isoformat(), to_time.isoformat()))
        count = 0

        db = get_session()
        try:
            # it is critical that these tuples are in proper index order for the primary key of the Images object so that subsequent get() operation works
            imgs = [(x.id, x.user_id) for x in db.query(Image).filter(
                Image.created_at >= from_time, Image.created_at <= to_time)]
            log.info('Detected images: {} for rescan'.format(
                ' ,'.join([str(x) for x in imgs]) if imgs else '[]'))
        finally:
            db.rollback()

        retry_max = 3
        for img in imgs:
            for i in range(retry_max):
                try:
                    # New transaction for each image to get incremental progress
                    db = get_session()
                    try:
                        # If the type or ordering of 'img' tuple changes, this needs to be updated as it relies on symmetry of that tuple and the identity key of the Image entity
                        image_obj = db.query(Image).get(img)
                        if image_obj:
                            log.info(
                                'Rescanning image {} post-vuln sync'.format(
                                    img))
                            vulns = rescan_image(image_obj, db_session=db)
                            count += 1
                        else:
                            log.warn(
                                'Failed to lookup image with tuple: {}'.format(
                                    str(img)))

                        db.commit()

                    finally:
                        db.rollback()

                    break
                except Exception as e:
                    log.exception(
                        'Caught exception updating vulnerability scan results for image {}. Waiting and retrying'
                        .format(img))
                    time.sleep(5)

        return count
Exemple #10
0
def get_image_vulnerabilities(user_id,
                              image_id,
                              force_refresh=False,
                              vendor_only=True):
    """
    Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked
    to do so.


    Example json output:
    {
       "multi" : {
          "url_column_index" : 7,
          "result" : {
             "rows" : [],
             "rowcount" : 0,
             "colcount" : 8,
             "header" : [
                "CVE_ID",
                "Severity",
                "*Total_Affected",
                "Vulnerable_Package",
                "Fix_Available",
                "Fix_Images",
                "Rebuild_Images",
                "URL"
             ]
          },
          "querycommand" : "/usr/lib/python2.7/site-packages/anchore/anchore-modules/multi-queries/cve-scan.py /ebs_data/anchore/querytmp/queryimages.7026386 /ebs_data/anchore/data /ebs_data/anchore/querytmp/query.59057288 all",
          "queryparams" : "all",
          "warns" : [
             "0005b136f0fb (prom/prometheus:master) cannot perform CVE scan: no CVE data is currently available for the detected base distro type (busybox:unknown_version,busybox:v1.26.2)"
          ]
       }
    }

    :param user_id: user id of image to evaluate
    :param image_id: image id to evaluate
    :param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values
    :param vendor_only: if true, filter out the vulnerabilities that vendors will explicitly not address
    :return:
    """

    # Has image?
    db = get_session()

    try:
        img = db.query(Image).get((image_id, user_id))
        if not img:
            return make_response_error("Image not found", in_httpcode=404), 404

        provider = get_vulnerabilities_provider()
        report = provider.get_image_vulnerabilities_json(
            image=img,
            vendor_only=vendor_only,
            db_session=db,
            force_refresh=force_refresh,
            use_store=True,
        )

        db.commit()
        return report, 200

    except HTTPException:
        db.rollback()
        raise
    except Exception as e:
        log.exception(
            "Error checking image {}, {} for vulnerabiltiies. Rolling back".
            format(user_id, image_id))
        db.rollback()
        return make_response_error(e, in_httpcode=500), 500
    finally:
        db.close()
Exemple #11
0
test_fixedin.epochless_version = '1.1'
test_fixedin.include_later_versions = True
test_fixedin.parent = test_cve
test_cve.fixed_in = [test_fixedin]

test_vulnin = VulnerableArtifact(vulnerability_id=test_cve.id)
test_vulnin.name = 'testpackage'
test_vulnin.version = '0.9'
test_vulnin.epochless_version = '0.9'
test_vulnin.namespace_name = 'centos:7'
test_vulnin.version_format = 'rpm'
test_vulnin.include_previous_versions = False
test_vulnin.parent = test_cve
test_cve.vulnerable_in = [test_vulnin]

db = get_session()
try:
    db.add(test_image)
    db.add(test_package)
    db.commit()
except sqlalchemy.exc.IntegrityError as e:
    db.rollback()
except Exception as e:
    log.exception('Unexpected failure')
    raise

db = get_session()
try:
    db.add(test_cve)
    FeedsUpdateTask.process_updated_vulnerability(db, test_cve)
    db.commit()
Exemple #12
0
def check_user_image_inline(user_id, image_id, tag, bundle):
    """
    Execute a policy evaluation using the info in the request body including the bundle content

    :param user_id:
    :param image_id:
    :param tag:
    :param bundle:
    :return:
    """

    timer = time.time()
    db = get_session()
    cache_mgr = None

    try:
        # Input validation
        if tag is None:
            # set tag value to a value that only matches wildcards
            tag = "*/*:*"

        try:
            img_obj = db.query(Image).get((image_id, user_id))
        except:
            return make_response_error("Image not found", in_httpcode=404), 404

        if not img_obj:
            log.info(
                "Request for evaluation of image that cannot be found: user_id = {}, image_id = {}"
                .format(user_id, image_id))
            return make_response_error("Image not found", in_httpcode=404), 404

        if evaluation_cache_enabled:
            timer2 = time.time()
            try:
                try:
                    conn_timeout = (
                        ApiRequestContextProxy.get_service().configuration.get(
                            "catalog_client_conn_timeout",
                            DEFAULT_CACHE_CONN_TIMEOUT))
                    read_timeout = (
                        ApiRequestContextProxy.get_service().configuration.get(
                            "catalog_client_read_timeout",
                            DEFAULT_CACHE_READ_TIMEOUT))
                    cache_mgr = EvaluationCacheManager(img_obj, tag, bundle,
                                                       conn_timeout,
                                                       read_timeout)
                except ValueError as err:
                    log.warn(
                        "Could not leverage cache due to error in bundle data: {}"
                        .format(err))
                    cache_mgr = None

                if cache_mgr is None:
                    log.info(
                        "Could not initialize cache manager for policy evaluation, skipping cache usage"
                    )
                else:
                    cached_result = cache_mgr.refresh()
                    if cached_result:
                        metrics.counter_inc(
                            name="anchore_policy_evaluation_cache_hits")
                        metrics.histogram_observe(
                            "anchore_policy_evaluation_cache_access_latency",
                            time.time() - timer2,
                            status="hit",
                        )
                        log.info(
                            "Returning cached result of policy evaluation for {}/{}, with tag {} and bundle {} with digest {}. Last evaluation: {}"
                            .format(
                                user_id,
                                image_id,
                                tag,
                                cache_mgr.bundle_id,
                                cache_mgr.bundle_digest,
                                cached_result.get("last_modified"),
                            ))
                        return cached_result
                    else:
                        metrics.counter_inc(
                            name="anchore_policy_evaluation_cache_misses")
                        metrics.histogram_observe(
                            "anchore_policy_evaluation_cache_access_latency",
                            time.time() - timer2,
                            status="miss",
                        )
                        log.info(
                            "Policy evaluation not cached, or invalid, executing evaluation for {}/{} with tag {} and bundle {} with digest {}"
                            .format(
                                user_id,
                                image_id,
                                tag,
                                cache_mgr.bundle_id,
                                cache_mgr.bundle_digest,
                            ))

            except Exception as ex:
                log.exception(
                    "Unexpected error operating on policy evaluation cache. Skipping use of cache."
                )

        else:
            log.info("Policy evaluation cache disabled. Executing evaluation")

        # Build bundle exec.
        problems = []
        executable_bundle = None
        try:
            # Allow deprecated gates here to support upgrade cases from old policy bundles.
            executable_bundle = build_bundle(bundle,
                                             for_tag=tag,
                                             allow_deprecated=True)
            if executable_bundle.init_errors:
                problems = executable_bundle.init_errors
        except InitializationError as e:
            log.exception(
                "Bundle construction and initialization returned errors")
            problems = e.causes

        eval_result = None
        if not problems:
            # Execute bundle
            try:
                eval_result = executable_bundle.execute(
                    img_obj, tag,
                    ExecutionContext(db_session=db, configuration={}))
            except Exception as e:
                log.exception(
                    "Error executing policy bundle {} against image {} w/tag {}: {}"
                    .format(bundle["id"], image_id, tag, e))
                return (
                    make_response_error(
                        "Internal bundle evaluation error",
                        details={
                            "message":
                            "Cannot execute given policy against the image due to errors executing the policy bundle: {}"
                            .format(e)
                        },
                        in_httpcode=500,
                    ),
                    500,
                )
        else:
            # Construct a failure eval with details on the errors and mappings to send to client
            eval_result = build_empty_error_execution(img_obj,
                                                      tag,
                                                      executable_bundle,
                                                      errors=problems,
                                                      warnings=[])
            if (executable_bundle and executable_bundle.mapping
                    and len(executable_bundle.mapping.mapping_rules) == 1):
                eval_result.executed_mapping = executable_bundle.mapping.mapping_rules[
                    0]

        resp = PolicyEvaluation()
        resp.user_id = user_id
        resp.image_id = image_id
        resp.tag = tag
        resp.bundle = bundle
        resp.matched_mapping_rule = (eval_result.executed_mapping.json() if
                                     eval_result.executed_mapping else False)
        resp.last_modified = int(time.time())
        resp.final_action = eval_result.bundle_decision.final_decision.name
        resp.final_action_reason = eval_result.bundle_decision.reason
        resp.matched_whitelisted_images_rule = (
            eval_result.bundle_decision.whitelisted_image.json()
            if eval_result.bundle_decision.whitelisted_image else False)
        resp.matched_blacklisted_images_rule = (
            eval_result.bundle_decision.blacklisted_image.json()
            if eval_result.bundle_decision.blacklisted_image else False)
        resp.result = eval_result.as_table_json()
        resp.created_at = int(time.time())
        resp.evaluation_problems = [
            problem_from_exception(i) for i in eval_result.errors
        ]
        resp.evaluation_problems += [
            problem_from_exception(i) for i in eval_result.warnings
        ]
        if resp.evaluation_problems:
            for i in resp.evaluation_problems:
                log.warn(
                    "Returning evaluation response for image {}/{} w/tag {} and bundle {} that contains error: {}"
                    .format(user_id, image_id, tag, bundle["id"],
                            json.dumps(i.to_json())))
            metrics.histogram_observe(
                "anchore_policy_evaluation_time_seconds",
                time.time() - timer,
                status="fail",
            )
        else:
            metrics.histogram_observe(
                "anchore_policy_evaluation_time_seconds",
                time.time() - timer,
                status="success",
            )

        result = resp.to_json()

        # Never let the cache block returning results
        try:
            if evaluation_cache_enabled and cache_mgr is not None:
                cache_mgr.save(result)
        except Exception as ex:
            log.exception(
                "Failed saving policy result in cache. Skipping and continuing."
            )

        db.commit()

        return result

    except HTTPException as e:
        db.rollback()
        log.exception("Caught exception in execution: {}".format(e))
        raise
    except Exception as e:
        db.rollback()
        log.exception("Failed processing bundle evaluation: {}".format(e))
        return (
            make_response_error(
                "Unexpected internal error",
                details={"message": str(e)},
                in_httpcode=500,
            ),
            500,
        )
    finally:
        db.close()
Exemple #13
0
    def execute(self):
        """
        Execute a load.
        Fetch from the catalog and send to loader.
        :return: the ImageLoad result object including the image object and its vulnerabilities or None if image already found
        """

        self.start_time = datetime.datetime.utcnow()
        try:
            db = get_session()
            img = db.query(Image).get((self.image_id, self.user_id))
            if img is not None:
                if not self.force_reload:
                    logger.info(
                        "Image {}/{} already found in the system. Will not re-load."
                        .format(self.user_id, self.image_id))
                    db.close()
                    return None
                else:
                    logger.info(
                        "Deleting image {}/{} and all associated resources for reload"
                        .format(self.user_id, self.image_id))
                    # for pkg_vuln in img.vulnerabilities():
                    #     db.delete(pkg_vuln)
                    db.delete(img)

            # Close the session during the data fetch.
            # db.close()

            image_obj = self._load_image_analysis()
            if not image_obj:
                logger.error("Could not load image analysis")
                raise ImageLoadError(
                    "Failed to load image: user_id = {}, image_id = {}, fetch_url = {}"
                    .format(self.user_id, self.image_id, self.fetch_url))

            db = get_session()
            try:
                logger.info("Adding image to db")
                db.add(image_obj)

                with timer("Generating vulnerability matches",
                           log_level="info"):
                    get_vulnerabilities_provider().load_image(
                        image=image_obj,
                        db_session=db,
                        use_store=True,  # save results to cache
                    )

                db.commit()
            except:
                logger.exception("Error adding image to db")
                db.rollback()
                raise

            return ImageLoadResult(image_obj)
        except Exception as e:
            logger.exception("Error loading and scanning image: {}".format(
                self.image_id))
            raise
        finally:
            self.stop_time = datetime.datetime.utcnow()
Exemple #14
0
    def get_image_vulnerabilities(
        self,
        image: Image,
        db_session,
        vendor_only: bool = True,
        force_refresh: bool = False,
        cache: bool = True,
    ):
        # select the nvd class once and be done
        _nvd_cls, _cpe_cls = select_nvd_classes(db_session)

        # initialize the scanner
        scanner = self.__scanner__()

        user_id = image.user_id
        image_id = image.id

        results = []

        if force_refresh:
            log.info("Forcing refresh of vulnerabilities for {}/{}".format(
                user_id, image_id))
            try:
                scanner.flush_and_recompute_vulnerabilities(
                    image, db_session=db_session)
                db_session.commit()
            except Exception as e:
                log.exception(
                    "Error refreshing cve matches for image {}/{}".format(
                        user_id, image_id))
                db_session.rollback()
                return make_response_error(
                    "Error refreshing vulnerability listing for image.",
                    in_httpcode=500,
                )

            db_session = get_session()
            db_session.refresh(image)

        with timer("Image vulnerability primary lookup", log_level="debug"):
            vulns = scanner.get_vulnerabilities(image)

        # Has vulnerabilities?
        warns = []
        if not vulns:
            vulns = []
            ns = DistroNamespace.for_obj(image)
            if not have_vulnerabilities_for(ns):
                warns = [
                    "No vulnerability data available for image distro: {}".
                    format(ns.namespace_name)
                ]

        rows = []
        with timer("Image vulnerability nvd metadata merge",
                   log_level="debug"):
            vulns = merge_nvd_metadata_image_packages(db_session, vulns,
                                                      _nvd_cls, _cpe_cls)

        with timer("Image vulnerability output formatting", log_level="debug"):
            for vuln, nvd_records in vulns:
                fixed_artifact = vuln.fixed_artifact()

                # Skip the vulnerability if the vendor_only flag is set to True and the issue won't be addressed by the vendor
                if vendor_only and vuln.fix_has_no_advisory(
                        fixed_in=fixed_artifact):
                    continue

                nvd_scores = [
                    self._make_cvss_score(score) for nvd_record in nvd_records
                    for score in nvd_record.get_cvss_scores_nvd()
                ]

                results.append(
                    VulnerabilityMatch(
                        vulnerability=VulnerabilityModel(
                            vulnerability_id=vuln.vulnerability_id,
                            description="NA",
                            severity=vuln.vulnerability.severity,
                            link=vuln.vulnerability.link,
                            feed="vulnerabilities",
                            feed_group=vuln.vulnerability.namespace_name,
                            cvss_scores_nvd=nvd_scores,
                            cvss_scores_vendor=[],
                            created_at=vuln.vulnerability.created_at,
                            last_modified=vuln.vulnerability.updated_at,
                        ),
                        artifact=Artifact(
                            name=vuln.pkg_name,
                            version=vuln.package.fullversion,
                            pkg_type=vuln.pkg_type,
                            pkg_path=vuln.pkg_path,
                            cpe="None",
                            cpe23="None",
                        ),
                        fixes=[
                            FixedArtifact(
                                version=str(
                                    vuln.fixed_in(fixed_in=fixed_artifact)),
                                wont_fix=vuln.fix_has_no_advisory(
                                    fixed_in=fixed_artifact),
                                observed_at=fixed_artifact.fix_observed_at
                                if fixed_artifact else None,
                            )
                        ],
                        match=Match(detected_at=vuln.created_at),
                    ))

        # TODO move dedup here so api doesn't have to
        # cpe_vuln_listing = []
        try:
            with timer("Image vulnerabilities cpe matches", log_level="debug"):
                all_cpe_matches = scanner.get_cpe_vulnerabilities(
                    image, _nvd_cls, _cpe_cls)

                if not all_cpe_matches:
                    all_cpe_matches = []

                api_endpoint = self._get_api_endpoint()

                for image_cpe, vulnerability_cpe in all_cpe_matches:
                    link = vulnerability_cpe.parent.link
                    if not link:
                        link = "{}/query/vulnerabilities?id={}".format(
                            api_endpoint, vulnerability_cpe.vulnerability_id)

                    nvd_scores = [
                        self._make_cvss_score(score) for score in
                        vulnerability_cpe.parent.get_cvss_scores_nvd()
                    ]

                    vendor_scores = [
                        self._make_cvss_score(score) for score in
                        vulnerability_cpe.parent.get_cvss_scores_vendor()
                    ]

                    results.append(
                        VulnerabilityMatch(
                            vulnerability=VulnerabilityModel(
                                vulnerability_id=vulnerability_cpe.parent.
                                normalized_id,
                                description="NA",
                                severity=vulnerability_cpe.parent.severity,
                                link=link,
                                feed=vulnerability_cpe.feed_name,
                                feed_group=vulnerability_cpe.namespace_name,
                                cvss_scores_nvd=nvd_scores,
                                cvss_scores_vendor=vendor_scores,
                                created_at=vulnerability_cpe.parent.created_at,
                                last_modified=vulnerability_cpe.parent.
                                updated_at,
                            ),
                            artifact=Artifact(
                                name=image_cpe.name,
                                version=image_cpe.version,
                                pkg_type=image_cpe.pkg_type,
                                pkg_path=image_cpe.pkg_path,
                                cpe=image_cpe.get_cpestring(),
                                cpe23=image_cpe.get_cpe23string(),
                            ),
                            fixes=[
                                FixedArtifact(
                                    version=item,
                                    wont_fix=False,
                                    observed_at=vulnerability_cpe.created_at,
                                ) for item in vulnerability_cpe.get_fixed_in()
                            ],
                            # using vulnerability created_at to indicate the match timestamp for now
                            match=Match(
                                detected_at=vulnerability_cpe.created_at),
                        ))
        except Exception as err:
            log.exception("could not fetch CPE matches")

        import uuid

        return ImageVulnerabilitiesReport(
            account_id=image.user_id,
            image_id=image_id,
            results=results,
            metadata=VulnerabilitiesReportMetadata(
                generated_at=datetime.datetime.utcnow(),
                uuid=str(uuid.uuid4()),
                generated_by=self._get_provider_metadata(),
            ),
            problems=[],
        )
Exemple #15
0
    def _sync_group(self,
                    group_download_result: GroupDownloadResult,
                    full_flush=False,
                    local_repo=None,
                    operation_id=None):
        """
        Sync data from a single group and return the data. This operation is scoped to a transaction on the db.

        :param group_obj:
        :return:
        """
        total_updated_count = 0
        result = build_group_sync_result()
        result['group'] = group_download_result.group
        sync_started = None

        db = get_session()
        group_db_obj = None
        if self.metadata:
            db.refresh(self.metadata)
            group_db_obj = self.group_by_name(group_download_result.group)

        if not group_db_obj:
            logger.error(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Skipping sync for feed group {}, not found in db, record should have been synced already'
                ))
            return result

        download_started = group_download_result.started.replace(
            tzinfo=datetime.timezone.utc)
        sync_started = time.time()

        try:
            if full_flush:
                logger.info(
                    log_msg_ctx(
                        operation_id, group_download_result.feed,
                        group_download_result.group,
                        'Performing data flush prior to sync as requested'))
                self._flush_group(group_db_obj, operation_id=operation_id)

            mapper = self._load_mapper(group_db_obj)

            # Iterate thru the records and commit

            logger.info(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Syncing {} total update records into db in sets of {}'.
                    format(group_download_result.total_records,
                           self.RECORDS_PER_CHUNK)))
            count = 0
            for record in local_repo.read(group_download_result.feed,
                                          group_download_result.group, 0):
                mapped = mapper.map(record)
                merged = db.merge(mapped)
                total_updated_count += 1
                count += 1

                if count >= self.RECORDS_PER_CHUNK:
                    # Commit
                    group_db_obj.count = self.record_count(
                        group_db_obj.name, db)
                    db.commit()
                    db = get_session()
                    logger.info(
                        log_msg_ctx(
                            operation_id, group_download_result.feed,
                            group_download_result.group,
                            'DB Update Progress: {}/{}'.format(
                                total_updated_count,
                                group_download_result.total_records)))
                    count = 0

            else:
                group_db_obj.count = self.record_count(group_db_obj.name, db)
                db.commit()
                db = get_session()
                logger.info(
                    log_msg_ctx(
                        operation_id, group_download_result.feed,
                        group_download_result.group,
                        'DB Update Progress: {}/{}'.format(
                            total_updated_count,
                            group_download_result.total_records)))

            logger.debug(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Updating last sync timestamp to {}'.format(
                        download_started)))
            group_db_obj = self.group_by_name(group_download_result.group)
            # There is potential failures that could happen when downloading,
            # skipping updating the `last_sync` allows the system to retry
            if group_download_result.status == 'complete':
                group_db_obj.last_sync = download_started
            group_db_obj.count = self.record_count(group_db_obj.name, db)
            db.add(group_db_obj)
            db.commit()
        except Exception as e:
            logger.exception(
                log_msg_ctx(operation_id, group_download_result.feed,
                            group_download_result.group, 'Error syncing'))
            db.rollback()
            raise e
        finally:
            sync_time = time.time() - sync_started
            total_group_time = time.time() - download_started.timestamp()
            logger.info(
                log_msg_ctx(operation_id, group_download_result.feed,
                            group_download_result.group,
                            'Sync to db duration: {} sec'.format(sync_time)))
            logger.info(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Total sync, including download, duration: {} sec'.format(
                        total_group_time)))

        result['updated_record_count'] = total_updated_count
        result['status'] = 'success'
        result['total_time_seconds'] = total_group_time
        result['updated_image_count'] = 0
        return result
Exemple #16
0
    def execute(self):
        """
        Execute a load.
        Fetch from the catalog and send to loader.
        :return: the ImageLoad result object including the image object and its vulnerabilities or None if image already found
        """

        self.start_time = datetime.datetime.utcnow()
        try:
            db = get_session()
            img = db.query(Image).get((self.image_id, self.user_id))
            if img is not None:
                if not self.force_reload:
                    log.info(
                        'Image {}/{} already found in the system. Will not re-load.'
                        .format(self.user_id, self.image_id))
                    db.close()
                    return None
                else:
                    log.info(
                        'Deleting image {}/{} and all associated resources for reload'
                        .format(self.user_id, self.image_id))
                    for pkg_vuln in img.vulnerabilities():
                        db.delete(pkg_vuln)
                    db.delete(img)

            # Close the session during the data fetch.
            #db.close()

            image_obj = self._load_image_analysis()
            if not image_obj:
                log.error('Could not load image analysis')
                raise ImageLoadError(
                    'Failed to load image: user_id = {}, image_id = {}, fetch_url = {}'
                    .format(self.user_id, self.image_id, self.fetch_url))

            db = get_session()
            try:
                log.info("Adding image to db")
                db.add(image_obj)

                ts = time.time()
                log.info("Adding image package vulnerabilities to db")
                vulns = vulnerabilities_for_image(image_obj)
                for vuln in vulns:
                    db.add(vuln)

                db.commit()
                #log.debug("TIMER TASKS: {}".format(time.time() - ts))
            except:
                log.exception('Error adding image to db')
                db.rollback()
                raise

            return ImageLoadResult(image_obj, vulns)
        except Exception as e:
            log.exception('Error loading and scanning image: {}'.format(
                self.image_id))
            raise
        finally:
            self.stop_time = datetime.datetime.utcnow()
Exemple #17
0
    def _sync_group(self,
                    group_download_result: GroupDownloadResult,
                    full_flush=False,
                    local_repo=None,
                    operation_id=None):
        """
        Sync data from a single group and return the data. This operation is scoped to a transaction on the db.

        :param group_download_result
        :return:
        """
        total_updated_count = 0
        result = build_group_sync_result()
        result['group'] = group_download_result.group
        sync_started = None

        db = get_session()
        db.refresh(self.metadata)
        group_db_obj = self.group_by_name(group_download_result.group)

        if not group_db_obj:
            logger.error(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Skipping group sync. Record not found in db, should have been synced already'
                ))
            return result

        sync_started = time.time()
        download_started = group_download_result.started.replace(
            tzinfo=datetime.timezone.utc)

        try:
            updated_images = set(
            )  # To get unique set of all images updated by this sync

            if full_flush:
                logger.info(
                    log_msg_ctx(operation_id, group_download_result.feed,
                                group_download_result.group,
                                'Performing group data flush prior to sync'))
                self._flush_group(group_db_obj, operation_id=operation_id)

            mapper = self._load_mapper(group_db_obj)

            # Iterate thru the records and commit
            count = 0
            for record in local_repo.read(group_download_result.feed,
                                          group_download_result.group, 0):
                mapped = mapper.map(record)
                updated_image_ids = self.update_vulnerability(
                    db,
                    mapped,
                    vulnerability_processing_fn=VulnerabilityFeed.
                    __vuln_processing_fn__)
                updated_images = updated_images.union(
                    set(updated_image_ids
                        ))  # Record after commit to ensure in-sync.
                merged = db.merge(mapped)
                total_updated_count += 1
                count += 1

                if len(updated_image_ids) > 0:
                    db.flush(
                    )  # Flush after every one so that mem footprint stays small if lots of images are updated

                if count >= self.RECORDS_PER_CHUNK:
                    # Commit
                    group_db_obj.count = self.record_count(
                        group_db_obj.name, db)
                    db.commit()
                    logger.info(
                        log_msg_ctx(
                            operation_id, group_download_result.feed,
                            group_download_result.group,
                            'DB Update Progress: {}/{}'.format(
                                total_updated_count,
                                group_download_result.total_records)))
                    db = get_session()
                    count = 0

            else:
                group_db_obj.count = self.record_count(group_db_obj.name, db)
                db.commit()
                logger.info(
                    log_msg_ctx(
                        operation_id, group_download_result.feed,
                        group_download_result.group,
                        'DB Update Progress: {}/{}'.format(
                            total_updated_count,
                            group_download_result.total_records)))
                db = get_session()

            logger.debug(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Updating last sync timestamp to {}'.format(
                        download_started)))
            group_db_obj = self.group_by_name(group_download_result.group)
            group_db_obj.last_sync = download_started
            group_db_obj.count = self.record_count(group_db_obj.name, db)
            db.add(group_db_obj)
            db.commit()
        except Exception as e:
            logger.exception(
                log_msg_ctx(operation_id, group_download_result.feed,
                            group_download_result.group,
                            'Error syncing group'))
            db.rollback()
            raise e
        finally:
            total_group_time = time.time() - download_started.timestamp()
            sync_time = time.time() - sync_started
            logger.info(
                log_msg_ctx(operation_id, group_download_result.feed,
                            group_download_result.group,
                            'Sync to db duration: {} sec'.format(sync_time)))
            logger.info(
                log_msg_ctx(
                    operation_id, group_download_result.feed,
                    group_download_result.group,
                    'Total sync, including download, duration: {} sec'.format(
                        total_group_time)))

        result['updated_record_count'] = total_updated_count
        result['status'] = 'success'
        result['total_time_seconds'] = total_group_time
        result['updated_image_count'] = 0
        return result
Exemple #18
0
    def test_image_blacklist(self):
        bundle = {
            'id':
            'multigate1',
            'name':
            'Multigate test1',
            'version':
            '1_0',
            'policies': [{
                'id':
                'policy1',
                'name':
                'Test policy1',
                'version':
                '1_0',
                'rules': [{
                    'gate': 'always',
                    'trigger': 'always',
                    'params': [],
                    'action': 'STOP'
                }]
            }],
            'whitelists': [],
            'mappings': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': '*'
                },
                'policy_id': 'policy1',
                'whitelist_ids': []
            }],
            'blacklisted_images': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': 'latest'
                }
            }],
            'whitelisted_images': []
        }

        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        test_tag = 'docker.io/library/ruby:alpine'
        built = build_bundle(bundle, for_tag=test_tag)
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))
        self.assertIsNotNone(evaluation)
        self.assertEqual(GateAction.stop,
                         evaluation.bundle_decision.final_decision)
        self.assertEqual('policy_evaluation',
                         evaluation.bundle_decision.reason)

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        test_tag = 'docker.io/library/ruby:latest'
        built = build_bundle(bundle, for_tag=test_tag)
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))
        self.assertIsNotNone(evaluation)
        self.assertEqual(GateAction.stop,
                         evaluation.bundle_decision.final_decision)
        self.assertEqual('blacklisted', evaluation.bundle_decision.reason)

        bundle = {
            'id':
            'emptytest1',
            'name':
            'Empty mapping test1',
            'version':
            '1_0',
            'policies': [],
            'whitelists': [],
            'mappings': [],
            'blacklisted_images': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': '*'
                }
            }],
            'whitelisted_images': []
        }

        built = build_bundle(bundle, for_tag=test_tag)
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))
        self.assertIsNotNone(evaluation)
        self.assertEqual(GateAction.stop,
                         evaluation.bundle_decision.final_decision)
        self.assertEqual('blacklisted', evaluation.bundle_decision.reason)

        bundle = {
            'id':
            'emptytest1',
            'name':
            'Empty mapping test1',
            'version':
            '1_0',
            'policies': [],
            'whitelists': [],
            'mappings': [],
            'whitelisted_images': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': '*'
                }
            }],
            'blacklisted_images': []
        }

        built = build_bundle(bundle, for_tag=test_tag)
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))
        self.assertIsNotNone(evaluation)
        self.assertEqual(GateAction.go,
                         evaluation.bundle_decision.final_decision)
        self.assertEqual('whitelisted', evaluation.bundle_decision.reason)
def get_image_vulnerabilities(user_id, image_id, force_refresh=False):
    """
    Return the vulnerability listing for the specified image and load from catalog if not found and specifically asked
    to do so.


    Example json output:
    {
       "multi" : {
          "url_column_index" : 7,
          "result" : {
             "rows" : [],
             "rowcount" : 0,
             "colcount" : 8,
             "header" : [
                "CVE_ID",
                "Severity",
                "*Total_Affected",
                "Vulnerable_Package",
                "Fix_Available",
                "Fix_Images",
                "Rebuild_Images",
                "URL"
             ]
          },
          "querycommand" : "/usr/lib/python2.7/site-packages/anchore/anchore-modules/multi-queries/cve-scan.py /ebs_data/anchore/querytmp/queryimages.7026386 /ebs_data/anchore/data /ebs_data/anchore/querytmp/query.59057288 all",
          "queryparams" : "all",
          "warns" : [
             "0005b136f0fb (prom/prometheus:master) cannot perform CVE scan: no CVE data is currently available for the detected base distro type (busybox:unknown_version,busybox:v1.26.2)"
          ]
       }
    }

    :param user_id: user id of image to evaluate
    :param image_id: image id to evaluate
    :param force_refresh: if true, flush and recompute vulnerabilities rather than returning current values
    :return:
    """

    # Has image?
    db = get_session()
    try:
        img = db.query(Image).get((image_id, user_id))
        vulns = []
        if not img:
            abort(404)
        else:
            if force_refresh:
                log.info('Forcing refresh of vulnerabiltiies for {}/{}'.format(
                    user_id, image_id))
                try:
                    current_vulns = img.vulnerabilities()
                    log.info(
                        'Removing {} current vulnerabilities for {}/{} to rescan'
                        .format(len(current_vulns), user_id, image_id))
                    for v in current_vulns:
                        db.delete(v)

                    db.flush()
                    vulns = vulnerabilities_for_image(img)
                    log.info('Adding {} vulnerabilities from rescan to {}/{}'.
                             format(len(vulns), user_id, image_id))
                    for v in vulns:
                        db.add(v)
                    db.commit()
                except Exception as e:
                    log.exception(
                        'Error refreshing cve matches for image {}/{}'.format(
                            user_id, image_id))
                    db.rollback()
                    abort(
                        Response(
                            'Error refreshing vulnerability listing for image.',
                            500))

                db = get_session()
                db.refresh(img)
            else:
                vulns = img.vulnerabilities()

        # Has vulnerabilities?
        warns = []
        if not vulns:
            vulns = []
            ns = DistroNamespace.for_obj(img)
            if not have_vulnerabilities_for(ns):
                warns = [
                    'No vulnerability data available for image distro: {}'.
                    format(ns.namespace_name)
                ]

        rows = []
        for vuln in vulns:
            # if vuln.vulnerability.fixed_in:
            #     fixes_in = filter(lambda x: x.name == vuln.pkg_name or x.name == vuln.package.normalized_src_pkg,
            #            vuln.vulnerability.fixed_in)
            #     fix_available_in = fixes_in[0].version if fixes_in else 'None'
            # else:
            #     fix_available_in = 'None'

            rows.append([
                vuln.vulnerability_id,
                vuln.vulnerability.severity,
                1,
                vuln.pkg_name + '-' + vuln.package.fullversion,
                str(vuln.fixed_in()),
                vuln.pkg_image_id,
                'None',  # Always empty this for now
                vuln.vulnerability.link
            ])

        vuln_listing = {
            'multi': {
                'url_column_index': 7,
                'result': {
                    'header': TABLE_STYLE_HEADER_LIST,
                    'rowcount': len(rows),
                    'colcount': len(TABLE_STYLE_HEADER_LIST),
                    'rows': rows
                },
                'warns': warns
            }
        }

        report = LegacyVulnerabilityReport.from_dict(vuln_listing)
        resp = ImageVulnerabilityListing(user_id=user_id,
                                         image_id=image_id,
                                         legacy_report=report)
        return resp.to_dict()
    except HTTPException:
        db.rollback()
        raise
    except Exception as e:
        log.exception(
            'Error checking image {}, {} for vulnerabiltiies. Rolling back'.
            format(user_id, image_id))
        db.rollback()
        abort(500)
    finally:
        db.close()
Exemple #20
0
    def testWhitelists(self):
        print('Building executable bundle from default bundle')
        test_tag = 'docker.io/library/ruby:latest'
        built = build_bundle(self.default_bundle, for_tag=test_tag)
        self.assertFalse(built.init_errors)
        print('Got: {}'.format(built))

        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))

        self.assertIsNotNone(evaluation, 'Got None eval')
        print(json.dumps(evaluation.json(), indent=2))
        print(json.dumps(evaluation.as_table_json(), indent=2))

        to_whitelist = evaluation.bundle_decision.policy_decision.decisions[0]
        whitelist_bundle = copy.deepcopy(self.default_bundle)
        whitelist_bundle['whitelists'].append({
            'id':
            'generated_whitelist1',
            'name':
            'test_whitelist',
            'version':
            '1_0',
            'items': [{
                'gate': to_whitelist.match.trigger.gate_cls.__gate_name__,
                'trigger_id': to_whitelist.match.id,
                'id': 'test_whitelistitem'
            }]
        })

        whitelist_bundle['mappings'][0]['whitelist_ids'] = [
            'generated_whitelist1'
        ]
        built = build_bundle(whitelist_bundle, for_tag=test_tag)

        print('Got updated: {}'.format(built))

        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))

        self.assertIsNotNone(evaluation, 'Got None eval')
        #print(json.dumps(evaluation.json(), indent=2))
        #print(json.dumps(evaluation.as_table_json(), indent=2))

        self.assertNotIn(
            to_whitelist.match.id,
            map(
                lambda x: x.match.id
                if not (hasattr(x.match, 'is_whitelisted') and x.match.
                        is_whitelisted) else None,
                evaluation.bundle_decision.policy_decision.decisions))
def test_cve_updates(test_data_env):
    test_env = test_data_env
    test_env.init_feeds()

    test_user_id = 'test1'
    test_img_id = 'img1'
    test_image = Image(user_id=test_user_id, id=test_img_id, distro_name='centos', distro_version='7')
    test_image.familytree_json = [test_img_id]
    test_image.layers_json = [test_img_id]
    test_image.layer_info_json = ['somelayer_here']
    test_image.like_distro = 'centos'
    test_image.state = 'analyzed'
    test_image.digest = 'digest1'
    test_image.anchore_type = 'undefined'
    test_image.dockerfile_mode = 'Guessed'
    test_image.docker_history_json = ['line1', 'line2']
    test_image.docker_data_json = {'Config': {}, 'ContainerConfig': {}}
    test_image.dockerfile_contents = 'FROM BLAH'

    test_package = ImagePackage(image_user_id=test_user_id, image_id=test_img_id, name='testpackage', version='1.0', pkg_type='RPM')
    test_package.src_pkg = 'testpackage'
    test_package.distro_name = 'centos'
    test_package.distro_version = '7'
    test_package.like_distro = 'centos'
    test_package.license = 'apache2'
    test_package.fullversion = '1.0'
    test_package.normalized_src_pkg = '1.0'
    test_package.release = ''
    test_package.size = 1000
    test_package.origin = 'upstream'
    test_package.arch = 'x86_64'
    test_package.image = test_image

    test_cve = Vulnerability(id='CVE123', namespace_name='centos:7')
    test_cve.severity = 'High'
    test_cve.description = 'some test cve'
    test_cve.cvss2_score = '1.0'
    test_cve.metadata_json = {}
    test_cve.cvss2_vectors = ''
    test_cve.link = 'http://mitre.com/cve123'

    test_fixedin = FixedArtifact(vulnerability_id=test_cve.id)
    test_fixedin.name = 'testpackage'
    test_fixedin.version = '1.1'
    test_fixedin.version_format = 'rpm'
    test_fixedin.epochless_version = '1.1'
    test_fixedin.include_later_versions = True
    test_fixedin.parent = test_cve
    test_cve.fixed_in = [test_fixedin]

    test_vulnin = VulnerableArtifact(vulnerability_id=test_cve.id)
    test_vulnin.name = 'testpackage'
    test_vulnin.version = '0.9'
    test_vulnin.epochless_version = '0.9'
    test_vulnin.namespace_name = 'centos:7'
    test_vulnin.version_format = 'rpm'
    test_vulnin.include_previous_versions = False
    test_vulnin.parent = test_cve
    test_cve.vulnerable_in = [test_vulnin]

    db = get_session()
    try:
        db.add(test_image)
        db.add(test_package)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        db.rollback()
    except Exception:
        logger.exception('Unexpected failure')
        raise

    db = get_session()
    try:
        db.add(test_cve)
        feeds.process_updated_vulnerability(db, test_cve)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception('Failed!')
        db.rollback()
    finally:
        db = get_session()
        i = db.query(Image).get((test_img_id, test_user_id))
        print(('Vulns: {}'.format(i.vulnerabilities())))
        db.commit()

    test_cve2 = Vulnerability(id='CVE123', namespace_name='centos:7')
    test_cve2.severity = 'Medium'
    test_cve2.description = 'some test cve'
    test_cve2.cvss2_score = '1.0'
    test_cve2.metadata_json = {}
    test_cve2.cvss2_vectors = ''
    test_cve2.link = 'http://mitre.com/cve123'
    fix2 = FixedArtifact(name='pkg2', version='1.2', epochless_version='1.2')
    fix2.namespace_name = 'centos:7'
    fix2.vulnerability_id = test_cve2.id
    test_cve2.fixed_in = [fix2]

    db = get_session()
    try:
        t2 = db.merge(test_cve2)
        db.add(t2)
        feeds.process_updated_vulnerability(db, t2)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception('Failed!')
        db.rollback()
    finally:
        db = get_session()
        i = db.query(Image).get((test_img_id, test_user_id))
        print(('Vulns: {}'.format(i.vulnerabilities())))
        db.commit()
Exemple #22
0
    def testDeprecatedGateEvaluation(self):
        bundle = {
            'id':
            'someid',
            'version':
            '1_0',
            'whitelists': [],
            'policies': [{
                'id':
                'abc',
                'name':
                'Deprecated Policy',
                'version':
                '1_0',
                'rules': [{
                    'gate': 'PKGDIFF',
                    'trigger': 'pkgadd',
                    'params': [],
                    'action': 'stop'
                }, {
                    'gate': 'always',
                    'trigger': 'always',
                    'action': 'go',
                    'params': []
                }, {
                    'gate': 'ANCHORESEC',
                    'trigger': 'VULNLOW',
                    'action': 'warn',
                    'params': []
                }]
            }],
            'mappings': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': '*'
                },
                'name': 'Default',
                'policy_id': 'abc',
                'whitelist_ids': []
            }]
        }

        print('Building executable bundle from default bundle')
        test_tag = 'docker.io/library/ruby:latest'
        with self.assertRaises(InitializationError) as ex:
            built = build_bundle(bundle,
                                 for_tag=test_tag,
                                 allow_deprecated=False)
            print('Got: {}'.format(built))

            db = get_session()
            img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
            if not img_obj:
                self.load_images()

            self.assertIsNotNone(img_obj,
                                 'Failed to get an image object to test')
            evaluation = built.execute(img_obj,
                                       tag=test_tag,
                                       context=ExecutionContext(
                                           db_session=db, configuration={}))

        built = build_bundle(bundle, for_tag=test_tag, allow_deprecated=True)
        print('Got: {}'.format(built))

        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))

        self.assertIsNotNone(evaluation, 'Got None eval')
        print('Result: {}'.format(json.dumps(evaluation.json(), indent=2)))
        self.assertIsNotNone(evaluation.warnings)
Exemple #23
0
 def query_client(client_id):
     db = get_session()
     c = db.query(OAuth2Client).filter_by(client_id=client_id).first()
     return c
Exemple #24
0
    def testPolicyInitError(self):
        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        ruby_tag = 'dockerhub/library/ruby:latest'

        with self.assertRaises(UnsupportedVersionError) as f:
            built = build_bundle({
                'id': 'someid',
                'version': 'invalid_version',
                'name': 'invalid_version',
                'whitelists': [],
                'policies': [],
                'mappings': []
            })
            built.execute(image_object=img_obj, context=None, tag=ruby_tag)

        with self.assertRaises(InitializationError) as f:
            built = build_bundle(
                {
                    'id':
                    'someid',
                    'version':
                    '1_0',
                    'name':
                    'invalid_version',
                    'whitelists': [{
                        'id': 'whitelist1',
                        'version': 'invalid_version',
                        'name': 'bad whitelist',
                        'rules': []
                    }],
                    'policies': [{
                        'id': 'ok_policy',
                        'version': 'v1.0',
                        'name': 'bad policy',
                        'rules': []
                    }],
                    'mappings': [{
                        'registry': '*',
                        'repository': '*',
                        'image': {
                            'type': 'tag',
                            'value': '*'
                        },
                        'policy_id': 'ok_policy',
                        'whitelist_ids': ['whitelist1']
                    }]
                },
                for_tag='dockerhub/library/centos:latest')
            built.execute(image_object=img_obj,
                          context=None,
                          tag='dockerhub/library/centos:latest')
        self.assertEqual(type(f.exception.causes[0]), UnsupportedVersionError)

        with self.assertRaises(InitializationError) as f:
            built = build_bundle(
                {
                    'id':
                    'someid',
                    'version':
                    '1_0',
                    'name':
                    'invalid_version',
                    'whitelists': [{
                        'id': 'whitelist1',
                        'version': '1_0',
                        'name': 'okwhitelist',
                        'items': []
                    }],
                    'policies': [{
                        'id': 'invalid_policy',
                        'version': 'invalid_version',
                        'name': 'bad policy',
                        'rules': []
                    }],
                    'mappings': [{
                        'registry': '*',
                        'repository': '*',
                        'image': {
                            'type': 'tag',
                            'value': '*'
                        },
                        'policy_id': 'invalid_policy',
                        'whitelist_ids': ['whitelist1']
                    }]
                },
                for_tag='dockerhub/library/centos:latest')
            built.execute(image_object=img_obj,
                          context=None,
                          tag='dockerhub/library/centos:latest')
        self.assertEqual(type(f.exception.causes[0]), UnsupportedVersionError)

        with self.assertRaises(InitializationError) as f:
            built = build_bundle({
                'id':
                'someid',
                'version':
                '1_0',
                'name':
                'invalid_version',
                'whitelists': [{
                    'id': 'whitelist1',
                    'version': '1_0',
                    'name': 'ok whitelist',
                    'items': []
                }],
                'policies': [{
                    'id': 'okpolicy',
                    'version': '2_0',
                    'name': 'ok policy',
                    'rules': []
                }],
                'mappings': [{
                    'id': 'invalid_mapping',
                    'policy_id': 'okpolicy',
                    'whitelist_ids': ['whitelist1'],
                    'registry': '*',
                    'repository': '*',
                    'image': {
                        'type': 'tag',
                        'value': '*'
                    }
                }]
            })
            built.execute(image_object=img_obj, context=None, tag=ruby_tag)
        self.assertEqual(type(f.exception.causes[0]), UnsupportedVersionError)
Exemple #25
0
    def _sync_group(self, group_obj, vulnerability_processing_fn=None):
        """
        Sync data from a single group and return the data. The vulnerability_processing_fn callback is invoked for each item within the transaction scope.

        :param group_obj: the group object to sync
        :param bulk_load: should the load be done in bulk fashion, typically this is only for first run as it bypasses all per-item processing
        :return:
        """
        sync_time = time.time()
        updated_images = []
        db = get_session()
        try:
            next_token = ''
            while next_token is not None:
                if next_token == '':
                    next_token = None
                fetch_time = time.time()
                new_data_deduped, next_token = self._get_deduped_data(
                    group_obj,
                    since=group_obj.last_sync,
                    next_token=next_token,
                    max_pages=self.MAX_FEED_SYNC_PAGES)
                fetch_time = time.time() - fetch_time
                log.debug('Group data fetch took {} sec'.format(fetch_time))
                log.debug('Merging {} records from group {}'.format(
                    len(new_data_deduped), group_obj.name))
                db_time = time.time()
                for rec in new_data_deduped:
                    # Make any updates and changes within this single transaction scope
                    updated_image_ids = self.update_vulnerability(
                        db,
                        rec,
                        vulnerability_processing_fn=vulnerability_processing_fn
                    )
                    updated_images += updated_image_ids  # Record after commit to ensure in-sync.
                    db.flush()
                log.debug('Db merge took {} sec'.format(time.time() - db_time))

            group_obj.last_sync = datetime.datetime.utcnow()
            db.add(group_obj)
            db.commit()
        except Exception as e:
            log.exception('Error syncing group: {}'.format(group_obj))
            db.rollback()
            raise
        finally:
            sync_time = time.time() - sync_time
            log.info('Syncing group took {} sec'.format(sync_time))

        #
        #
        # new_data_deduped, next_token = self._get_deduped_data(group_obj=group_obj, since=group_obj.last_sync)
        # updated_images = []
        # db = get_session()
        # try:
        #     log.info('Merging {} vulnerabilities into db for group {}'.format(len(new_data_deduped), group_obj.name))
        #     for rec in new_data_deduped:
        #         # Make any updates and changes within this single transaction scope
        #         updated_image_ids = self.update_vulnerability(db, rec, vulnerability_processing_fn=vulnerability_processing_fn)
        #         updated_images += updated_image_ids  # Record after commit to ensure in-sync.
        #         db.flush()
        #
        #     group_obj.last_sync = datetime.datetime.utcnow()
        #     db.add(group_obj)
        #     db.commit()
        # except Exception as e:
        #     log.exception('Error during processing of vulnerabilities. Rolling back transaction and aborting')
        #     db.rollback()
        #     raise

        return updated_images
Exemple #26
0
    def testInvalidActions(self):
        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))

        with self.assertRaises(InitializationError) as f:
            built = build_bundle(self.test_env.get_bundle('bad_bundle1'))
            built.execute(image_object=img_obj,
                          context=None,
                          tag='dockerhub/library/ruby:latest')
            built.execute(image_object=img_obj, context=None, tag='test')

        with self.assertRaises(InitializationError) as f:
            built = build_bundle({
                'id':
                'someid',
                'version':
                '1_0',
                'name':
                'invalid_actions',
                'whitelists': [{
                    'id': 'whitelist1',
                    'version': '1_0',
                    'name': 'ok whitelist',
                    'items': []
                }],
                'policies': [{
                    'id':
                    'okpolicy',
                    'version':
                    '1_0',
                    'name':
                    'ok policy',
                    'rules': [{
                        'gate': 'ANCHORESEC',
                        'trigger': 'UNSUPPPORTEDDISTRO',
                        'action': 'HELLO',
                        'params': []
                    }]
                }],
                'mappings': [{
                    'policy_id': 'okpolicy',
                    'whitelist_ids': ['whitelist1'],
                    'registry': '*',
                    'repository': '*',
                    'image': {
                        'type': 'tag',
                        'value': '*'
                    }
                }]
            })
            built.execute(image_object=img_obj, context=None, tag=None)

        with self.assertRaises(InitializationError) as f:
            bad_param1 = build_bundle({
                'id':
                'someid',
                'version':
                '1_0',
                'name':
                'invalid_params',
                'whitelists': [{
                    'id': 'whitelist1',
                    'version': '1_0',
                    'name': 'ok whitelist',
                    'items': []
                }],
                'policies': [{
                    'id':
                    'okpolicy',
                    'version':
                    '1_0',
                    'name':
                    'ok policy',
                    'rules': [{
                        'gate': 'ANCHORESEC',
                        'trigger': 'FEEDOUTOFDATE',
                        'action': 'GO',
                        'params': [{
                            'name': 'MAXAGE',
                            'value': 0.1
                        }]
                    }]
                }],
                'mappings': [{
                    'policy_id': 'okpolicy',
                    'whitelist_ids': ['whitelist1'],
                    'registry': '*',
                    'repository': '*',
                    'image': {
                        'type': 'tag',
                        'value': '*'
                    }
                }]
            })
            built.execute(image_object=img_obj, context=None, tag=None)

        with self.assertRaises(InitializationError) as f:
            bad_param2 = build_bundle({
                'id':
                'someid',
                'version':
                '1_0',
                'name':
                'invalid_params',
                'whitelists': [{
                    'id': 'whitelist1',
                    'version': '1_0',
                    'name': 'ok whitelist',
                    'items': []
                }],
                'policies': [{
                    'id':
                    'okpolicy',
                    'version':
                    '1_0',
                    'name':
                    'ok policy',
                    'rules': [{
                        'gate':
                        'ANCHORESEC',
                        'trigger':
                        'FEEDOUTOFDATE',
                        'action':
                        'GO',
                        'params': [{
                            'name': 'MAXIMUS_AGIMUS',
                            'value': 10
                        }]
                    }]
                }],
                'mappings': [{
                    'policy_id': 'okpolicy',
                    'whitelist_ids': ['whitelist1'],
                    'registry': '*',
                    'repository': '*',
                    'image': {
                        'type': 'tag',
                        'value': '*'
                    }
                }]
            })
            built.execute(image_object=img_obj, context=None, tag=None)
def check_user_image_inline(user_id, image_id, tag, bundle):
    """
    Execute a policy evaluation using the info in the request body including the bundle content

    :param user_id:
    :param image_id:
    :param tag:
    :param bundle:
    :return:
    """

    timer = time.time()
    db = get_session()
    try:
        # Input validation
        if tag is None:
            # set tag value to a value that only matches wildcards
            tag = '*/*:*'

        try:
            img_obj = db.query(Image).get((image_id, user_id))
        except:
            abort(Response(response='Image not found', status=404))

        if not img_obj:
            log.info(
                'Request for evaluation of image that cannot be found: user_id = {}, image_id = {}'
                .format(user_id, image_id))
            abort(Response(response='Image not found', status=404))

        # Build bundle exec.
        problems = []
        executable_bundle = None
        try:
            # Allow deprecated gates here to support upgrade cases from old policy bundles.
            executable_bundle = build_bundle(bundle,
                                             for_tag=tag,
                                             allow_deprecated=True)
            if executable_bundle.init_errors:
                problems = executable_bundle.init_errors
        except InitializationError as e:
            log.exception(
                'Bundle construction and initialization returned errors')
            problems = e.causes

        eval_result = None
        if not problems:
            # Execute bundle
            try:
                eval_result = executable_bundle.execute(
                    img_obj, tag,
                    ExecutionContext(db_session=db, configuration={}))
            except Exception as e:
                log.exception(
                    'Error executing policy bundle {} against image {} w/tag {}: {}'
                    .format(bundle['id'], image_id, tag, e.message))
                abort(
                    Response(
                        response=
                        'Cannot execute given policy against the image due to errors executing the policy bundle: {}'
                        .format(e.message),
                        status=500))
        else:
            # Construct a failure eval with details on the errors and mappings to send to client
            eval_result = build_empty_error_execution(img_obj,
                                                      tag,
                                                      executable_bundle,
                                                      errors=problems,
                                                      warnings=[])
            if executable_bundle and executable_bundle.mapping and len(
                    executable_bundle.mapping.mapping_rules) == 1:
                eval_result.executed_mapping = executable_bundle.mapping.mapping_rules[
                    0]

        resp = PolicyEvaluation()
        resp.user_id = user_id
        resp.image_id = image_id
        resp.tag = tag
        resp.bundle = bundle
        resp.matched_mapping_rule = eval_result.executed_mapping.json(
        ) if eval_result.executed_mapping else False
        resp.last_modified = int(time.time())
        resp.final_action = eval_result.bundle_decision.final_decision.name
        resp.final_action_reason = eval_result.bundle_decision.reason
        resp.matched_whitelisted_images_rule = eval_result.bundle_decision.whitelisted_image.json(
        ) if eval_result.bundle_decision.whitelisted_image else False
        resp.matched_blacklisted_images_rule = eval_result.bundle_decision.blacklisted_image.json(
        ) if eval_result.bundle_decision.blacklisted_image else False
        resp.result = eval_result.as_table_json()
        resp.created_at = int(time.time())
        resp.evaluation_problems = [
            problem_from_exception(i) for i in eval_result.errors
        ]
        resp.evaluation_problems += [
            problem_from_exception(i) for i in eval_result.warnings
        ]
        if resp.evaluation_problems:
            for i in resp.evaluation_problems:
                log.warn(
                    'Returning evaluation response for image {}/{} w/tag {} and bundle {} that contains error: {}'
                    .format(user_id, image_id, tag, bundle['id'],
                            json.dumps(i.to_dict())))
            anchore_engine.subsys.metrics.histogram_observe(
                'anchore_policy_evaluation_time_seconds',
                time.time() - timer,
                status="fail")
        else:
            anchore_engine.subsys.metrics.histogram_observe(
                'anchore_policy_evaluation_time_seconds',
                time.time() - timer,
                status="success")

        return resp.to_dict()

    except HTTPException as e:
        db.rollback()
        log.exception('Caught exception in execution: {}'.format(e))
        raise
    except Exception as e:
        db.rollback()
        log.exception('Failed processing bundle evaluation: {}'.format(e))
        abort(Response('Unexpected internal error', 500))
    finally:
        db.close()
Exemple #28
0
    def testDuplicateRuleEvaluation(self):
        print('Building executable bundle from default bundle')
        test_tag = 'docker.io/library/ruby:latest'
        multi_gate_bundle = {
            'id':
            'multigate1',
            'name':
            'Multigate test1',
            'version':
            '1_0',
            'policies': [{
                'id':
                'policy1',
                'name':
                'Test policy1',
                'version':
                '1_0',
                'rules': [{
                    'gate': 'DOCKERFILECHECK',
                    'trigger': 'FROMSCRATCH',
                    'params': [],
                    'action': 'GO'
                }, {
                    'gate': 'DOCKERFILECHECK',
                    'trigger': 'FROMSCRATCH',
                    'params': [],
                    'action': 'STOP'
                }, {
                    'action':
                    'stop',
                    'gate':
                    'DOCKERFILECHECK',
                    'trigger':
                    'DIRECTIVECHECK',
                    'params': [{
                        'name': 'DIRECTIVES',
                        'value': 'RUN'
                    }, {
                        'name': 'CHECK',
                        'value': 'exists'
                    }]
                }, {
                    'action':
                    'STOP',
                    'gate':
                    'DOCKERFILECHECK',
                    'trigger':
                    'DIRECTIVECHECK',
                    'params': [{
                        'name': 'DIRECTIVES',
                        'value': 'USER'
                    }, {
                        'name': 'CHECK',
                        'value': 'not_exists'
                    }]
                }, {
                    'action':
                    'STOP',
                    'gate':
                    'DOCKERFILECHECK',
                    'trigger':
                    'DIRECTIVECHECK',
                    'params': [{
                        'name': 'DIRECTIVES',
                        'value': 'RUN'
                    }, {
                        'name': 'CHECK',
                        'value': '=',
                        'check_value': 'yum update -y'
                    }]
                }]
            }],
            'whitelists': [],
            'mappings': [{
                'registry': '*',
                'repository': '*',
                'image': {
                    'type': 'tag',
                    'value': '*'
                },
                'policy_id': 'policy1',
                'whitelist_ids': []
            }]
        }
        built = build_bundle(multi_gate_bundle, for_tag=test_tag)
        self.assertFalse(built.init_errors)
        print('Got: {}'.format(built))

        db = get_session()
        img_obj = db.query(Image).get((self.test_image_ids['ruby'], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))

        self.assertIsNotNone(evaluation, 'Got None eval')
        print(json.dumps(evaluation.json(), indent=2))
        print(json.dumps(evaluation.as_table_json(), indent=2))
    def testRegexes(self):
        """
        Test regular expressions in the trigger_id part of the WL rule
        :return:
        """
        print('Building executable bundle from default bundle')
        test_tag = 'docker.io/library/node:latest'

        bundle = copy.deepcopy(self.default_bundle)
        node_whitelist = [
            x for x in bundle['whitelists'] if x['id'] == 'wl_jessie'
        ][0]
        node_whitelist['items'] = [
            x for x in node_whitelist['items'] if 'binutils' in x['trigger_id']
        ]
        node_whitelist['items'].append({
            'gate': 'ANCHORESEC',
            'trigger_id': 'CVE-2016-6515+openssh-client',
            'id': 'testinserted3'
        })
        node_whitelist['items'].append({
            'gate': 'ANCHORESEC',
            'trigger_id': 'CVE-2016-6515+*',
            'id': 'test-cve-2016-6515'
        })
        node_whitelist['items'].append({
            'gate': 'ANCHORESEC',
            'trigger_id': 'CVE-2017*',
            'id': 'testinserted2'
        })
        node_whitelist['items'].append({
            'gate': 'ANCHORESEC',
            'trigger_id': '*binutils*',
            'id': 'testinserted1'
        })

        db = get_session()
        img_obj = db.query(Image).get(
            (self.test_env.get_images_named('node')[0][0], '0'))
        if not img_obj:
            self.load_images()

        self.assertIsNotNone(img_obj, 'Failed to get an image object to test')

        ExecutableWhitelist._use_indexes = True
        built = build_bundle(bundle, for_tag=test_tag)
        self.assertFalse(built.init_errors)

        print('Executing with indexes')
        t = time.time()
        evaluation = built.execute(img_obj,
                                   tag=test_tag,
                                   context=ExecutionContext(db_session=db,
                                                            configuration={}))
        t1 = time.time() - t
        print(('Took: {}'.format(t1)))
        self.assertIsNotNone(evaluation, 'Got None eval')

        ExecutableWhitelist._use_indexes = False
        non_index_built = build_bundle(bundle, for_tag=test_tag)
        self.assertFalse(non_index_built.init_errors)
        print('Executing without indexes')
        t2 = time.time()
        evaluation2 = non_index_built.execute(img_obj,
                                              tag=test_tag,
                                              context=ExecutionContext(
                                                  db_session=db,
                                                  configuration={}))
        t2 = time.time() - t2
        print(('Took: {}'.format(t2)))
        self.assertIsNotNone(evaluation2, 'Got None eval')
        ExecutableWhitelist._use_indexes = True

        self.assertListEqual(
            evaluation.json()['bundle_decision']['policy_decisions'][0]
            ['decisions'],
            evaluation2.json()['bundle_decision']['policy_decisions'][0]
            ['decisions'])
        print(('Evaluation: {}'.format(json.dumps(evaluation.json(),
                                                  indent=2))))
        open_ssl_wl_match = {
            "action": "go",
            "rule": {
                "action": "stop",
                "gate": "ANCHORESEC",
                "trigger": "VULNHIGH",
                "params": {}
            },
            "match": {
                "message":
                "HIGH Vulnerability found in package - openssh-client (CVE-2016-6515 - https://security-tracker.debian.org/tracker/CVE-2016-6515)",
                "trigger": "VULNHIGH",
                "whitelisted": {
                    "whitelist_id": "wl_jessie",
                    "matched_rule_id": "testinserted3",
                    "whitelist_name": "CVE whitelist for jessie - 12092017"
                },
                "trigger_id": "CVE-2016-6515+openssh-client"
            }
        }
        self.assertIn(
            open_ssl_wl_match,
            evaluation.json()['bundle_decision']['policy_decisions'][0]
            ['decisions'])
        self.assertGreaterEqual(
            len([
                x for x in evaluation.json()['bundle_decision']
                ['policy_decisions'][0]['decisions']
                if x['match'].get('whitelisted', {}).get(
                    'matched_rule_id',
                    '') in ['testinserted1', 'testinserted2', 'testinserted3']
            ]), 1)
def test_github_advisory_fixed_in(test_data_env):
    test_env = test_data_env
    test_env.init_feeds()

    test_user_id = "test1"
    test_img_id = "img1"
    test_image = Image(user_id=test_user_id,
                       id=test_img_id,
                       distro_name="centos",
                       distro_version="7")
    test_image.familytree_json = [test_img_id]
    test_image.layers_json = [test_img_id]
    test_image.layer_info_json = ["somelayer_here"]
    test_image.like_distro = "centos"
    test_image.state = "analyzed"
    test_image.digest = "digest1"
    test_image.anchore_type = "undefined"
    test_image.dockerfile_mode = "Guessed"
    test_image.docker_history_json = ["line1", "line2"]
    test_image.docker_data_json = {"Config": {}, "ContainerConfig": {}}
    test_image.dockerfile_contents = "FROM BLAH"

    test_package = ImagePackage(
        image_user_id=test_user_id,
        image_id=test_img_id,
        name="testpackage",
        version="1.0",
        pkg_type="python",
    )
    test_package.src_pkg = "testpackage"
    test_package.distro_name = "centos"
    test_package.distro_version = "7"
    test_package.like_distro = "centos"
    test_package.license = "apache2"
    test_package.fullversion = "1.0"
    test_package.normalized_src_pkg = "1.0"
    test_package.release = ""
    test_package.size = 1000
    test_package.origin = "upstream"
    test_package.arch = "x86_64"
    test_package.image = test_image

    test_cve = Vulnerability(id="GHSA-rpch-cqj9-h65r",
                             namespace_name="github:python")
    test_cve.severity = "High"
    test_cve.description = "some advisory ghsa"
    test_cve.link = "http://mitre.com/cve123"

    test_fixedin = FixedArtifact(vulnerability_id=test_cve.id)
    test_fixedin.name = "testpackage"
    test_fixedin.version = "None"
    test_fixedin.fix_metadata = {"first_patched_version": "1.2"}
    test_fixedin.version_format = "semver"
    test_fixedin.parent = test_cve
    test_cve.fixed_in = [test_fixedin]

    db = get_session()
    try:
        db.add(test_image)
        db.add(test_package)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        db.rollback()
    except Exception:
        logger.exception("Unexpected failure")
        raise

    db = get_session()
    # XXX This needs to be a fixture
    try:
        db.add(test_cve)
        feeds.process_updated_vulnerability(db, test_cve)
        db.commit()
    except sqlalchemy.exc.IntegrityError:
        logger.exception("Failed!")
        db.rollback()

    db = get_session()
    image_vuln = db.query(Image).get((test_img_id, test_user_id))
    # should be one vulnerability
    vulnerabilities = image_vuln.vulnerabilities()
    assert len(vulnerabilities) == 1
    img_pkg_vuln = vulnerabilities[0]
    assert img_pkg_vuln.fixed_in() == "1.2"