Exemplo n.º 1
0
    def evaluate(self, image_obj, context):
        feeds = DataFeeds.instance()

        # Map to a namespace
        ns = DistroNamespace.for_obj(image_obj)

        oldest_update = None
        if ns:
            vulnerability_feed = DataFeeds.instance().vulnerabilities
            for namespace_name in ns.like_namespace_names:
                # Check feed names
                groups = vulnerability_feed.group_by_name(namespace_name)
                if groups:
                    # No records yet, but we have the feed, so may just not have any data yet
                    oldest_update = groups[0].last_sync
                    break

        maxage = self.eval_params.get('MAXAGE')
        if maxage:
            try:
                if oldest_update is not None:
                    oldest_update = calendar.timegm(oldest_update.timetuple())
                    mintime = time.time() - int(int(maxage) * 86400)
                    if oldest_update < mintime:
                        self._fire(msg="FEEDOUTOFDATE The vulnerability feed for this image distro is older than MAXAGE ("+str(maxage)+") days")
                else:
                    self._fire(
                        msg="FEEDOUTOFDATE The vulnerability feed for this image distro is older than MAXAGE (" + str(
                            maxage) + ") days")
            except Exception as err:
                self._fire(msg="FEEDOUTOFDATE Cannot perform data feed up-to-date check - message from server: " + str(err))
Exemplo n.º 2
0
    def execute(self):
        log.info('Starting feed update')

        try:
            f = DataFeeds.instance()
            updated = []
            vuln_updates = {}
            #         try:
            #             # Update the vulnerabilities with updates to the affected images done within the same transaction scope
            #             vuln_updates = df.vulnerabilities.sync(item_processing_fn=FeedsUpdateTask.process_updated_vulnerability)
            #             group_counts = [len(grp_updates) for grp_updates in vuln_updates.values()]
            #             total_count = reduce(lambda x, y: x + y, group_counts, 0)
            #             log.info('Processed {} vulnerability updates in {} groups'.format(total_count, len(group_counts)))
            #
            updated_dict = f.sync(to_sync=self.feeds)
            for g in updated_dict:
                updated += updated_dict[g]

            log.info('Feed sync complete')
            return updated
        except:
            log.exception('Failure refreshing and syncing feeds')
            raise
        finally:
            end_session()
Exemplo n.º 3
0
def test_gems_full_data(test_data_env):
    count = 1
    c = DataFeeds.instance().vulnerabilities.source
    for v in c.get_feed_group_data('packages', 'gem'):
        r = gem_mapper.map(v)
        _gem_validator(r)
        count += 1
Exemplo n.º 4
0
 def test_full_data(self):
     c = DataFeeds.instance().vulnerabilities.source
     for g in c.list_feed_groups('vulnerabilities'):
         print('Group: {}'.format(g.name))
         for v in c.get_feed_group_data('vulnerabilities', g.name):
             r = self.mapper.map(v)
             self.assertTrue(TestVulnerabilityMapping._vuln_validator(r), 'Failed validation on: {}'.format(v))
Exemplo n.º 5
0
    def execute(self):
        log.info('Starting feed update')

        # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be
        # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself.

        start_time = datetime.datetime.utcnow()
        try:
            f = DataFeeds.instance()
            start_time = datetime.datetime.utcnow()

            f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability
            f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches

            updated_dict = f.sync(to_sync=self.feeds,
                                  full_flush=self.full_flush)

            log.info('Feed sync complete. Results = {}'.format(updated_dict))
            return updated_dict
        except:
            log.exception('Failure refreshing and syncing feeds')
            raise
        finally:
            end_time = datetime.datetime.utcnow()
            try:
                self.rescan_images_created_between(from_time=start_time,
                                                   to_time=end_time)
            except:
                log.exception(
                    'Unexpected exception rescanning vulns for images added during the feed sync'
                )
                raise
            finally:
                end_session()
Exemplo n.º 6
0
def test_vuln_full_data(test_data_env):
    c = DataFeeds.instance().vulnerabilities.source
    for g in c.list_feed_groups('vulnerabilities'):
        print(('Group: {}'.format(g.name)))
        for v in c.get_feed_group_data('vulnerabilities', g.name):
            r = vuln_mapper.map(v)
            assert vuln_validator(r), 'Failed validation on: {}'.format(v)
Exemplo n.º 7
0
    def execute(self):
        log.info('Starting feed update')

        # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be
        # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself.

        start_time = datetime.datetime.utcnow()
        try:
            f = DataFeeds.instance()
            updated = []
            start_time = datetime.datetime.utcnow()

            f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability
            f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches

            updated_dict = f.sync(to_sync=self.feeds, full_flush=self.full_flush)

            # Response is dict with feed name and dict for each group mapped to list of images updated
            log.info('Updated: {}'.format(updated_dict))
            for feed in updated_dict:
                for updated_imgs in list(updated_dict[feed].values()):
                    updated += updated_imgs

            log.info('Feed sync complete')
            return updated
        except:
            log.exception('Failure refreshing and syncing feeds')
            raise
        finally:
            end_time = datetime.datetime.utcnow()
            self.rescan_images_created_between(from_time=start_time, to_time=end_time)
            end_session()
Exemplo n.º 8
0
 def test_full_data(self):
     count = 1
     c = DataFeeds.instance().vulnerabilities.source
     for v in c.get_feed_group_data('packages', 'gem'):
         r = self.mapper.map(v)
         self.assertTrue(TestGemMapping._gem_validator(r), 'Failed validation on #{}: {}'.format(count, v))
         count += 1
Exemplo n.º 9
0
 def test_group_lookups(self):
     df = DataFeeds.instance()
     df.vulnerabilities.refresh_groups()
     bad = df.vulnerabilities.group_by_name('not_a_real_Group')
     self.assertFalse(bad, 'Found non-existent group')
     self.assertIsNotNone(df.vulnerabilities.group_by_name('alpine:3.3'),
                          'Should have found group alpine:3.3')
Exemplo n.º 10
0
    def evaluate(self, image_obj, context):
        try:
            feed_meta = DataFeeds.instance().packages.group_by_name(FEED_KEY)
            if feed_meta and feed_meta[0].last_sync:
                return
        except Exception as e:
            log.exception('Error determining feed presence for npms. Defaulting to firing trigger')

        self._fire()
        return
Exemplo n.º 11
0
def sync_feeds(test_env, up_to=None):
    df = DataFeeds.instance()
    if up_to:
        test_env.set_max_feed_time(up_to)

    logger.info('Syncing vuln')
    df.vulnerabilities.sync(item_processing_fn=FeedsUpdateTask.process_updated_vulnerability)
    logger.info('Syncing packages')
    df.packages.sync()
    logger.info('Sync complete')
Exemplo n.º 12
0
 def execute(self):
     db = get_session()
     try:
         count = db.query(ImagePackageVulnerability).delete()
         log.info('Deleted {} vulnerability match records in flush'.format(count))
         f = DataFeeds.instance()
         f.flush()
         db.commit()
     except:
         log.exception('Error executing feeds flush task')
         raise
Exemplo n.º 13
0
    def test_bulk_vuln_sync(self):
        with session_scope() as db:
            vcount = db.query(Vulnerability).count()
            log.info('Starting with {} vuln records'.format(vcount))
            self.assertEqual(vcount, 0, 'Not starting with empty table')

        df = DataFeeds.instance()
        t = time.time()
        df.vulnerabilities.bulk_sync()
        t = time.time() - t
        log.info('Done with vulnerabilities. Took: {} sec'.format(t))
        log.info('Has {} vuln records'.format(db.query(Vulnerability).count()))
Exemplo n.º 14
0
    def test_vuln_sync(self):
        print('Test0')
        with session_scope() as db:
            vcount = db.query(Vulnerability).count()

        log.info('Starting with {} vuln records'.format(vcount))
        self.assertEqual(vcount, 0, 'Not starting with empty table')

        df = DataFeeds.instance()
        log.info('Syncing vulnerabilities')
        t = time.time()
        df.vulnerabilities.sync(group='alpine:3.3')
        t = time.time() - t
        log.info('Done with vulnerabilities. Took: {} sec'.format(t))
        with session_scope() as db:
            log.info('Has {} vuln records'.format(db.query(Vulnerability).count()))
Exemplo n.º 15
0
    def execute(self):
        log.info('Starting initial feed sync')
        f = DataFeeds.instance()

        try:
            updated = []
            updated_dict = f.bulk_sync(to_sync=self.feed_list)
            for g in updated_dict:
                updated += updated_dict[g]

            log.info('Initial feed sync complete')
            return updated
        except:
            log.exception('Failure refreshing and syncing feeds')
            raise
        finally:
            end_session()
Exemplo n.º 16
0
    def test_bulk_package_sync(self):
        with session_scope() as db:
            ncount = db.query(NpmMetadata).count()
            gcount = db.query(GemMetadata).count()
        self.assertEqual(ncount, 0, 'Not starting with empty table')
        self.assertEqual(gcount, 0, 'Not starting with empty table')

        df = DataFeeds.instance()
        t = time.time()
        df.packages.bulk_sync()
        t = time.time() - t
        log.info('Done with bulk package sync. Took: {} sec'.format(t))
        with session_scope() as db:
            ncount = db.query(NpmMetadata).count()
            gcount = db.query(GemMetadata).count()

        log.info('Has {} npm records'.format(ncount))
        log.info('Has {} gem records'.format(gcount))
Exemplo n.º 17
0
def test_package_sync(test_data_env):
    with session_scope() as db:
        ncount = db.query(NpmMetadata).count()
        gcount = db.query(GemMetadata).count()
    assert ncount == 0, 'Not starting with empty table'
    assert gcount == 0, 'Not starting with empty table'

    df = DataFeeds.instance()
    logger.info('Syncing packages')
    t = time.time()
    df.packages.sync()
    t = time.time() - t
    logger.info('Done with packages. Took: {} sec'.format(t))
    with session_scope() as db:
        ncount = db.query(NpmMetadata).count()
        gcount = db.query(GemMetadata).count()

    logger.info('Has {} npm records'.format(ncount))
    logger.info('Has {} gem records'.format(gcount))
Exemplo n.º 18
0
def list_feeds(include_counts=False):
    """
    GET /feeds
    :return:
    """

    f = DataFeeds.instance()
    meta = f.list_metadata()

    response = []

    for feed in meta:
        i = FeedMetadata()
        i.name = feed.name
        i.last_full_sync = feed.last_full_sync.isoformat(
        ) if feed.last_full_sync else None
        i.created_at = feed.created_at.isoformat() if feed.created_at else None
        i.updated_at = feed.last_update.isoformat(
        ) if feed.last_update else None
        i.groups = []

        for group in feed.groups:
            g = FeedGroupMetadata()
            g.name = group.name
            g.last_sync = group.last_sync.isoformat(
            ) if group.last_sync else None
            g.created_at = group.created_at.isoformat(
            ) if group.created_at else None

            if include_counts:
                # Compute count (this is slow)
                g.record_count = f.records_for(i.name, g.name)
            else:
                g.record_count = None

            i.groups.append(g.to_dict())

        response.append(i.to_dict())

    return jsonify(response)
Exemplo n.º 19
0
    def execute(self):
        log.info('Starting feed update')

        # Feed syncs will update the images with any new cves that are pulled in for a the sync. As such, any images that are loaded while the sync itself is in progress need to be
        # re-scanned for cves since the transaction ordering can result in the images being loaded with data prior to sync but not included in the sync process itself.

        # Create feed task begin event
        error = None
        with session_scope() as session:
            mgr = identities.manager_factory.for_session(session)
            catalog_client = internal_client_for(CatalogClient, userId=None)

        try:
            catalog_client.add_event(
                FeedSyncTaskStarted(
                    groups=self.feeds if self.feeds else 'all'))
        except:
            log.exception('Ignoring event generation error before feed sync')

        start_time = datetime.datetime.utcnow()
        try:
            f = DataFeeds.instance()
            start_time = datetime.datetime.utcnow()

            f.vuln_fn = FeedsUpdateTask.process_updated_vulnerability
            f.vuln_flush_fn = FeedsUpdateTask.flush_vulnerability_matches

            updated_dict = f.sync(to_sync=self.feeds,
                                  full_flush=self.full_flush,
                                  catalog_client=catalog_client)

            log.info('Feed sync complete. Results = {}'.format(updated_dict))
            return updated_dict
        except Exception as e:
            error = e
            log.exception('Failure refreshing and syncing feeds')
            raise
        finally:
            end_time = datetime.datetime.utcnow()
            # log feed sync event
            try:
                if error:
                    catalog_client.add_event(
                        FeedSyncTaskFailed(
                            groups=self.feeds if self.feeds else 'all',
                            error=error))
                else:
                    catalog_client.add_event(
                        FeedSyncTaskCompleted(
                            groups=self.feeds if self.feeds else 'all'))
            except:
                log.exception(
                    'Ignoring event generation error after feed sync')

            try:
                self.rescan_images_created_between(from_time=start_time,
                                                   to_time=end_time)
            except:
                log.exception(
                    'Unexpected exception rescanning vulns for images added during the feed sync'
                )
                raise
            finally:
                end_session()
Exemplo n.º 20
0
def test_group_lookups(test_data_env):
    df = DataFeeds.instance()
    df.vulnerabilities.refresh_groups()
    bad = df.vulnerabilities.group_by_name('not_a_real_Group')
    assert not bad, 'Found non-existent group'
    assert df.vulnerabilities.group_by_name('alpine:3.3') is not None, 'Should have found group alpine:3.3'