def _inputs_changed(self, cache_timestamp):
        # A feed sync has occurred since the eval was done or the image has been updated/reloaded, so inputs can have changed. Must be stale
        db = get_session()
        # TODO: zhill - test more
        feed_group_updated_list = [
            group.last_sync if group.last_sync is not None else
            datetime.datetime.utcfromtimestamp(0) for feed in get_all_feeds(db)
            for group in feed.groups
        ]
        feed_synced = (max(feed_group_updated_list) > cache_timestamp
                       if feed_group_updated_list else False)

        image_updated = self.image.last_modified > cache_timestamp

        return feed_synced or image_updated
예제 #2
0
    def sync_metadata(feed_client: IFeedSource,
                      to_sync: list = None,
                      operation_id=None) -> tuple:
        """
        Get metadata from source and sync db metadata records to that (e.g. add any new groups or feeds)
        Executes as a unit-of-work for db, so will commit result and returns the records found on upstream source.

        If a record exists in db but was not found upstream, it is not returned

        :param feed_client:
        :param to_sync: list of string feed names to sync metadata on
        :return: tuple, first element: dict of names mapped to db records post-sync only including records successfully updated by upstream, second element is a list of tuples where each tuple is (failed_feed_name, error_obj)
        """

        if not to_sync:
            return {}, []

        db = get_session()
        try:
            logger.info(
                'Syncing feed and group metadata from upstream source (operation_id={})'
                .format(operation_id))

            source_resp = feed_client.list_feeds()
            if to_sync:
                feeds = filter(lambda x: x.name in to_sync, source_resp.feeds)
            else:
                feeds = []

            failed = []
            source_feeds = {
                x.name: {
                    'meta': x,
                    'groups': feed_client.list_feed_groups(x.name).groups
                }
                for x in feeds
            }
            logger.debug('Upstream feeds available: %s', source_feeds)
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db))

            for feed_name, feed_api_record in source_feeds.items():
                try:
                    logger.info(
                        'Syncing metadata for feed: {} (operation_id={})'.
                        format(feed_name, operation_id))

                    api_feed = feed_api_record['meta']
                    db_feed = db_feeds.get(api_feed.name)

                    # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                    if not db_feed:
                        logger.debug(
                            'Adding new feed metadata record to db: {} (operation_id={})'
                            .format(api_feed.name, operation_id))
                        db_feed = FeedMetadata(
                            name=api_feed.name,
                            description=api_feed.description,
                            access_tier=api_feed.access_tier,
                            enabled=True)
                        db.add(db_feed)
                        db.flush()
                    else:
                        logger.debug(
                            'Feed metadata already in db: {} (operation_id={})'
                            .format(api_feed.name, operation_id))

                    # Check for any update
                    db_feed.description = api_feed.description
                    db_feed.access_tier = api_feed.access_tier

                    db_groups = {x.name: x for x in db_feed.groups}
                    for api_group in feed_api_record.get('groups', []):
                        db_group = db_groups.get(api_group.name)
                        # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                        if not db_group:
                            logger.debug(
                                'Adding new feed metadata record to db: {} (operation_id={})'
                                .format(api_group.name, operation_id))
                            db_group = FeedGroupMetadata(
                                name=api_group.name,
                                description=api_group.description,
                                access_tier=api_group.access_tier,
                                feed=db_feed,
                                enabled=True)
                            db_group.last_sync = None
                            db.add(db_group)
                        else:
                            logger.debug(
                                'Feed group metadata already in db: {} (operation_id={})'
                                .format(api_group.name, operation_id))

                        db_group.access_tier = api_group.access_tier
                        db_group.description = api_group.description
                except Exception as e:
                    logger.exception('Error syncing feed {}'.format(feed_name))
                    logger.warn(
                        'Could not sync metadata for feed: {} (operation_id={})'
                        .format(feed_name, operation_id))
                    failed.append((feed_name, e))
                finally:
                    db.flush()

            # Reload
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db))

            db.commit()
            logger.info(
                'Metadata sync from feeds upstream source complete (operation_id={})'
                .format(operation_id))
            return db_feeds, failed
        except Exception as e:
            logger.error(
                'Rolling back feed metadata update due to error: {} (operation_id={})'
                .format(e, operation_id))
            db.rollback()
            raise
예제 #3
0
    def sync_metadata(
        source_feeds: Dict[
            str, Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]]
        ],
        to_sync: List[str] = None,
        operation_id: Optional[str] = None,
        groups: bool = True,
    ) -> Tuple[Dict[str, FeedMetadata], List[Tuple[str, Union[str, BaseException]]]]:
        """
        Get metadata from source and sync db metadata records to that (e.g. add any new groups or feeds)
        Executes as a unit-of-work for db, so will commit result and returns the records found on upstream source.

        If a record exists in db but was not found upstream, it is not returned

        :param source_feeds: mapping containing FeedAPIRecord and FeedAPIGroupRecord
        :type source_feeds: Dict[str, Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]]]
        :param to_sync: list of string feed names to sync metadata on
        :type to_sync: List[str]
        :param operation_id: UUID4 hexadecimal string
        :type operation_id: Optional[str]
        :param groups: whether or not to sync group metadata (defaults to True, which will sync group metadata)
        :type groups: bool
        :return: tuple, first element: dict of names mapped to db records post-sync only including records successfully updated by upstream, second element is a list of tuples where each tuple is (failed_feed_name, error_obj)
        :rtype: Tuple[Dict[str, FeedMetadata], List[Tuple[str, Union[str, BaseException]]]
        """

        if not to_sync:
            return {}, []

        db = get_session()
        try:
            logger.info(
                "Syncing feed and group metadata from upstream source (operation_id={})".format(
                    operation_id
                )
            )
            failed = []
            db_feeds = MetadataSyncUtils._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db)
            )

            for feed_name, feed_api_record in source_feeds.items():
                try:
                    logger.info(
                        "Syncing metadata for feed: {} (operation_id={})".format(
                            feed_name, operation_id
                        )
                    )
                    feed_metadata_map = MetadataSyncUtils._sync_feed_metadata(
                        db, feed_api_record, db_feeds, operation_id
                    )
                    if groups:
                        MetadataSyncUtils._sync_feed_group_metadata(
                            db, feed_api_record, feed_metadata_map, operation_id
                        )
                except Exception as e:
                    logger.exception("Error syncing feed {}".format(feed_name))
                    logger.warn(
                        "Could not sync metadata for feed: {} (operation_id={})".format(
                            feed_name, operation_id
                        )
                    )
                    failed.append((feed_name, e))
                finally:
                    db.flush()

            # Reload
            db_feeds = MetadataSyncUtils._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db)
            )

            db.commit()
            logger.info(
                "Metadata sync from feeds upstream source complete (operation_id={})".format(
                    operation_id
                )
            )
            return db_feeds, failed
        except Exception as e:
            logger.error(
                "Rolling back feed metadata update due to error: {} (operation_id={})".format(
                    e, operation_id
                )
            )
            db.rollback()
            raise