Ejemplo n.º 1
0
    def refresh_groups(self):
        group_list = self.source.list_feed_groups(self.__feed_name__)

        for group in group_list:
            my_group = self.group_by_name(group.name)
            if not my_group:
                g = FeedGroupMetadata(name=group.name,
                                      description=group.description,
                                      access_tier=group.access_tier,
                                      feed=self.metadata)
                g.last_sync = None
Ejemplo n.º 2
0
    def _sync_feed_group_metadata(
        db: Session,
        feed_api_record: Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]],
        db_feeds: Dict[str, FeedMetadata],
        operation_id: Optional[str] = None,
    ) -> None:
        """
        Add FeedGroupMetadata records to DB if they don't already exist

        :param db: database session
        :type db: Session
        :param feed_api_record: data from API client
        :type feed_api_record: Dict[str, Union[FeedAPIRecord, List[FeedAPIGroupRecord]]]
        :param db_feeds: map of feed names to FeedMetadata tied to DB session
        :type db_feeds: Dict[str, FeedMetadata]
        :param operation_id: UUID4 hexadecimal string
        :type operation_id: Optional[str]
        """
        api_feed = feed_api_record["meta"]
        db_feed = db_feeds.get(api_feed.name)
        # Check for any update
        db_feed.description = api_feed.description
        db_feed.access_tier = api_feed.access_tier

        db_groups = {x.name: x for x in db_feed.groups}
        for api_group in feed_api_record.get("groups", []):
            db_group = db_groups.get(api_group.name)
            # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
            if not db_group:
                logger.debug(
                    "Adding new feed metadata record to db: {} (operation_id={})".format(
                        api_group.name, operation_id
                    )
                )
                db_group = FeedGroupMetadata(
                    name=api_group.name,
                    description=api_group.description,
                    access_tier=api_group.access_tier,
                    feed=db_feed,
                    enabled=True,
                )
                db_group.last_sync = None
                db.add(db_group)
            else:
                logger.debug(
                    "Feed group metadata already in db: {} (operation_id={})".format(
                        api_group.name, operation_id
                    )
                )

            db_group.access_tier = api_group.access_tier
            db_group.description = api_group.description
Ejemplo n.º 3
0
    def sync_metadata(feed_client: IFeedSource,
                      to_sync: list = None,
                      operation_id=None) -> tuple:
        """
        Get metadata from source and sync db metadata records to that (e.g. add any new groups or feeds)
        Executes as a unit-of-work for db, so will commit result and returns the records found on upstream source.

        If a record exists in db but was not found upstream, it is not returned

        :param feed_client:
        :param to_sync: list of string feed names to sync metadata on
        :return: tuple, first element: dict of names mapped to db records post-sync only including records successfully updated by upstream, second element is a list of tuples where each tuple is (failed_feed_name, error_obj)
        """

        if not to_sync:
            return {}, []

        db = get_session()
        try:
            logger.info(
                'Syncing feed and group metadata from upstream source (operation_id={})'
                .format(operation_id))

            source_resp = feed_client.list_feeds()
            if to_sync:
                feeds = filter(lambda x: x.name in to_sync, source_resp.feeds)
            else:
                feeds = []

            failed = []
            source_feeds = {
                x.name: {
                    'meta': x,
                    'groups': feed_client.list_feed_groups(x.name).groups
                }
                for x in feeds
            }
            logger.debug('Upstream feeds available: %s', source_feeds)
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db))

            for feed_name, feed_api_record in source_feeds.items():
                try:
                    logger.info(
                        'Syncing metadata for feed: {} (operation_id={})'.
                        format(feed_name, operation_id))

                    api_feed = feed_api_record['meta']
                    db_feed = db_feeds.get(api_feed.name)

                    # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                    if not db_feed:
                        logger.debug(
                            'Adding new feed metadata record to db: {} (operation_id={})'
                            .format(api_feed.name, operation_id))
                        db_feed = FeedMetadata(
                            name=api_feed.name,
                            description=api_feed.description,
                            access_tier=api_feed.access_tier,
                            enabled=True)
                        db.add(db_feed)
                        db.flush()
                    else:
                        logger.debug(
                            'Feed metadata already in db: {} (operation_id={})'
                            .format(api_feed.name, operation_id))

                    # Check for any update
                    db_feed.description = api_feed.description
                    db_feed.access_tier = api_feed.access_tier

                    db_groups = {x.name: x for x in db_feed.groups}
                    for api_group in feed_api_record.get('groups', []):
                        db_group = db_groups.get(api_group.name)
                        # Do this instead of a db.merge() to ensure no timestamps are reset or overwritten
                        if not db_group:
                            logger.debug(
                                'Adding new feed metadata record to db: {} (operation_id={})'
                                .format(api_group.name, operation_id))
                            db_group = FeedGroupMetadata(
                                name=api_group.name,
                                description=api_group.description,
                                access_tier=api_group.access_tier,
                                feed=db_feed,
                                enabled=True)
                            db_group.last_sync = None
                            db.add(db_group)
                        else:
                            logger.debug(
                                'Feed group metadata already in db: {} (operation_id={})'
                                .format(api_group.name, operation_id))

                        db_group.access_tier = api_group.access_tier
                        db_group.description = api_group.description
                except Exception as e:
                    logger.exception('Error syncing feed {}'.format(feed_name))
                    logger.warn(
                        'Could not sync metadata for feed: {} (operation_id={})'
                        .format(feed_name, operation_id))
                    failed.append((feed_name, e))
                finally:
                    db.flush()

            # Reload
            db_feeds = DataFeeds._pivot_and_filter_feeds_by_config(
                to_sync, list(source_feeds.keys()), get_all_feeds(db))

            db.commit()
            logger.info(
                'Metadata sync from feeds upstream source complete (operation_id={})'
                .format(operation_id))
            return db_feeds, failed
        except Exception as e:
            logger.error(
                'Rolling back feed metadata update due to error: {} (operation_id={})'
                .format(e, operation_id))
            db.rollback()
            raise