def story_get_by_offset( request, feed_unionid: T.feed_unionid.object, offset: T.int.min(0).optional, detail: StoryDetailSchema, set_readed: T.bool.default(False), ) -> StorySchema: """Story detail""" check_unionid(request, feed_unionid) try: story = UnionStory.get_by_feed_offset(feed_unionid, offset, detail=detail) except StoryNotFoundError: return Response({"message": "does not exist"}, status=400) if set_readed: try: UnionFeed.set_story_offset(feed_unionid, offset + 1) except FeedStoryOffsetError as ex: return Response({'message': str(ex)}, status=400) except ConcurrentUpdateError as ex: LOG.error(f'ConcurrentUpdateError: story set_readed {ex}', exc_info=ex) image_token = ImageToken( referrer=story.link, feed=feed_unionid.feed_id, offset=offset, ).encode(secret=CONFIG.image_token_secret) ret = story.to_dict() ret.update(image_token=image_token) return ret
def delete_invalid_feeds(days=1, limit=100, threshold=99): sql = """ SELECT feed_id, title, link, url, status_code, count FROM ( SELECT feed_id, status_code, count(1) as count FROM rssant_api_rawfeed WHERE dt_created >= %s and (status_code < 200 or status_code >= 400) group by feed_id, status_code having count(1) > 3 order by count desc limit %s ) error_feed join rssant_api_feed on error_feed.feed_id = rssant_api_feed.id order by feed_id, status_code, count; """ sql_ok_count = """ SELECT feed_id, count(1) as count FROM rssant_api_rawfeed WHERE dt_created >= %s and (status_code >= 200 and status_code < 400) AND feed_id=ANY(%s) group by feed_id """ t_begin = timezone.now() - timezone.timedelta(days=days) error_feeds = defaultdict(dict) with connection.cursor() as cursor: cursor.execute(sql, [t_begin, limit]) for feed_id, title, link, url, status_code, count in cursor.fetchall(): error_feeds[feed_id].update(feed_id=feed_id, title=title, link=link, url=url) error = error_feeds[feed_id].setdefault('error', {}) error_name = FeedResponseStatus.name_of(status_code) error[error_name] = count error_feeds[feed_id]['error_count'] = sum(error.values()) error_feeds[feed_id].update(ok_count=0, error_percent=100) cursor.execute(sql_ok_count, [t_begin, list(error_feeds)]) for feed_id, ok_count in cursor.fetchall(): feed = error_feeds[feed_id] total = feed['error_count'] + ok_count error_percent = round((feed['error_count'] / total) * 100) feed.update(ok_count=ok_count, error_percent=error_percent) error_feeds = list( sorted(error_feeds.values(), key=lambda x: x['error_percent'], reverse=True)) delete_feed_ids = [] for feed in error_feeds: if feed['error_percent'] >= threshold: delete_feed_ids.append(feed['feed_id']) click.echo(pretty_format_json(feed)) if delete_feed_ids: confirm_delete = click.confirm(f'Delete {len(delete_feed_ids)} feeds?') if not confirm_delete: click.echo('Abort!') else: UnionFeed.bulk_delete(delete_feed_ids) click.echo('Done!') return error_feeds
def feed_delete_all( request, ids: T.list(T.feed_unionid.object).maxlen(MAX_FEED_COUNT).optional, ) -> T.dict(num_deleted=T.int): check_unionid(request, ids) num_deleted = UnionFeed.delete_all(user_id=request.user.id, ids=ids) return dict(num_deleted=num_deleted)
def feed_create( request, url: _SCHEMA_FEED_CREATE_URL ) -> T.dict( is_ready=T.bool, feed=FeedSchema.optional, feed_creation=FeedCreationSchema.optional, ): """Deprecated, use feed_import instead.""" try: feed, feed_creation = UnionFeed.create_by_url(url=url, user_id=request.user.id) except FeedExistError: return Response({'message': 'already exists'}, status=400) if feed_creation: scheduler.tell( 'worker_rss.find_feed', dict( feed_creation_id=feed_creation.id, url=feed_creation.url, )) return dict( is_ready=bool(feed), feed=feed.to_dict() if feed else None, feed_creation=feed_creation.to_dict() if feed_creation else None, )
def feed_query( request, hints: T.list( T.dict(id=T.feed_unionid.object, dt_updated=T.datetime.object)).optional, detail: FeedDetailSchema, ) -> T.dict( total=T.int.optional, size=T.int.optional, feeds=T.list(FeedSchema).maxlen(5000), deleted_size=T.int.optional, deleted_ids=T.list(T.feed_unionid), ): """Feed query""" if hints: check_unionid(request, [x['id'] for x in hints]) total, feeds, deleted_ids = UnionFeed.query_by_user( user_id=request.user.id, hints=hints, detail=detail) feeds = [x.to_dict() for x in feeds] return dict( total=total, size=len(feeds), feeds=feeds, deleted_size=len(deleted_ids), deleted_ids=deleted_ids, )
def feed_query( request, hints: T.list(T.dict( id=T.feed_unionid.object, dt_updated=T.datetime.object, )).maxlen(MAX_FEED_COUNT * 10).optional, detail: FeedDetailSchema, ) -> T.dict( total=T.int.optional, size=T.int.optional, feeds=T.list(FeedSchema).maxlen(MAX_FEED_COUNT), deleted_size=T.int.optional, deleted_ids=T.list(T.feed_unionid).maxlen(MAX_FEED_COUNT), ): """Feed query, if user feed count exceed limit, only return limit feeds.""" if hints: # allow hints schema exceed feed count limit, but discard exceeded hints = hints[:MAX_FEED_COUNT] check_unionid(request, [x['id'] for x in hints]) total, feeds, deleted_ids = UnionFeed.query_by_user( user_id=request.user.id, hints=hints, detail=detail) feeds = [x.to_dict() for x in feeds] return dict( total=total, size=len(feeds), feeds=feeds, deleted_size=len(deleted_ids), deleted_ids=deleted_ids, )
def feed_set_offset(request, feed_unionid: T.feed_unionid.object, offset: T.int.min(0).optional) -> FeedSchema: check_unionid(request, feed_unionid) try: feed = UnionFeed.set_story_offset(feed_unionid, offset) except FeedStoryOffsetError as ex: return Response({'message': str(ex)}, status=400) return feed.to_dict()
def feed_set_all_readed( request, ids: T.list(T.feed_unionid.object).maxlen(MAX_FEED_COUNT).optional, ) -> T.dict(num_updated=T.int): check_unionid(request, ids) num_updated = UnionFeed.set_all_readed_by_user(user_id=request.user.id, ids=ids) return dict(num_updated=num_updated)
def feed_export_opml(request, download: T.bool.default(False)): """export feeds to OPML file""" total, user_feeds, __ = UnionFeed.query_by_user(request.user.id) content = render_opml(user_feeds) response = HttpResponse(content, content_type='text/xml') if download: response['Content-Disposition'] = 'attachment;filename="rssant.opml"' return response
def feed_set_title( request, id: T.feed_unionid.object, title: T.str.maxlen(200).optional, ) -> FeedSchema: check_unionid(request, id) feed = UnionFeed.set_title(id, title) return feed.to_dict()
def feed_set_group( request, id: T.feed_unionid.object, group: T.str.maxlen(50).optional, ) -> FeedSchema: check_unionid(request, id) feed = UnionFeed.set_group(id, group) return feed.to_dict()
def feed_get(request, feed_unionid: T.feed_unionid.object, detail: FeedDetailSchema) -> FeedSchema: """Feed detail""" check_unionid(request, feed_unionid) try: feed = UnionFeed.get_by_id(feed_unionid, detail=detail) except FeedNotFoundError: return Response({"message": "订阅不存在"}, status=400) return feed.to_dict()
def feed_update( request, feed_unionid: T.feed_unionid.object, title: T.str.maxlen(200).optional, ) -> FeedSchema: """deprecated, use feed_set_title instead""" check_unionid(request, feed_unionid) feed = UnionFeed.set_title(feed_unionid, title) return feed.to_dict()
def feed_set_all_group( request, ids: T.list(T.feed_unionid.object).maxlen(MAX_FEED_COUNT), group: T.str.maxlen(50), ) -> T.dict(num_updated=T.int): check_unionid(request, ids) feed_ids = [x.feed_id for x in ids] num_updated = UnionFeed.set_all_group( user_id=request.user.id, feed_ids=feed_ids, group=group) return dict(num_updated=num_updated)
def feed_export_opml(request, download: T.bool.default(False)): """export feeds to OPML file""" total, feeds, __ = UnionFeed.query_by_user(request.user.id) feeds = [x.to_dict() for x in feeds] for user_feed in feeds: for field in ['title', 'link', 'url', 'version']: user_feed[field] = xml_quote(xml_escape(user_feed[field] or '')) tmpl = Template(filename=OPML_TEMPLATE_PATH) content = tmpl.render(feeds=feeds) response = HttpResponse(content, content_type='text/xml') if download: response['Content-Disposition'] = 'attachment;filename="rssant.opml"' return response
def feed_create(request, url: T.url.default_schema('http')) -> T.dict( is_ready=T.bool, feed=FeedSchema.optional, feed_creation=FeedCreationSchema.optional, ): try: feed, feed_creation = UnionFeed.create_by_url(url=url, user_id=request.user.id) except FeedExistError: return Response({'message': 'already exists'}, status=400) if feed_creation: scheduler.tell('worker_rss.find_feed', dict( feed_creation_id=feed_creation.id, url=feed_creation.url, )) return dict( is_ready=bool(feed), feed=feed.to_dict() if feed else None, feed_creation=feed_creation.to_dict() if feed_creation else None, )
def _create_feeds_by_imports( user, imports: list, group: str = None, is_from_bookmark=False, ): import_items = [] for raw_item in imports: item_group = group if not item_group: item_group = raw_item.get('group') item_group = group_id_of(item_group) title = raw_item.get('title') item = FeedImportItem(url=raw_item['url'], title=title, group=item_group) import_items.append(item) result = UnionFeed.create_by_imports(imports=import_items, user_id=user.id) find_feed_tasks = [] for feed_creation in result.feed_creations: find_feed_tasks.append( dict(dst='worker_rss.find_feed', content=dict( feed_creation_id=feed_creation.id, url=feed_creation.url, ))) scheduler.batch_tell(find_feed_tasks) created_feeds = [x.to_dict() for x in result.created_feeds] feed_creations = [x.to_dict() for x in result.feed_creations] first_existed_feed = None if result.existed_feeds: first_existed_feed = result.existed_feeds[0].to_dict() return dict( total=result.total, num_created_feeds=len(result.created_feeds), num_existed_feeds=len(result.existed_feeds), num_feed_creations=len(result.feed_creations), first_existed_feed=first_existed_feed, created_feeds=created_feeds, feed_creations=feed_creations, )
def _create_feeds_by_urls(user, urls, is_from_bookmark=False): result = UnionFeed.create_by_url_s(urls=urls, user_id=user.id) find_feed_tasks = [] for feed_creation in result.feed_creations: find_feed_tasks.append( dict(dst='worker_rss.find_feed', content=dict( feed_creation_id=feed_creation.id, url=feed_creation.url, ))) scheduler.batch_tell(find_feed_tasks) created_feeds = [x.to_dict() for x in result.created_feeds] feed_creations = [x.to_dict() for x in result.feed_creations] return dict( total=result.total, num_created_feeds=len(result.created_feeds), num_existed_feeds=len(result.existed_feeds), num_feed_creations=len(result.feed_creations), created_feeds=created_feeds, feed_creations=feed_creations, )
def _import_feeds(self, imports: list): result = UnionFeed.create_by_imports(user_id=self._tester.id, imports=imports) for creation in result.feed_creations: creation: FeedCreation feed = Feed( title=creation.title, url=creation.url, status=FeedStatus.READY, dt_updated=timezone.now(), ) feed.save() user_feed = UserFeed( user=self._tester, feed=feed, title=creation.title, group=creation.group, dt_updated=timezone.now(), ) user_feed.save() FeedUrlMap(source=creation.url, target=feed.url).save() FeedUrlMap(source=creation.url + '.c', target=feed.url).save() return result
def feed_delete(request, feed_unionid: T.feed_unionid.object): check_unionid(request, feed_unionid) try: UnionFeed.delete_by_id(feed_unionid) except FeedNotFoundError: return Response({"message": "订阅不存在"}, status=400)
def feed_update(request, feed_unionid: T.feed_unionid.object, title: T.str.optional) -> FeedSchema: check_unionid(request, feed_unionid) feed = UnionFeed.set_title(feed_unionid, title) return feed.to_dict()
def _query_user_feeds(self): _, feeds, _ = UnionFeed.query_by_user(self._tester.id) return feeds