def feeds_tag(request, tag): lang = "pt-br" feed = feedgenerator.Rss201rev2Feed( title=u'Django Utilidades :: Marinho Brandao', link=settings.PROJECT_ROOT_URL, description=u'', language=lang, ) entries = Entry.all().filter('published =', True).filter('show_in_rss =', True).filter('tags =', tag) entries.order('-pub_date') entries = entries[:20] for e in entries: feed.add_item( title=e.title, link=settings.PROJECT_ROOT_URL[:-1] + e.get_absolute_url(), description=e.get_text(), ) response = HttpResponse(mimetype="application/xhtml+xml") feed.write(response, 'utf-8') return response
def test_feed_without_feed_url_gets_rendered_without_atom_link(self): feed = feedgenerator.Rss201rev2Feed("title", "/link/", "descr") self.assertIsNone(feed.feed["feed_url"]) feed_content = feed.writeString("utf-8") self.assertNotIn("<atom:link", feed_content) self.assertNotIn('href="/feed/"', feed_content) self.assertNotIn('rel="self"', feed_content)
def make_feed(username, filename=None, aggregate_feed=None): if aggregate_feed: feed = aggregate_feed else: feed = feedgenerator.Rss201rev2Feed( title=f'Twitter @{username}', link=f'https://twitter.com/{username}', description=f'Tweets by @{username}', ) with urllib.request.urlopen(f'https://twitter.com/{username}') as f: data = f.read() soup = BeautifulSoup(data, features='html5lib') for tweet in soup.find_all('div', class_='tweet'): text = tweet.find('div', class_='js-tweet-text-container').get_text(' ') if aggregate_feed: text = f'@{username}: {text}' relative_permalink = tweet.find('a', class_='js-permalink')['href'] link = 'https://twitter.com' + relative_permalink timestamp = int( tweet.find('span', class_='js-short-timestamp')['data-time']) feed.add_item( title=text, link=link, description=str(tweet), author=f'@{username}', pubdate=datetime.fromtimestamp(timestamp), ) if filename: with open(filename, 'wb') as f: feed.write(f, encoding='utf-8')
def rss(request): host = request.get_host() db = connect_blog_database(request) info = db.infos.find_one() articles = db.articles.find(sort=[('PostOn', pymongo.DESCENDING)]) feed = feedgenerator.Rss201rev2Feed( title=info['Title'], link='http://' + host, description=info['Subtitle'], language='zh-cn', feed_url='http://' + host + '/rss/', ) for i in articles: if not i['IsPublic']: continue feed.add_item(title=i['Title'], link='http://%s/article/%d/' % (host, i['Id']), pubdate=datetime.now(), description=i['Content']) return HttpResponse(feed.writeString('utf-8'), content_type='application/rss+xml; charset=utf-8')
def __call__(self, request): feed_dict = { 'title': self.get_title(request), 'link': request.build_absolute_uri(self.get_link(request)), 'description': '', 'language': u'en', 'feed_url': request.build_absolute_uri(), } feed = feedgenerator.Rss201rev2Feed(**feed_dict) qs = self.get_query_set(request) for obj in qs[0:10]: link = self.get_item_url(request, obj) if link: link = request.build_absolute_uri(link) feed.add_item( title=str(obj or ''), link=link, description=obj.description() or '', pubdate=self.get_item_date(request, obj) or '', ) return HttpResponse(feed.writeString('utf-8'), mimetype='application/xml')
def rss(request, question_id): question = get_object_or_404(Question, pk=question_id) answers = Answer.objects.filter(question_id=question_id) feed = feedgenerator.Rss201rev2Feed( title="Output question rss", link="", description= u"This is the content of all staff related to one question.", language=u"en", ) feed.add_item(title=question.question_title, link=u"", description=question.question_text) for answer in answers: feed.add_item( title=u"answer", link="", description=answer.answer_text, ) str = feed.writeString('utf-8') context = {} str = format(str) context['str'] = str return render(request, 'questions/rss.html', context)
def test_rss_mime_type(self): """ Test to make sure RSS MIME type has UTF8 Charset parameter set """ rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description") self.assertEqual(rss_feed.content_type, "application/rss+xml; charset=utf-8")
def test_feed_with_feed_url_gets_rendered_with_atom_link(self): feed = feedgenerator.Rss201rev2Feed('title', '/link/', 'descr', feed_url='/feed/') self.assertEqual(feed.feed['feed_url'], '/feed/') feed_content = feed.writeString('utf-8') self.assertIn('<atom:link', feed_content) self.assertIn('href="/feed/"', feed_content) self.assertIn('rel="self"', feed_content)
def index(request): context = {} # create a feed generator having a channel with following title, link and description feed = feedgenerator.Rss201rev2Feed( title=u"Runnable", link=u"*****@*****.**", description=u"This is final project for open source tool", language=u"en", ) questions = db.GqlQuery("SELECT * FROM Questions") for question in questions: feed.add_item(title=question.title, description=question.description, createdate=question.createdate, modifydate=question.modifydate) answers = db.Query(Answers) for answer in answers: feed.add_item(title=question.title, description=answer.description, createdate=answer.createdate, modifydate=answer.modifydate) # Write all the feeds in a string str = feed.writeString('utf-8') # You can use following to write the same in a file #with open('test.rss', 'w') as fp: # feed.write(fp, 'utf-8') # format the string so that it will be readable str = format(str) context['str'] = str return render(request, 'questionAnswerSite/index.html', context)
def test_latest_post_date_returns_utc_time(self): for use_tz in (True, False): with self.settings(USE_TZ=use_tz): rss_feed = feedgenerator.Rss201rev2Feed("title", "link", "description") self.assertEqual( rss_feed.latest_post_date().tzinfo, datetime.timezone.utc, )
def org_rss(request, org_short_name, org_feed_hash): try: """ """ host = request.META['HTTP_HOST'] current_org, message = Organization.objects.get_current_org( org_short_name) if message: return HttpResponseRedirect(reverse('org_orgs_list')) if not org_feed_hash == current_org.org_feed_hash: return HttpResponseRedirect( reverse('org_org_view', kwargs={'org_short_name': current_org.org_short_name})) events = current_org.event_set.all().order_by('-event_date') orgfeed = feedgenerator.Rss201rev2Feed( title=current_org.org_name, link="http://%s%s" % (host, reverse('event_events_list', kwargs={ 'org_short_name': current_org.org_short_name, })), description=current_org.org_desc, language='en', ) for event in events: orgfeed.add_item( title=event.event_name, link="http://%s%s" % (host, reverse('event_event_view', kwargs={ 'org_short_name': current_org.org_short_name, 'event_hash': event.event_hash })), description="Event on: %s -- Description: %s" % (event.event_date.strftime('%d %b %Y'), event.event_desc), categories=(event.event_type, ), author_name=event.event_creator_name, pubdate=event.event_created_date) response = HttpResponse() response['Content-Type'] = 'application/rss+xml' response.write(orgfeed.writeString('UTF-8')) #template_name = "error.html" return response except ObjectDoesNotExist: context = { 'error': "Organization does not exist", } template_name = "error.html" return render_to_response(template_name, context, context_instance=RequestContext(request))
def generate_feed_from_queryset(request, queryset=None): plugin = get_plugin('rss') if not queryset: contenttypes = plugin.get_config().get('contenttypes', []).get_value() query = Q() if not contenttypes or ALL_TYPES in contenttypes: queryset = BaseContent.objects.filter( status='published').order_by('-modification_date') else: classnames = [x for x in contenttypes] for classname in classnames: query = query | Q(class_name=classname.lower()) queryset = BaseContent.objects.filter(status='published').filter( query).order_by('-modification_date') qsm = QueryStringManager(request) queryset = queryset.filter(**qsm.get_filters()) portal_title = plugin.get_config().get('portal', '').get_value() f = feedgenerator.Rss201rev2Feed( title=portal_title, link=render_to_string('rss/link.html'), description=render_to_string('rss/description.html'), language=render_to_string('rss/language.html'), author_name=render_to_string('rss/author_name.html'), feed_url=render_to_string('rss/feed_url.html'), ) limit = plugin.get_config().get('limit', None) queryset = queryset[:int(limit.get_value())] link_prefix = 'http://%s' % Site.objects.all()[0].domain for item in queryset: if hasattr(item, 'get_real_instance'): item = item.get_real_instance() if 'modification_date' in item.__dict__: item_date = item.modification_date else: item_date = datetime.now() templates = { 'title': ['rss/%s/title.html' % item.class_name, 'rss/items/title.html'], 'description': [ 'rss/%s/description.html' % item.class_name, 'rss/items/description.html' ], } f.add_item( title=render_to_string(templates['title'], {'item': item}), link=u'%s%s' % (link_prefix, item.public_link()), pubdate=item_date, description=render_to_string(templates['description'], {'item': item}), ) return f.writeString('UTF-8')
def rss_action(self): self.lookup_instance.add_sorter('published_at', 'desc') self.lookup_instance.add_filter('published', 1) self.read_action() self.rss_feed = feedgenerator.Rss201rev2Feed( title=self.title, link=self.link, description=self.description) for item in self.lookup_instance.get_models(): self.rss_feed.add_item(title=getattr(item, self.item_title), link=self.get_item_link(item), description=self.get_item_description(item), pubdate=getattr(item, self.item_pubdate), author_name=self.get_author_name(item))
def feed(request, openid): logging.info(openid) str = cache.get(openid) if not str: weixin = models.WeiXin() items = weixin.get_items(openid) feed = feedgenerator.Rss201rev2Feed(title=items["title"], link=items["link"], description=items["description"], language="zh-cn") for item in items["items"]: feed.add_item(title=item["title"], description=item["content"], link=item["link"]) str = feed.writeString('utf-8') cache.set(openid, str) return HttpResponse(str) # # def feed_new(request, openid): # str = cache.get(openid) # if not str: # weixin = models.WeiXin() # items = weixin.get_items(openid) # feed = models.Rss( # title=items["title"], # link=items["link"], # description=items["description"], # language="zh-cn" # ) # for item in items["items"]: # feed.add_item(title=item["title"], description=item["content"], link=item["link"]) # str = feed.writeString('') # # cache.set(openid, str) # # return HttpResponse(str) # # # def format(str): # new_str = str.replace('&', '&') # new_str = str.replace('<', '<') # # new_str = str.replace('\n\n', '\n') # return new_str # # # rss_generate()
def render_path(request, repository, path, changesets): feed = feedgenerator.Rss201rev2Feed( title="Changes in %s %s" % (repository, path), description="Changes recently made to %s in %s" % (path, repository), link=reverse('path', args=[repository, path])) for changeset in changesets: feed.add_item( title="Changeset %s" % changeset, description=render_to_string("pathfeed.html", {"changeset": changeset}), link="%srev/%s" % (repository.url, changeset), author_name=changeset.author.split(" <")[0], pubdate=changeset.localdate, ) return HttpResponse(feed.writeString('UTF-8'), content_type=feed.mime_type)
def get(self): from django.utils import feedgenerator query = Feed.all().order("-updated_parsed").fetch(PAGESIZE) feed = feedgenerator.Rss201rev2Feed( title = "mato-mato-chan2", link = "http://matomatochan2.appspot.com/", description = SITE_DESCRIPTION, language = u"ja") for article in query: feed.add_item( title = article.title, link = article.link, description = "", ) rss = feed.writeString("utf-8") self.response.out.write(rss)
def get(self): # フィード作成 feed = feedgenerator.Rss201rev2Feed( title="extweet", link="http://extractweet.appspot.com/rss", description="twitterのリストから抽出したURLをRSS配信", language=u"ja") tweets = db.GqlQuery("SELECT * FROM Tweet ORDER BY date DESC") for tweet in tweets: feed.add_item(title=tweet.title, link=tweet.urls[0], description=tweet.content, pubdate=tweet.date) # RSS 文字列にする rss = feed.writeString("utf-8") self.response.headers['Content-Type'] = 'text/xml; charset=utf-8' self.response.out.write(rss)
def rss(request, question_id): question = get_object_or_404(Question, pk=question_id) answers = question.answers_set.order_by('-net_votes') # create a feed generator having a channel with following title, link and description # feed = feedgenerator.Rss201rev2Feed( # title=question.question_title, # content=question.question_text, # ) feed = feedgenerator.Rss201rev2Feed( title="Output question rss", link="", description= u"This is the content of all staff related to one question.", language=u"en", ) feed.add_item(title=question.question_title, link=u"", description=question.question_text) for answer in answers: feed.add_item( title=u"answer", link="", description=answer.answer_text, ) #up_votes=str(answer.up_votes), # Write all the feeds in a string str = feed.writeString('utf-8') # You can use following to write the same in a file #with open('test.rss', 'w') as fp: # feed.write(fp, 'utf-8') # format the string so that it will be readable context = {} str = format(str) context['str'] = str return render(request, 'polls/rss.html', context)
def main(): args = parse_args() aggregate_feed = None if args.aggregate_file: aggregate_feed = feedgenerator.Rss201rev2Feed( title='Twitter', link='https://twitter.com/', description=f'Tweets from %d Twitter useres' % len(args.username), ) for username in args.username: if aggregate_feed: make_feed(username, aggregate_feed=aggregate_feed) else: make_feed(username, filename=os.path.join(args.output_dir, f'{username}.xml')) if aggregate_feed: with open(args.aggregate_file, 'wb') as f: aggregate_feed.write(f, encoding='utf-8')
def feed(request): assert request.method == 'GET', "error on request method" feed = feedgenerator.Rss201rev2Feed( title='FikaNote', link='https://fikanote.herokuapp.com/', description= 'Talking about Tech, Software Development and Gadgets with Coffee.', language='ja-jp') episodes = FikanoteDB.objects.order_by('-date') for episode in episodes: url = 'https://fikanote.herokuapp.com/%d/' % episode.number feed.add_item(title='FikaNote %d: %s' % (episode.number, episode.title), link=url, author_email='Kosuke Nagano', description=episode.agenda, pubdate=episode.date) result = feed.writeString('utf-8') return HttpResponse(result, content_type="text/xml; charset=utf-8")
def rss(request): ''' Generates an RSS feed ''' resp = memcache.get('rss_feed', None) # @UndefinedVariable if not resp: feed = feedgenerator.Rss201rev2Feed(title= "The Site With The LAMP", link="http://raditha.com/", description = "A long standing but irregularly updated tech blog", author_name="Raditha dissanayake", feed_url="http://raditha.com/feed") for page in Page.query().filter(Page.blog == True).filter(Page.draft == False).order(-Page.timestamp).fetch(20): feed.add_item(title = page.title, link = "http://raditha.com/blog/archives/{0}".format(page.link, page.title), pubDate = page.published_at, description = page.content) resp = feed.writeString('UTF-8') memcache.set('rss_feed', resp) # @UndefinedVariable response = HttpResponse(resp, mimetype='application/xml') return response
def rssfeedCreate(request): from django.utils import feedgenerator fd = feedgenerator.Rss201rev2Feed( title=u'my blog rss', link=u'/feed/', description=u'this is a rss of my blog', ) for entry in Entries.objects.all().order_by('-created')[:5]: fd.add_item(title=entry.Title, link=u'/entry/%d/' % entry.id, description=entry.Content, pubdate=entry.created, categories=(entry.Category.Title, )) return HttpResponse(fd.writeString('utf-8'), content_type='application/rss+xml') #---TemplateView #class HomeView(TemplateView): # teplate_name = 'home.html'
def generate_rss(self): from django.conf import settings from django.utils import feedgenerator from nm.models import Newsitem import os file_name = "latest_news.rss" file_path = os.sep.join([settings.MEDIA_ROOT, file_name]) file_url = os.sep.join([settings.MEDIA_URL, "site_media", file_name]) feed = feedgenerator.Rss201rev2Feed( title=u"Newsmemory", link=file_url, description=u"Dernières nouvelles", language=u"fr", ) start_date = datetime.datetime.now() - timedelta(days=1) qs = Newsitem.objects.select_related().filter( time__gt=start_date).filter( Q(source__name="AFP") | Q(source__name="AP") | Q(source__name="Reuters") | Q(source__name="ATS")).order_by('-time')[:50] print "qs.count()", qs.count() # TODO break lines in text # TODO display source and original link for r in qs: # text = "<br />".join([r.source.name, "%s" % r.time, r.text]) feed.add_item(title=r.title, link=r.get_absolute_url(), description="") fp = open(file_path, 'w') feed.write(fp, 'utf-8') fp.close()
def test_latest_post_date_returns_utc_time(self): for use_tz in (True, False): with self.settings(USE_TZ=use_tz): rss_feed = feedgenerator.Rss201rev2Feed('title', 'link', 'description') self.assertEqual(rss_feed.latest_post_date().tzinfo, utc)
def test_feed_without_feed_url_gets_rendered_without_atom_link(self): feed = feedgenerator.Rss201rev2Feed('title', '/link/', 'descr') self.assertEquals(feed.feed['feed_url'], None) feed_content = feed.writeString('utf-8') self.assertNotIn('<atom:link href=', feed_content)
def test_Rss201rev2Feed(inp): feedgenerator.Rss201rev2Feed(inp, "link", "description")
def create_portal_feed(request, portal): return feedgenerator.Rss201rev2Feed( title=portal.title, link=request.build_absolute_uri(domain_reverse(portal, 'portals.frontend.views.portal_home', args=[portal.address])), description=portal.subtitle, )