def feeds_tag(request, tag): lang = "pt-br" feed = feedgenerator.Rss201rev2Feed( title=u'Django Utilidades :: Marinho Brandao', link=settings.PROJECT_ROOT_URL, description=u'', language=lang, ) entries = Entry.all().filter('published =', True).filter('show_in_rss =', True).filter('tags =', tag) entries.order('-pub_date') entries = entries[:20] for e in entries: feed.add_item( title=e.title, link=settings.PROJECT_ROOT_URL[:-1] + e.get_absolute_url(), description=e.get_text(), ) response = HttpResponse(mimetype="application/xhtml+xml") feed.write(response, 'utf-8') return response
def index(request): q_feed = Feed.all() feed_list = q_feed.fetch(q_feed.count()) q_entry = Entry.all() entry_list = q_entry.fetch(q_entry.count()) # Create as dict tree dict_tree = {} for e in entry_list: dict_tree.setdefault(e.pub_date(), {}).setdefault(e.feed, []).append(e) days = dict_tree.keys() days.sort() days.reverse() # Recreate as list tree recent_list = [] for day in days: blog_list = [dict_tree[day][k] for k in dict_tree[day]] blog_list.sort(pub_dttm_desc) recent_list.append(blog_list) return render_to_response('planet/index.html', { 'feed_list': feed_list, 'recent_list': recent_list })
def index(handler): context = {} if handler.session.get("account", False): context["drafts"] = Entry.all().filter("author =", db.Key(handler.session["account"])) else: pass return context
def get(self): self.response.headers['Content-Type'] = 'text/html' older_than = int(self.request.get("older_than", datetime.datetime.now().date().toordinal() + 1)) older_than = datetime.date.fromordinal(older_than) body = "" oldest = datetime.datetime.now().date().toordinal() + 1 for e in Entry.all().filter("date <", older_than).order('-date').run( limit=20): body += e.render() oldest = e.date.toordinal() nav = """ <div class='row'> <div class='span4 offset4'> <a href='/?older_than=%d'>Newer</a> -- <a href='/?older_than=%d'>Older</a> </div> </div>""" % (oldest + 41, oldest) body = nav + body + nav self.response.out.write(indexTemplate.render({ 'title': 'Home', 'body': body }))
def detail(slug): query = Entry.all() entry = get_object_or_404(query, Entry.slug == slug) tags = "" for tag in entry.tags: tags = tags + " " + tag.tag return render_template("detail.html", entry=entry, tags=tags)
def post(self): enforce_key(self) rawEntries = self.request.get("entries") entries = json.loads(rawEntries) for e in Entry.all(): e.delete() for e in entries: newEntry = Entry() newEntry.from_json(e) newEntry.put() rawTodos = self.request.get("todos") todos = json.loads(rawTodos) for t in ToDo.all(): t.delete() for t in todos: newToDo = ToDo() newToDo.from_json(t) newToDo.put() self.response.out.write("Backup successfuly restored.")
def detail(slug): query = Entry.all() entry = get_object_or_404(query, Entry.slug == slug) tags = "" for tag in entry.tags: tags = tags + " " + tag.tag return render_template('detail.html', entry=entry, tags=tags)
def get(self): enforce_key(self) entries = [e.to_dict() for e in Entry.all().order('-date')] self.response.headers['Content-Type'] = "application/json" self.response.headers['Content-Disposition'] = ( "attachment; filename=entries.json") self.response.out.write(json.dumps(entries))
def get(self): today = datetime.date.today() q = Entry.all().filter("date >", today - datetime.timedelta(days=1)) msg = "" if q.count() <= 0: q = Entry.all().filter("date =", today - datetime.timedelta(days=30)) old_entry = "" if q.count() > 0: old_entry = "\tEntry from 30 days ago\n%s\n\n" % q[0].content q = Entry.all().filter("date =", today - datetime.timedelta(days=180)) if q.count() > 0: old_entry += "\tEntry from 180 days ago\n%s\n\n" % q[0].content mail.send_mail(sender="%s <%s>" % (DIARY_NAME, DIARY_EMAIL), to="%s <%s>" % (RECIPIENT_NAME, RECIPIENT_EMAIL), subject="Entry reminder for %s" % today.strftime("%b %d"), body="""Don't forget to update your diary! Remember to include short snippets of important things you did today, as well as a list of things you are grateful for. Just respond to this message with todays entry. %s ----- diaryentry%dtag """ % (old_entry, int(time.time()))) msg = "Reminder sent" else: msg = "I already have an entry for today" self.response.out.write(indexTemplate.render({ 'title': 'Ideas', 'body': msg, 'active_page': 'reminder' }))
def entry_index(request): _posts = Entry.all().filter('published =', True) _posts.order('-pub_date') _posts = list(_posts) try: latest = _posts[0] except IndexError: latest = None posts = len(_posts) > 1 and _posts[1:11] or [] return locals()
def get(self, pageindex): logging.info(pageindex) pi = 1 if int(pageindex) < 1 else int(pageindex) offset = (pi - 1) * PAGESIZE logging.info(offset) q = Entry.all(); q.order('-insertTime') total = q.count() its = q.fetch(PAGESIZE, offset) self.render_template('index.html', { 'title': 'Entry List', 'items': its, 'total': total, 'pageindex': pi, 'pagesize': PAGESIZE })
def get(self, ordinal): out = "" month = date.fromordinal(int(ordinal)) for e in Entry.all().filter("date >=", month).filter("date <", next_month(month)): out += """%s <form action='/highlights/month/%s' method='POST'> <input type='hidden' name='key' value='%s'> <input type='submit' value='Pick'> </form><br><br>""" % (e.render(), ordinal, e.key()) self.response.out.write(indexTemplate.render({ 'title': 'Highlights - Monthly', 'body': out, 'active_page': 'highlights' }))
def tag_index(request, tag): posts = Entry.all().filter('published =', True).filter('tags =',tag) posts.order('-pub_date') posts = [i for i in posts] if not posts: raise Http404 latest = posts[0] try: posts = posts[1:] except ValueError: posts = [] return locals()
def get(self): ideas = [] for e in Entry.all().order('-date'): if "--" in e.content: ideas += self.scrape_ideas(e.content) body_text = "<ul>\n" for idea in ideas: body_text += "\t<li>%s</li>\n" % idea body_text += "</ul>" self.response.out.write(indexTemplate.render({ 'title': 'Ideas', 'body': body_text, 'active_page': 'ideas' }))
def tag_index(request, tag): posts = Entry.all().filter('published =', True).filter('tags =', tag) posts.order('-pub_date') posts = [i for i in posts] if not posts: raise Http404 latest = posts[0] try: posts = posts[1:] except ValueError: posts = [] return locals()
def year_index(request, year): start_date = datetime.datetime(int(year),1,1) end_date = datetime.datetime(int(year),12,31) posts = Entry.all().filter('published =', True)#.filter('pub_date >=',start_date).filter('pub_date <=',end_date) posts.order('-pub_date') posts = [i for i in posts] if not posts: raise Http404 latest = posts[0] try: posts = posts[1:] except ValueError: posts = [] return locals()
def year_index(request, year): start_date = datetime.datetime(int(year), 1, 1) end_date = datetime.datetime(int(year), 12, 31) posts = Entry.all().filter( 'published =', True ) #.filter('pub_date >=',start_date).filter('pub_date <=',end_date) posts.order('-pub_date') posts = [i for i in posts] if not posts: raise Http404 latest = posts[0] try: posts = posts[1:] except ValueError: posts = [] return locals()
def get(self): out = "" first_entry = Entry.all().order('date').get().date month = date(first_entry.year, first_entry.month, 1) while month < date.today(): h = Highlight.all().filter("date =", month).filter("period =", "month").get() if h: out += "<h2>%s</h2>\n" % month.strftime("%B %Y") out += h.entry.render() else: out += "<a href='/highlights/month/%d'><h2>%s</h2></a>" % ( month.toordinal(), month.strftime("%B %Y")) month = next_month(month) self.response.out.write(indexTemplate.render({ 'title': 'Highlights', 'body': out, 'active_page': 'highlights' }))
def index(request): q_feed = Feed.all() feed_list = q_feed.fetch(q_feed.count()) q_entry = Entry.all() entry_list = q_entry.fetch(q_entry.count()) # Create as dict tree dict_tree = {} for e in entry_list: dict_tree.setdefault(e.pub_date(), {}).setdefault(e.feed, []).append(e) days = dict_tree.keys() days.sort() days.reverse() # Recreate as list tree recent_list = [] for day in days: blog_list = [dict_tree[day][k] for k in dict_tree[day]] blog_list.sort(pub_dttm_desc) recent_list.append(blog_list) return render_to_response("planet/index.html", {"feed_list": feed_list, "recent_list": recent_list})
def feeds_tag(request, tag): lang = "pt-br" feed = feedgenerator.Rss201rev2Feed( title = u'Django Utilidades :: Marinho Brandao', link = settings.PROJECT_ROOT_URL, description = u'', language = lang, ) entries = Entry.all().filter('published =',True).filter('show_in_rss =',True).filter('tags =',tag) entries.order('-pub_date') entries = entries[:20] for e in entries: feed.add_item( title=e.title, link=settings.PROJECT_ROOT_URL[:-1]+e.get_absolute_url(), description=e.get_text(), ) response = HttpResponse(mimetype="application/xhtml+xml") feed.write(response, 'utf-8') return response
def items(self): # return Entry.objects.filter(pub_dttm__gte=datetime.date.today()-datetime.timedelta(14)).order_by('-pub_dttm', '-id') # FIXME: q = Entry.all() entries = q.fetch(q.count()) return entries
def items(self): return Entry.all().order('-pub_date').filter('show_in_rss =', True)[:10]
print ".. removes 1000 entries from the datastore .." print "run using remote_api_shell.py clean_datastore.py" from models import Entry query = Entry.all() entries =query.fetch(1000) db.delete(entries)
def import_from_json(data, show=False): ret = [] if show: print len([i for i in Entry.all()]) # Deserialize objects = simplejson.loads(data) for obj in objects: if obj['model'] != 'blog.entry': continue #if obj['fields']['media_type'] == 'I': # print "'%s': %s"%(obj['fields']['slug'], obj['pk']) #continue if obj['fields']['media_type'] != 'P' or not obj['fields']['published']: continue msg = '%d %s'%(obj['pk'], obj['fields']['title']) if show: print msg else: ret.append(msg) # Blog entry try: entry = Entry.all().filter('old_id =', int(obj['pk']))[0] except IndexError: entry = Entry() entry.old_id = obj['pk'] m = RE_DATETIME.match(obj['fields']['pub_date']) groups = [int(i) for i in m.groups()] entry.pub_date = datetime.datetime(*groups) entry.title = obj['fields']['title'] entry.description = obj['fields']['description'] entry.format = obj['fields']['format'] entry.published = True entry.show_in_rss = False entry.slug = obj['fields']['slug'] entry.tags = [db.Category(TAGS[tag]) for tag in obj['fields']['tags']] text = obj['fields']['content'] text = text.replace('http://media.marinhobrandao.com/','/') f = RE_IMG_URL.findall(text) rep = [] for url in f: m = RE_IMG_URL2.match(url) new_url = '/media/img/upload/%s'%IMAGES[m.group(1)] if show: print '\t', new_url text = text.replace(url, new_url) entry.text = text entry.save() # Gallery image msg = [i.slug for i in Entry.all()] if show: print msg else: ret.append(' '.join([i for i in msg]))
def import_from_json(data, show=False): ret = [] if show: print len([i for i in Entry.all()]) # Deserialize objects = simplejson.loads(data) for obj in objects: if obj['model'] != 'blog.entry': continue #if obj['fields']['media_type'] == 'I': # print "'%s': %s"%(obj['fields']['slug'], obj['pk']) #continue if obj['fields']['media_type'] != 'P' or not obj['fields']['published']: continue msg = '%d %s' % (obj['pk'], obj['fields']['title']) if show: print msg else: ret.append(msg) # Blog entry try: entry = Entry.all().filter('old_id =', int(obj['pk']))[0] except IndexError: entry = Entry() entry.old_id = obj['pk'] m = RE_DATETIME.match(obj['fields']['pub_date']) groups = [int(i) for i in m.groups()] entry.pub_date = datetime.datetime(*groups) entry.title = obj['fields']['title'] entry.description = obj['fields']['description'] entry.format = obj['fields']['format'] entry.published = True entry.show_in_rss = False entry.slug = obj['fields']['slug'] entry.tags = [db.Category(TAGS[tag]) for tag in obj['fields']['tags']] text = obj['fields']['content'] text = text.replace('http://media.marinhobrandao.com/', '/') f = RE_IMG_URL.findall(text) rep = [] for url in f: m = RE_IMG_URL2.match(url) new_url = '/media/img/upload/%s' % IMAGES[m.group(1)] if show: print '\t', new_url text = text.replace(url, new_url) entry.text = text entry.save() # Gallery image msg = [i.slug for i in Entry.all()] if show: print msg else: ret.append(' '.join([i for i in msg]))
def admin_entry_export_all(request): items = Entry.all().order('title') #ret = render_to_string('gae_blog/export_all.txt', locals()) ret = serialize(items) return HttpResponse(ret, mimetype='text/xml')
def admin_index(request): list = Entry.all() list.order('-pub_date') return locals()