def get_story_list(self): """ Get all the stories written by this author since ANALYSIS_STARTDATE. """ from politico.models import Story bylines = Story.all().filter("bylines =", self.key()) bylines = bylines.filter("updated_date >=", ANALYSIS_STARTDATE) return bylines.order("-updated_date")
def get_daily_average(self): """ Average how many stories this author has posted per day since our global ANALYSIS_STARTDATE """ from politico.models import Story obj_list = Story.all().filter("bylines =", self.key()) obj_list = obj_list.filter("update_date >=", ANALYSIS_STARTDATE) date_diff = (datetime.now() - ANALYSIS_STARTDATE).days return obj_list.count() / float(date_diff)
def index(request): """ The homepage. """ qs = Story.all() stories = qs.filter("updated_date >=", ANALYSIS_STARTDATE) latest_stories = stories.order('-updated_date').fetch(25) context = { 'headline': "Winning the present", 'object_list': latest_stories, 'selected': 'index', } return direct_to_template(request, 'index.html', context)
def update_daily_stats(request): """ Group stories by day of the week and record the totals in the database. """ qs = Story.all().filter("updated_date >=", ANALYSIS_STARTDATE).order("-updated_date") data_dict = {} for obj in qs: this_weekday = obj.updated_local().weekday() try: data_dict[this_weekday] += 1 except KeyError: data_dict[this_weekday] = 1 data_json = simplejson.dumps(data_dict) logging.info("Creating a new DailyStats record") obj = DailyStats(creation_datetime=datetime.now(), data=data_json) obj.put() return HttpResponse('ok!')
def items(self): return Story.all().order("-updated_date")[:10]
def update_feed(request): """ Fetch a feed and sync each item with the database. """ # Fetch the url url = request.GET['url'] content = fetch(url).content d = feedparser.parse(StringIO.StringIO(content)) # Loop through all the items for entry in d.entries: # See if this link already exists story_query = Story.all() story = story_query.filter('link =', entry.id).get() # And if it doesn't ... if not story: # Create a new Story object story = Story( link = entry.id, title = entry.title, updated_date = datetime.fromtimestamp(time.mktime(entry.updated_parsed)), ) # Prep the authors authors = entry.author.split(',') author_keys = [] # Loop through the authors for author in authors: # Check if the author already exists this_slug = str(slugify(author)) if not this_slug: continue a = Author.get_by_key_name(this_slug) # If it does... if a: # Sync updates if story.updated_date > a.last_updated: a.last_updated = story.updated_date a.put() # Otherwise... else: # Create a new Author obj a = Author( key_name = this_slug, name = author, slug = this_slug, story_count = 1, last_updated = story.updated_date ) a.put() # Add this to the Author key list author_keys.append(a.key()) # Add the author keys to the story object story.bylines = author_keys # Save the story story.put() # Schedule total updates for all the authors [taskqueue.add( url = '/_update_story_count_for_author/', params = {'key' : i}, method='GET' ) for i in author_keys] return HttpResponse('ok!')