Esempio n. 1
0
            summary_match = firstPar.match(post.body)
            if summary_match:
                m = summary_match.groups('first')
                if isinstance(m, tuple):
                    return m[0]
                else:
                    return m
            else:
                return post.body
        posts = [{
            'title': post.title,
            'link': post.link,
            'body': first_par_for_post(post),
            'date': post.date.strftime("%B %d, %Y")
            
        } for post in posts_repr]
        
        delete_all(RecentPosts)
        RecentPosts(postsJson=encode_posts(posts)).put()
        
        return posts
    else:
        logging.info('No primary posterous site')
        return {}

get_posts = memcache_or_db_or_web(
    "posts", 
    lambda: decode_posts(RecentPosts.all()[0].postsJson)[:3], 
    update_posterous
)
Esempio n. 2
0
            if len(minute) < 2:
                minute = "0" + minute
            recent_commits.append({
                'repo_name': c.repo.name,
                'repo_url': c.repo.url,
                'time': {
                    'day': c.committed_date.day,
                    'month': c.committed_date.month,
                    'year': c.committed_date.year,
                    'hour': c.committed_date.hour,
                    'minute': minute
                },
                'message': linebreak_message(c.message),
                'url': c.url
            })
        
        delete_all(RecentCommits)
        RecentCommits(commitsJson=encode_commits(recent_commits)).put()
        memcache.delete("commits")
    except DownloadError:
        # Some query to Github took too long
        logging.info("unable to download from github")
    
    return recent_commits

get_commits = memcache_or_db_or_web(
    "commits", 
    lambda: decode_commits(RecentCommits.all()[0].commitsJson), 
    update_github
)