Esempio n. 1
0
def update_github():
    logging.info("updating github")
    github = Github(username=settings.GITHUB_USER, api_token=settings.GITHUB_API_KEY)
    commits = []
    recent_commits = []
    try:
        for r in github.repos.list(for_user=settings.GITHUB_USER):
            new_commits = github.commits.list("%s/%s" % (settings.GITHUB_USER, r.name), 'master')
            for c in new_commits:
                c.repo = r
            commits.extend(new_commits)
        all_commits = sorted(commits, key=lambda c: c.committed_date)
        commits = reversed(all_commits[-settings.NUM_GITHUB_COMMITS:])
        
        linebreak_message = lambda m: string.replace(m, 'github.com/', 'github.com/ ')
        
        for c in commits:
            minute = str(c.committed_date.minute)
            if len(minute) < 2:
                minute = "0" + minute
            recent_commits.append({
                'repo_name': c.repo.name,
                'repo_url': c.repo.url,
                'time': {
                    'day': c.committed_date.day,
                    'month': c.committed_date.month,
                    'year': c.committed_date.year,
                    'hour': c.committed_date.hour,
                    'minute': minute
                },
                'message': linebreak_message(c.message),
                'url': c.url
            })
        
        delete_all(RecentCommits)
        RecentCommits(commitsJson=encode_commits(recent_commits)).put()
        memcache.delete("commits")
    except DownloadError:
        # Some query to Github took too long
        logging.info("unable to download from github")
    
    return recent_commits
Esempio n. 2
0
            summary_match = firstPar.match(post.body)
            if summary_match:
                m = summary_match.groups('first')
                if isinstance(m, tuple):
                    return m[0]
                else:
                    return m
            else:
                return post.body
        posts = [{
            'title': post.title,
            'link': post.link,
            'body': first_par_for_post(post),
            'date': post.date.strftime("%B %d, %Y")
            
        } for post in posts_repr]
        
        delete_all(RecentPosts)
        RecentPosts(postsJson=encode_posts(posts)).put()
        
        return posts
    else:
        logging.info('No primary posterous site')
        return {}

get_posts = memcache_or_db_or_web(
    "posts", 
    lambda: decode_posts(RecentPosts.all()[0].postsJson)[:3], 
    update_posterous
)