コード例 #1
0
ファイル: api.py プロジェクト: teliov/unilagapi
def get_items():
    page = int(request.query.page or 1)
    limit = int(request.query.limit or 100)
    items = NewsItem.select().paginate(page, limit)
    result = []
    for item in items:
        result.append(model_to_dict(item))

    pagination = get_pagination_object(NewsItem.select(), page, limit)
    response.set_header('content-type', 'application/json')
    response_obj = {"data": result, "pagination": pagination}
    return json.dumps(response_obj, cls=MyEncoder)
コード例 #2
0
ファイル: api.py プロジェクト: teliov/unilagapi
def get_item(news_id):
    response.set_header("content-type", "application/json")
    try:
        item = NewsItem.select().where(NewsItem.id == news_id).get()
        obj = model_to_dict(item)
        response_obj = {"data": obj}
        return json.dumps(response_obj, cls=MyEncoder)
    except DoesNotExist:
        abort(404)
コード例 #3
0
CUR_DIR = os.path.dirname(os.path.realpath(__file__))

from models import NewsItem

# Set up the template engine to look in the current directory
template_loader = jinja2.FileSystemLoader('templates')
template_env = jinja2.Environment(loader=template_loader)

# Load the template file
template_file = "report.tpl.txt"
template = template_env.get_template(template_file)

# Load all the news items from the past week
seven_days_ago_ts = arrow.utcnow().ceil('hour').replace(days=-2).timestamp
news_items = NewsItem.select().where(NewsItem.published_ts>seven_days_ago_ts)

# Create a dictionary with all the stories grouped by source
sources = {}
for news_item in news_items:

    if not sources.has_key(news_item.source):
        sources[news_item.source] = {
            'items': [],
            'name': news_item.source,
        }
        
    # Add the news item
    sources[news_item.source]['items'].append(news_item)

# Sort the news items for each key
コード例 #4
0
# Set up the template engine to look in the current directory
template_loader = jinja2.FileSystemLoader('templates')
template_env = jinja2.Environment(loader=template_loader)

# Adding filters to enviroment to make them visible in the template
template_env.filters['format_date'] = reformat_date
template_env.filters['get_source_url'] = get_source_url

# Load the template file
template_file = "index.tpl.html"
template = template_env.get_template(template_file)

# Load all the news items
three_days_ago = arrow.utcnow().to('US/Eastern').replace(
    hours=-72).format('YYYY-MM-DD')
news_items = NewsItem.select().where(NewsItem.published_date > three_days_ago,
                                     NewsItem.hidden == 0)
news_items.order_by(NewsItem.published_ts)

# Render the template
context = {
    'news_items': news_items,
    'updated_eastern': arrow.utcnow().to('US/Eastern')
}
output = template.render(context)

# Save the output
filepath = os.path.join(CUR_DIR, 'output/sources.html')
with codecs.open(filepath, 'w', 'utf-8') as f:
    f.write(output)
コード例 #5
0
ファイル: render.py プロジェクト: greencoder/wxnewsscraper
        return '#'

# Set up the template engine to look in the current directory
template_loader = jinja2.FileSystemLoader('templates')
template_env = jinja2.Environment(loader=template_loader)

# Adding filters to enviroment to make them visible in the template
template_env.filters['format_date'] = reformat_date
template_env.filters['get_source_url'] = get_source_url

# Load the template file
template_file = "index.tpl.html"
template = template_env.get_template(template_file)

# Load all the news items
three_days_ago = arrow.utcnow().to('US/Eastern').replace(hours=-72).format('YYYY-MM-DD')
news_items = NewsItem.select().where(
    NewsItem.published_date > three_days_ago,
    NewsItem.hidden == 0
)
news_items.order_by(NewsItem.published_ts)

# Render the template
context = {'news_items': news_items, 'updated_eastern': arrow.utcnow().to('US/Eastern') }
output = template.render(context)

# Save the output
filepath = os.path.join(CUR_DIR, 'output/sources.html')
with codecs.open(filepath, 'w', 'utf-8') as f:
    f.write(output)
コード例 #6
0
ファイル: cron.py プロジェクト: teliov/unilagapi
#! /usr/bin/env python

## To be Run every hour

from scrap import UnilagScrap as Scrap
from models import NewsItem
from datetime import datetime

scrapper = Scrap()

news_items = scrapper.get_news_items()

for item in news_items:
    ## let's insert stuff
    # first check that it does not already exist!
    to_db = NewsItem.select().where(NewsItem.slug == item['slug'])
    if len(to_db) == 0:
        # item has not been in the db before
        to_db = NewsItem(news_hash=item['news_hash'],
                         slug=item['slug'],
                         news_title=item['title'],
                         news_link=item['link'],
                         date_updated=item['date_updated'],
                         intro_text=item['intro_text'],
                         scrapped_at=datetime.now())
        to_db.save()
    else:
        to_db = to_db.get()
        # check if the hash has changed
        if to_db.news_hash != item['news_hash']:
            to_db.news_hash = item['news_hash']