def home(source=None, variant=None, edition=None):
    api_url = configuration.lookup('CONTENT_API_URL')
    payload = {
        'api-key':              configuration.lookup('CONTENT_API_KEY'),
        'page-size':            10,
        'show-editors-picks':   'true',
        'show-elements':        'image',
        'show-fields':          'all',
        'edition':              edition
    }
    response = requests.get(api_url, params=payload)
    data = response.json()['response']['editorsPicks']
    return render_template('index.html', content=data)
def read(content_id, params = None):

	url = "http://%s%s" % (CONTENT_API_HOST, content_id)

	if params:
		cached_key = configuration.lookup('API_KEY', 'gu-most-popular')
		if not 'api-key' in params and cached_key:
			params['api-key'] = cached_key
		url = url + "?" + urllib.urlencode(params)

	#logging.debug(url)

	cached_data = memcache.get(url)

	if cached_data: return cached_data

	result = fetch(url)

	if not result.status_code == 200:
		logging.warning("Content API read failed: %d" % result.status_code)
		return None

	memcache.set(url, result.content, time = 60 * 15)

	return result.content
Exemple #3
0
	def post(self):

		email = self.request.params['email']
		subject = self.request.params['subject']
		message = self.request.params['message']

		mail.send_mail(configuration.lookup('FROM_EMAIL'), email, subject, message)

		self.response.write(json.dumps({"email": "sent"}))
Exemple #4
0
	def get(self):
		consumer_key = lookup("t_consumer_key")
		consumer_secret = lookup("t_consumer_secret")
		access_token = lookup("t_access_token")
		access_token_secret =  lookup("t_access_token_secret")
		auth = tweepy.OAuthHandler( consumer_key, consumer_secret)
		auth.set_access_token(access_token, access_token_secret)

		api = tweepy.API(auth, parser=tweepy.parsers.JSONParser())

		for tracked_user in TrackedUser.query():
			username=tracked_user.username

			try:
				user = api.get_user(screen_name=username)

				TrackedUserSnapshot(username=username, profile=json.dumps(user)).put()
				Followers(username=username, followers=user['followers_count']).put()
			except Exception, e:
				logging.fatal("Could not retreive data for %s" % username)
				logging.fatal(e)
def home(source=None, variant=None, edition=None):
    api_url = configuration.lookup('CONTENT_API_URL')
    num_items = 10
    payload = {
        'api-key':              configuration.lookup('CONTENT_API_KEY'),
        'page-size':            num_items,
        'show-editors-picks':   'true',
        'show-elements':        'image',
        'show-fields':          'all',
        'edition':              edition
    }
    options = {
        'showlinks': False,
        '_copyText': 'We produce hard-hitting, internationally-recognised journalism every day. Here are a sample of stories currently on our US homepage:'
    }

    if source == 'popular':

        if not edition:
            popular_url = 'http://gu-most-popular.appspot.com/api/most-viewed'
        else:
            popular_url = 'http://rrees-experiments.appspot.com/data/most-popular/' + edition + '/' + str(num_items)

        response = requests.get(popular_url)

        # this is annoying: the most popular feed doesn't use a key,
        # but the experimental one uses 'most_popular'
        if not edition:
            data = response.json()
        else:
            data = response.json()['most_popular']

    else:
        response = requests.get(api_url, params=payload)
        data = response.json()['response']['editorsPicks']


    return render_template('index.html', content=data, options=options, variant=variant, edition=edition, source=source)
Exemple #6
0
    def get(self):
        consumer_key = lookup("t_consumer_key")
        consumer_secret = lookup("t_consumer_secret")
        access_token = lookup("t_access_token")
        access_token_secret = lookup("t_access_token_secret")
        auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
        auth.set_access_token(access_token, access_token_secret)

        api = tweepy.API(auth, parser=tweepy.parsers.JSONParser())

        for tracked_user in TrackedUser.query():
            username = tracked_user.username

            try:
                user = api.get_user(screen_name=username)

                TrackedUserSnapshot(username=username,
                                    profile=json.dumps(user)).put()
                Followers(username=username,
                          followers=user['followers_count']).put()
            except Exception, e:
                logging.fatal("Could not retreive data for %s" % username)
                logging.fatal(e)
Exemple #7
0
    def get(self):
        emails_to_send = models.email.unsent_emails()

        logging.info(emails_to_send)

        sender_address = configuration.lookup('EMAIL_FROM')

        for email in emails_to_send:
            mail.send_mail(sender_address, email.to, email.subject,
                           email.message)
            logging.info(email)
            email.sent = True
            email.put()

        headers.json(self.response)

        output = {'pending_emails': emails_to_send.count()}

        self.response.out.write(json.dumps(output))
Exemple #8
0
	def get(self, rota_id):

		if not 'api-key' in self.request.GET:
			webapp2.abort(400, 'No API key specified')
			return

		api_key = self.request.get('api-key')
		if api_key not in configuration.lookup('VALID_API_KEYS', "").split(','):
			webapp2.abort(400, 'The API key {0} specified is invalid'.format(api_key))
			return

		rota = models.lookup(rota_id)

		payload = {
			'rota_id': rota_id,
			'current': rota.current,
		}

		headers.json(self.response)
		self.response.out.write(json.dumps(payload))
Exemple #9
0
def graph():
    db_url = configuration.lookup('gdb_url', 'http://localhost:7474/db/data/')
    logging.info(db_url)
    return py2neo.Graph(db_url)
Exemple #10
0
def capi_key():
	return configuration.lookup('CONTENT_API_KEY', 'your-app-id')
Exemple #11
0
def capi_host():
	return configuration.lookup('CONTENT_API_HOST', 'content.guardianapis.com')
import urlparse
import urllib
import logging
import json
import datetime

from google.appengine.api import memcache

from google.appengine.api.urlfetch import fetch

import configuration

last_30_days = (datetime.date.today() + datetime.timedelta(days=-30)).isoformat()

CONTENT_API_HOST = configuration.lookup('CONTENT_API_HOST', 'content.guardianapis.com')
API_KEY = configuration.lookup('API_KEY')
SEARCH = 'search'
PARAMS = {
    'section' : 'film',
    'tag': 'tone/reviews',
    'show-fields': 'headline,thumbnail,trailText,star-rating',
    'show-tags': 'all',
    'from-date': last_30_days,
    'page': "1",
    }

if API_KEY:
    PARAMS['api-key'] = API_KEY

def read_all(params=None):
    if not params:
Exemple #13
0
def graph():
	db_url = configuration.lookup('gdb_url', 'http://localhost:7474/db/data/')
	logging.info(db_url)
	return py2neo.Graph(db_url)
def capi_host():
	return configuration.lookup('CONTENT_API_HOST', 'content.guardianapis.com')
Exemple #15
0
import webapp2
import jinja2
import os
import json
import logging
import datetime
from urllib import quote, urlencode
from google.appengine.api import urlfetch
from google.appengine.api import memcache

import headers
import configuration

API_KEY = configuration.lookup('API_KEY')

def reading_seconds(words):
	return (words / 250) * 60


def read_todays_content(page = 1, results =  None):
	url = "http://content.guardianapis.com/search"

	today = datetime.date.today()

	payload = {
		"page" : str(page),
		"page-size" : "50",
		"format" : "json",
		"show-fields" : "wordcount,headline,standfirst,thumbnail",
		"tags" : "tone",
		"from-date" : today.isoformat(),
Exemple #16
0
def read_todays_content(page = 1):
	url = "http://content.guardianapis.com/search"

	today = datetime.date.today()

	payload = {
		"page" : str(page),
		"page-size" : "50",
		"format" : "json",
		"show-fields" : "wordcount",
		"tags" : "tone",
		"from-date" : today.isoformat(),
		"api-key" : configuration.lookup('API_KEY'),
		}

	final_url = url + "?" + urlencode(payload)
	#logging.info(final_url)

	result = urlfetch.fetch(final_url, deadline = 9)

	if not result.status_code == 200:
		logging.warning("Failed to read from the Content Api")
		logging.warning('Status code: %d' % result.status_code)
		return

	data = json.loads(result.content)

	api_response = data.get("response", {})

	total_pages = api_response.get("pages", None)

	if not total_pages:
		return

	results = api_response.get("results", [])

	for result in results:
		fields = result.get("fields", {})

		if not 'wordcount' in fields: continue

		path = result["id"]

		live_flag = tags.is_live(result)

		lookup = WordcountSummary.query(WordcountSummary.path == path)

		if lookup.count() > 0:

			record = lookup.iter().next()

			current_wordcount = read_wordcount(fields)

			if not current_wordcount == record.wordcount:
				record.wordcount = current_wordcount
				record.put()

			continue


		WordcountSummary(path = path,
			section_id = result["sectionId"],
			wordcount = read_wordcount(fields),
			iso_published_date = result["webPublicationDate"][:10],).put()

	if not int(total_pages) == page:
		read_todays_content(page + 1)
import urlparse
import urllib
import logging

from google.appengine.api.urlfetch import fetch
from google.appengine.api import memcache

import configuration

CONTENT_API_HOST = 'content.guardianapis.com'

api_host = configuration.lookup('CONTENT_API_HOST', CONTENT_API_HOST)
api_key = configuration.lookup('CONTENT_API_KEY')

def content_id(url):
	parsed_url = urlparse.urlparse(url)
	return parsed_url.path

def read(content_id, params = None):
	client = memcache.Client()

	url = "http://%s%s" % (api_host, content_id)

	if not 'api_key' in params and api_key:
		params['api-key'] = api_key

	if params:
		url = url + "?" + urllib.urlencode(params)

	#logging.info(url)
def capi_key():
	return configuration.lookup('CONTENT_API_KEY', 'your-app-id')
Exemple #19
0
import configuration

from models import OphanData

import webapp2
import json

from urllib import urlencode
from urlparse import urlparse

from google.appengine.api.urlfetch import fetch
from google.appengine.api import memcache

import logging

OPHAN_API_KEY = configuration.lookup("OPHAN_API_KEY")

def read_weeks_ophan_data():
	cached_content = memcache.get("ophan_summary")

	if cached_content:
		return json.loads(cached_content)

	base_url = "http://api.ophan.co.uk/api/mostread"

	params = {
		"api-key" : OPHAN_API_KEY,
		"age" : 7 * 24 * 60 * 60,
		"count" : 100,
	}