class StoreActions(): def __init__(self): self.converter = Converter() self.urlFetcher = UrlFetcher() def addUrl(self, url, name, title, description): formatedUrl = url.replace('&', '&') if name == 'somename' and not url.startswith('http://en.wikipedia.org/'): return # Spam protection for default feed name lastFeedEntries = FeedEntry.objects.filter(name=name).order_by('-creation_date') if len(lastFeedEntries) == 0 or formatedUrl != lastFeedEntries[0].url: # Do not add same URL twice ! feedentry = FeedEntry(url=formatedUrl, name=name) feedentry.description = self.converter.convert(description) if not title: try: title = self.urlFetcher.fetch(url, '(?<=<(title|TITLE)>)[^<|^\r|^\n]*') except Exception, err: logging.exception('Error while fetching page title:') feedentry.title = feedentry.url feedentry.title = self.converter.convert(title) if not feedentry.title: feedentry.title = feedentry.url feedentry.save()
class StoreActions(): def __init__(self): self.converter = Converter() self.urlFetcher = UrlFetcher() def addUrl(self, url, name, title, description): if name == 'somename' and not url.startswith( 'http://en.wikipedia.org/'): return # Spam protection for default feed name lastFeedEntries = FeedEntry.objects.filter( name=name).order_by('-creation_date') if len(lastFeedEntries) == 0 or url != lastFeedEntries[ 0].url: # Do not add same URL twice ! feedentry = FeedEntry(url=url, name=name) feedentry.description = self.converter.convert(description) if not title: try: title = self.urlFetcher.fetch( url, '(?<=<(title|TITLE)>)[^<|^\r|^\n]*') except Exception, err: logging.exception('Error while fetching page title:') feedentry.title = feedentry.url feedentry.title = self.converter.convert(title) if not feedentry.title: feedentry.title = feedentry.url feedentry.save()
import datetime import os import logging import webapp2 from google.appengine.ext import db from google.appengine.ext.webapp import template from converter import Converter from urlfetcher import UrlFetcher from feedNameCleaner import FeedNameCleaner from limitParser import LimitParser logging.getLogger().setLevel(logging.ERROR) converter = Converter() urlFetcher = UrlFetcher() feedNameCleaner = FeedNameCleaner() limitParser = LimitParser() class Feed(db.Model): name = db.StringProperty(multiline=False) url = db.StringProperty(multiline=False) title = db.StringProperty(multiline=False) creation_date = db.DateTimeProperty(auto_now_add=True) description = db.StringProperty(multiline=True) class MainPage(webapp2.RequestHandler): def get(self): name = feedNameCleaner.clean(self.request.get('name') or self.request.get('n')) limit = limitParser.parse(self.request.get('limit') or self.request.get('l'))
def __init__(self): self.converter = Converter() self.urlFetcher = UrlFetcher()