def post_feeds(self): data = request.get_json(force=True) Feed.create(time=data["time"], name=data["name"], size=data["size"], last_feed=data["last_feed"], feed_count=data["feed_count"]) return self.get_feeds()
def post(self): session = is_logged_in(self) if not session: return self.redirect(REDIR_URL) req = self.request errors = {} name = validate_string(req, errors, 'name', 'search name', MAX_FEED_NAME_LEN) if not name: name = '' rss_url = req.get('rss_url') if rss_url: feed_key = parse_rss_url(rss_url) if not feed_key: return self.redirect_to_errors(GET_PARAMS, {'error_rss_url':'''This URL isn't in the expected form. Please <a href="/contact">send it to us</a> if you think this is a bug.'''}) if len(errors): return self.redirect_to_self(GET_PARAMS, errors) else: city = validate_string(req, errors, 'city', 'city/region', max_len=50) category = validate_string(req, errors, 'category', 'category', 3) area = '' # TODO: add area picker if not CATEGORIES.has_key(category): errors['category'] = 'Please choose a category.' query = validate_string(req, errors, 'query', 'search string', 100, required=False) if not query: query = '' title_only = req.get('title_only')=='checked' if title_only: stype = 'T' else: stype = 'A' min_cost = validate_int(req, errors, 'min_cost', 'Minimum Cost', 0, None, False) if not min_cost: min_cost = '' max_cost = validate_int(req, errors, 'max_cost', 'Maximum Cost', 0, None, False) if not max_cost: max_cost = '' num_bedrooms = validate_int(req, errors, 'num_bedrooms', 'Number of bedrooms', 1, 8, False) if not num_bedrooms: num_bedrooms = '' cats = req.get('cats')=='checked' dogs = req.get('dogs')=='checked' pics = req.get('pics')=='checked' if len(errors): return self.redirect_to_self(GET_PARAMS, errors) feed_key = Feed.make_key_name(city, category, area, min_cost, max_cost, num_bedrooms, cats, dogs, pics, [], stype, query) # make sure the feed is in the datastore try: feed = Feed.get_or_insert(key_name=feed_key) except Exception, e: logging.error('Unable to create new Feed (%s): %s' % (feed_key, e)) return self.redirect_to_self(GET_PARAMS, {'err':'The service is temporarily unavailable - please try again later.'})
def parse_rss_url(url): """Parses a RSS URL from Craigslist and returns the Feed key which describes it.""" m = RE_RSS_URL.match(url) if not m: return None groups = m.groups() city, cat, area, qparams = groups[0], groups[2], groups[4], groups[5] cat2, min_ask, max_ask, nb, c, d, hp, n, st, q = parse_rss_url_params(qparams) if cat2: cat = cat2 return Feed.make_key_name(city, cat, area, min_ask, max_ask, nb, c, d, hp, n, st, q)
def post_feed(self, feed_id): data = request.get_json(force=True) feed = Feed.select().where(Feed.id == data["id"]).get() if feed: feed.time = data["time"] feed.name = data["name"] feed.size = data["size"] feed.last_feed = data["last_feed"] feed.feed_count = data["feed_count"] feed.save() return json.dumps(feed.dict(), cls=MyEncoder)
def parse_rss_url(url): """Parses a RSS URL from Craigslist and returns the Feed key which describes it.""" m = RE_RSS_URL.match(url) if not m: return None groups = m.groups() city, cat, area, qparams = groups[0], groups[2], groups[4], groups[5] cat2, min_ask, max_ask, nb, c, d, hp, n, st, q = parse_rss_url_params( qparams) if cat2: cat = cat2 return Feed.make_key_name(city, cat, area, min_ask, max_ask, nb, c, d, hp, n, st, q)
def post(self): feed_key_name = self.request.get('f') feed = Feed.get_by_key_name(feed_key_name) if not feed: return feed.extract_values() feed_url = feed.make_url(rss=True) fhid = feed.hashed_id # get the feed from Craigslist try: resp = urlfetch.fetch(feed_url) except urlfetch.Error, e: logging.warn('Failed to fetch Craigslist feed (%s) due to fetch failure: %s' % (feed_url, e)) return self.error(500)
def run(self): logging.getLogger('petfeedd').info("Starting time worker.") while True: if self.stopped(): logging.getLogger('petfeedd').info("Stopping time worker.") return if petfeedd.feeding_semaphore: time_query = time.strftime("%H:%M:%S") for feed in Feed.select().where(Feed.time == time_query): logging.getLogger('petfeedd').info("Found feed " + feed.name + " at " + time_query) feed_event = FeedEvent.create(size=feed.size, name=feed.name) self.feed_queue.put(feed_event) feed.last_feed = feed_event.date_updated feed.feed_count = feed.feed_count + 1 feed.save() time.sleep(1)
def mapResult(self, cursor): columns = [c.name for c in cursor.description] return [Feed(dict(zip(columns, row))) for row in cursor.fetchall()]
def get(self): session = is_logged_in(self) if not session: return self.redirect('/') uid = session['my_id'] now = datetime.datetime.now() feed_key_name = self.request.get('f') t = self.request.get('t') overall_view = (not feed_key_name and t != 'newest') if feed_key_name == 'manual': fhid = 'manual' age = desc = None updating_shortly = False if t == 'hidden': name = "Manually-Added Ads that were Hidden" elif t == 'newest': return self.redirect('/tracker') else: name = "Manually-Added Ads" elif feed_key_name: fhid = Feed.hashed_id_from_pk(feed_key_name) # get the user's name for this feed name = get_search_name(self, feed_key_name) if name is None: return self.redirect('/tracker') # user is no longer tracking this feed elif name is False: return self.redirect('/') # login related error # compute how old the data is feed_dt_updated = dt_feed_last_updated(feed_key_name) if not feed_dt_updated: return self.redirect('/tracker?err=That%20feed%20no%20longer%20exists.') age = str_age(feed_dt_updated, now) td = now - feed_dt_updated updating_shortly = td.days>0 or td.seconds>MAX_AGE_MIN*60 if updating_shortly: age += ' - update in progress' # update the feed if we haven't retrieved the latest ads recently updating = update_feed_if_needed(feed_key_name) if updating is None: return self.redirect('/tracker?err=The%20requested%20feed%20does%20not%20exist.') elif overall_view: age = desc = fhid = None updating_shortly = False if t == 'hidden': name = "All Hidden Ads" else: name = "All Rated/Noted Ads" else: # t=newest and feed=all doesn't make sense together return self.redirect('/tracker') # determine which set of ads to show next = self.request.get('next') if t == 'newest': # show the newest ads (regardless of whether the user has commented on them or not) q = Ad.all().filter('feeds =', fhid).order('-update_dt') if next: q.with_cursor(next) ads = q.fetch(ADS_PER_PAGE) # get user comments on these ads, if any user_ad_keys = [db.Key.from_path('UserCmt', '%s%s' % (uid, a.cid)) for a in ads] user_ad_notes = db.get(user_ad_keys) title_extra = 'Newest Ads' else: # show ads this user has commented on/rated (whether to show hidden ads or not depends on t) hidden = (t == 'hidden') q = UserCmt.all() q.filter('uid =', session['my_id']) if fhid: q.filter('feeds =', fhid) if hidden: q.filter('dt_hidden >', DT_PRESITE).order('-dt_hidden') else: q.filter('dt_hidden =', None).order('-rating') if next: q.with_cursor(next) user_ad_notes = q.fetch(ADS_PER_PAGE) # get the ads associated with these comments ad_keys = [db.Key.from_path('Ad', uan.cid) for uan in user_ad_notes] ads = db.get(ad_keys) if t == 'hidden': title_extra = "Ignored Ads" else: title_extra = "Ads I've Rated" # put the ads and their comments together ad_infos = zip(ads, user_ad_notes) # check that each UserCmt.feeds field is up to date with Ad.feeds (can # only do this when we're searching by Ad, i.e., t=newest) if t == 'newest': # TODO: only mark as outdated if they are inequal EXCEPT 'manual' # TODO: when updating cmt.feeds, don't copy over 'manual' (user-specific) # TODO: reconsider this code ... outdated = [(ad,cmt) for ad, cmt in ad_infos if cmt and ad.feeds!=cmt.feeds] if outdated: # update any out of date comments for ad,cmt in outdated: cmt.feeds = ad.feeds db.put([cmt for ad,cmt in outdated]) # whether there may be more ads more = (len(ads) == ADS_PER_PAGE) if more: more = q.cursor() if not more or more==str(next): more = None # get a description of the search we're viewing if fhid and fhid!='manual': tmp_feed = Feed(key_name=feed_key_name) tmp_feed.extract_values() desc = tmp_feed.desc() if not next: page = 1 else: try: page = int(self.request.get('page', 1)) except ValueError: page = 1; self.response.headers['Content-Type'] = 'text/html' self.response.out.write(MakoLoader.render('search_view.html', request=self.request, ADS_PER_PAGE=ADS_PER_PAGE, ads=ad_infos, more=more, age=age, now=now, search_desc=desc, title_extra=title_extra, page=page, name=name, updating_shortly=updating_shortly, overall_view=overall_view))
def post(self): session = is_logged_in(self) if not session: return self.redirect(REDIR_URL) req = self.request errors = {} name = validate_string(req, errors, 'name', 'search name', MAX_FEED_NAME_LEN) if not name: name = '' rss_url = req.get('rss_url') if rss_url: feed_key = parse_rss_url(rss_url) if not feed_key: return self.redirect_to_errors( GET_PARAMS, { 'error_rss_url': '''This URL isn't in the expected form. Please <a href="/contact">send it to us</a> if you think this is a bug.''' }) if len(errors): return self.redirect_to_self(GET_PARAMS, errors) else: city = validate_string(req, errors, 'city', 'city/region', max_len=50) category = validate_string(req, errors, 'category', 'category', 3) area = '' # TODO: add area picker if not CATEGORIES.has_key(category): errors['category'] = 'Please choose a category.' query = validate_string(req, errors, 'query', 'search string', 100, required=False) if not query: query = '' title_only = req.get('title_only') == 'checked' if title_only: stype = 'T' else: stype = 'A' min_cost = validate_int(req, errors, 'min_cost', 'Minimum Cost', 0, None, False) if not min_cost: min_cost = '' max_cost = validate_int(req, errors, 'max_cost', 'Maximum Cost', 0, None, False) if not max_cost: max_cost = '' num_bedrooms = validate_int(req, errors, 'num_bedrooms', 'Number of bedrooms', 1, 8, False) if not num_bedrooms: num_bedrooms = '' cats = req.get('cats') == 'checked' dogs = req.get('dogs') == 'checked' pics = req.get('pics') == 'checked' if len(errors): return self.redirect_to_self(GET_PARAMS, errors) feed_key = Feed.make_key_name(city, category, area, min_cost, max_cost, num_bedrooms, cats, dogs, pics, [], stype, query) # make sure the feed is in the datastore try: feed = Feed.get_or_insert(key_name=feed_key) except Exception, e: logging.error('Unable to create new Feed (%s): %s' % (feed_key, e)) return self.redirect_to_self( GET_PARAMS, { 'err': 'The service is temporarily unavailable - please try again later.' })
def delete_feed(self, feed_id): feed = Feed.select().where(Feed.id == feed_id).get() if feed: feed.delete_instance() return self.get_feeds()
def get_feed(self, feed_id): feed = Feed.select().where(Feed.id == feed_id).get() if feed: return json.dumps(feed.dict(), cls=MyEncoder) return "{}"
def get_feeds(self): feeds = [] for feed in Feed.select().dicts(): feeds.append(feed) return json.dumps(feeds, cls=MyEncoder)