def try_push_resub(): """Post all new items for feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) unsubscribed_feeds = Feed.query(Feed.hub != None, Feed.subscribed_at_hub == False) # noqa qit = unsubscribed_feeds.iter() errors = 0 success = 0 count = 0 futures = [] while (yield qit.has_next_async()): feed = qit.next() futures.append((feed, Feed.subscribe_to_hub(feed))) for feed, future in futures: count += 1 try: yield future success += 1 except: errors += 1 logger.exception('Failed to PuSH subscribe feed:%s' % (feed.feed_url, )) logger.info('Tried to call hub for num_unsubscribed_feeds:%s success:%s, errors:%s', count, success, errors) raise ndb.Return(jsonify(status='ok'))
def test_get_with_permanent_redirect(self): # Create a dummy feed with a last_checked date well in the past last_checked = datetime(2013, 9, 21, 10, 0, 0, 0) date_of_last_entry = datetime(2013, 9, 20, 8, 45, 0, 0) feed = Feed(name = "Test Feed1", last_checked = last_checked, date_of_last_entry = date_of_last_entry, url = './test-feeds/test-rss.xml', parent = root_key()) feed.put() # Stub out the publish_entry function so we don't have to deal with # the GAE mail API def _publish_entry(feed_title, entry, recipent_address): pass feed_utils.publish_entry = _publish_entry # Create a dummy function to return the feeds to check def _find_feeds_to_check(working_date = datetime.now()): return [feed] feed_utils.find_feeds_to_check = _find_feeds_to_check # Replace feedparser.parse with our own version that sets the status # code to 301 and the href to a new URL def _parse(url): parsed_feed = self.orig_parse_func(url) parsed_feed['status'] = 301 parsed_feed['href'] = 'http://def.com' return parsed_feed feedparser.parse = _parse response = self.testapp.get('/check_feeds') # check the feed's URL has been updated self.assertEquals(feed.url, 'http://def.com')
def track(request): if request.method == 'POST': form = UserForm(request.POST) if form.is_valid(): user = form.cleaned_data['username'] raw_feed = build_feed(user, 5, 2) f = Feed(username = user) f.save() for track in raw_feed: art_url = 'http://i.imgur.com/BNBFGfg.jpg' if track[5] != None: art_url = track[5] t = Track(id = track[0] , date = track[1] , title = track[2] , artist = track[3] , uri = track[4] , art = art_url) t.save() f.tracks.add(t) return HttpResponseRedirect('track.html') else: form = UserForm() return render(request, 'subs/track.html', { 'form': form })
def testFindFeedsToCheckMoreHoursThanFeeds(self): feeds = [ { 'name': 'Test 1', 'last_checked': datetime(2017, 5, 6, 19, 7, 33) }, { 'name': 'Test 2', 'last_checked': datetime(2017, 5, 5, 19, 6, 9) }, { 'name': 'Test 3', 'last_checked': datetime(2017, 5, 4, 18, 23, 56) } ] for feed_data in feeds: feed = Feed(name = feed_data['name'], last_checked = feed_data['last_checked'], parent = root_key()) feed.put() working_date = datetime(2017, 5, 6, 19, 45, 0) feeds = feed_utils.find_feeds_to_check(working_date) # there are 4 hours left in the day so only 1 feed will be returned self.assertEqual(1, len(feeds)) self.assertEqual("Test 3", feeds[0].name)
def create_feed_from_url(url): """ Query the given URL, parse the results and create a Feed object. If the response isn't RSS then throw an error """ f = feedparser.parse(url) # Handle obvious errors if f.bozo: handle_bozo_exception(f.bozo_exception) if 'status' in f and f.status in (200, 301, 302): # If we received a bozo exception raise it if f.bozo: handle_bozo_exception(f.bozo_exception) # If the resource has permanently moved update it's URL feed_url = url if f.status == 301: feed_url = f.href feed = Feed(parent=root_key(), name=f.channel.title, url=feed_url) if len(f.entries) > 0: feed.date_of_last_entry = datetime.fromtimestamp( mktime(f.entries[0].updated_parsed)) return feed else: raise Exception("Unexpected result from feedparser.parse(): %s" % f)
def get_feeds(self, all = False): """Return feeds - defaults to returning enabled feeds only""" if all: result = [f for f in Feed.select()] else: result = [f for f in Feed.select(Feed.enabled == True)] return result
def get_feeds(self, all=False): """Return feeds - defaults to returning enabled feeds only""" if all: result = [f for f in Feed.select()] else: result = [f for f in Feed.select(Feed.enabled == True)] return result
def create_feed(user,board_id,title,link,description,actions,public_board,get_all,token): """ Creates a feed and attaches it to a user object """ user = get_user(user) q = Feed.query( Feed.channel_title==title, Feed.channel_link==link, Feed.channel_description==description, Feed.feed_name==title, Feed.token==token, Feed.actions.IN(actions), Feed.board_id==board_id, Feed.public_board==public_board) feed = q.get() if feed is None: feed = Feed( channel_title=title, channel_link=link, channel_description=description, feed_name=title, token=token, actions=actions, board_id=board_id, public_board=public_board, get_all=get_all,user=user.key) feed.put() if user.feeds: if feed.key not in user.feeds: user.feeds.append(feed.key) user.put() else: user.feeds = [feed.key] user.put() return create_url(feed.key.id())
def create_feed(self, title, url, days_since_checked=0, days_since_updated=0, owner=None): now = timezone.now() feed = Feed(url=url, title=title, last_checked=now + datetime.timedelta(days=days_since_checked), last_updated=now + datetime.timedelta(days=days_since_checked + days_since_updated), owner = owner or self.default_user) feed.save() return feed
def post(self): #表单字段: url, author, title, content, allow_sendto_kindle f = Feed() f.url = self.request.get('tUrl') f.title = self.request.get('tTitle') f.put(); # save return self.redirect('/feed')
def add_feed(self, url, site_url = None, title = None, group = None): """Add a feed to the database""" # existing feed? try: f = Feed.get(Feed.url == url) except Feed.DoesNotExist: f = Feed.create(url = url, title=title, site_url=site_url) db.close() return f
def add_feed(self, url, site_url=None, title=None, group=None): """Add a feed to the database""" # existing feed? try: f = Feed.get(Feed.url == url) except Feed.DoesNotExist: f = Feed.create(url=url, title=title, site_url=site_url) except: log.error(tb()) return f
def add_feed_github(request, user=None): # if user is None: # return 'fail' if request.user.is_authenticated(): user = request.REQUEST['user'] url = 'https://github.com/%s/' % user feed_url = 'https://github.com/%s.atom' % user # feed, create = Feed.objects.get_or_create(title=user) category, created = Category.objects.get_or_create(name='github', slug='github') feed = Feed() feed.title = user feed.url = url feed.category = category feed.feed_url = feed_url feed.dt_checked = datetime.datetime(1, 1, 1, 0, 0, 0) feed.dt_updated = datetime.datetime(1, 1, 1, 0, 0, 0) feed.save() return HttpResponse('yay') # return HttpResponseRedirect('/admin/reader/feed/%d/' % (feed.id,)) else: return 'h4axx0r..'
def create_photo_feed(feed_type, photo, photowalk=1): user = photo.user feed = Feed(photo=photo, user=user, feed_type=feed_type) if feed_type is 'photowalk': feed.photowalk = photowalk feed.save() for stalker in followers(user): wall = UserWall.objects.get(user=stalker) wall.feeds.add(feed) wall.save() return
def feed_create(): """List all examples""" form = FeedCreate(request.form) if not form.validate(): return jsonify(status='error', message='The passed arguments failed validation') existing_feeds = Feed.for_user_and_url(user=g.user, feed_url=form.data['feed_url']) if existing_feeds.count(): feed = existing_feeds.get() else: feed = Feed.create_feed_from_form(g.user, form).get_result() return jsonify(status='ok', data=feed.to_json())
def setup_database(db_file_path=settings.DB_FILE_PATH): """ Connect the database. Create the database tables if they don't exists already. """ # import models here to avoid circular references from models import BaseModel, Feed, FeedItem BaseModel.db.connect() Feed.create_table() FeedItem.create_table()
def main() -> None: kv = Ingestor(KV_DIR) symlist = SymbolList(SYMBOLS_CSV) for _, symbol in symlist.next(): if kv.get(symbol) is None: f = Feed(symbol, url=FEED_URL.format(symbol=symbol)) f.refresh() kv.set(symbol, f) print("Wrote {}".format(symbol), flush=True) sleep(FEED_FETCH_SLEEP) kv.finish()
def add_feed(url=None): # Get url submitted via AJAX url = request.json['url'] FEED_TYPES = ('application/rss+xml', 'text/xml', 'application/atom+xml', 'application/x.atom+xml', 'application/x-atom+xml') # Check if url already exists in feed DB dupe = Feed.select().where(Feed.url == url).count() if dupe > 0: return jsonify(**DUPLICATE_FEED) # Attempt to retrieve URL try: r = requests.get(url, timeout=5) except requests.exceptions.Timeout: return jsonify(**FEED_NOT_FOUND) # check request status code if r.status_code != requests.codes.ok: return jsonify(**FEED_NOT_FOUND) # Get Content-Type contenttype = r.headers['content-type'] # If Content-type is RSS, add it directly if contenttype in FEED_TYPES: feed = Feed.create(url=url) return jsonify(**STATUS_OK) # If Content-type is HTML, pass to autodiscovery if contenttype == 'text/html': p = autodiscovery.Discover() p.feed(r.text) # check result in case of no feeds found if len(p.feeds) == 0: return jsonify(**FEED_NOT_FOUND) else: # TODO: Could loop over all available feeds found? fulluri = p.feeds[0]['fulluri'] # just adds first feed found feed = Feed.create(url=fulluri) return jsonify(**STATUS_OK) # dropped through to here, feed must be invalid return jsonify(**FEED_INVALID)
def test_check_feed_with_probem(self): last_checked = datetime(2000, 1, 1) date_of_last_entry = datetime(2000, 1, 1) feed_url = 'http://x.y.z' feed = Feed(name = "Test Feed", last_checked = last_checked, date_of_last_entry = date_of_last_entry, url = feed_url, parent = root_key()) feed.put() # Replace feedparser.parse with our own version that sets the status # code to 500 to simulate a server side error def _parse(url): parsed_feed = FeedParserDict(status = 500, bozo = False) return parsed_feed feedparser.parse = _parse reported_title = [0] reported_message = [0] reported_recipient = [0] orig_report_error = feed_utils.report_error def _report_error(title, message, recipent_address): reported_title[0] = title reported_message[0] = message reported_recipient[0] = recipent_address orig_report_error(title, message, recipent_address) feed_utils.report_error = _report_error # run the method under test # set logging to CRITICAL so-as not to print exception generated by # the test try: logging.getLogger().setLevel(logging.CRITICAL) response = self.testapp.get('/check_feeds') self.assertEquals(reported_title[0], feed.name) self.assertTrue('returned HTTP code 500' in reported_message[0]) self.assertEquals(reported_recipient[0], '*****@*****.**') #self.fail("Should have raised an exception as server returned 500") finally: logging.getLogger().setLevel(logging.ERROR) # there should be one email message describing the problem messages = self.mail_stub.get_sent_messages() self.assertEquals(1, len(messages)) self.assertTrue("URL '%s' returned HTTP code 500" % feed_url in messages[0].body.payload) # check the feed's last_checked date was updated self.assertNotEqual(feed.last_checked, last_checked)
def api_feed(request): api_result = {"api": "feed", "status": "success"} try: token = request.GET["token"] user = get_user_from_token(token) group_id = 0 try: group_id = request.GET["group_id"] except: pass page = 1 try: page = int(request.GET["page"]) except: pass page_size = 10 try: page_size = int(request.GET["page_size"]) except: pass if not user: api_result["status"] = "failure" api_result["error"] = "user not found" else: if group_id == 0: polls = Feed.user_feed(user, page, page_size) else: polls = Feed.group_feed(user, group_id, page, page_size) if page == 1 and not polls: # create_feed_new_user(user) # polls = Feed.user_feed(user, page, page_size) pass if polls: api_result["polls"] = json_polls(polls) else: api_result["polls"] = "None" except Exception as e: api_result["status"] = "failure" api_result["error"] = e.message pass return JsonResponse(api_result)
def _addFeedObject(self, request): user = endpoints.get_current_user() if not user: raise endpoints.UnauthorizedException('Authorization required') user_id = getUserId(user) if not request.url: raise endpoints.BadRequestException("Feed 'url' field required") if not request.get_full_article: setattr(request, 'get_full_article', False) d = feedparser.parse(request.url) data = {field.name: getattr(request, field.name) for field in request.all_fields()} del data['websafeKey'] data['title'] = d.feed.title data['description'] = d.feed.description data['link'] = d.feed.link data['updated'] = mktime(d.feed.updated_parsed) p_key = ndb.Key(Profile, user_id) feed_id = Feed.allocate_ids(size=1, parent=p_key)[0] feed_key = ndb.Key(Feed, feed_id, parent=p_key) data['key'] = feed_key Feed(**data).put() self._addFeedItem(request, feed_key, d) return request
def feed(id=None): # Get feed number <id> try: feed = Feed.get(Feed.id == id) except Feed.DoesNotExist: return jsonify(**FEED_NOT_FOUND) # populate Category tree (categories, feeds) = loadTree() # Get posts in decreasing date order posts = Post.select().join(Feed).where(Feed.id == id).order_by(Post.published.desc()).paginate(1, 50) # Create human-readable datestamps for posts datestamps = loadDates(posts) # Select return format on requested content-type? if request.json is None: # Render feed page template return render_template("feed.html", categories=categories, feeds=feeds, feed=feed, posts=posts, datestamps=datestamps) else: # Return JSON here for client-side formatting? return jsonify(response=[dict(feed=feed, posts=posts)])
def feed_update(id=None): # Manual update of one or all feeds now if request.json['action'] == 'refresh': # Call refresh routine # TODO: RSS worker functions in separate package # TODO: Need to capture return status if id is None: rss_spawn() # Update all feeds else: try: feed = Feed.get(Feed.id == id) except Feed.DoesNotExist: return jsonify(**FEED_NOT_FOUND) rss_worker(feed) # Update single feed # return JSON status OK return jsonify(**STATUS_OK) # Mark one or all feeds read elif request.json['action'] == 'markread': if id is None: # Mark all posts read query = Post.update(is_read=True) else: # Mark posts in current feed read query = Post.update(is_read=True).where(Feed.id == id) query.execute() # return JSON status OK return jsonify(**STATUS_OK)
def rpc_fetch(): q = Feed.query() results = ndb.get_multi(q.fetch(keys_only=True)) rpcs = [] for f in results: rpc = urlfetch.create_rpc() urlfetch.make_fetch_call(rpc, f.url) rpcs.append(rpc) for rpc in rpcs: rpc.wait() result = rpc.get_result() d = feedparser.parse(result.content) for e in d['entries']: dt = parser.parse(e["published"]).replace(tzinfo=None) dy = (datetime.datetime.utcnow() - datetime.timedelta(days=COLLECT_DAYS, seconds=COLLECT_HOURS*3600)).replace(tzinfo=None) if dt > dy: obj = EntryCollect.get_or_insert(e["id"]) if obj.published and obj.published >= dt: pass else: logging.info("new entry : %s" % e["id"]) obj.published = dt obj.title = e["title"] obj.link = e["link"] obj.summary = clean_html(e["summary"]) obj.feed = d['feed']['title'] obj.need_collect_word = True obj.need_notice = True obj.put()
def api_user_login(request): api_result = {"api": "login"} username = request.POST["username"] password = request.POST["password"] print username, password user = authenticate(username=username, password=password) print user.pk if user is not None: if user.is_active: login(request, user) api_result["status"] = "success" else: # Return a 'disabled account' error message api_result["status"] = "failure" else: # Return an 'invalid login' error message. api_result["status"] = "failure" print request.user.is_authenticated() # if no polls in feed, initiate new user protocol polls = Feed.user_feed(user, 1, 10) if len(polls) == 0: create_feed_new_user(user) return JsonResponse(api_result)
def all_feeds(): """Post all new items for feeds for a specific interval""" def feed_to_dict(feed): return { 'feed_key': feed.key.urlsafe(), 'feed_url': feed.feed_url, 'etag': feed.etag, 'last_hash': feed.last_fetched_content_hash, } qit = Feed.query().iter() feeds_response = [] while (yield qit.has_next_async()): feeds_response.append(feed_to_dict(qit.next())) poller_run_id = uuid.uuid4().hex logger.info('Poller run %s dispatched with %d feeds', poller_run_id, len(feeds_response)) response = { 'poller_run_id': poller_run_id, 'feeds': feeds_response, } raise ndb.Return(jsonify(status='ok', data=response))
def post_all_feeds(): """Post all new items for feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) feeds = Feed.query().iter() errors = 0 success = 0 num_posted = 0 futures = [] while (yield feeds.has_next_async()): feed = feeds.next() futures.append((feed, Entry.publish_for_feed(feed))) for feed, future in futures: try: num_posts = yield future if num_posts is not None: num_posted += num_posts success += 1 except: errors += 1 logger.exception('Failed to Publish feed:%s' % (feed.feed_url, )) logger.info('Post Feeds success:%s errors: %s num_posted: %s', success, errors, num_posted) raise ndb.Return(jsonify(status='ok'))
def update_all_feeds(interval_id): """Update all feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) feeds = Feed.for_interval(interval_id) success = 0 more = True cursor = None futures = [] while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(100, start_cursor=cursor) keys = ','.join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append(Queue('poll').add_async(Task(url=url_for('tq_feed_poll'), method='POST', params={'keys': keys}))) success += 1 for future in futures: yield future logger.info('queued poll for %d feeds at interval_id=%s', success, interval_id) raise ndb.Return(jsonify(status='ok'))
def get_feed(feed_id): """ Retrieve a feed from memcache or generate it if it does not exist """ xml = memcache.get(feed_id) # In memcache? if xml is None: feed = Feed.get_by_id(int(feed_id)) rss = None if feed: if feed.public_board: rss = TrelloRSS( constants.KEY, channel_title=feed.channel_title, rss_channel_link=feed.channel_link, description=feed.channel_description) else: user = feed.user.get() rss = TrelloRSS( constants.KEY, token=user.auth_token, channel_title=feed.channel_title, rss_channel_link=feed.channel_link, description=feed.channel_description) if feed.get_all: if feed.actions: rss.get_all(items=feed.actions) else: rss.get_all() else: if feed.actions: rss.get_from(feed.board_id,public_board=feed.public_board,items=feed.actions) else: rss.get_from(feed.board_id,public_board=feed.public_board) xml = rss.rss memcache.add(key=feed_id, value=xml, time=1800) # Expire after 30 minutes return xml
def getFavicon(feed_id): # Favicon HTTP content types favicon_types = ["image/vnd.microsoft.icon", "image/x-icon"] feed = Feed.get(Feed.id == feed_id) url = feed.url u = urlparse(url) favicon_url = 'http://' + u.netloc + '/favicon.ico' log.info("getFavicon: Looking for favicon at %s", favicon_url) try: r = requests.get(favicon_url, stream=True, timeout=5) content_type = r.headers.get('content-type') if r.status_code == requests.codes.ok and content_type in favicon_types: # pylint: disable=maybe-no-member log.info("getFavicon: returned from urllib, content-type %s", content_type) else: return None except Exception: return None log.info("Favicon %s status: %s", str(feed_id), str(r.status_code)) favicon_path = '{0}favicon_{1}.ico'.format(ICONS_PATH, str(feed_id)) # Full file path to favicon favicon_file = 'favicon_{0}.ico'.format(str(feed_id)) # favicon filename with open(favicon_path, 'wb') as fav: shutil.copyfileobj(r.raw, fav) del r # Return filename of favicon return favicon_file
def get_feeds_for_user(self, user): q = Feed.select(Feed).join(Subscription).join(User).where(User.id == user.id).distinct().naive() result = [] for f in q: # TODO: Change the model defaults in order to clean this up try: result.append({ 'id' : f.id, 'favicon_id' : f.favicon.id, 'title' : f.title, 'url' : f.url, 'site_url' : f.site_url, 'is_spark' : 0, # TODO: implement this field in the model 'last_updated_on_time': f.last_checked }) except Favicon.DoesNotExist: result.append({ 'id' : f.id, 'favicon_id' : 0, 'title' : f.title, 'url' : f.url, 'site_url' : f.site_url, 'is_spark' : 0, # TODO: implement this field in the model 'last_updated_on_time': f.last_checked }) return result
def create_feed(name, session=None): """ Return the named feed """ local_session = None created = None feed = None if not session: local_session = db_session() else: local_session = session feed = retrieve_feed(name, local_session) if feed: return (True, feed) try: feed = Feed(title=name) local_session.add(feed) local_session.commit() created = True except Exception as e: print "Got exception {0}".format(e) feed = None local_session.rollback() finally: if not session: local_session.close() return (created, feed)
def feed_preview(): """preview a feed""" form = FeedPreview(request.args) if not form.validate(): return jsonify(status='error', form_errors=form.errors) feed = Feed() form.populate_obj(feed) feed.preview = True entries = [] error = None try: entries = Entry.entry_preview_for_feed(feed) except FetchException, e: error = unicode(e)
def feed_register (feed_url, user_id): feed_new = False #app.logger.info ("RSS LINK = " + feed_url) feed = dboper.get_feed_by_rsslink(feed_url) #Check if it's already in the central repository if not feed: ## New feed in the repository feed_new = True d = Feed.parse(feed_url) #app.logger.info(d.feed) if 'image' in d.feed: imgurl = d.feed.image.href else: imgurl = None if 'link' in d.feed: sitelink = d.feed.link else: sitelink = None feed = Feed (name=d.feed.title, rsslink=feed_url, image_url=imgurl, sitelink=sitelink) dboper.add_feed(feed) #Add new feed to the repository (if not exist) #app.logger.info("NEW FEED: " + feed.name) dboper.add_user_feed_by_id(user_id, feed) #By user_id and not g.user because g object is lost in another thread #app.logger.info("ADDING NEW USER FEED: " + feed.name) #if feed_new: #If completely new, load articles app.logger.info("LOADING ARTICLES " ) n = dboper.load_new_user_articles(feed, user_id) app.logger.info("Fetched: " + str(n))
def get_feeds_for_user(self, user): q = Feed.select(Feed).join(Subscription).join(User).where( User.id == user.id).distinct().naive() result = [] for f in q: # TODO: Change the model defaults in order to clean this up try: result.append({ 'id': f.id, 'favicon_id': f.favicon.id, 'title': f.title, 'url': f.url, 'site_url': f.site_url, 'is_spark': 0, # TODO: implement this field in the model 'last_updated_on_time': f.last_checked }) except Favicon.DoesNotExist: result.append({ 'id': f.id, 'favicon_id': 0, 'title': f.title, 'url': f.url, 'site_url': f.site_url, 'is_spark': 0, # TODO: implement this field in the model 'last_updated_on_time': f.last_checked }) return result
def feed_create(): """List all examples""" form = FeedCreate(request.form) if not form.validate(): return jsonify(status='error', message='The passed arguments failed validation') if Feed.for_user(user=g.user).count(): return jsonify(status='error', message='This user has already created a feed') exsisting_feeds = Feed.for_user_and_url(user=g.user, feed_url=form.data['feed_url']) try: feed = exsisting_feeds.iter().next() except StopIteration: feed = Feed.create_feed_from_form(g.user, form) return jsonify(status='ok', data=feed.to_json())
def get(self): feed_id, feed_url = self.request.get('feed_id'), \ self.request.get('feed_url') url_result = urllib2.urlopen(feed_url) feed_result = fd.parse(url_result) if feed_result.bozo == 1: logging.error('fetch error, id: %s, url: %s, error: %s', feed_id, feed_url, feed_result.bozo_exception) return feed_update_time = feed_result.get('updated', datetime.utcnow) has_update = True f = Feed.get_by_id(int(feed_id)) if f.is_allow_fetch(feed_update_time): for entry in feed_result.entries: if entry.published_parsed <= f.lastedPublishedTime: logging.info('no updated, id: %s, url: %s', feed_id, feed_url) has_update = False break e = Entry(title = entry.title, url = entry.link, author = entry.author, content = entry.content), publishedTime = entry.published_parsed) e.put() logging.debug('fetch entry, url: %s', entry.link)
def index(request): q_feed = Feed.all() feed_list = q_feed.fetch(q_feed.count()) q_entry = Entry.all() entry_list = q_entry.fetch(q_entry.count()) # Create as dict tree dict_tree = {} for e in entry_list: dict_tree.setdefault(e.pub_date(), {}).setdefault(e.feed, []).append(e) days = dict_tree.keys() days.sort() days.reverse() # Recreate as list tree recent_list = [] for day in days: blog_list = [dict_tree[day][k] for k in dict_tree[day]] blog_list.sort(pub_dttm_desc) recent_list.append(blog_list) return render_to_response('planet/index.html', { 'feed_list': feed_list, 'recent_list': recent_list })
def delete_feed(): feed = request.form['feed'] try: fd = Feed.get(Feed.id == feed, Feed.user == current_user.id) fd.delete_instance() return jsonify(status='OK') except Feed.DoesNotExist: return jsonify(status='FAIL')
def rss(feed): # Con el id del feed obtengo el feed para mostrarlo con rss. try: feed_new = Feed.get(Feed.id == feed) except: return redirect(url_for('index')) feed = feedparser.parse(feed_new.url) return render_template('rss.html', feed=feed_new, entries=feed.entries)
def rss(feed): try: fd = Feed.get(Feed.id == feed, Feed.user == current_user.id) return render_template("rss.html", feed=fd, entries=feedparser.parse(fd.url).entries) except Feed.DoesNotExist: abort(404)
def new_feed(): feedurl = request.form['feed_url'] f = feedparser.parse(feedurl) if f.bozo == 1: return jsonify(status='FAIL') if 'title' not in f.feed: return jsonify(status='FAIL') title = f.feed.title try: description = f.feed.description except AttributeError: description = "" Feed.create( user=current_user.id, title=title, url=feedurl, description=description) return jsonify(status='OK')
def add_feed(): """ Add feeds """ if not request.form.get('url'): return redirect(url_for('index')) feed = Feed(request.form['url'], request.user) db.session.add(feed) db.session.commit() return redirect(url_for('feeds'))
def delete_category(id): # Delete category #id # Reassign all feeds in category to 'unsorted' id 0? query = Feed.update(category_id=0).where(Category.id == id) query.execute() query = Category.delete().where(Category.id == id) query.execute() # return JSON status OK return jsonify(**STATUS_OK)
def add_feed(): title = request.form.get('title') url = request.form.get('url') delay = request.form.get('delay') feed = Feed() feed.url = url feed.title = title feed.delay = delay feed.latest_fetch = datetime.datetime.now() feed.put() flash(u'KeyWord add successfully~', 'success') flush_feed_cache() return redirect(url_for("admin"))
def create_db(): logging.info("Creating SQLite database %s..." % DB_FILE) # Define database db = SqliteDatabase(DB_FILE, threadlocals=True) # Connect to database db.connect() # Create tables Category.create_table() Feed.create_table() Post.create_table() Image.create_table() # Create default Unsorted category Category.create(name='Unsorted', comment='Uncategorised feeds', order=1) logging.info("Database created.")
def delete_feed(feed): # Verifico que el feed exista y elimino. Recordar que siempre los # id de los feeds no dan informacion del usuario. Por lo tanto # dos feeds iguales pero de distintos usuarios tienen distintos ids. try: feed_new = Feed.get(Feed.id == feed) except: return 'Error' feed_new.delete_instance() # Muestro el index. return redirect(url_for('index'))
def test_feed_model(self): """Does basic model work?""" f = Feed(name="test feed name", user_id=self.uid) db.session.add(f) db.session.commit() # User should have 1 feed created self.assertEqual(len(self.u.feeds), 1) self.assertEqual(self.u.feeds[0].name, "test feed name")
def initialise(): print("initialising...", end=' ') # Check for existence of SQLite3 database, creating if necessary if not os.path.exists(DB_FILE): create_db() # If feed table is empty, load the default feed set: if Feed.select().count() == 0: load_defaults()
def send_form(request): try: instance = Student.objects.get(email_id=request.user.email) except: instance = '' if request.method == 'POST': form = FeedbackForm(request.POST) if form.is_valid(): name = request.POST.get('name', '') email_id = request.POST.get('email_id', '') query = request.POST.get('query', '') obj = Feed(name=name, email_id=email_id, query=query) obj.save() return index(request) else: print form.errors else: form = FeedbackForm() context = {'form': form, 'student': instance} return render(request, 'feedback1.html', context)
def get_feeds_with_counts(self, enabled=True): """Return feeds - defaults to returning enabled feeds only""" def _merge(i): r = i.fields() r.update({'item_count': i.item_count}) return r result = [ i for i in Feed.select().annotate( Item, fn.Count(Item.id).alias('item_count')).where( Feed.enabled == enabled) ] result = map(_merge, result) return result
def loadTree(): # Declare empty dict of feeds (emulate sparse list) feeds = {} # Get categories, number of posts by category categories = Category.select(Category, fn.Count(Post.id).alias('count')).join(Feed).join(Post).group_by(Category) # Loop over categories for c in categories: # Get feeds by category f = Feed.select().where(Feed.category == c.id).annotate(Post) feeds[c.id] = f return (categories, feeds)
def feed(line): """Logic feed""" msg = 0 if len(line) == 2: user = User.get_by_name(line[1]) if user: #valid user msg = [] for f in Feed.filter_by_user(user): msg.append(f.message) else: msg = ERROR_MSG['U_NOT_EXISTED'] else: msg = ERROR_MSG['INVALID_ARS'] if isinstance(msg, list): msg = '\n'.join(msg) return msg
def delete_feed(id=None): # Manual deletion of feed from database # TODO: Some confirmation required? Client JS via modal? if id is None: # return feed not found return jsonify(**FEED_NOT_FOUND) else: # TODO: Test ON DELETE CASCADE to Post table (should work) query = Feed.delete().where(Feed.id == id) query.execute() # return JSON status OK return jsonify(**STATUS_OK)
def admin_feed_form(request, feed_id=None): if feed_id: feed = Feed.get_by_id(int(feed_id)) else: feed = None if request.method == 'POST': form = FeedForm(request.POST, instance=feed) if form.is_valid(): feed = form.save(commit=False) feed.save() return HttpResponseRedirect('../') else: form = FeedForm(instance=feed) return render_to_response('planet/admin_feed_form.html', { 'feed_id': feed_id, 'form': form })
def load_defaults(): logging.info("Loading default feed entries into database %s..." % DB_FILE) # Open defaults file with open(DEFAULTS_FILE, 'r') as f: rows = f.readlines() f.close() # Define database db = SqliteDatabase(DB_FILE, threadlocals=True) # Connect to database db.connect() # Iterate over default feeds list # PSV format name|url|category for row in rows: (name, url, category) = row.split('|') category = category.strip() # Update Category table c = Category.create(name=category, comment='Default category', order=1) # Get Category insert id cid = c.id # Update Feeds table f = Feed.create(name=name, version='', url=url, category=cid, favicon='', comment='Default feed', description='Default feed') # Get Feed insert id fid = f.id # Get favicon for this Feed # returns path to local favicon file, or None # write to current feed record logging.info("Getting favicon for %s" % f.url) f.favicon = getFavicon(fid) logging.info("Got favicon %s" % f.favicon) # Save entry to feeds table f.save() logging.info("Default feeds loaded.")