def search_podcasts(q, limit=20, skip=0): if is_url(q): url = sanitize_url(q) podcast = Podcast.for_url(url, create=True) if not podcast.title: update_podcasts([podcast]) podcast = Podcast.for_url(url) return [podcast], 1 db = get_main_database() #FIXME current couchdbkit can't parse responses for multi-query searches q = q.replace(',', '') res = db.search('podcasts/search', wrapper=search_wrapper, include_docs=True, limit=limit, skip=skip, q=q, sort='\\subscribers<int>') #FIXME: return empty results in case of search backend error try: return list(res), res.total_rows except: return [], 0
def update_published_podcasts(request, username): user = User.get_user(username) if not user: raise Http404 published_podcasts = Podcast.get_multi(user.published_objects) update_podcasts(published_podcasts) return HttpResponse('Updated:\n' + '\n'.join([p.url for p in published_podcasts]), mimetype='text/plain')
def update_podcast(request, podcast): if not check_publisher_permission(request.user, podcast): return HttpResponseForbidden() update_podcasts( [podcast] ) url = get_podcast_link_target(podcast, 'podcast-publisher-detail') return HttpResponseRedirect(url)
def handle(self, *args, **options): queue = self.get_podcasts(*args, **options) max_podcasts = options.get('max') if max_podcasts: queue = islice(queue, 0, max_podcasts) if options.get('list'): for podcast in queue: print podcast.url else: print 'Updating podcasts...' try: feeddownloader.update_podcasts(queue) except RequestFailed as ex: traceback.print_exc()
def handle(self, *args, **options): queue = self.get_podcasts(*args, **options) max_podcasts = options.get('max') if max_podcasts: queue = islice(queue, 0, max_podcasts) if options.get('list'): for podcast in queue: logger.info('Podcast %s', podcast) else: logger.info('Updating podcasts...') for podcast in update_podcasts(queue): logger.info('Updated podcast %s', podcast)