def add_channel(): service = request.form.get("service") chanid = request.form.get("chanid") if service is None or chanid is None: ret = {"error": "Both 'chanid' (%r) and 'service' (%r) must be specified" % (service, chanid)} return json.dumps(ret), 500 if service not in ytdl.models.ALL_SERVICES: ret = {"error": "service must be one of %s" % ", ".join(ytdl.models.ALL_SERVICES)} return json.dumps(ret), 500 try: existing_chan = Channel.get(chanid=chanid) except Channel.DoesNotExist: pass # Good else: # Exists ret = {"error": "channel %r already exists (on service %s)" % (existing_chan.chanid, existing_chan.service)} return json.dumps(ret), 500 # Create! c = Channel( chanid=chanid, service=service) id = c.save() # Queue refresh ytdl.tasks.refresh_channel.delay(id=id) return json.dumps({"status": "ok", "id": id})
def refresh_channel(id): log.debug("Refreshing channel %s" % id) channel = Channel.get(id=id) log.debug("Refreshing channel metadata for %s" % (channel)) channel.refresh_meta() log.debug("Grabbing from channel %s" % (channel)) channel.grab() log.debug("Refresh complete for %s" % (channel))
def refresh_all_channels(asyncr=True): # type: (bool) -> None log.debug("Refreshing all channels") channels = Channel.select() for c in channels: if asyncr: refresh_channel.delay(id=c.id) else: refresh_channel(id=c.id)
def list_channels(): page = int(request.args.get("page", "1")) count = request.args.get("count") def offset(sliceable, page, count): start = (page - 1) * count end = page * count return sliceable[start:end] query = Channel.select().order_by(Channel.title.asc()) if count is not None: count = int(count) query = offset(query, page, count) channels = [] for c in query: channels.append(_channel_info_dict(c)) return json.dumps({'channels': channels, 'total': Channel.select().count()})
def refresh(): chanid = request.args.get("channel") if chanid == "_all": ytdl.tasks.refresh_all_channels() return json.dumps({"message": "refreshing all channels"}) else: chan = Channel.get(id=chanid) if chan is None: return json.dumps({"error": "so such channel"}), 404 ytdl.tasks.refresh_channel.delay(id=chan.id) return json.dumps({"message": "refreshing channel %s (%s)" % (chan.id, chan.title)})
def channel_details(chanid): if chanid == "_all": query = Video.select() else: chan = Channel.get(id=chanid) query = Video.select().filter(channel=chan) query = query.order_by(Video.publishdate.desc()) search = request.args.get('search', "") if len(search) > 0: query = query.where(Video.title.contains(search)) # Query based on status status = request.args.get('status', "") if len(status) > 0: status = status.strip().split(",") x = Video.status == status[0] for st in status[1:]: x = x | (Video.status == st) query = query.where(x) # 25 videos per page, with no less than 5 per page paginator = Paginator(query, per_page=25, orphans=5) # Get page parameter page_num = request.args.get('page', '1') if int(page_num) < 1: page_num = 1 try: page = paginator.page(page_num) except PageNotAnInteger: page = paginator.page(1) except EmptyPage: page = paginator.page(paginator.num_pages) out_videos = [] for v in page: out_videos.append({ 'id': v.id, 'title': v.title, 'imgs': v.img, 'url': v.url, 'description': v.description, 'publishdate': str(v.publishdate), 'status': v.status, # FIXME: Data duplication, only used for "all" channel view 'channel': _channel_info_dict(v.channel), }) if chanid == '_all': channel = None else: channel = _channel_info_dict(chan) page_info = { 'total': paginator.num_pages, 'current': page.number, 'has_next': page.has_next(), 'has_previous': page.has_previous(), } return json.dumps({ 'channel': channel, 'videos': out_videos, 'pagination': page_info })
except Exception, e: # ? video.status = Video.STATE_GRAB_ERROR video.save() log.error("Error grabbing %s: %s" % (video, e), exc_info=True) return else: video.status = Video.STATE_GRABBED video.save() log.info("Grab complete %s" % video) @task(QUEUE_DEFAULT) def refresh_channel(id): log.debug("Refreshing channel %s" % id) channel = Channel.get(id=id) log.debug("Refreshing channel metadata for %s" % (channel)) channel.refresh_meta() log.debug("Grabbing from channel %s" % (channel)) channel.grab() log.debug("Refresh complete for %s" % (channel)) def refresh_all_channels(async=True): log.debug("Refreshing all channels") channels = Channel.select() for c in channels: if async: refresh_channel.delay(id=c.id) else: refresh_channel(id=c.id)
def channel_details(chanid): if chanid == "_all": query = Video.select() else: chan = Channel.get(id=chanid) query = Video.select().filter(channel = chan) query = query.order_by(Video.publishdate.desc()) search = request.args.get('search', "") if len(search) > 0: query = query.where(Video.title.contains(search)) # Query based on status status = request.args.get('status', "") if len(status) > 0: status = status.strip().split(",") x = Video.status == status[0] for st in status[1:]: x = x | (Video.status == st) query = query.where(x) # 25 videos per page, with no less than 5 per page paginator = Paginator(query, per_page=25, orphans=5) # Get page parameter page_num = request.args.get('page', '1') if int(page_num) < 1: page_num = 1 try: page = paginator.page(page_num) except PageNotAnInteger: page = paginator.page(1) except EmptyPage: page = paginator.page(paginator.num_pages) out_videos = [] for v in page: out_videos.append({ 'id': v.id, 'title': v.title, 'imgs': v.img, 'url': v.url, 'description': v.description, 'publishdate': str(v.publishdate), 'status': v.status, # FIXME: Data duplication, only used for "all" channel view 'channel': _channel_info_dict(v.channel), }) if chanid == '_all': channel = None else: channel = _channel_info_dict(chan) page_info = { 'total': paginator.num_pages, 'current': page.number, 'has_next': page.has_next(), 'has_previous': page.has_previous(), } return json.dumps( {'channel': channel, 'videos': out_videos, 'pagination': page_info})