示例#1
0
 def add_feed_media(cls, collection_id, feed_category=None, approve_all=False):
   collection = Collection.get_by_id(int(collection_id))
   medias = []
   if feed_category:
     uri = Collection.YOUTUBE_CATEGORY_FEED % ((collection.region or 'US'), collection.feed_id,
                                               feed_category)
   else:
     uri = Collection.YOUTUBE_FEED % ((collection.region or 'US'), collection.feed_id)
   response = urlfetch.fetch(uri)
   if response.status_code == 200:
     data = simplejson.loads(response.content) or {}
     entries = data['feed']['entry'] if data.get('feed') else []
     ids = ''
     publisher_map = {}
     for entry in entries:
       id = re.search('video:(.*)', entry['id']['$t']).group(1)
       publisher = Publisher.add(host=MediaHost.YOUTUBE, host_id=entry['author'][0]['yt$userId']['$t'])
       deferred.defer(Publisher.fetch_details, publisher.id,
                      _name='publisher-' + publisher.id + '-' + str(uuid.uuid1()),
                      _queue='youtube')
       ids += id + ','
       publisher_map[id] = publisher
     if len(ids):
       youtube3 = get_youtube3_service()
       videos_response = youtube3.videos().list(
         id=ids,
         part='id,snippet,topicDetails,contentDetails,statistics'
       ).execute()
       logging.info('ADD FEED VIDEO: ' + id)
       medias = Media.add_from_snippet(videos_response.get('items', []), collection=collection,
                                       publisher=publisher, approve=approve_all)
   return medias
示例#2
0
 def add_reddit_media(cls, collection_id, reddit_id, approve_all):
   collection = Collection.get_by_id(int(collection_id))
   response = urlfetch.fetch('http://www.reddit.com/r/%s.json?limit=50' % reddit_id)
   content = simplejson.loads(response.content) or {}
   if content.get('error'):
     logging.error('REDDIT ERROR: ' + str(content.get('error')))
   data = content.get('data')
   if data:
     links = data.get('children')
     ids = []
     reddit_ids = {}
     logging.info('FETCHING %s VIDEOS FROM REDDIT' % str(len(links)))
     for link in links:
       link_data = link.get('data') or {}
       if link_data.get('domain') == 'youtube.com' and not link_data.get('over_18') and link_data.get('score') > 100:
         url = link_data.get('url')
         parsed_url = urlparse.urlparse(url)
         host_id = urlparse.parse_qs(parsed_url.query)['v'][0]
         reddit_ids[host_id] = link.get('kind') + '_' + link_data.get('id')
         ids.append(host_id)
 
     youtube3 = get_youtube3_service()
     
     while len(ids) > 0:
       videos_response = youtube3.videos().list(
         id=','.join(ids[0:10]),
         part="id,snippet,topicDetails,contentDetails,statistics"
       ).execute()
       if videos_response.get('error'):
         logging.error(videos_response.get('error').get('error')[0]['reason'])
         return
       medias = Media.add_from_snippet(videos_response.get("items", []),
                                       collection=collection,
                                       approve=approve_all)
       for media in medias:
         if reddit_ids[media.host_id]:
           media.reddit_id = reddit_ids[media.host_id]
           media.put()
       ids = ids[10:]
示例#3
0
文件: channel.py 项目: ebby/brokentv
  def youtube_channel(cls, name=None, user=None, token=None, yt_channel_id=None, yt_playlist_id=None, page_token=None):
    name = name or 'ytchannel'
    channel = Channel(key_name=(yt_channel_id or yt_playlist_id),
                      name=name, privacy=Privacy.FRIENDS, online=True, user=user, youtube=True)
    channel_id = (yt_channel_id or yt_playlist_id)
    medias = []
    all = True

    cached_channel = memcache.get(channel_id)
    #if cached_channel:
    #  return simplejson.loads(cached_channel)


    youtube3 = get_youtube3_service()
    search_response = {}
    if yt_channel_id:
      if yt_channel_id.startswith('HC'):
        channel_response = youtube3.channels().list(
          id=yt_channel_id,
          part='topicDetails',
          maxResults=1
        ).execute()
        if len(channel_response.get('items', [])):
          topic_id = channel_response.get('items')[0]['topicDetails']['topicIds'][0]
          search_response = youtube3.search().list(
            topicId=topic_id,
            part='id,snippet',
            order='date',
            maxResults=(20 if all else 1),
            pageToken=page_token,
            fields='items,nextPageToken',
            type='video'
          ).execute()
      else:
        search_response = youtube3.search().list(
            channelId=yt_channel_id,
            part='id,snippet',
            order='date',
            maxResults=(20 if all else 1),
            pageToken=page_token,
            fields='items,nextPageToken',
            type='video'
          ).execute()
    elif yt_playlist_id:
      search_response = youtube3.playlistItems().list(
          playlistId=yt_playlist_id,
          part='id,snippet',
          maxResults=(20 if all else 1),
          pageToken=page_token,
          fields='items,nextPageToken'
        ).execute()


    logging.info('SEARCH RESPONSE \n \n \n')
    logging.info(search_response)
    next_page_token = search_response.get("nextPageToken")
    if len(search_response.get('items', [])):
      search_ids = ''
      for item in search_response.get('items', []):
        if item['kind'] == 'youtube#searchResult':
          search_ids += item['id']['videoId'] + ','
        elif item['kind'] == 'youtube#playlistItem':
          search_ids += item['snippet']['resourceId']['videoId'] + ','
      videos_response = youtube3.videos().list(
        id=search_ids,
        part="id,snippet,topicDetails,contentDetails,statistics"
      ).execute()

      medias = Media.add_from_snippet(videos_response.get("items", []), approve=True)

      if not all:
        deferred.defer(util.schedule_youtube_channel,
                       name=name,
                       user_id=user.id if user else None,
                       token=token,
                       channel_id=channel_id,
                       yt_channel_id=yt_channel_id,
                       yt_playlist_id=yt_playlist_id,
                       _name='youtube-channel-' + channel_id + '-' + str(uuid.uuid1()),
                       _queue='youtube')

    else:
      logging.error('No search results for youtube channel: ' + str(search_response))

    programs = []
    if user and False:
      programs = programming.Programming.set_user_channel_programs(user.id, channel, medias,
                                                      time=datetime.datetime.now(), reset=True)
    else:
      next_time = datetime.datetime.now()
      for media in medias:
        program = Program.add_program(channel, media, time=next_time)
        if program:
          programs.append(program)
          next_time = next_time + datetime.timedelta(seconds=media.duration)
    if not all:
      broadcast.broadcastNewPrograms(channel, programs, new_channel=True, to_owner=False, token=token)

    if len(medias):
      deferred.defer(programming.Programming.fetch_related_tweets, medias,
                     _name='twitter-' + channel_id + '-' + str(uuid.uuid1()),
                     _queue='twitter',
                     _countdown=30)

    channel_json = channel.toJson()
    channel_json['next_page_token'] = next_page_token
    data_json = {
      'channel': channel_json,
      'programs': [p.toJson() for p in programs]
    }

    memcache.set(channel_id, simplejson.dumps(data_json), time=86400)
    return data_json
示例#4
0
  def fetch(self, collection=None, approve_all=False):
    all_medias = []

    if self.host == MediaHost.YOUTUBE:
      if not self.channel_id:
        yt_service = get_youtube_service()
        try:
          user_entry = yt_service.GetYouTubeUserEntry(username=self.host_id)
        except Exception as e:
          logging.error(e)
          return
        desc = user_entry.content.text
        desc = desc.decode('utf-8').replace("\n", r" ") if desc else None
        
        picture = urlfetch.fetch(user_entry.thumbnail.url)
        self.name = user_entry.title.text
        self.picture = db.Blob(picture.content)
        self.description = desc
        self.link = user_entry.link[0].href
        self.channel_id = re.search('/channel/(.*)', self.link).groups()[0]
        self.put()

      
      youtube3 = get_youtube3_service()
      
      
      publishedAfter = '1970-01-01T00:00:00Z'
      if constants.PUBLISHED_AFTER:
        publishedAfter = constants.PUBLISHED_AFTER
      elif self.last_fetch:
        publishedAfter = self.last_fetch.isoformat('T') + 'Z'
      
      ordering = ['date'] if self.last_fetch else ['date', 'rating', 'viewCount']
      
      for order in ordering:
        medias = []
        next_page_token = ''
        while next_page_token is not None:
          search_response = youtube3.search().list(
            channelId=self.channel_id,
            part='id',
            order=order,
            pageToken=next_page_token,
            publishedAfter=publishedAfter,
            maxResults=20
          ).execute()
          search_ids = ''
          for item in search_response.get('items', []):
            if item['id']['kind'] == 'youtube#video':
              search_ids += item['id']['videoId'] + ','
          videos_response = youtube3.videos().list(
            id=search_ids,
            part="id,snippet,topicDetails,contentDetails,statistics"
          ).execute()
          medias = Media.add_from_snippet(videos_response.get("items", []),
                                          collection=collection,
                                          publisher=self,
                                          enforce_category=len(collection.categories) > 0,
                                          approve=approve_all)
  
          all_medias += medias
          next_page_token = search_response.get('tokenPagination', {}).get('nextPageToken')

      self.last_fetch = datetime.datetime.now()
      self.put()
    if len(all_medias) and not approve_all:
      msg = emailer.Email(emailer.Message.FETCH, data={'count': len(medias)})
      for uid in constants.SUPER_ADMINS:
        user = User.get_by_key_name(uid)
        msg.send(user)