def get(self, slug):
    if slug and slug[0] == '/':
      slug = slug[1:]
    logging.debug('the slug is %s', slug)
    if not slug:
      pipes = model.Pipe.gql("WHERE auto_refresh = :1", True).fetch(200)
      logging.debug('fetch %d pipes' , len(pipes))
      for pipe in pipes:
        default_feed_queue.add(taskqueue.Task(url="/refreshfeed/" + pipe.pipe_web_address))

      return ['ok']
      pass

    pipe = get_pipe(slug)
    if pipe and pipe.auto_refresh:
      feed_url = pipe.feed_url
      logging.debug('start refresh feed, the feedUrl is ' + feed_url)
      #req_path = self.request.path
      memcache_input_feed_xml_key = INPUT_FEED_XML_MEMCACHE_KEY + '_' + slug
      logging.debug('memcache_feed_xml_key %s', memcache_input_feed_xml_key)
      old_feed_xml = memcache.get(memcache_input_feed_xml_key)
      feed_xml = zrqutil.fetch_content(feed_url)
      if old_feed_xml == feed_xml:
        logging.info('feed_xml not change')
        return ['ok']
      elif not old_feed_xml:
        logging.info('feed_xml not found in cache')
      else:
        logging.info('feed_xml changed')
      logging.debug('start parse feed xml')
      data = feedparser.parse(feed_xml)
      #if alternate change
      try:
        if hasattr(data.feed, 'links'):
          for link in data.feed.links:
            if link.rel == 'alternate' and link.get('href', '') and pipe.alternate != link.href:
              logging.debug('update pipe alternate')
              pipe.alternate = link.href
              pipe.put()
              break
      except Exception, e :
        logging.exception(e)

      entries = get_entries(data, pipe)
      entries = filter_entries(entries, pipe)
      logging.debug('fetch %d entries' , len(entries))
      if len(entries) > 0:
        entries_map = {'entries':entries, 'fetch_index':0, 'put_index':0}
        random_str = get_random_str()
        random_key = slug + '_' + random_str
        memcache.set(random_key, entries_map, 120)

        add_feed_queue.add(taskqueue.Task(url='/addFeed', params={'key': random_key, 'pipe_web_address': pipe.pipe_web_address}))

      memcache.set(memcache_input_feed_xml_key, feed_xml, 60 * 60)
Example #2
0
 def get_xml(self, pipe):
     feed_url = pipe.feed_url
     feed_xml = zrqutil.fetch_content(feed_url)
     return feed_xml
Example #3
0
    def get(self, slug):
        if slug and slug[0] == '/':
            slug = slug[1:]
        logging.debug('the slug is %s', slug)
        if not slug:
            pipes = model.Pipe.gql("WHERE auto_refresh = :1", True).fetch(200)
            logging.debug('fetch %d pipes', len(pipes))
            for pipe in pipes:
                default_feed_queue.add(
                    taskqueue.Task(url="/refreshfeed/" +
                                   pipe.pipe_web_address))

            return ['ok']
            pass

        pipe = get_pipe(slug)
        if pipe and pipe.auto_refresh:
            feed_url = pipe.feed_url
            logging.debug('start refresh feed, the feedUrl is ' + feed_url)
            #req_path = self.request.path
            memcache_input_feed_xml_key = INPUT_FEED_XML_MEMCACHE_KEY + '_' + slug
            logging.debug('memcache_feed_xml_key %s',
                          memcache_input_feed_xml_key)
            old_feed_xml = memcache.get(memcache_input_feed_xml_key)
            feed_xml = zrqutil.fetch_content(feed_url)
            if old_feed_xml == feed_xml:
                logging.info('feed_xml not change')
                return ['ok']
            elif not old_feed_xml:
                logging.info('feed_xml not found in cache')
            else:
                logging.info('feed_xml changed')
            logging.debug('start parse feed xml')
            data = feedparser.parse(feed_xml)
            #if alternate change
            try:
                if hasattr(data.feed, 'links'):
                    for link in data.feed.links:
                        if link.rel == 'alternate' and link.get(
                                'href', '') and pipe.alternate != link.href:
                            logging.debug('update pipe alternate')
                            pipe.alternate = link.href
                            pipe.put()
                            break
            except Exception, e:
                logging.exception(e)

            entries = get_entries(data, pipe)
            entries = filter_entries(entries, pipe)
            logging.debug('fetch %d entries', len(entries))
            if len(entries) > 0:
                entries_map = {
                    'entries': entries,
                    'fetch_index': 0,
                    'put_index': 0
                }
                random_str = get_random_str()
                random_key = slug + '_' + random_str
                memcache.set(random_key, entries_map, 120)

                add_feed_queue.add(
                    taskqueue.Task(url='/addFeed',
                                   params={
                                       'key': random_key,
                                       'pipe_web_address':
                                       pipe.pipe_web_address
                                   }))

            memcache.set(memcache_input_feed_xml_key, feed_xml, 60 * 60)
 def get_xml(self, pipe):
   feed_url = pipe.feed_url
   feed_xml = zrqutil.fetch_content(feed_url)
   return feed_xml