Esempio n. 1
0
 def post(self):
   product_key_name = self.request.get('product_key_name')
   count = int(self.request.get('count'))
   retries = int(self.request.get('retries'))
   date = str_to_date(self.request.get('date_string'))
   frequency = self.request.get('frequency')
   
   logging.info('Fetching details for %s , frequency: %s' %(product_key_name,frequency))
   
   #Create empty renderer 
   renderer = ProductRenderer.new(product_key_name,frequency, date,count = count)
   
   asin = AmazonURLParser.extract_asin(product_key_name)
   locale = AmazonURLParser.get_locale(product_key_name)
   renderer = AmazonProductFetcher.get_product_details(asin, renderer,locale)
   
   if renderer is not None: #If all details were fetched successfully
     renderer.put(_storage=[MEMCACHE,DATASTORE])
   else:
     if retries <  MAX_PRODUCT_INFO_RETRIES:
       retries += 1
       logging.error('Error saving product: %s, adding to queue again, retries: %s' %(product_key_name,retries))
       enqueue_renderer_info(product_key_name,count,frequency,date,
                                       countdown = 60, retries = retries)
     else:
       logging.critical('Max retries reached for product: %s' %product_key_name)
       renderer = ProductRenderer.new(product_key_name,frequency,
                                       date, count = count,is_banned=True)
       renderer.log_properties()
       renderer.put(_storage=[MEMCACHE,DATASTORE])
Esempio n. 2
0
    def post(self):
        product_key_name = self.request.get('product_key_name')
        count = int(self.request.get('count'))
        retries = int(self.request.get('retries'))
        date = str_to_date(self.request.get('date_string'))
        frequency = self.request.get('frequency')

        logging.info('Fetching details for %s , frequency: %s' %
                     (product_key_name, frequency))

        #Create empty renderer
        renderer = ProductRenderer.new(product_key_name,
                                       frequency,
                                       date,
                                       count=count)

        asin = AmazonURLParser.extract_asin(product_key_name)
        locale = AmazonURLParser.get_locale(product_key_name)
        renderer = AmazonProductFetcher.get_product_details(
            asin, renderer, locale)

        if renderer is not None:  #If all details were fetched successfully
            renderer.put(_storage=[MEMCACHE, DATASTORE])
        else:
            if retries < MAX_PRODUCT_INFO_RETRIES:
                retries += 1
                logging.error(
                    'Error saving product: %s, adding to queue again, retries: %s'
                    % (product_key_name, retries))
                enqueue_renderer_info(product_key_name,
                                      count,
                                      frequency,
                                      date,
                                      countdown=60,
                                      retries=retries)
            else:
                logging.critical('Max retries reached for product: %s' %
                                 product_key_name)
                renderer = ProductRenderer.new(product_key_name,
                                               frequency,
                                               date,
                                               count=count,
                                               is_banned=True)
                renderer.log_properties()
                renderer.put(_storage=[MEMCACHE, DATASTORE])
Esempio n. 3
0
    def post(self):
        logging.info('UrlFetchWorker started')
        payloads = Payload.deserialize(self.request.get('payload'))
        product_ban_list = Banlist.retrieve(
            _storage=[LOCAL, MEMCACHE, DATASTORE],
            _local_expiration=time_util.minute_expiration(minutes=10)).products

        fetch_targets = list(set([payload.url for payload in payloads]))
        result_dict = UrlFetcher.fetch_urls(fetch_targets)
        urls = []
        counter_targets = []

        for payload in payloads:
            request_url = payload.url
            final_url = result_dict[request_url]
            user_id = payload.user_id

            urls.append(
                Url(key_name=request_url, final_url=final_url,
                    user_id=user_id))

        for url in urls:
            if url.final_url is not None:
                try:
                    product_url = AmazonURLParser.product_url(url.final_url)

                    if product_url in product_ban_list:
                        logging.info(
                            'Mention creation prevented for banned product url: %s'
                            % product_url)
                        continue  #no action for banned product

                    url.is_product = True  #No exceptions for product_url => valid product reference
                    counter_targets.append(Payload(product_url, url.user_id))
                except ParserException:
                    pass

        logging.info('UrlFetchWorker finished, counter targets: %s' %
                     counter_targets)
        pdb.put(urls, _storage=[LOCAL,
                                MEMCACHE])  #Urls are stored in cache only

        if len(counter_targets):
            enqueue_counter(Payload.serialize(counter_targets))
Esempio n. 4
0
 def post(self):
   logging.info('UrlFetchWorker started')
   payloads = Payload.deserialize(self.request.get('payload'))
   product_ban_list = Banlist.retrieve(_storage=[LOCAL,MEMCACHE,DATASTORE],
                                   _local_expiration=time_util.minute_expiration(minutes=10)).products 
   
   fetch_targets = list(set([payload.url for payload in payloads]))
   result_dict = UrlFetcher.fetch_urls(fetch_targets)
   urls = []
   counter_targets = []
   
   for payload in payloads:
     request_url = payload.url
     final_url = result_dict[request_url]
     user_id = payload.user_id
     
     urls.append(Url(key_name=request_url,
                         final_url=final_url,
                         user_id = user_id))
       
   for url in urls:
     if url.final_url is not None:
       try:
         product_url = AmazonURLParser.product_url(url.final_url)
         
         if product_url in product_ban_list:
             logging.info('Mention creation prevented for banned product url: %s' %product_url)
             continue #no action for banned product
         
         url.is_product = True #No exceptions for product_url => valid product reference
         counter_targets.append(Payload(product_url,url.user_id))
       except ParserException:
         pass 
   
   logging.info('UrlFetchWorker finished, counter targets: %s' %counter_targets)   
   pdb.put(urls, _storage = [LOCAL,MEMCACHE]) #Urls are stored in cache only
   
   if len(counter_targets):
     enqueue_counter(Payload.serialize(counter_targets))
Esempio n. 5
0
 def root_url(self):
   return AmazonURLParser.root_url(self.final_url)
Esempio n. 6
0
 def product_url(self):
   return AmazonURLParser.product_url(self.final_url)
Esempio n. 7
0
 def asin(self):
   return AmazonURLParser.extract_asin(self.final_url)
Esempio n. 8
0
 def new(cls,*args,**kwds):
   entity = super(ProductRenderer, cls).new(*args,**kwds)
   url = AmazonURLParser.product_url(args[0])
   entity.url = url
   return entity
Esempio n. 9
0
 def new(cls,*args,**kwds):
   entity = super(StoreFrequencyBase, cls).new(*args,**kwds)
   store_key_name = AmazonURLParser.root_url(args[0])
   entity.store = db.Key.from_path('Store',store_key_name)
   return entity
Esempio n. 10
0
 def root_url(self):
     return AmazonURLParser.root_url(self.final_url)
Esempio n. 11
0
 def product_url(self):
     return AmazonURLParser.product_url(self.final_url)
Esempio n. 12
0
 def asin(self):
     return AmazonURLParser.extract_asin(self.final_url)
Esempio n. 13
0
 def new(cls, *args, **kwds):
     entity = super(ProductRenderer, cls).new(*args, **kwds)
     url = AmazonURLParser.product_url(args[0])
     entity.url = url
     return entity
Esempio n. 14
0
 def new(cls, *args, **kwds):
     entity = super(StoreFrequencyBase, cls).new(*args, **kwds)
     store_key_name = AmazonURLParser.root_url(args[0])
     entity.store = db.Key.from_path('Store', store_key_name)
     return entity