def post(self): store_key_name = self.request.get('store_key_name') date = str_to_date(self.request.get('date_string')) frequency = self.request.get('frequency') renderers = [] store_key = Key.from_path('Store',store_key_name) query = get_counter_query_for_frequency(frequency, date, store_key) product_counters = query.fetch(TEMPLATE_PRODUCT_COUNT) key_names = [counter.key().name() for counter in product_counters ] product_renderers = ProductRenderer.get_by_key_name(key_names, _storage=[MEMCACHE,DATASTORE], _result_type=NAME_DICT) for counter in product_counters: renderer = product_renderers[counter.key().name()] try: renderer.count = counter.count renderers.append(renderer) except AttributeError: #Renderer is none renderer = ProductRenderer.build(counter.key_root, frequency, date,count = counter.count) if renderer is not None: #building from existing renderers successful renderers.append(renderer) else: enqueue_renderer_info(counter.key_root, counter.count, frequency, date) if len(renderers): pdb.put(renderers, _storage=[MEMCACHE,DATASTORE])
def post(self): store_key_name = self.request.get('store_key_name') date = str_to_date(self.request.get('date_string')) frequency = self.request.get('frequency') renderers = [] store_key = Key.from_path('Store', store_key_name) query = get_counter_query_for_frequency(frequency, date, store_key) product_counters = query.fetch(TEMPLATE_PRODUCT_COUNT) key_names = [counter.key().name() for counter in product_counters] product_renderers = ProductRenderer.get_by_key_name( key_names, _storage=[MEMCACHE, DATASTORE], _result_type=NAME_DICT) for counter in product_counters: renderer = product_renderers[counter.key().name()] try: renderer.count = counter.count renderers.append(renderer) except AttributeError: #Renderer is none renderer = ProductRenderer.build(counter.key_root, frequency, date, count=counter.count) if renderer is not None: #building from existing renderers successful renderers.append(renderer) else: enqueue_renderer_info(counter.key_root, counter.count, frequency, date) if len(renderers): pdb.put(renderers, _storage=[MEMCACHE, DATASTORE])
def filtered_update(cls, models): db_targets = [ str(model.key()) for model in models if model.count >= cls._MIN_COUNT_FOR_DB_WRITE ] pdb.put(models, _storage=MEMCACHE) if len(db_targets): cls.update_cached_counter_keys(db_targets)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.query = pdb.GqlQuery('SELECT * FROM PdbModel') models = [] for i in range(100): models.append(PdbModel(count=i)) pdb.put(models)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.query = pdb.GqlQuery("SELECT * FROM PdbModel") models = [] for i in range(100): models.append(PdbModel(count=i)) pdb.put(models)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.setup_name = 'test' self.setup_name_int = 'int_model' parent_model = PdbModel(key_name='parent_model') model = PdbModel(key_name='test_model',parent=parent_model,name=self.setup_name) int_model = PdbModel(name=self.setup_name_int,parent=parent_model) self.setup_key = pdb.put(model,_storage=['local','memcache','datastore']) self.parent_key = pdb.put(parent_model,_storage=['local','memcache','datastore']) self.setup_key_int = pdb.put(int_model,_storage=['local','memcache','datastore'])
def test_cached_set(self): class RefModel(pdb.Model): reference = db.ReferenceProperty(PdbModel) models = [] for i in range(100): models.append(RefModel(reference=self.setup_key)) pdb.put(models) pdb_model = pdb.get(self.setup_key) #First call creates memcache index refs = pdb_model.cached_set('refmodel_set') self.assertEqual(len(refs),len(models))
def test_cached_set(self): class RefModel(pdb.Model): reference = db.ReferenceProperty(PdbModel) models = [] for i in range(100): models.append(RefModel(reference=self.setup_key)) pdb.put(models) pdb_model = pdb.get(self.setup_key) #First call creates memcache index refs = pdb_model.cached_set('refmodel_set') self.assertEqual(len(refs), len(models))
def get(self): counter_keys = CounterBase.get_cached_counter_keys() if not len(counter_keys): return logging.info('Counter keys retrieved: %s' %len(counter_keys)) counters = CounterBase.get(counter_keys,_storage=MEMCACHE) if len(counters): try: logging.info('Counters being inserted: %s' %len(counters)) pdb.put(counters, _storage=DATASTORE) #Delete cached counter keys CounterBase.set_cached_counter_keys([]) except CapabilityDisabledError: pass
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() model = TestModel(key_name='test_key_name') self.setup_key = pdb.put(model,_storage=['local','memcache','datastore'] ) self.cache_key = str(self.setup_key)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() model = TestModel(key_name='test_key_name') self.setup_key = pdb.put(model, _storage=['local', 'memcache', 'datastore']) self.cache_key = str(self.setup_key)
def setUp(self): self.testbed = testbed.Testbed() self.testbed.activate() self.testbed.init_datastore_v3_stub() self.testbed.init_memcache_stub() self.setup_name = 'test' self.setup_name_int = 'int_model' parent_model = PdbModel(key_name='parent_model') model = PdbModel(key_name='test_model', parent=parent_model, name=self.setup_name) int_model = PdbModel(name=self.setup_name_int, parent=parent_model) self.setup_key = pdb.put(model, _storage=['local', 'memcache', 'datastore']) self.parent_key = pdb.put(parent_model, _storage=['local', 'memcache', 'datastore']) self.setup_key_int = pdb.put( int_model, _storage=['local', 'memcache', 'datastore'])
def get(self): USER_SPAM_COUNTERS.bind(spam_count_limit = SPAM_COUNT_LIMIT) user_counters = USER_SPAM_COUNTERS.fetch(100) users = [] if len(user_counters): for counter in user_counters: counter.is_banned = True users.append(TwitterUser(key_name = counter.key_root)) targets = [user.key().name() for user in users] ban_list = Banlist.retrieve() ban_list.users += targets ban_list.put(_storage=[MEMCACHE,DATASTORE]) #TwitterUser.update_banlist([user.key().name() for user in users]) logging.info('Banning users with keys: %s' %[user.key().name() for user in users]) pdb.put(user_counters+users)
def post(self): logging.info('UrlFetchWorker started') payloads = Payload.deserialize(self.request.get('payload')) product_ban_list = Banlist.retrieve( _storage=[LOCAL, MEMCACHE, DATASTORE], _local_expiration=time_util.minute_expiration(minutes=10)).products fetch_targets = list(set([payload.url for payload in payloads])) result_dict = UrlFetcher.fetch_urls(fetch_targets) urls = [] counter_targets = [] for payload in payloads: request_url = payload.url final_url = result_dict[request_url] user_id = payload.user_id urls.append( Url(key_name=request_url, final_url=final_url, user_id=user_id)) for url in urls: if url.final_url is not None: try: product_url = AmazonURLParser.product_url(url.final_url) if product_url in product_ban_list: logging.info( 'Mention creation prevented for banned product url: %s' % product_url) continue #no action for banned product url.is_product = True #No exceptions for product_url => valid product reference counter_targets.append(Payload(product_url, url.user_id)) except ParserException: pass logging.info('UrlFetchWorker finished, counter targets: %s' % counter_targets) pdb.put(urls, _storage=[LOCAL, MEMCACHE]) #Urls are stored in cache only if len(counter_targets): enqueue_counter(Payload.serialize(counter_targets))
def get(self): renderers = PRODUCT_RENDERER_BAN_TARGETS.fetch(100) products = [Product(key_name = renderer.key_root) for renderer in renderers] product_counters = [] for renderer in renderers: product_counters.append(ProductCounter( key_name = renderer.key().name(), is_banned = True, day = renderer.day, week = renderer.week, month = renderer.month, year = renderer.year)) renderer.is_ban_synched = True targets = [product.key().name() for product in products] ban_list = Banlist.retrieve() ban_list.products += targets ban_list.put(_storage=[MEMCACHE,DATASTORE]) pdb.put(products+renderers+product_counters,_storage = [MEMCACHE,DATASTORE])
def post(self): logging.info('UrlFetchWorker started') payloads = Payload.deserialize(self.request.get('payload')) product_ban_list = Banlist.retrieve(_storage=[LOCAL,MEMCACHE,DATASTORE], _local_expiration=time_util.minute_expiration(minutes=10)).products fetch_targets = list(set([payload.url for payload in payloads])) result_dict = UrlFetcher.fetch_urls(fetch_targets) urls = [] counter_targets = [] for payload in payloads: request_url = payload.url final_url = result_dict[request_url] user_id = payload.user_id urls.append(Url(key_name=request_url, final_url=final_url, user_id = user_id)) for url in urls: if url.final_url is not None: try: product_url = AmazonURLParser.product_url(url.final_url) if product_url in product_ban_list: logging.info('Mention creation prevented for banned product url: %s' %product_url) continue #no action for banned product url.is_product = True #No exceptions for product_url => valid product reference counter_targets.append(Payload(product_url,url.user_id)) except ParserException: pass logging.info('UrlFetchWorker finished, counter targets: %s' %counter_targets) pdb.put(urls, _storage = [LOCAL,MEMCACHE]) #Urls are stored in cache only if len(counter_targets): enqueue_counter(Payload.serialize(counter_targets))
def test_cascaded_cache_refresh(self): e1 = TestModel() k1 = db.put(e1) #Memcache refresh from datastore pdb.get(k1,_storage =['memcache','datastore'] ) e2 = pdb.get(k1,_storage ='memcache') self.assertEqual(e1.key(),e2.key()) #Local refresh from datastore pdb.get(k1,_storage =['local','datastore']) e2 = pdb.get(k1,_storage='local') self.assertEqual(e1.key(),e2.key()) #Local refresh from memcache e3 = TestModel(key_name='memcache_model') k3 = pdb.put(e3,_storage='memcache') pdb.get(k3, _storage=['local','memcache']) e4 = pdb.get(k3,_storage='local') self.assertEqual(e3.key(),e4.key())
def test_cascaded_cache_refresh(self): e1 = TestModel() k1 = db.put(e1) #Memcache refresh from datastore pdb.get(k1, _storage=['memcache', 'datastore']) e2 = pdb.get(k1, _storage='memcache') self.assertEqual(e1.key(), e2.key()) #Local refresh from datastore pdb.get(k1, _storage=['local', 'datastore']) e2 = pdb.get(k1, _storage='local') self.assertEqual(e1.key(), e2.key()) #Local refresh from memcache e3 = TestModel(key_name='memcache_model') k3 = pdb.put(e3, _storage='memcache') pdb.get(k3, _storage=['local', 'memcache']) e4 = pdb.get(k3, _storage='local') self.assertEqual(e3.key(), e4.key())
def test_result_type(self): single_result = pdb.get(self.setup_key) self.assertTrue(isinstance(single_result, db.Model)) dict_result = pdb.get(self.setup_key,_result_type='dict') self.assertTrue(isinstance(dict_result, dict)) self.assertEqual(dict_result.keys()[0],str(self.setup_key)) self.assertEqual(dict_result.values()[0].name,'test') name_dict_result = pdb.get(self.setup_key,_result_type='name_dict') self.assertTrue(isinstance(name_dict_result, dict)) self.assertEqual(name_dict_result.keys()[0],self.setup_key.name()) self.assertEqual(name_dict_result.values()[0].name,'test') #Check for integer based key e1 = TestModel(name='integer_test') k1 = pdb.put(e1) name_dict_result = pdb.get(k1,_result_type='name_dict') self.assertTrue(isinstance(name_dict_result, dict)) self.assertEqual(name_dict_result.keys()[0],str(k1.id())) self.assertEqual(name_dict_result.values()[0].name,'integer_test')
def test_result_type(self): single_result = pdb.get(self.setup_key) self.assertTrue(isinstance(single_result, db.Model)) dict_result = pdb.get(self.setup_key, _result_type='dict') self.assertTrue(isinstance(dict_result, dict)) self.assertEqual(dict_result.keys()[0], str(self.setup_key)) self.assertEqual(dict_result.values()[0].name, 'test') name_dict_result = pdb.get(self.setup_key, _result_type='name_dict') self.assertTrue(isinstance(name_dict_result, dict)) self.assertEqual(name_dict_result.keys()[0], self.setup_key.name()) self.assertEqual(name_dict_result.values()[0].name, 'test') #Check for integer based key e1 = TestModel(name='integer_test') k1 = pdb.put(e1) name_dict_result = pdb.get(k1, _result_type='name_dict') self.assertTrue(isinstance(name_dict_result, dict)) self.assertEqual(name_dict_result.keys()[0], str(k1.id())) self.assertEqual(name_dict_result.values()[0].name, 'integer_test')
def test_put_memcache(self): model = TestModel(key_name='test_key_name',name='test') key = pdb.put(model,_storage='memcache') entity = _deserialize(memcache.get(str(key))) self.assertEqual('test', entity.name)
def test_put_db(self): model = TestModel(key_name='test_key_name',name='test') key = pdb.put(model,_storage='datastore') self.assertEqual('test', db.get(key).name)
def test_put_local(self): model = TestModel(key_name='test_key_name', name='test') key = pdb.put(model, _storage='local') entity = cachepy.get(str(key)) self.assertEqual('test', entity.name)
def test_put_local(self): model = TestModel(key_name='test_key_name',name='test') key = pdb.put(model,_storage='local') entity = cachepy.get(str(key)) self.assertEqual('test', entity.name)
def filtered_update(cls,models): db_targets = [str(model.key()) for model in models if model.count >= cls._MIN_COUNT_FOR_DB_WRITE] pdb.put(models,_storage=MEMCACHE) if len(db_targets): cls.update_cached_counter_keys(db_targets)
def test_put_db(self): model = TestModel(key_name='test_key_name', name='test') key = pdb.put(model, _storage='datastore') self.assertEqual('test', db.get(key).name)
def test_put_memcache(self): model = TestModel(key_name='test_key_name', name='test') key = pdb.put(model, _storage='memcache') entity = _deserialize(memcache.get(str(key))) self.assertEqual('test', entity.name)