def test_11_cache(self):
     cache.cache("exists", {"key" : "value"})
     
     client = redis.StrictRedis(host=test_host, port=test_port, db=test_db)
     s = client.get("exists")
     obj = json.loads(s)
     assert obj.has_key("key")
     assert obj["key"] == "value"
 def test_11_cache(self):
     mo = models.MessageObject(record={"key" : "value"})
     cache.cache("exists", mo)
     
     client = redis.StrictRedis(host=test_host, port=test_port, db=test_db)
     s = client.get("exists")
     obj = json.loads(s)
     assert obj.has_key("key")
     assert obj["key"] == "value"
def _update_cache(record):
    """
    update the cache, and reset the timeout on the cached item
    """
    if not record.has_key('identifier'):
        raise model_exceptions.LookupException("no identifier in record object")
    
    if not record['identifier'].has_key('canonical'):
        raise model_exceptions.LookupException("can't create/update anything in the cache without a canonical id")
    
    # update or create the cache
    cache.cache(record['identifier']['canonical'], record)
Example #4
0
def _update_cache(record):
    """
    update the cache, and reset the timeout on the cached item
    
    arguments:
    record -- an OAG record object, see the module documentation for details
    
    """
    if record.canonical is None:
        raise models.LookupException("can't create/update anything in the cache without a canonical id")
    
    # update or create the cache
    # cache.cache(record['identifier']['canonical'], record)
    cache.cache(record.canonical, record)
 def setUp(self):
     self.buffer = config.BUFFERING
     config.BUFFERING = False
     # load all of the bibjson objects into the index
     for bj in bibjson_records:
         models.Record.store(bj)
     
     # load each object into the cache
     for bj in bibjson_records:
         rec = {
             "bibjson" : bj
         }
         key = bj.get("id")
         cache.cache(key, models.MessageObject(record=rec))
         
     # set a page size which requires all query results to be paged (or at least, when there is more than one result)
     self.old_page_size = invalidate.ES_PAGE_SIZE
     invalidate.ES_PAGE_SIZE = 1
     time.sleep(2) # need to wait to give ES a chance to index the data
 def test_10_cache_not_json(self):
     with self.assertRaises(cache.CacheException):
         cache.cache("exists", self) # pass in something that won't json serialise