def main(): db_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'db.db') engine = create_engine('sqlite:///' + db_path, echo=False) Session = sessionmaker(bind=engine) use_connection() logging.info('Nuking and creating the table.') Base.metadata.drop_all(bind=engine) Base.metadata.create_all(bind=engine) logging.info('Filling the table with 5000 records.') session = Session() for i in xrange(1, 5001): if i % 500 == 0: logging.info('%d records created' % i) record = Record(uuid=str(uuid.uuid4()), created_at=datetime.now()) session.add(record) session.commit() session.close() cache_manager = DBCacheManager(key_base=u'sql_ex', ttl=5) @cache_manager.cache def get_record_by_id(id_, session=session): return session.query(Record).filter_by(id=id_).first() def load(ids): session = Session() for id_ in ids: logging.debug('Loading record with ID = %d' % id_) loaded = get_record_by_id(id_, session=session) logging.debug('Loaded record = %s' % loaded) session.close() sample = random.sample(xrange(1, 5001), 1000) logging.info('Loading %d records...' % len(sample)) start_time = time.time() load(sample) end_time = time.time() logging.info('Elapsed time: %.5f seconds' % (end_time - start_time, )) logging.info('Loading the same records again...') start_time = time.time() load(sample) end_time = time.time() logging.info('Elapsed time: %.5f seconds' % (end_time - start_time, ))
from redcache import CacheManager, get_current_connection, use_connection class JsonCacheManager(CacheManager): def before_save(self, data, **kwargs): return json.dumps(data) def after_load(self, data, **kwargs): return json.loads(data) json_cache_manager = JsonCacheManager() def test_json_cache_manager(): @json_cache_manager.cache def cached(spam, eggs): return {'spam': spam, 'eggs': eggs} result = cached('spam', 'eggs') connection = get_current_connection() print connection.get('cache:cached:spam:eggs') connection.delete('cache:cached:spam:eggs') if __name__ == '__main__': use_connection() test_json_cache_manager()