def test_lazy_context_collection_cycle(): # we have cycle in lazy context calculation lazy1-> lazy2 -> lazy3 -> lazy1 context = LazyContextCollection(lazy1=SumContextFactory('lazy2'), lazy2=SumContextFactory('lazy3'), lazy3=SumContextFactory('lazy1')) with pytest.raises(KeyError): assert not context['lazy1'] with pytest.raises(KeyError): assert not context.get('lazy2')
def test_lazy_context_collection(key, expected): context = LazyContextCollection( a=2, b=3, c=4, sum_a_b=SumContextFactory('a', 'b'), # lazy context sum_a_b_c=SumContextFactory('sum_a_b', 'c'), named_value=NamedSumContextFactory('sum_a_b_c')) assert context[key] == expected assert context.get(key) == expected
def _wrapped_context_manager(self, injections): with super(PipeContextManager, self).context(injections) as additional_injections: if additional_injections: injections = LazyContextCollection(injections, **additional_injections) for context in self._gen_func(injections): yield context
def run_processor(self, processor_id, injections): processor = self.get_processor(processor_id) if not processor: logger.error( 'Processor with ID %s is not registered in program %s', processor_id, self.id) return # apply global context managers to each processor with self.context(injections) as context: if context: injections = LazyContextCollection(injections, **context) processor.process(injections)
# and provides context injections for factory_func parameters context = {'conn_id': 123, 'extra': {}, 'name': 'my name'} storage = LazyContextPoolFactory( lambda name, conn_id: MyStorageService(name, conn_id)) context_storage = storage(context) print('\ncontext_storage.wait_all.get_name() = ', context_storage.wait_all.get_name()) print('context_storage.wait_all.value = ', context_storage.cursor.value) # usually service pool is used as a context item in LazyContextCollection # and sometimes it's useful to have an ability to specify different initialization factories # for different pool items. In this case you may want use a DistributedPool helper lazy_context = LazyContextCollection(context) # Lets append our pool with custom `library` storage into the context collection # distributed_pool always checks if pool item exists in context. # Name of pool item have to be prefixed with distributed pool name: <pool_name>.<service_name> # <pool_name>.default is used when the item is not found in context. lazy_context.update({ 'storage': distributed_pool, 'storage.default': LazyContextPoolFactory( lambda name, conn_id: MyStorageService(name, conn_id)), 'storage.library': LazyContext( lambda conn_id: MyStorageService('Custom library service', conn_id))
# this is an example of http_client_context usage. # http_client_context is a lazy context # for using in infrastructure setup for pipeline processing # it could use metrics and cache context if they are defined # so lets create a context collection that will initialize the context for us # like pipeline infrastructure does it. enable_cache = Config(http_client={ 'cache': {'enabled': True} # enable caching in http_client context }) context = LazyContextCollection( http=http_client_context, cache=memory_cache_pool, # we want to cache response metrics=log_metrics_context, # we want to collect http_client metric config=Config(enable_cache), logger=logger) # log is used by log_metrics_context to output metrics # lets initialize our context http_client = context['http'] print('HTTP client is : ', http_client) # http_client inherits all features of requests.Session # so lets make some test call. Metrics service should log a metric about the request response = http_client.get('http://google.com') print('First call status: ', response.status_code) # lets call again. Any metric for this call should be not emitted # because the call response is already in a cache
# most pipe processor and content managers expects that storage is an IContentPool # see service/example_pool.py for pool usage example # There are several predefined storage pools that you may use in your application # This example demonstrates a usage of redis_storage_pool # lets specify a context for lazy storage initialization config = Config({'redis': { 'host': 'localhost', 'port': 6379, }}) context = {'config': config, 'storage': redis_storage_pool} # redis_storage_pool is a lazy context # lets initialize the storage like pipeline application does it. storage = LazyContextCollection(context)['storage'] # storage is now a ContextPoolFactory of RedisStorage objects print('Storage:', storage, type(storage.default)) # all the storages in pool will use the same redis client because configuration is the same # but will have different key prefix that is a name of storage print('Client is the same:', storage.default.redis == storage.cursor.redis) print('Prefix:', repr(storage.default._prefix), repr(storage.cursor._prefix)) # if you need a physically separated storages # you may set a different redis config for some storage name # lets change our config a little context['config'] = Config({
'limit': 5, 'hours': 2 }, # 5 / each 2 full hours, starting from this one 'day_quota': { 'limit': 5, 'days': 1 }, # 5 / 1 day 'month_quota': { 'limit': 5, 'months': 1 }, # 5 / 1 month } } context = LazyContextCollection( quota=quota_pool, config=Config(config), rate_counter=memory_rate_pool, ) # lets initialize a quota pool like infrastructure does it. pool = context['quota'] print('Quota pool:', pool) # we have several quotas configured print('Get rate_quota:', pool.rate_quota) print('Get hour_quota:', pool.hour_quota) # UnlimitedQuota is returned if you try to get some not configured quota. # UnlimitedQuota will never expire print('Get unknown_quota:', pool.unknown_quota)
def process(self, injections): with self.context(injections) as context: if context: injections = LazyContextCollection(injections, **context) super(ContextProcessor, self).process(injections)
# that acts like regular key-value storage # VersionedCursorStorage also uses IStorage as a storage backend but it saves separate cursor # per each program version. This allows easy rollback # to any previous program version (previous cursor version), # if new program version faced with some problem and its cursors can't be further used from __future__ import print_function from pypipes.service.cursor_storage import cursor_storage_context, versioned_cursor_storage_context from pypipes.service.storage import memory_storage_pool from pypipes.context import LazyContextCollection # we will use LazyContextCollection to emulate context creation by Infrastructure context = LazyContextCollection({ 'storage': memory_storage_pool, 'cursor_storage': cursor_storage_context, }) cursor_storage = context['cursor_storage'] cursor_storage.save('cursor1', 'cursor1 value') cursor_storage.save('cursor2', 'cursor2 value') cursor_storage.save('cursor3', 'cursor3 value') print('cursor1 =', cursor_storage.get('cursor1')) print('cursor2 =', cursor_storage.get('cursor2')) print('cursor3 =', cursor_storage.get('cursor3')) print('unknown =', cursor_storage.get('unknown')) # update cursor value cursor_storage.save('cursor1', 'updated cursor1 value')
def test_lazy_context_collection_unknown(): context = LazyContextCollection() # empty collection assert context.get('unknown') is None assert context.get('unknown', 100) == 100
from pypipes.context import (context, IContextFactory, LazyContextCollection, apply_context_to_kwargs) # LazyContextCollection provides lazy initialization of context # if context value is an instance of IContextFactory # this context factory builds key1 context from key2 and key3 class MyContextFactory(IContextFactory): def __call__(self, context_dict): return 'MyCustomContext: {}'.format(context_dict['key2'] + context_dict['key3']) lazy_context = LazyContextCollection(key1=MyContextFactory(), key2=2, key3=3) print('lazy_context = ', lazy_context) print('lazy_context.get("key1") = ', LazyContextCollection(lazy_context).get('key1')) print('lazy_context["key1"] = ', LazyContextCollection(lazy_context)['key1']) # this works in apply_context_to_kwargs as well kwargs = dict(key1=context.key1) print(kwargs, '=', apply_context_to_kwargs(kwargs, LazyContextCollection(lazy_context))) # if kay not found in a LazyContextCollection, the KeyError message will contain a list # of available context for better error tracing. try: LazyContextCollection(lazy_context)['unknown']
# for using in infrastructure setup for pipeline processing # it could use metrics and cache context if they are defined # so lets create a context collection that will initialize the context for us # like pipeline infrastructure does it. API_SERVICE_URL = "http://localhost:8000/api/v1/doc/" api_config = Config(api={ 'incidents': { 'url': API_SERVICE_URL } }) context = LazyContextCollection( api=api_client_pool, cache=memory_cache_pool, # we plan to cache some responses metrics=log_metrics_context, # we want to collect request metrics config=Config(api_config), logger=logger) # logger is used by log_metrics_context to output metrics # lets initialize our context api = context['api'] print('API client is: ', api) # lets call some API operation directory = { 'connection_uid': '1', 'statuses': ['active'], 'types': ['user', 'group'] } result = api.incidents.directories.directory_add(directory=directory).result()
def context(self): return LazyContextCollection(self._context)
def get_program_context(self, program): context = LazyContextCollection(self.context) context['infrastructure'] = self context['program'] = program context['program_id'] = program.id return context