class AppContainer(containers.DeclarativeContainer): wiring_config = containers.WiringConfiguration(modules=[ "api.resources.astro_object.astro_object", "api.resources.classifier.classifier", "api.resources.features.features", "api.resources.magstats.magstats", "api.resources.light_curve.light_curve", "api.resources.probabilities.probabilities", ]) # config config = providers.Configuration() # gateways psql_db = providers.ThreadSafeSingleton(SQLConnection) mongo_db = providers.ThreadSafeSingleton(MongoConnection) database_config = config.DATABASE db_control = providers.ThreadSafeSingleton( DBControl, app_config=database_config.APP_CONFIG, psql_config=database_config.SQL, mongo_config=database_config.MONGO, psql_db=psql_db, mongo_db=mongo_db, ) # views dependencies view_result_handler = providers.Factory(ViewResultHandler) # packages lightcurve_package = providers.Container(LightcurveContainer, psql_db=psql_db, mongo_db=mongo_db)
class ServicesContainer(containers.DeclarativeContainer): config = providers.Configuration() logger = providers.ThreadSafeSingleton(get_module_logger) nodes_state_provider = providers.ThreadSafeSingleton(NodesState) message_service_provider = providers.ThreadSafeSingleton(MessageService) replication_sender_provider = providers.ThreadSafeSingleton( ReplicationSender, logger=logger, nodes_state=nodes_state_provider) heartbeat_service_provider = providers.ThreadSafeSingleton( HeartbeatService, logger=logger, nodes_state=nodes_state_provider, replication_sender=replication_sender_provider)
def __init__(self, Connection, port, *args, **kwargs): super().__init__() ports = port if type(port) is list else [port] self._singletons = [ providers.ThreadSafeSingleton(Connection, port, *args, **kwargs) for port in ports ]
def __init__(self, Device, *args, **kwargs): "docstring" logger.debug(' Creating device with tracking {}'.format(self.tracking)) self.tracking = args.pop(0) self._singleton = providers.ThreadSafeSingleton( Device, *args, **kwargs) self._device = None
async def test_concurrent_init(): async def create_instance(): return object() provider = providers.ThreadSafeSingleton(create_instance) future_instance1 = provider() future_instance2 = provider() instance1, instance2 = await asyncio.gather(future_instance1, future_instance2) assert instance1 is instance2
def test_async_mode(self): instance = object() async def create_instance(): return instance provider = providers.ThreadSafeSingleton(create_instance) instance1 = self._run(provider()) instance2 = self._run(provider()) self.assertIs(instance1, instance2) self.assertIs(instance, instance)
async def test_async_mode(): instance = object() async def create_instance(): return instance provider = providers.ThreadSafeSingleton(create_instance) instance1 = await provider() instance2 = await provider() assert instance1 is instance2 assert instance1 is instance assert instance2 is instance
class Container(containers.DeclarativeContainer): thread_local_object = providers.ThreadLocalSingleton(object) queue_provider = providers.ThreadSafeSingleton(queue.Queue) put_in_queue = providers.Callable( put_in_queue, example_object=thread_local_object, queue_object=queue_provider, ) thread_factory = providers.Factory( threading.Thread, target=put_in_queue.provider, )
def my_logger(name='main'): logger = providers.ThreadSafeSingleton(logging.getLogger)(name=name) logger.setLevel(logging.INFO) return logger
attributes4: Dict[str, Any] = provider4.attributes # Test 5: to check the provided instance interface provider5 = providers.Singleton(Animal) provided5: providers.ProvidedInstance = provider5.provided attr_getter5: providers.AttributeGetter = provider5.provided.attr item_getter5: providers.ItemGetter = provider5.provided['item'] method_caller5: providers.MethodCaller = provider5.provided.method.call( 123, arg=324) # Test 6: to check the DelegatedSingleton provider6 = providers.DelegatedSingleton(Cat) animal6: Animal = provider6(1, 2, 3, b='1', c=2, e=0.0) # Test 7: to check the ThreadSafeSingleton provider7: providers.BaseSingleton[Animal] = providers.ThreadSafeSingleton(Cat) animal7: Animal = provider7() # Test 8: to check the DelegatedThreadSafeSingleton provider8 = providers.DelegatedThreadSafeSingleton(Cat) animal8: Animal = provider8(1, 2, 3, b='1', c=2, e=0.0) # Test 9: to check the ThreadLocalSingleton provider9 = providers.ThreadLocalSingleton(Cat) animal9: Animal = provider9(1, 2, 3, b='1', c=2, e=0.0) # Test 10: to check the DelegatedThreadLocalSingleton provider10 = providers.DelegatedThreadLocalSingleton(Cat) animal10: Animal = provider10(1, 2, 3, b='1', c=2, e=0.0) # Test 11: to check the AbstractSingleton
import threading import queue import dependency_injector.providers as providers def example(example_object, queue_object): """Put provided object in the provided queue.""" queue_object.put(example_object) # Create thread-local singleton provider for some object (main thread): thread_local_object = providers.ThreadLocalSingleton(object) # Create singleton provider for thread-safe queue: queue_factory = providers.ThreadSafeSingleton(queue.Queue) # Create callable provider for example(), inject dependencies: example = providers.DelegatedCallable( example, example_object=thread_local_object, queue_object=queue_factory, ) # Create factory for threads that are targeted to execute example(): thread_factory = providers.Factory(threading.Thread, target=example) if __name__ == '__main__': # Create 10 threads for concurrent execution of example(): threads = [] for thread_number in range(10):
class OurOpeners(containers.DeclarativeContainer): opener = providers.ThreadSafeSingleton(_build_opener) url_to_file = _url_to_file
class Storage(containers.DeclarativeContainer): s3_preview_bucket = providers.ThreadSafeSingleton( boto3.resource('s3').Bucket, di_config.aws.preview_bucket)
class Contexts(_BaseContexts): default_translations = providers.ThreadSafeSingleton(init_locale, logger=_BaseContexts.logger) # 'unknown' is a hack not to modify an existing logger execution_context = providers.Factory(ExecutionContext, logger=_BaseContexts.logger)
class OurOpeners(containers.DeclarativeContainer): _our_opener = providers.ThreadSafeSingleton(_build_opener) our_opener = providers.ThreadSafeSingleton(MyOpener, opener=_our_opener, timeout=10.0) url_to_file = _url_to_file
class ServicesContainer(containers.DeclarativeContainer): config = providers.Configuration() logger = providers.ThreadSafeSingleton(get_module_logger) message_service_provider = providers.ThreadSafeSingleton(MessageService)
class SubclassRelationForType(SubclassRelation): def __init__(self, node_class, context=Contexts.execution_context(), graph=None, relation=RDFS.subClassOf): super().__init__(context=context, graph=graph, relation=relation) self.node_class = node_class def check_types(self, graph, src, dst): src_ok = (src, RDF.type, self.node_class) in graph dst_ok = (dst, RDF.type, self.node_class) in graph if src_ok ^ dst_ok: msg = self.context.translations.gettext( "Both operands should be of type {type}").format( type=self.node_class) self.context.logger.warning(msg) return src_ok and dst_ok basic_subclasses_graph = providers.ThreadSafeSingleton( Global.load_rdf, filename='core/data/subclasses.ttl') class SubclassContainers(containers.DeclarativeContainer): basic_subclasses = providers.ThreadSafeSingleton( SubclassRelation, context=Contexts.execution_context(), graph=basic_subclasses_graph)
class SubclassContainers(containers.DeclarativeContainer): basic_subclasses = providers.ThreadSafeSingleton( SubclassRelation, context=Contexts.execution_context(), graph=basic_subclasses_graph)
class Contexts(containers.DeclarativeContainer): default_logger = providers.Callable(my_logger) default_translations = providers.ThreadSafeSingleton(init_locale) execution_context = providers.Factory(ExecutionContext, logger=default_logger, translations=default_translations)
def singleton_client(self): return providers.ThreadSafeSingleton(self.__cls, **self.__kwargs)