def __init__(self, name, dist, debug=False, pickler=Pickler, unpickler=Unpickler, serializer=serializer.Dummy, publisher_service=None, **config): """Initialization In: - ``serializer`` -- serializer / deserializer of the states - ``pickler`` -- pickler used by the serializer - ``unpickler`` -- unpickler used by the serializer """ super(Sessions, self).__init__(name, dist, debug=debug, pickler=pickler, unpickler=unpickler, serializer=serializer, **config) publisher = publisher_service.service self.check_concurrence(publisher.has_multi_processes, publisher.has_multi_threads) pickler = reference.load_object(pickler)[0] if isinstance( pickler, str) else pickler unpickler = reference.load_object(unpickler)[0] if isinstance( unpickler, str) else unpickler serializer = reference.load_object(serializer)[0] if isinstance( serializer, str) else serializer self.serializer = serializer(pickler, unpickler, debug, self.logger)
def __init__(self, name, dist, emulator_host, emulator_port, max_bytes, max_latency, max_messages, ordering=False, client_options=None, credentials=None, services_service=None, **config): services_service(plugin.Plugin.__init__, self, name, dist, emulator_host=emulator_host, emulator_port=emulator_port, max_bytes=max_bytes, max_latency=max_latency, max_messages=max_messages, ordering=False, client_options=None, credentials=None, **config) batch_settings = types.BatchSettings(max_bytes=max_bytes, max_latency=max_latency, max_messages=max_messages) publisher_options = types.PublisherOptions( enable_message_ordering=ordering) settings = {} if client_options is not None: if isinstance(client_options, (str, type(u''))): client_options = services_service( reference.load_object(client_options)[0]) settings['client_options'] = client_options if emulator_host: channel = grpc.insecure_channel('{}:{}'.format( emulator_host, emulator_port)) transport = PublisherGrpcTransport(channel=channel) else: transport = None if credentials is not None: settings['credentials'] = services_service( reference.load_object(credentials)[0]) self.__class__.proxy_target = PublisherClient(batch_settings, publisher_options, transport=transport, **settings)
def __init__(self, name, dist, http_exception_handler, services_service, **config): services_service(super(ExceptionsService, self).__init__, name, dist, http_exception_handler=http_exception_handler, **config) self.http_exception_handler = reference.load_object( http_exception_handler)[0]
def handle_start(self, app): engines = {} for name, config in self.configs.items(): if config.pop('activated'): engine_config = self._configure_session(**config) populate = self._configure_engine(name, engines, **engine_config) self.populates.append(reference.load_object(populate)[0]) configure_mappers(self.collections_class, self.inverse_foreign_keys)
def _configure_session(session, autoflush, autocommit, expire_on_commit, twophases, **engine_config): session = reference.load_object(session)[0] session.configure(autoflush=autoflush, autocommit=autocommit, expire_on_commit=expire_on_commit, twophase=twophases) zope.sqlalchemy.register(session) return engine_config
def __init__(self, name, dist, bundles=None, output_dir=None, watch=False, reload=False, refresh=False, manifest='', mapping=None, reloader_service=None, services_service=None, **config): """Initialization """ services_service(super(WebAssets, self).__init__, name, dist, bundles=bundles, output_dir=output_dir, watch=watch, reload=reload, refresh=refresh, manifest=manifest, mapping=mapping, **config) manifest = 'json:{}'.format(manifest) if manifest else None self.environment = Env(directory=output_dir, auto_build=False, manifest=manifest, url_mapping=mapping or {}, **config) self.reload = reload self.refresh = refresh self.reloader = reloader_service if watch else None filter.register_filter(TypeScript) filter.register_filter(BabelJSX) filter.register_filter(BabelJS) filter.register_filter(CompileLess) if bundles: bundles = reference.load_object(bundles)[0] if callable(bundles): bundles = services_service(bundles, self) for name, bundle in bundles.items(): self.environment.register(name, bundle) bundle.get_version() self.bundles = bundles
def __init__(self, name, dist, emulator_host, emulator_port, client_options=None, credentials=None, services_service=None, **config): services_service(super(Subscriber, self).__init__, name, dist, emulator_host=emulator_host, emulator_port=emulator_port, client_options=client_options, credentials=credentials, **config) settings = {} if client_options is not None: if isinstance(client_options, (str, type(u''))): client_options = services_service( reference.load_object(client_options)[0]) settings['client_options'] = client_options if emulator_host: channel = grpc.insecure_channel('{}:{}'.format( emulator_host, emulator_port)) transport = SubscriberGrpcTransport(channel=channel) else: transport = None if credentials is not None: settings['credentials'] = services_service( reference.load_object(credentials)[0]) self.__class__.proxy_target = SubscriberClient(transport=transport, **settings)
def _configure_engine(self, name, engines, uri, debug, metadata, populate, **config): metadata = reference.load_object(metadata)[0] if uri: uri = self.convert_uri(uri) key = (uri, frozenset(config.items()), debug) engine = engines.setdefault( key, engine_from_config(config, '', echo=debug, url=uri)) metadata.bind = engine self.metadatas[name] = metadata return populate
def __init__(self, name, dist, collections_class, inverse_foreign_keys, upgrade, **configs): super(Database, self).__init__(name, dist, collections_class=collections_class, inverse_foreign_keys=inverse_foreign_keys, upgrade=upgrade, **configs) self.collections_class = reference.load_object( collections_class)[0] if ':' in collections_class else eval( collections_class) self.inverse_foreign_keys = inverse_foreign_keys self.alembic_config = { k: v for k, v in upgrade.items() if v is not None } self.configs = configs self.metadatas = {} self.populates = []
def __init__(self, name, dist, mountpoints=None, **config): super(Statics, self).__init__(name, dist, **config) self._mountpoints = [] for route, app_ref in (mountpoints or {}).items(): self.register_handler(route, reference.load_object(app_ref)[0])
def __init__(self, name, dist, topics=(), key_deserializer=None, value_deserializer=None, default_offset_commit_callback=None, partition_assignment_strategy=None, consumer_timeout_ms=None, socket_options=None, ssl_context=None, metric_reporters=None, selector=None, **config): plugin.Plugin.__init__( self, name, dist, topics=topics, key_deserializer=key_deserializer, value_deserializer=value_deserializer, default_offset_commit_callback=default_offset_commit_callback, partition_assignment_strategy=partition_assignment_strategy, consumer_timeout_ms=consumer_timeout_ms, socket_options=socket_options, ssl_context=ssl_context, metric_reporters=metric_reporters, selector=selector, **config) if key_deserializer: key_deserializer, _ = reference.load_object(key_deserializer) if value_deserializer: value_deserializer, _ = reference.load_object(value_deserializer) if default_offset_commit_callback: default_offset_commit_callback, _ = reference.load_object( default_offset_commit_callback) else: default_offset_commit_callback = kafka.KafkaConsumer.DEFAULT_CONFIG[ 'default_offset_commit_callback'] if partition_assignment_strategy is not None: partition_assignment_strategy = [ reference.load_object(strategie)[0] for strategie in partition_assignment_strategy ] else: partition_assignment_strategy = kafka.KafkaConsumer.DEFAULT_CONFIG[ 'partition_assignment_strategy'] if consumer_timeout_ms is None: consumer_timeout_ms = kafka.KafkaConsumer.DEFAULT_CONFIG[ 'consumer_timeout_ms'] if socket_options is None: socket_options = kafka.KafkaConsumer.DEFAULT_CONFIG[ 'socket_options'] if metric_reporters is not None: metric_reporters = [ reference.load_object(reporter)[0] for reporter in metric_reporters ] else: metric_reporters = kafka.KafkaConsumer.DEFAULT_CONFIG[ 'metric_reporters'] if selector is not None: selector, _ = reference.load_object(selector) else: selector = kafka.KafkaConsumer.DEFAULT_CONFIG['selector'] kafka.KafkaConsumer.__init__( self, *topics, key_deserializer=key_deserializer, value_deserializer=value_deserializer, default_offset_commit_callback=default_offset_commit_callback, partition_assignment_strategy=partition_assignment_strategy, consumer_timeout_ms=consumer_timeout_ms, socket_options=socket_options, ssl_context=ssl_context, metric_reporters=metric_reporters, selector=selector, **config)
def __init__(self, name, dist, key_serializer=None, value_serializer=None, partitioner=None, socket_options=None, ssl_context=None, metric_reporters=None, selector=None, **config): plugin.Plugin.__init__(self, name, dist, key_serializer=key_serializer, value_serializer=value_serializer, partitioner=partitioner, socket_options=socket_options, ssl_context=ssl_context, metric_reporters=metric_reporters, selector=selector, **config) if key_serializer: key_serializer, _ = reference.load_object(key_serializer) if value_serializer: value_serializer, _ = reference.load_object(value_serializer) if partitioner is not None: partitioner, _ = reference.load_object(partitioner) else: partitioner = kafka.KafkaProducer.DEFAULT_CONFIG['partitioner'] if socket_options is None: socket_options = kafka.KafkaProducer.DEFAULT_CONFIG[ 'socket_options'] if metric_reporters is not None: metric_reporters = [ reference.load_object(reporter)[0] for reporter in metric_reporters ] else: metric_reporters = kafka.KafkaProducer.DEFAULT_CONFIG[ 'metric_reporters'] if selector is not None: selector, _ = reference.load_object(selector) else: selector = kafka.KafkaProducer.DEFAULT_CONFIG['selector'] kafka.KafkaProducer.__init__(self, key_serializer=key_serializer, value_serializer=value_serializer, partitioner=partitioner, socket_options=socket_options, ssl_context=ssl_context, metric_reporters=metric_reporters, selector=selector, **config)
def __init__( self, name, dist, config_sections, main, on_configure, watch, tasks, services_service, reloader_service=None, **config ): """Initialization In: - ``host`` -- address of the memcache server - ``port`` -- port of the memcache server """ super(_CeleryService, self).__init__(name, dist, **config) self.watch = watch self.reloader = reloader_service self.services = services_service self.files = set() nb_cpus = multiprocessing.cpu_count() config['worker']['concurrency'] = eval(config['worker']['concurrency'], {}, {'NB_CPUS': nb_cpus}) celery_config = {} app_tasks = {name: {} for name in tasks} for section, parameters in list(config.items()): if isinstance(parameters, dict): if (section not in config_sections): app_tasks[section] = config[section] = {k: v for k, v in parameters.items() if v is not None} else: celery_config[section] = parameters.copy() else: celery_config[section] = parameters celery_config['task']['routes'] = { section: celery_config['task'].pop(section)['queue'] for section, parameters in list(celery_config['task'].items()) if isinstance(parameters, dict) and (section != 'publish_retry_policy') } schedules = {} for section, parameters in list(celery_config['beat'].items()): if isinstance(parameters, dict): del celery_config['beat'][section] schedule = {'task': parameters['task'], 'args': eval(parameters['args'])} if parameters['schedule']: schedule['schedule'] = parameters['schedule'] crontab_params = {param: parameters[param] for param in CRONTAB_PARAMS if parameters[param] is not None} if crontab_params: schedule['schedule'] = crontab(**crontab_params) schedules[section] = schedule celery_config = collections.AttributeDict(defaults.flatten(celery_config)) celery_config['beat_schedule'] = schedules if celery_config.pop('result_asynchronous_callbacks'): asynchronous.register_drainer('default')(Drainer) if on_configure: reference.load_object(on_configure)[0](celery_config) self.celery = self.CELERY_FACTORY(main, log='nagare.services.celery:Logging', config_source=celery_config) for task, parameters in app_tasks.items(): self.register_task(reference.load_object(task)[0], **parameters)