def make(cls, what, **kwargs): '''Read configuration; instantiate classes using dependency injection. :param what: list of either a class to instantiate or None to get the dependency graph itself. :param powers: powers needed by dependencies Using :meth:`mods` to get a list of modules that provide bindings from classes (or other keys) to objects, create an `injector.Injector` and use it to instantiate each of the classes in `what`. ''' # _logged(('make', cls, kwargs)) modules = cls.mods(**kwargs) depgraph = injector.Injector(modules) it = None try: return [depgraph.get(it) if it else depgraph for it in what] # noqa except TypeError as oops: # for debugging: # ack senderle Jul '12 https://stackoverflow.com/a/11415140/7963 # import sys # import traceback # ex_type, ex, tb = sys.exc_info() # traceback.print_tb(tb) # import pdb; pdb.set_trace() raise TypeError('failed (%s) to instantiate: %s w.r.t. \n%s' % (oops, it, '\n'.join([str(m) for m in modules])))
def _setup_dependency_injection(settings: dict, engine: Engine) -> injector.Injector: return injector.Injector( [ Db(engine), RedisMod(settings["redis.host"]), Rq(), EventBusMod(), Configs(settings), Auctions(), AuctionsInfrastructure(), Shipping(), ShippingInfrastructure(), CustomerRelationship(), Payments(), Processes(), ], auto_bind=False, )
def _setup_dependency_injection( settings: dict, connection_provider: ThreadlocalConnectionProvider ) -> injector.Injector: return injector.Injector( # type: ignore [ Db(connection_provider), RedisMod(settings["redis.host"]), Rq(), EventBusMod(), Configs(settings), Auctions(), AuctionsInfrastructure(), Shipping(), ShippingInfrastructure(), CustomerRelationship(), Payments(), Processes(), ], auto_bind=False, )
def __init__(self): self.injector = injector.Injector(self.__class__.configure)
import injector from flask.cli import with_appcontext from flask_cors import CORS from flask_injector import FlaskInjector from flask_sqlalchemy import SQLAlchemy from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics from flask_migrate import Migrate import rhub from rhub.api.vault import Vault, VaultModule from rhub.auth.keycloak import KeycloakModule from rhub.scheduler import SchedulerModule logger = logging.getLogger(__name__) di = injector.Injector() db = SQLAlchemy() migrate = Migrate() DEFAULT_PAGE_LIMIT = 20 def init_app(): logger.info('Starting initialization...') from ._setup import setup setup() logger.info('Initialization finished.') @click.command('init') @with_appcontext
from flask import Flask,request import injector import os db_url = os.environ.get("DB_API", "http://chaos.db.openshift:5001") server_port = int(os.environ.get("SERVER_PORT", 5002)) app = Flask(__name__) injection_slave = injector.Injector(db_url) @app.route('/inject_fault',methods=['GET']) def get_instructions(): return "send dns and fault name in json object" @app.route('/inject_fault',methods=['POST']) def inject_fault(): dns = request.json['dns'] fault = request.json['fault'] output = call_slave(dns,fault) return output def call_slave(dns,fault): output = injection_slave.start_experiment(dns, fault) return output if __name__ == '__main__': app.run(host='0.0.0.0', port= server_port)
def test_injector(): return injector.Injector()
def runner() -> None: """Bind all and launch the simulation!""" ini = time.process_time() custom_injector = injector.Injector([Module]) configuration = custom_injector.get(Configuration) config_logging(configuration) create_histogram_tables(custom_injector.get(sqlite3.Connection)) if configuration.get_arg('debug'): numpy.random.seed(0) simulator = custom_injector.get(Simulation) max_runs = configuration.get_arg('max_runs') confidence_width = configuration.get_arg('max_confidence_interval_width') run = custom_injector.get(profile)(simulator.run) logger.info('Parsing done at second %.2f', time.process_time() - ini) logger.info('Simulating %d users during %d s (%.1f week(s)).', configuration.users_num, configuration.simulation_time, configuration.simulation_time / WEEK(1)) logger.info('User Satisfaction (US) target is %d%%.', simulator.target_satisfaction) if simulator.timeout[0] < math.inf: logger.info( 'Average global timeout will be %.2f s ' '(median = %.2f s, std = %.2f s)', *simulator.timeout) logger.info( 'A priori WUS = %.2f%% (median = %.2f%%, std = %.2f p.p.), ' 'US = %.2f%% (median = %.2f%%, std = %.2f p.p.), ' 'RI = %.2f%%.', *simulator.test_timeout) logger.info('A priori analysis at second %.2f', time.process_time() - ini) if configuration.get_arg('graph_timeouts'): simulator.graph_timeouts() logger.info('Graph done %.2f', time.process_time() - ini) (s, i, t), c = run(), 1 logger.info('Run 1: US = %.2f%%, RI = %.2f%%, timeout = %.2f', s, i, t) if max_runs == 1 or configuration.get_arg('fleet_generator'): logger.warning('Only one run, cannot calculate confidence intervals') else: satisfaction = confidence_interval(s) inactivity = confidence_interval(i) (xs, ds) = satisfaction.send(None) (xi, di) = inactivity.send(None) while di > confidence_width or ds > confidence_width or c < 2: (s, i, t), c = run(), c + 1 (xs, ds) = satisfaction.send(s) (xi, di) = inactivity.send(i) logger.info( 'Run %d: US = %.2f%% (d = %.3f), ' 'RI = %.2f%% (d = %.3f), timeout = %.2f', c, xs, ds, xi, di, t) if c >= max_runs: logger.warning('Max runs (%d) reached, stopping.', max_runs) break logger.info('All runs done (%d).', c) logger.info('Runs done at second %.2f', time.process_time() - ini) if configuration.get_arg('plot'): logger.debug('Storing plots.') custom_injector.get(Plot).plot_all() logger.info('Plotting done at second %.2f', time.process_time() - ini) logger.debug('Process memory footprint: %.2f MiB', memory_profiler.memory_usage()[0]) logger.info('All done at second %.2f', time.process_time() - ini)
import injector import tornado.ioloop from url_shortener.lib.configuration import Configuration from url_shortener.handlers.module import AppHandlers from url_shortener.app import (AppModule, App) if __name__ == '__main__': conf = Configuration(mongo_conf={ 'url': 'mongodb://localhost', 'db': 'url_shortener' }) injection = injector.Injector(AppModule(conf)) app = injection.get(App) server = app.get_server() server.start(0) # add handlers later to start connection after fork app.add_handlers(injection.get(AppHandlers)) tornado.ioloop.IOLoop.current().start()
from imports.infrastructure.domains.blog.adapters.identity_adapter_impl import IdentityAdapterImpl from imports.infrastructure.domains.blog.adapters.path_adapter_impl import PathAdapterImpl from imports.infrastructure.domains.blog.adapters.time_adapter_impl import TimeAdapterImpl from imports.infrastructure.domains.blog.adapters.truncate_html_adapter_impl import TruncateHTMLAdapterImpl binder.bind(ConfigAdapter, to=ConfigAdapterImpl) binder.bind(EnvAdapter, to=EnvAdapterImpl) binder.bind(IdentityAdapter, to=IdentityAdapterImpl) binder.bind(PathAdapter, to=PathAdapterImpl) binder.bind(SlugAdapter, to=SlugAdapterImpl) binder.bind(TimeAdapter, to=TimeAdapterImpl) binder.bind(TruncateHTMLAdapter, to=TruncateHTMLAdapterImpl) binder.bind(SiteInfoRepository, to=SiteInfoRepositoryImpl) binder.bind(WebPageRepository, to=WebPageRepositoryImpl) binder.bind(AssetRepository, to=AssetRepositoryImpl) binder.bind(ArticleRepository, to=ArticleRepositoryImpl) def mapping_rst_parser_domain(binder): from imports.domains.rst_parser.adapters import PathAdapter, TransformRstAdapter from imports.infrastructure.domains.rst_parser.adapters.transform_rst_adapter_impl import TransformRstAdapterImpl from imports.infrastructure.domains.rst_parser.adapters.path_adapter_impl import PathAdapterImpl binder.bind(PathAdapter, to=PathAdapterImpl) binder.bind(TransformRstAdapter, to=TransformRstAdapterImpl) domain_injector = injector.Injector( [mapping_blog_domain, mapping_rst_parser_domain])
General Blitzortung error class. """ pass # ----------------------------------------------------------------------------- # Public interface and exports. # ----------------------------------------------------------------------------- import injector from . import config from . import db from . import geom INJECTOR = injector.Injector([config.ConfigModule(), db.DbModule()]) __all__ = [ 'builder.Strike', 'builder.Station', 'data.TimeIntervals', 'data.Timestamp', 'data.NanosecondTimestamp', # data items 'db.strike', 'db.station', 'db.stationOffline', 'db.location', # database access 'Error', # custom exceptions 'files.Raw', 'files.Data', 'geom.Point',
print('usage: %s aws:<region>/<dynamo table> <mount point> [mount options]' % argv[0]) exit(1) logStream = open('/var/log/dynamo-fuse.log', 'w', 0) logging.basicConfig(stream=logStream) logging.getLogger("dynamo-fuse-oper ").setLevel(logging.DEBUG) logging.getLogger("dynamo-fuse-access").setLevel(logging.INFO) logging.getLogger("dynamo-fuse-record").setLevel(logging.INFO) logging.getLogger("dynamo-fuse-file ").setLevel(logging.DEBUG) logging.getLogger("fuse.log-mixin").setLevel(logging.INFO) logging.getLogger("dynamo-fuse-lock ").setLevel(logging.DEBUG) logging.getLogger("dynamo-fuse-master").setLevel(logging.DEBUG) logging.getLogger("dynamo-fuse-block ").setLevel(logging.DEBUG) if argv[2] == "cleanup": cleanup(argv[1]) elif argv[2] == "createTable": DynamoFS(argv[1]).createTable() else: fg = False if len(argv) == 4: fg = "fg" in argv[3].split(",") dynamoFS = DynamoFS(argv[1]) dynamofuse.ioc = injector.Injector([DynamoFuseInjector(dynamoFS)]) fuse = FUSE(dynamoFS, argv[2], foreground=fg, nothreads=not MULTITHREADED, default_permissions=False, auto_cache=False, hard_remove=True, noauto_cache=True, kernel_cache=False, direct_io=True, allow_other=True, use_ino=True, attr_timeout=0)