def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ # Configure SQLAlchemy engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine # Configure ElasticSearch configure_es_from_config(settings) config = Configurator(settings=settings) config.include('cornice') config.registry.queue_config = get_queue_config(settings) bypass_auth = False if 'noauthorization' in settings: bypass_auth = asbool(settings['noauthorization']) if not bypass_auth: config.include("pyramid_jwtauth") # Intercept request handling to validate token against the database config.add_tween('c2corg_api.jwt_database_validation_tween_factory') # Inject ACLs config.set_root_factory(RootFactory) else: log.warning('Bypassing authorization') configure_caches(settings) # Scan MUST be the last call otherwise ACLs will not be set # and the permissions would be bypassed config.scan(ignore='c2corg_api.tests') return config.make_wsgi_app()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) # configure connections for Postgres, ElasticSearch and Redis settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') Session = sessionmaker() # noqa Session.configure(bind=engine) configure_es_from_config(settings) queue_config = get_queue_config(settings) batch_size = int(settings.get('elasticsearch.batch_size.syncer', 1000)) with queue_config.connection: try: worker = SyncWorker( queue_config.connection, queue_config.queue, batch_size, session_factory=Session) log.info('Syncer started, running initial sync') worker.sync() log.info('Waiting on messages') worker.run() except KeyboardInterrupt: log.info('Syncer stopped')
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) # configure connections for Postgres, ElasticSearch and Redis settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') Session = sessionmaker() # noqa Session.configure(bind=engine) configure_es_from_config(settings) queue_config = get_queue_config(settings) batch_size = int(settings.get('elasticsearch.batch_size.syncer', 1000)) with queue_config.connection: try: worker = SyncWorker( queue_config.connection, queue_config.queue, batch_size, session_factory=Session) log.info('Syncer started, running initial sync') worker.sync() log.info('Waiting on messages') worker.run() except KeyboardInterrupt: log.info('Syncer stopped')
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ # Configure SQLAlchemy engine = engine_from_config(settings, 'sqlalchemy.') DBSession.configure(bind=engine) Base.metadata.bind = engine # Configure ElasticSearch configure_es_from_config(settings) config = Configurator(settings=settings) config.include('cornice') config.registry.queue_config = get_queue_config(settings) bypass_auth = False if 'noauthorization' in settings: bypass_auth = asbool(settings['noauthorization']) if not bypass_auth: config.include("pyramid_jwtauth") # Intercept request handling to validate token against the database config.add_tween('c2corg_api.jwt_database_validation_tween_factory') # Inject ACLs config.set_root_factory(RootFactory) else: log.warning('Bypassing authorization') # Scan MUST be the last call otherwise ACLs will not be set # and the permissions would be bypassed config.scan(ignore='c2corg_api.tests') return config.make_wsgi_app()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) configure_es_from_config(settings) setup_es()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) settings = get_appsettings(config_uri, options=options) configure_es_from_config(settings) setup_es()
def setup_package(): # set up database engine = get_engine() DBSession.configure(bind=engine) Base.metadata.drop_all(engine) initializedb.setup_db(engine, DBSession) # Add test data needed for all tests with transaction.manager: _add_global_test_data(DBSession) DBSession.remove() # set up ElasticSearch configure_es_from_config(settings) initializees.drop_index() initializees.setup_es()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) logging.getLogger('sqlalchemy.engine').setLevel(logging.ERROR) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') Session = sessionmaker(extension=ZopeTransactionExtension()) # noqa session = Session(bind=engine) configure_es_from_config(settings) with transaction.manager: fill_index(session)
def setup_package(): # set up database engine = get_engine() DBSession.configure(bind=engine) Base.metadata.drop_all(engine) initializedb.setup_db(engine, DBSession) # set up ElasticSearch configure_es_from_config(settings) initializees.drop_index() initializees.setup_es() # Add test data needed for all tests with transaction.manager: _add_global_test_data(DBSession) fill_index(DBSession) DBSession.remove()
def main(argv=sys.argv): if len(argv) < 2: usage(argv) config_uri = argv[1] options = parse_vars(argv[2:]) setup_logging(config_uri) logging.getLogger('sqlalchemy.engine').setLevel(logging.ERROR) settings = get_appsettings(config_uri, options=options) engine = engine_from_config(settings, 'sqlalchemy.') Session = sessionmaker() # noqa register(Session) session = Session(bind=engine) configure_es_from_config(settings) batch_size = int(settings.get('elasticsearch.batch_size.fill_index', 1000)) with transaction.manager: fill_index(session, batch_size)
def setup_package(): # set up database engine = get_engine() DBSession.configure(bind=engine) alembic_config = _get_alembic_config() downgrade(alembic_config, 'base') initializedb.setup_db(alembic_config, DBSession) # set up ElasticSearch configure_es_from_config(settings) initializees.drop_index() initializees.setup_es() # Add test data needed for all tests with transaction.manager: _add_global_test_data(DBSession) fill_index(DBSession) DBSession.remove()