def setUp(self): self.amqp_handler = logs.AMQPHandler(level=logging.DEBUG) self.amqp_handler.set_calc_info(None, None) self.log = logging.getLogger(self.LOGGER_NAME) self.log.setLevel(logging.DEBUG) self.log.addHandler(self.amqp_handler) cfg = config.get_section('amqp') self.connection = kombu.BrokerConnection(hostname=cfg.get('host'), userid=cfg['user'], password=cfg['password'], virtual_host=cfg['vhost']) self.channel = self.connection.channel() self.exchange = kombu.entity.Exchange(cfg['exchange'], type='topic', channel=self.channel) self.queue = kombu.entity.Queue(exchange=self.exchange, channel=self.channel, routing_key=self.ROUTING_KEY, exclusive=True) self.queue.queue_declare() self.queue.queue_bind() self.consumer = kombu.messaging.Consumer(self.channel, self.queue, no_ack=True, auto_declare=False) self.producer = kombu.messaging.Producer(self.channel, self.exchange, serializer='json')
def setUp(self): self.amqp_handler = logs.AMQPHandler(level=logging.DEBUG) self.amqp_handler.set_calc_info(None, None) self.log = logging.getLogger(self.LOGGER_NAME) self.log.setLevel(logging.DEBUG) self.log.addHandler(self.amqp_handler) cfg = config.get_section('amqp') self.connection = kombu.BrokerConnection(hostname=cfg.get('host'), userid=cfg['user'], password=cfg['password'], virtual_host=cfg['vhost']) self.channel = self.connection.channel() self.exchange = kombu.entity.Exchange(cfg['exchange'], type='topic', channel=self.channel) self.queue = kombu.entity.Queue(exchange=self.exchange, channel=self.channel, routing_key=self.ROUTING_KEY, exclusive=True) self.queue.queue_declare() self.queue.queue_bind() self.consumer = kombu.messaging.Consumer( self.channel, self.queue, no_ack=True, auto_declare=False) self.producer = kombu.messaging.Producer(self.channel, self.exchange, serializer='json')
def setUp(self): cfg = config.get_section("amqp") self.connection = kombu.BrokerConnection( hostname=cfg.get("host"), userid=cfg["user"], password=cfg["password"], virtual_host=cfg["vhost"] ) self.channel = self.connection.channel() self.exchange = kombu.entity.Exchange(cfg["exchange"], type="topic", channel=self.channel) self.producer = kombu.messaging.Producer(self.channel, exchange=self.exchange, serializer="json")
def test_get_section_merely_calls_get_on_config_data_dict(self): orig_method = config.cfg.get def fake_get(section): self.assertEqual("f@k3", section) return {"this": "is", "so": "fake"} config.cfg.get = fake_get self.assertEqual({"this": "is", "so": "fake"}, config.get_section("f@k3")) config.cfg.get = orig_method
def setUp(self): cfg = config.get_section('amqp') self.connection = kombu.BrokerConnection(hostname=cfg.get('host'), userid=cfg['user'], password=cfg['password'], virtual_host=cfg['vhost']) self.channel = self.connection.channel() self.exchange = kombu.entity.Exchange(cfg['exchange'], type='topic', channel=self.channel) self.producer = kombu.messaging.Producer(self.channel, exchange=self.exchange, serializer="json")
def test_get_section_merely_calls_get_on_config_data_dict(self): orig_method = config.cfg.get def fake_get(section): self.assertEqual("f@k3", section) return {"this": "is", "so": "fake"} config.cfg.get = fake_get self.assertEqual({ "this": "is", "so": "fake" }, config.get_section("f@k3")) config.cfg.get = orig_method
def amqp_connect(): """ Connect to amqp broker with kombu using default configuration and return connection, channel and exchange as tuple. """ cfg = config.get_section("amqp") connection = kombu.BrokerConnection(hostname=cfg['host'], userid=cfg['user'], password=cfg['password'], virtual_host=cfg['vhost']) channel = connection.channel() exchange = kombu.entity.Exchange(cfg['exchange'], type='topic', channel=channel) exchange.declare() return connection, channel, exchange
def exchange_and_conn_args(): """ Helper method to setup an exchange for task communication and the args needed to create a broker connection. """ amqp_cfg = config.get_section('amqp') exchange = kombu.Exchange(amqp_cfg['task_exchange'], type='direct') conn_args = { 'hostname': amqp_cfg['host'], 'userid': amqp_cfg['user'], 'password': amqp_cfg['password'], 'virtual_host': amqp_cfg['vhost'], } return exchange, conn_args
def get_client(**kwargs): """ Return a redis kvs client connection for general OpenQuake engine calculation usage.. PLEASE NOTE: The 'db' argument is automatically read from the openquake.cfg and set. If specified in ``kwargs``, it will be overridden with the setting in openquake.cfg. """ global __KVS_CONN_POOL if __KVS_CONN_POOL is None: cfg = config.get_section("kvs") # get the default db from the openquake.cfg: db = int(config.get('kvs', 'redis_db')) __KVS_CONN_POOL = redis.ConnectionPool( max_connections=1, host=cfg["host"], port=int(cfg["port"]), db=db) kwargs.update({"connection_pool": __KVS_CONN_POOL}) return redis.Redis(**kwargs)
def get_client(**kwargs): """ Return a redis kvs client connection for general OpenQuake engine calculation usage.. PLEASE NOTE: The 'db' argument is automatically read from the openquake.cfg and set. If specified in ``kwargs``, it will be overridden with the setting in openquake.cfg. """ global __KVS_CONN_POOL if __KVS_CONN_POOL is None: cfg = config.get_section("kvs") # get the default db from the openquake.cfg: db = int(config.get('kvs', 'redis_db')) __KVS_CONN_POOL = redis.ConnectionPool(max_connections=1, host=cfg["host"], port=int(cfg["port"]), db=db) kwargs.update({"connection_pool": __KVS_CONN_POOL}) return redis.Redis(**kwargs)
# just in the case that are you using oq-engine from sources # with the rest of oq libraries installed into the system (or a # virtual environment) you must set this environment variable if os.environ.get("OQ_ENGINE_USE_SRCDIR"): sys.modules['openquake'].__dict__["__path__"].insert( 0, os.path.join(os.path.dirname(__file__), "openquake")) from openquake.engine.utils import config, get_core_modules from openquake import engine config.abort_if_no_config_available() sys.path.insert(0, os.path.dirname(__file__)) amqp = config.get_section("amqp") # RabbitMQ broker (default) BROKER_URL = 'amqp://%(user)s:%(password)s@%(host)s:%(port)s/%(vhost)s' % \ amqp # Redis broker (works only on Trusty) # BROKER_URL = 'redis://%(host)s:6379/0' % amqp # BROKER_POOL_LIMIT enables a connections pool so Celery can reuse # a single connection to RabbitMQ. Value 10 is the default from # Celery 2.5 where this feature is enabled by default. # Actually disabled because it's not stable in production. # See https://bugs.launchpad.net/oq-engine/+bug/1250402 BROKER_POOL_LIMIT = None # RabbitMQ result backend (default)
# OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. """Django settings for OpenQuake.""" from openquake.engine.utils import config # DEBUG = True DB_SECTION = config.get_section('database') def _db_cfg(db_name): """ Helper method to create db config items for the various roles and schemas. :param db_name: The name of the database configuration. Configurations for this name will be loaded from the site specific config file. If an item doesn't exist in the config file, a default value will be used instead. :returns: Configuration dict, structured like so:: {'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'openquake2', 'USER': '******', 'PASSWORD': '******',
# just in the case that are you using oq-engine from sources # with the rest of oq libraries installed into the system (or a # virtual environment) you must set this environment variable if os.environ.get("OQ_ENGINE_USE_SRCDIR"): sys.modules['openquake'].__dict__["__path__"].insert( 0, os.path.join(os.path.dirname(__file__), "openquake")) from openquake.engine.utils import config, get_core_modules from openquake.engine.calculators import hazard, risk config.abort_if_no_config_available() sys.path.insert(0, os.path.dirname(__file__)) amqp = config.get_section("amqp") BROKER_HOST = amqp.get("host") BROKER_PORT = int(amqp.get("port")) BROKER_USER = amqp.get("user") BROKER_PASSWORD = amqp.get("password") BROKER_VHOST = amqp.get("vhost") CELERY_RESULT_BACKEND = "amqp" # CELERY_ACKS_LATE and CELERYD_PREFETCH_MULTIPLIER settings help evenly # distribute tasks across the cluster. This configuration is intended # make worker processes reserve only a single task at any given time. # (The default settings for prefetching define that each worker process will # reserve 4 tasks at once. For long running calculations with lots of long, # heavy tasks, this greedy prefetching is not recommended and can result in
# # OpenQuake is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with OpenQuake. If not, see <http://www.gnu.org/licenses/>. import os from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS from openquake.engine.utils import config DB_SECTION = config.get_section('database') INSTALLED_APPS = ('openquake.server.db',) OQSERVER_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG BASE_DIR = os.path.abspath(os.path.dirname(__file__)) TEMPLATE_CONTEXT_PROCESSORS += ( 'django.contrib.messages.context_processors.messages', 'openquake.server.utils.oq_server_context_processor', ) STATIC_URL = '/static/'