示例#1
0
def get_jvm_max_mem():
    """
    Determine what the JVM maximum memory size should be.

    :returns: the maximum JVM memory size considering the possible sources in
        the following order
        * the value of the `OQ_JVM_MAXMEM` environment variable
        * the setting in the config file
        * a fixed default (`768` MB).
    """
    def str2int(a_dict, key):
        """Return `False` unless int(a_dict[key]) yields a valid integer."""
        if not a_dict:
            return False
        val = a_dict.get(key)
        if val is None:
            return False
        val = val.strip()
        try:
            val = int(val)
        except ValueError:
            return False
        else:
            return val

    result = str2int(os.environ, "OQ_JVM_MAXMEM")
    if result:
        return result

    result = str2int(config.get_section("java"), "max_mem")
    if result:
        return result

    return DEFAULT_JVM_MAX_MEM
示例#2
0
def _db_cfg(db_name):
    """
    Helper method to create db config items for the various roles and schemas.

    :param db_name: The name of the database configuration. Configurations for
        this name will be loaded from the site specific config file. If an item
        doesn't exist in the config file, a default value will be used instead.

    :returns: Configuration dict, structured like so::
        {'ENGINE': 'django.db.backends.postgresql_psycopg2',
         'NAME': 'openquake',
         'USER': '******',
         'PASSWORD': '******',
         'HOST': 'localhost',
         'PORT': '5432',
        }


    """
    db_section = config.get_section("database")

    return dict(
        ENGINE="django.contrib.gis.db.backends.postgis",
        NAME=db_section.get("name", "openquake"),
        USER=db_section.get("%s_user" % db_name, "openquake"),
        PASSWORD=db_section.get("%s_password" % db_name, ""),
        HOST=db_section.get("host", ""),
        PORT=db_section.get("port", ""),
    )
示例#3
0
文件: java.py 项目: kpanic/openquake
def get_jvm_max_mem():
    """
    Determine what the JVM maximum memory size should be.

    :returns: the maximum JVM memory size considering the possible sources in
        the following order
        * the value of the `OQ_JVM_MAXMEM` environment variable
        * the setting in the config file
        * a fixed default (`768` MB).
    """

    def str2int(a_dict, key):
        """Return `False` unless int(a_dict[key]) yields a valid integer."""
        if not a_dict:
            return False
        val = a_dict.get(key)
        if val is None:
            return False
        val = val.strip()
        try:
            val = int(val)
        except ValueError:
            return False
        else:
            return val

    result = str2int(os.environ, "OQ_JVM_MAXMEM")
    if result:
        return result

    result = str2int(config.get_section("java"), "max_mem")
    if result:
        return result

    return DEFAULT_JVM_MAX_MEM
    def setUp(self):
        self.amqp_handler = logs.AMQPHandler(level=logging.DEBUG)
        self.amqp_handler.set_job_id(None)

        self.log = logging.getLogger(self.LOGGER_NAME)
        self.log.setLevel(logging.DEBUG)
        self.log.addHandler(self.amqp_handler)

        cfg = config.get_section('amqp')
        self.connection = kombu.BrokerConnection(hostname=cfg.get('host'),
                                                 userid=cfg['user'],
                                                 password=cfg['password'],
                                                 virtual_host=cfg['vhost'])
        self.channel = self.connection.channel()
        self.exchange = kombu.entity.Exchange(cfg['exchange'], type='topic',
                                              channel=self.channel)
        self.queue = kombu.entity.Queue(exchange=self.exchange,
                                        channel=self.channel,
                                        routing_key=self.ROUTING_KEY,
                                        exclusive=True)
        self.queue.queue_declare()
        self.queue.queue_bind()
        self.consumer = kombu.messaging.Consumer(
            self.channel, self.queue, no_ack=True, auto_declare=False)
        self.producer = kombu.messaging.Producer(self.channel, self.exchange,
                                                 serializer='json')
示例#5
0
    def setUp(self):
        self.amqp_handler = logs.AMQPHandler(level=logging.DEBUG)
        self.amqp_handler.set_job_id(None)

        self.log = logging.getLogger(self.LOGGER_NAME)
        self.log.setLevel(logging.DEBUG)
        self.log.addHandler(self.amqp_handler)

        cfg = config.get_section('amqp')
        self.connection = kombu.BrokerConnection(hostname=cfg.get('host'),
                                                 userid=cfg['user'],
                                                 password=cfg['password'],
                                                 virtual_host=cfg['vhost'])
        self.channel = self.connection.channel()
        self.exchange = kombu.entity.Exchange(cfg['exchange'],
                                              type='topic',
                                              channel=self.channel)
        self.queue = kombu.entity.Queue(exchange=self.exchange,
                                        channel=self.channel,
                                        routing_key=self.ROUTING_KEY,
                                        exclusive=True)
        self.queue.queue_declare()
        self.queue.queue_bind()
        self.consumer = kombu.messaging.Consumer(self.channel,
                                                 self.queue,
                                                 no_ack=True,
                                                 auto_declare=False)
        self.producer = kombu.messaging.Producer(self.channel,
                                                 self.exchange,
                                                 serializer='json')
示例#6
0
def declare_and_bind_queue(job_id, levels, name=''):
    """
    Create an amqp queue for sending/receiving messages and binds it to the
    exchange of specific job and levels.

    It is safe to call this function more than once.  If the exchange, queue or
    bindings already exists, this function won't create them again.

    :param job_id: the id of the job
    :type job_id: int
    :param levels: the signalling levels, e.g. 'ERROR'
    :type levels: iterable of strings
    :param name: the name for the queue, '' (empty string) to give the queue an
                 automatically generated name
    :type name: string
    :return: the name of the created queue
    :rtype: string
    """
    cfg = config.get_section("amqp")

    conn, chn = connect()

    name, _, _ = chn.queue_declare(queue=name, auto_delete=False)

    for level in levels:
        chn.queue_bind(name, cfg['exchange'],
                       routing_key=generate_routing_key(job_id, level))

    chn.close()
    conn.close()

    return name
示例#7
0
def get_client(**kwargs):
    """Return a redis kvs client connection object."""
    global __KVS_CONN_POOL
    if __KVS_CONN_POOL is None:
        cfg = config.get_section("kvs")
        __KVS_CONN_POOL = redis.ConnectionPool(max_connections=1, host=cfg["host"], port=int(cfg["port"]))
    kwargs.update({"connection_pool": __KVS_CONN_POOL})
    return redis.Redis(**kwargs)
示例#8
0
 def setUp(self):
     cfg = config.get_section("amqp")
     self.connection = kombu.BrokerConnection(
         hostname=cfg.get("host"), userid=cfg["user"], password=cfg["password"], virtual_host=cfg["vhost"]
     )
     self.channel = self.connection.channel()
     self.exchange = kombu.entity.Exchange(cfg["exchange"], type="topic", channel=self.channel)
     self.producer = kombu.messaging.Producer(self.channel, exchange=self.exchange, serializer="json")
示例#9
0
def exchange_and_conn_args():
    """
    Helper method to setup an exchange for task communication and the args
    needed to create a broker connection.
    """

    exchange = kombu.Exchange(
        config.get_section('hazard')['task_exchange'], type='direct')

    amqp_cfg = config.get_section('amqp')
    conn_args = {
        'hostname': amqp_cfg['host'],
        'userid': amqp_cfg['user'],
        'password': amqp_cfg['password'],
        'virtual_host': amqp_cfg['vhost'],
    }

    return exchange, conn_args
示例#10
0
def get_client(**kwargs):
    """Return a redis kvs client connection object."""
    global __KVS_CONN_POOL
    if __KVS_CONN_POOL is None:
        cfg = config.get_section("kvs")
        __KVS_CONN_POOL = redis.ConnectionPool(max_connections=1,
                                               host=cfg["host"],
                                               port=int(cfg["port"]))
    kwargs.update({"connection_pool": __KVS_CONN_POOL})
    return redis.Redis(**kwargs)
示例#11
0
    def test_get_section_merely_calls_get_on_config_data_dict(self):
        "config.get_section() merely makes use of Config().get()" ""
        orig_method = config.Config().get

        def fake_get(section):
            self.assertEqual("f@k3", section)
            return {"this": "is", "so": "fake"}

        config.Config().get = fake_get
        self.assertEqual({"this": "is", "so": "fake"}, config.get_section("f@k3"))
        config.Config().get = orig_method
示例#12
0
 def setUp(self):
     cfg = config.get_section('amqp')
     self.connection = kombu.BrokerConnection(hostname=cfg.get('host'),
                                              userid=cfg['user'],
                                              password=cfg['password'],
                                              virtual_host=cfg['vhost'])
     self.channel = self.connection.channel()
     self.exchange = kombu.entity.Exchange(cfg['exchange'], type='topic',
                                           channel=self.channel)
     self.producer = kombu.messaging.Producer(self.channel,
                                              exchange=self.exchange,
                                              serializer="json")
示例#13
0
def amqp_connect():
    """
    Connect to amqp broker with kombu using default configuration
    and return connection, channel and exchange as tuple.
    """
    cfg = config.get_section("amqp")
    connection = kombu.BrokerConnection(
        hostname=cfg["host"], userid=cfg["user"], password=cfg["password"], virtual_host=cfg["vhost"]
    )
    channel = connection.channel()
    exchange = kombu.entity.Exchange(cfg["exchange"], type="topic", channel=channel)
    exchange.declare()
    return connection, channel, exchange
示例#14
0
 def setUp(self):
     cfg = config.get_section('amqp')
     self.connection = kombu.BrokerConnection(hostname=cfg.get('host'),
                                              userid=cfg['user'],
                                              password=cfg['password'],
                                              virtual_host=cfg['vhost'])
     self.channel = self.connection.channel()
     self.exchange = kombu.entity.Exchange(cfg['exchange'],
                                           type='topic',
                                           channel=self.channel)
     self.producer = kombu.messaging.Producer(self.channel,
                                              exchange=self.exchange,
                                              serializer="json")
示例#15
0
    def test_get_section_merely_calls_get_on_config_data_dict(self):
        "config.get_section() merely makes use of Config().get()" ""
        orig_method = config.Config().get

        def fake_get(section):
            self.assertEqual("f@k3", section)
            return {"this": "is", "so": "fake"}

        config.Config().get = fake_get
        self.assertEqual({
            "this": "is",
            "so": "fake"
        }, config.get_section("f@k3"))
        config.Config().get = orig_method
示例#16
0
def amqp_connect():
    """
    Connect to amqp broker with kombu using default configuration
    and return connection, channel and exchange as tuple.
    """
    cfg = config.get_section("amqp")
    connection = kombu.BrokerConnection(hostname=cfg['host'],
                                        userid=cfg['user'],
                                        password=cfg['password'],
                                        virtual_host=cfg['vhost'])
    channel = connection.channel()
    exchange = kombu.entity.Exchange(cfg['exchange'], type='topic',
                                     channel=channel)
    exchange.declare()
    return connection, channel, exchange
示例#17
0
def get_client(**kwargs):
    """
    Return a redis kvs client connection for general OpenQuake engine
    calculation usage..

    PLEASE NOTE: The 'db' argument is automatically read from the openquake.cfg
    and set. If specified in ``kwargs``, it will be overridden with the setting
    in openquake.cfg.
    """
    global __KVS_CONN_POOL
    if __KVS_CONN_POOL is None:
        cfg = config.get_section("kvs")
        # get the default db from the openquake.cfg:
        db = int(config.get('kvs', 'redis_db'))
        __KVS_CONN_POOL = redis.ConnectionPool(
            max_connections=1, host=cfg["host"], port=int(cfg["port"]), db=db)
    kwargs.update({"connection_pool": __KVS_CONN_POOL})
    return redis.Redis(**kwargs)
示例#18
0
def signal_job_outcome(job_id, outcome):
    """
    Send an amqp message to publish the outcome of a job.

    :param job_id: the id of the job
    :type job_id: int
    :param outcome: the outcome of the job, 'succeeded' or 'failed'
    :type outcome: string
    """
    cfg = config.get_section("amqp")

    conn, chn = connect()

    chn.basic_publish(amqp.Message(), exchange=cfg['exchange'],
                      routing_key=generate_routing_key(job_id, outcome))

    chn.close()
    conn.close()
示例#19
0
def connect():
    """
    Create an amqp channel for signalling using the parameters from
    openquake.cfg.

    Create the exchange too if it doesn't exist yet.

    :return: the tuple (connection, channel)
    """
    cfg = config.get_section("amqp")

    conn = amqp.Connection(host=cfg['host'],
                           userid=cfg['user'],
                           password=cfg['password'],
                           virtual_host=cfg['vhost'])
    chn = conn.channel()
    # I use the vhost as a realm, which seems to be an arbitrary string
    chn.access_request(cfg['vhost'], active=True, read=True, write=True)
    chn.exchange_declare(cfg['exchange'], 'topic', auto_delete=False)

    return conn, chn
示例#20
0
def init_logs_amqp(level):
    """Init Python and Java logging to log to AMQP"""

    logging_level = LEVELS.get(level, 'warn')

    # loggers are organized in a hierarchy with the root logger at the
    # top; by default log messages are handled first by the logger
    # that receives the .info/.warn/etc. call and then in turn by all
    # its ancestor (up to the root logger)
    #
    # setting .propagate to False avoids log messages coming from
    # amqplib being propagated up the logger chain up to the root
    # logger, which then tries to use the AMQP appender to log and
    # (potentially) causes an infinite loop
    amqp_log = logging.getLogger("amqplib")
    amqp_log.propagate = False

    # initialize Python logging
    found = any(isinstance(hdlr, AMQPHandler) for hdlr in LOG.handlers)

    amqp_cfg = config.get_section("amqp")

    if not found:
        hdlr = AMQPHandler(
            host=amqp_cfg.get("host"),
            username=amqp_cfg.get("user"),
            password=amqp_cfg.get("password"),
            virtual_host=amqp_cfg.get("vhost"),
            exchange=amqp_cfg.get("exchange"),
            routing_key='log.%(loglevel)s.%(job_id)s',
            level=logging.DEBUG)

        hdlr.setFormatter(
            logging.Formatter(LOGGING_AMQP_FORMAT, None))
        LOG.addHandler(hdlr)

    LOG.setLevel(logging_level)
    RISK_LOG.setLevel(logging_level)
    HAZARD_LOG.setLevel(logging_level)
示例#21
0
    def __init__(self, job_id, levels=None, timeout=None):
        """
        :param job_id: the id of the job whose logging messages we are
                       interested in
        :type job_id: int
        :param levels: the logging levels we are interested in
        :type levels: None for all the levels (translated to a '*' in the
                      routing_key) or an iterable of stings
                      (e.g. ['ERROR', 'CRITICAL'])
        :param timeout: the optional timeout in seconds. When it expires the
                        `timeout_callback` will be called.
        :type timeout: None or float
        """

        self.timeout = timeout

        cfg = config.get_section("amqp")

        self.conn = amqp.Connection(host=cfg['host'],
                                    userid=cfg['user'],
                                    password=cfg['password'],
                                    virtual_host=cfg['vhost'])
        self.chn = self.conn.channel()
        # I use the vhost as a realm, which seems to be an arbitrary string
        self.chn.access_request(cfg['vhost'], active=False, read=True)
        self.chn.exchange_declare(cfg['exchange'], 'topic', auto_delete=True)

        self.qname = 'supervisor-%s' % job_id
        self.chn.queue_declare(self.qname)

        if levels is None:
            levels = ('*',)

        for level in levels:
            self.chn.queue_bind(self.qname, cfg['exchange'],
                               routing_key='log.%s.%s' % (level, job_id))
示例#22
0
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake.  If not, see <http://www.gnu.org/licenses/>.
"""Django settings for OpenQuake."""

from openquake.utils import config

# DEBUG = True
DB_SECTION = config.get_section('database')


def _db_cfg(db_name):
    """
    Helper method to create db config items for the various roles and schemas.

    :param db_name: The name of the database configuration. Configurations for
        this name will be loaded from the site specific config file. If an item
        doesn't exist in the config file, a default value will be used instead.

    :returns: Configuration dict, structured like so::
        {'ENGINE': 'django.db.backends.postgresql_psycopg2',
         'NAME': 'openquake',
         'USER': '******',
         'PASSWORD': '******',
示例#23
0
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake.  If not, see <http://www.gnu.org/licenses/>.


"""Django settings for OpenQuake."""

from openquake.utils import config


# DEBUG = True
DB_SECTION = config.get_section('database')


def _db_cfg(db_name):
    """
    Helper method to create db config items for the various roles and schemas.

    :param db_name: The name of the database configuration. Configurations for
        this name will be loaded from the site specific config file. If an item
        doesn't exist in the config file, a default value will be used instead.

    :returns: Configuration dict, structured like so::
        {'ENGINE': 'django.db.backends.postgresql_psycopg2',
         'NAME': 'openquake',
         'USER': '******',
         'PASSWORD': '******',
示例#24
0
"""
Config for all installed OpenQuake binaries and modules.
Should be installed by setup.py into /etc/openquake
eventually.
"""

import os
import sys

from openquake.utils import config


sys.path.insert(0, os.path.dirname(__file__))

amqp = config.get_section("amqp")

BROKER_HOST = amqp.get("host")
BROKER_PORT = int(amqp.get("port"))
BROKER_USER = amqp.get("user")
BROKER_PASSWORD = amqp.get("password")
BROKER_VHOST = amqp.get("vhost")

CELERY_RESULT_BACKEND = "amqp"


CELERY_IMPORTS = (
    "openquake.risk.job", "openquake.hazard.tasks", "tests.utils.tasks")

os.environ["DJANGO_SETTINGS_MODULE"] = "openquake.settings"
示例#25
0
"""
Config for all installed OpenQuake binaries and modules.
Should be installed by setup.py into /etc/openquake
eventually.
"""

import os
import sys

from openquake.utils import config

config.abort_if_no_config_available()

sys.path.insert(0, os.path.dirname(__file__))

amqp = config.get_section("amqp")

BROKER_HOST = amqp.get("host")
BROKER_PORT = int(amqp.get("port"))
BROKER_USER = amqp.get("user")
BROKER_PASSWORD = amqp.get("password")
BROKER_VHOST = amqp.get("vhost")

CELERY_RESULT_BACKEND = "amqp"

CELERY_IMPORTS = ("openquake.calculators.hazard.classical.core",
                  "openquake.calculators.hazard.disagg.core",
                  "openquake.calculators.hazard.disagg.subsets",
                  "openquake.calculators.hazard.uhs.core",
                  "openquake.calculators.risk.general", "tests.utils.tasks")