def importer_depth_rrd(): """Add these counts to the rrd graph""" rrd = ImportQueueDepth( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.mark(datetime.now(), ImportQueueMgr.size()) rrd.update()
def count_rrd(): """Add these counts to the rrd graph""" rrd = SystemCounts( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.mark(datetime.now(), BmarkMgr.count(), BmarkMgr.count(distinct=True), TagMgr.count()) rrd.update()
def importer_depth_rrd(): """Add these counts to the rrd graph""" rrd = ImportQueueDepth( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.mark( datetime.now(), ImportQueueMgr.size() ) rrd.update()
def count_rrd(): """Add these counts to the rrd graph""" rrd = SystemCounts( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.mark( datetime.now(), BmarkMgr.count(), BmarkMgr.count(distinct=True), TagMgr.count() ) rrd.update()
def generate_importer_depth_rrd(): """Update the png for the counts.""" rrd = ImportQueueDepth( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.output()
def generate_count_rrd(): """Update the png for the counts.""" rrd = SystemCounts( ini.get('rrd_data').format(here=HERE), ini.get('rrd_graphs').format(here=HERE)) rrd.output()
HERE = dirname(dirname(dirname(__file__))) if ini is None: from bookie.bcelery.celeryd import load_ini ini = load_ini() bookie.bcelery.celery.conf.update( # List of modules to import when celery starts. CELERY_IMPORTS=("bookie.bcelery.tasks", ), CELERY_ENABLE_UTC=True, ## Result store settings. CELERY_RESULT_BACKEND=ini.get('celery_result_backend'), CELERY_RESULT_DBURI=ini.get('celery_result_dburi'), ## Broker settings. BROKER_TRANSPORT=ini.get('celeryd_broker_transport'), BROKER_HOST=ini.get('celery_broker_host'), # BROKER_URL = "amqp://*****:*****@localhost:5672//" ## Worker settings ## If you're doing mostly I/O you can have more processes, ## but if mostly spending CPU, try to keep it close to the ## number of CPUs on your machine. If not set, the number of CPUs/cores ## available will be used. CELERYD_CONCURRENCY=ini.get('celery_concurrency'), # CELERY_ANNOTATIONS = {"tasks.add": {"rate_limit": "10/s"}}
from bookie.bcelery import celery as mycelery from bookie.bcelery import ini HERE = dirname(dirname(dirname(__file__))) if ini is None: from bookie.bcelery.celeryd import load_ini ini = load_ini() bookie.bcelery.celery.conf.update( # List of modules to import when celery starts. CELERY_IMPORTS=("bookie.bcelery.tasks", ), CELERY_ENABLE_UTC=True, ## Result store settings. CELERY_RESULT_BACKEND=ini.get('celery_result_backend'), CELERY_RESULT_DBURI=ini.get('celery_result_dburi'), ## Broker settings. BROKER_TRANSPORT=ini.get('celeryd_broker_transport'), BROKER_HOST=ini.get('celery_broker_host'), # BROKER_URL = "amqp://*****:*****@localhost:5672//" ## Worker settings ## If you're doing mostly I/O you can have more processes, ## but if mostly spending CPU, try to keep it close to the ## number of CPUs on your machine. If not set, the number of CPUs/cores ## available will be used. CELERYD_CONCURRENCY=ini.get('celery_concurrency'), # CELERY_ANNOTATIONS = {"tasks.add": {"rate_limit": "10/s"}}