Example #1
0
def get_num_workers():
    stats_dict = ctrl.Control(app).inspect().stats()
    if stats_dict is None:
        return 0
    else:
        num_workers = 0
        for instance, stats in stats_dict.iteritems():
            num_workers += stats['pool']['max-concurrency']
    return num_workers
Example #2
0
 def wrapper(*args, **kwargs):
     try:
         func(*args, **kwargs)
     except Error:
         # If we receive any database-related exceptions, send SIGTERM--this may not be
         # necessary in every case, but it is safer
         logger.exception("Database error")
         rollbar.report_exc_info()
         _collector.increment("signal-force-shutdown")
         controller = control.Control(app=app_or_default())
         controller.shutdown()
     except Exception:
         logger.exception("Ranch signal error")
         rollbar.report_exc_info()
         _collector.increment("signal_error", value=1)
         raise
Example #3
0
from mongoengine.queryset import DoesNotExist

from pulp.common.constants import RESOURCE_MANAGER_WORKER_NAME, SCHEDULER_WORKER_NAME
from pulp.common import constants, dateutils, tags
from pulp.server.async.celery_instance import celery, RESOURCE_MANAGER_QUEUE, \
    DEDICATED_QUEUE_EXCHANGE
from pulp.server.exceptions import PulpException, MissingResource, \
    NoWorkers, PulpCodedException, error_codes
from pulp.server.config import config
from pulp.server.db.model import Worker, ReservedResource, TaskStatus, \
    ResourceManagerLock, CeleryBeatLock
from pulp.server.managers.repo import _common as common_utils
from pulp.server.managers import factory as managers
from pulp.server.managers.schedule import utils

controller = control.Control(app=celery)
_logger = logging.getLogger(__name__)


class PulpTask(CeleryTask):
    """
    The ancestor of Celery tasks in Pulp. All Celery tasks should inherit from this object.

    It provides behavioral modifications to apply_async and __call__ to serialize and
    deserialize common object types which are not json serializable.
    """
    def _type_transform(self, value):
        """
            Transforms ObjectId types to str type and vice versa.

            Any ObjectId types present are serialized to a str.
Example #4
0
 def test_control_exchange__setting(self):
     self.app.conf.control_exchange = 'test_exchange'
     c = control.Control(self.app)
     assert c.mailbox.namespace == 'test_exchange'
Example #5
0
 def test_control_exchange__default(self):
     c = control.Control(self.app)
     assert c.mailbox.namespace == 'celery'
Example #6
0
 def test_serializer(self):
     self.app.conf['task_serializer'] = 'test'
     self.app.conf['accept_content'] = ['test']
     assert control.Control(self.app).mailbox.serializer == 'test'
     assert control.Control(self.app).mailbox.accept == ['test']
	def main_celery():
		from celery import group
		from celery import subtask
		from celery import Task
		from celery import Celery
		from celery.app import control
		
		njobs = 10
		if 'nt' in os.name:
			njobs = 2
		
		if len(sys.argv) > 1 and int(sys.argv[1]) > 0:
			njobs = int(sys.argv[1])
		if len(sys.argv) > 2:
			ncpus = int(sys.argv[2])

		start_time = time.time()
	# 
		print 'creating celery instance...'
		cw = 'redis://ew_tb_was_3p/'
  	 	celery = Celery(backend=cw, broker=cw, config_source='celeryconfig')

		cc = control.Control()
		pong = cc.ping(timeout=3)
		if len(pong) == 0:
			print 'no worker node is available'
			sys.exit(1)
		
		print 'calling jobs... with workers:', len(pong)

		job = group([subtask('bottest_run', args=[i]) for i in xrange(njobs)])
# 		jobs = group([send_task("bottest_run", args=[i]).subtask() for i in xrange(njobs)])
		
		print 'job', njobs
		
 		result = job.apply_async()
# 		result = job()
		print 'result'

		done = 0
		th = int(njobs * 0.01)
		th = max(th, 10)
		total_num = 0
		total_sum = 0.0
		total_avg = 0.0
		for r in result.iterate():
			done += 1
			if done % th == 0:
				print 'processed:', done
			total_num += r['total_num']
			total_sum += r['total_sum']

		results = result.join()
		print 'joining was done'
		
		verbose = True
		verbose = False
		
		ela = time.time() - start_time
		print "Time elapsed: ", ela, "s"
		print 'time per job:', ela / njobs
		if total_num > 0:
			total_avg = total_sum / total_num
		print 'operation total_num:', total_num
		print 'operation total_sum:', total_sum
		print 'operation total_avg:', total_avg