def start_worker(self, json_request): """ Starts taskqueue workers if they are not already running. A worker can be started on both a master and slave node. Args: json_request: A JSON string with the application id. Returns: A JSON string with the error status and error reason. """ request = self.__parse_json_and_validate_tags(json_request, self.SETUP_WORKERS_TAGS) if 'error' in request: return json.dumps(request) app_id = self.__cleanse(request['app_id']) hostname = socket.gethostbyname(socket.gethostname()) config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id) # Load the queue info try: config.load_queues_from_file(app_id) config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE) config.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE) except ValueError, value_error: return json.dumps({"error": True, "reason": str(value_error)})
def start_worker(self, json_request): """ Starts taskqueue workers if they are not already running. A worker can be started on both a master and slave node. Args: json_request: A JSON string with the application id. Returns: A JSON string with the error status and error reason. """ request = self.__parse_json_and_validate_tags(json_request, self.SETUP_WORKERS_TAGS) if 'error' in request: return json.dumps(request) app_id = self.__cleanse(request['app_id']) hostname = socket.gethostbyname(socket.gethostname()) config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id) # Load the queue info try: config.load_queues_from_file(app_id) config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE) config.create_celery_worker_scripts( TaskQueueConfig.QUEUE_INFO_FILE) except ValueError, value_error: return json.dumps({"error": True, "reason": str(value_error)})
def test_load_queues_from_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml) flexmock(file_io) \ .should_receive("exists").and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals( queue_info, { 'queue': [{ 'name': 'default', 'rate': '5/s' }, { 'name': 'foo', 'rate': '10/m' }] }) flexmock(file_io) \ .should_receive("read").and_return('blah').and_raise(IOError) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals(queue_info, {'queue': [{ 'name': 'default', 'rate': '5/s' }]}) flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("write").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals( queue_info, { 'queue': [ { 'name': 'foo', 'rate': '10/m' }, { 'name': 'default', 'rate': '5/s' }, ] })
def test_load_queues_from_xml_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_xml) flexmock(file_io) \ .should_receive("exists").and_return(False).and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals( queue_info, { 'queue': [{ 'max_concurrent_requests': '300', 'rate': '100/s', 'bucket_size': '100', 'name': 'default', 'retry_parameters': { 'task_age_limit': '3d' } }, { 'max_concurrent_requests': '100', 'rate': '100/s', 'bucket_size': '100', 'name': 'mapreduce-workers', 'retry_parameters': { 'task_age_limit': '3d' } }] })
def reload_worker(self, json_request): """ Reloads taskqueue workers as needed. A worker can be started on both a master and slave node. Args: json_request: A JSON string with the application id. Returns: A JSON string with the error status and error reason. """ request = self.__parse_json_and_validate_tags(json_request, self.SETUP_WORKERS_TAGS) logging.info("Reload worker request: {0}".format(request)) if 'error' in request: return json.dumps(request) app_id = self.__cleanse(request['app_id']) config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id) old_queues = self.__queue_info_cache.get(app_id, {'queue': []}) old_queue_dict = {} for queue in old_queues['queue']: old_queue_dict[queue['name']] = queue new_queue_dict = {} # Load the new queue info. try: new_queues = config.load_queues_from_file(app_id) for queue in new_queues['queue']: new_queue_dict[queue['name']] = queue except ValueError, value_error: return json.dumps({"error": True, "reason": str(value_error)})
def test_create_celery_worker_scripts(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io) \ .should_receive("exists").and_return(True) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() FILE1 = open( os.path.dirname(os.path.realpath(__file__)) + '/../../templates/header.py', 'r') file1 = FILE1.read() FILE1.close() FILE2 = open( os.path.dirname(os.path.realpath(__file__)) + '/../../templates/task.py', 'r') file2 = FILE2.read() FILE2.close() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).and_return( file2) self.assertEquals( tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py') self.assertEquals( tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
def test_load_queues_from_xml_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_xml) flexmock(file_io) \ .should_receive("exists").and_return(False).and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals(queue_info, {'queue': [{'max_concurrent_requests': '300', 'rate': '100/s', 'bucket_size': '100', 'name': 'default', 'retry_parameters': {'task_age_limit': '3d'}}, {'max_concurrent_requests': '100', 'rate': '100/s', 'bucket_size': '100', 'name': 'mapreduce-workers', 'retry_parameters': {'task_age_limit': '3d'}}]})
def test_load_queues(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("exists").and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db()
def test_load_queues_from_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml) flexmock(file_io) \ .should_receive("exists").and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals(queue_info, {'queue':[{'name': 'default', 'rate': '5/s'}, {'name': 'foo', 'rate': '10/m'}]}) flexmock(file_io) \ .should_receive("read").and_return('blah').and_raise(IOError) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals(queue_info, {'queue':[{'name': 'default', 'rate': '5/s'}]}) flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("write").and_return(None) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') queue_info = tqc.load_queues_from_file('app_id') self.assertEquals(queue_info, {'queue':[{'name': 'foo', 'rate': '10/m'}, {'name': 'default', 'rate': '5/s'}, ]})
def test_save_queues_to_db(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) flexmock(file_io) \ .should_receive('exists').and_return(True) flexmock(datastore).should_receive("Put").\ and_return() tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') try: queue_info = tqc.save_queues_to_db() raise except ValueError: pass queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.save_queues_to_db()
def get_task_args(self, request): """ Gets the task args used when making a task web request. Args: request: A taskqueue_service_pb.TaskQueueAddRequest Returns: A dictionary used by a task worker. """ args = {} args['task_name'] = request.task_name() args['url'] = request.url() args['app_id'] = request.app_id() args['queue_name'] = request.queue_name() args['method'] = self.__method_mapping(request.method()) args['body'] = request.body() args['payload'] = request.payload() args['description'] = request.description() # Set defaults. args['max_retries'] = self.DEFAULT_MAX_RETRIES args['expires'] = self.__when_to_expire(request) args['max_retries'] = self.DEFAULT_MAX_RETRIES args['max_backoff_sec'] = self.DEFAULT_MAX_BACKOFF args['min_backoff_sec'] = self.DEFAULT_MIN_BACKOFF args['max_doublings'] = self.DEFAULT_MAX_DOUBLINGS # Load queue info into cache. if request.app_id() not in self.__queue_info_cache: try: config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, request.app_id()) self.__queue_info_cache[ request.app_id()] = config.load_queues_from_file( request.app_id()) except ValueError, value_error: logging.error("Unable to load queues for app id {0} using defaults."\ .format(request.app_id())) except NameError, name_error: logging.error("Unable to load queues for app id {0} using defaults."\ .format(request.app_id()))
def test_create_celery_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("exists").and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() # making sure it does not throw an exception self.assertEquals(tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py") self.assertEquals(tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
def get_task_args(self, request): """ Gets the task args used when making a task web request. Args: request: A taskqueue_service_pb.TaskQueueAddRequest Returns: A dictionary used by a task worker. """ args = {} args['task_name'] = request.task_name() args['url'] = request.url() args['app_id'] = request.app_id() args['queue_name'] = request.queue_name() args['method'] = self.__method_mapping(request.method()) args['body'] = request.body() args['payload'] = request.payload() args['description'] = request.description() # Set defaults. args['max_retries'] = self.DEFAULT_MAX_RETRIES args['expires'] = self.__when_to_expire(request) args['max_retries'] = self.DEFAULT_MAX_RETRIES args['max_backoff_sec'] = self.DEFAULT_MAX_BACKOFF args['min_backoff_sec'] = self.DEFAULT_MIN_BACKOFF args['max_doublings'] = self.DEFAULT_MAX_DOUBLINGS # Load queue info into cache. if request.app_id() not in self.__queue_info_cache: try: config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, request.app_id()) self.__queue_info_cache[request.app_id()] = config.load_queues_from_file( request.app_id()) except ValueError, value_error: logging.error("Unable to load queues for app id {0} using defaults."\ .format(request.app_id())) except NameError, name_error: logging.error("Unable to load queues for app id {0} using defaults."\ .format(request.app_id()))
def test_create_celery_file(self): flexmock(file_io) \ .should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io) \ .should_receive("exists").and_return(True) flexmock(file_io) \ .should_receive("write").and_return(None) flexmock(file_io) \ .should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() # making sure it does not throw an exception self.assertEquals( tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py") self.assertEquals( tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
def test_create_celery_worker_scripts(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io) \ .should_receive("exists").and_return(True) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() FILE1 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/header.py', 'r') file1 = FILE1.read() FILE1.close() FILE2 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/task.py', 'r') file2 = FILE2.read() FILE2.close() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).and_return(file2) self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py') self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')