def start_worker(self, json_request): """ Starts taskqueue workers if they are not already running. A worker can be started on both a master and slave node. Args: json_request: A JSON string with the application id. Returns: A JSON string with the error status and error reason. """ request = self.__parse_json_and_validate_tags(json_request, self.SETUP_WORKERS_TAGS) if 'error' in request: return json.dumps(request) app_id = self.__cleanse(request['app_id']) hostname = socket.gethostbyname(socket.gethostname()) config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id) # Load the queue info try: self.__queue_info_cache[app_id] = config.load_queues_from_file(app_id) config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE) config.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE) except ValueError, value_error: return json.dumps({"error": True, "reason": str(value_error)})
def start_worker(self, json_request): """ Starts taskqueue workers if they are not already running. A worker can be started on both a master and slave node. Args: json_request: A JSON string with the application id. Returns: A JSON string with the error status and error reason. """ request = self.__parse_json_and_validate_tags(json_request, self.SETUP_WORKERS_TAGS) logging.info("Start worker request: {0}".format(request)) if 'error' in request: return json.dumps(request) app_id = self.__cleanse(request['app_id']) hostname = appscale_info.get_private_ip() config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id) # Load the queue info. try: self.__queue_info_cache[app_id] = config.load_queues_from_file( app_id) config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE) config.create_celery_worker_scripts( TaskQueueConfig.QUEUE_INFO_FILE) except ValueError, value_error: return json.dumps({"error": True, "reason": str(value_error)})
def test_create_celery_worker_scripts(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io) \ .should_receive("exists").and_return(True) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() FILE1 = open( os.path.dirname(os.path.realpath(__file__)) + '/../../templates/header.py', 'r') file1 = FILE1.read() FILE1.close() FILE2 = open( os.path.dirname(os.path.realpath(__file__)) + '/../../templates/task.py', 'r') file2 = FILE2.read() FILE2.close() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).and_return( file2) self.assertEquals( tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py') self.assertEquals( tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
def test_create_celery_worker_scripts(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) flexmock(datastore).should_receive("Get").\ and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'}) tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ, 'myapp') flexmock(file_io) \ .should_receive("exists").and_return(True) queue_info = tqc.load_queues_from_file('app_id') queue_info = tqc.load_queues_from_db() FILE1 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/header.py', 'r') file1 = FILE1.read() FILE1.close() FILE2 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/task.py', 'r') file2 = FILE2.read() FILE2.close() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).and_return(file2) self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py') self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')