def test_create_celery_worker_scripts(self): flexmock(TaskQueueConfig).should_receive("get_celery_queue_name").\ and_return("") flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) tqc = TaskQueueConfig('myapp') header_template = os.path.join(os.path.dirname(__file__), '../../appscale', 'taskqueue', 'templates', 'header.py') with open(header_template) as header_template_file: file1 = header_template_file.read() task_template = os.path.join(os.path.dirname(__file__), '../../appscale', 'taskqueue', 'templates', 'task.py') with open(task_template) as task_template_file: file2 = task_template_file.read() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).\ and_return(file2) self.assertEquals(tqc.create_celery_worker_scripts(), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
def test_load_queues_from_file(self): self.maxDiff = None flexmock(file_io).should_receive("mkdir").and_return(None) app_id = 'myapp' # Test queue sample. flexmock(TaskQueueConfig).should_receive("get_queue_file_location").\ and_return("/path/to/file") flexmock(file_io).should_receive("read").and_return(sample_queue_yaml) expected_info = [{ 'name': 'default', 'rate': '5/s' }, { 'name': 'foo', 'rate': '10/m' }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } flexmock(TaskQueueConfig).should_receive('load_queues_from_file').\ and_return(expected_queues) tqc = TaskQueueConfig(app_id) self.assertEquals(tqc.queues, expected_queues) # Test queue sample 2. flexmock(TaskQueueConfig).should_receive("get_queue_file_location").\ and_return("/path/to/file") flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) expected_info = [{ 'name': 'foo', 'rate': '10/m' }, { 'name': 'default', 'rate': '5/s' }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } flexmock(TaskQueueConfig).should_receive('load_queues_from_file').\ and_return(expected_queues) tqc = TaskQueueConfig(app_id) self.assertEquals(tqc.queues, expected_queues) # Test without queues. flexmock(TaskQueueConfig).should_receive("get_queue_file_location").\ and_return("") flexmock(file_io).should_receive("read").and_raise(IOError) expected_info = [{'name': 'default', 'rate': '5/s'}] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } flexmock(TaskQueueConfig).should_receive('load_queues_from_file').\ and_return(expected_queues) tqc = TaskQueueConfig(app_id) self.assertEquals(tqc.queues, expected_queues)
def test_create_celery_file(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("exists").and_return(True) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) tqc = TaskQueueConfig('myapp') # making sure it does not throw an exception self.assertEquals(tqc.create_celery_file(), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
def test_create_celery_file(self): flexmock(TaskQueueConfig).should_receive("get_celery_queue_name").\ and_return("") flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) tqc = TaskQueueConfig('myapp') # making sure it does not throw an exception self.assertEquals(tqc.create_celery_file(), TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
def test_load_queues_from_xml_file(self): flexmock(file_io).should_receive("mkdir").and_return(None) app_id = 'myapp' flexmock(TaskQueueConfig).should_receive("get_queue_file_location").\ and_return("/path/to/file") flexmock(file_io).should_receive("read").and_return(sample_queue_xml) expected_info = [{ 'max_concurrent_requests': '300', 'rate': '100/s', 'bucket_size': '100', 'name': 'default', 'retry_parameters': { 'task_age_limit': '3d' } }, { 'max_concurrent_requests': '100', 'rate': '100/s', 'bucket_size': '100', 'name': 'mapreduce-workers', 'retry_parameters': { 'task_age_limit': '3d' } }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } flexmock(TaskQueueConfig).should_receive('load_queues_from_file').\ and_return(expected_queues) tqc = TaskQueueConfig(app_id) self.assertEquals(tqc.queues, expected_queues)
def test_load_queues_from_xml_file(self): flexmock(file_io).should_receive("read").and_return(sample_queue_xml) flexmock(file_io).should_receive("exists").and_return(False)\ .and_return(True) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) app_id = 'myapp' tqc = TaskQueueConfig(app_id) expected_info = [{ 'max_concurrent_requests': '300', 'rate': '100/s', 'bucket_size': '100', 'name': 'default', 'retry_parameters': { 'task_age_limit': '3d' } }, { 'max_concurrent_requests': '100', 'rate': '100/s', 'bucket_size': '100', 'name': 'mapreduce-workers', 'retry_parameters': { 'task_age_limit': '3d' } }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } self.assertEquals(tqc.queues, expected_queues)
def test_load_queues_from_file(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml) flexmock(file_io).should_receive("exists").and_return(True) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) app_id = 'myapp' tqc = TaskQueueConfig(app_id) expected_info = [{ 'name': 'default', 'rate': '5/s' }, { 'name': 'foo', 'rate': '10/m' }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } self.assertEquals(tqc.queues, expected_queues) flexmock(file_io).should_receive("read").and_raise(IOError) tqc = TaskQueueConfig(app_id) expected_info = [{'name': 'default', 'rate': '5/s'}] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } self.assertEquals(tqc.queues, expected_queues) flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) tqc = TaskQueueConfig(app_id) expected_info = [{ 'name': 'foo', 'rate': '10/m' }, { 'name': 'default', 'rate': '5/s' }] expected_queues = { info['name']: PushQueue(info, app_id) for info in expected_info } self.assertEquals(tqc.queues, expected_queues)
def test_create_celery_worker_scripts(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2) flexmock(file_io).should_receive("write").and_return(None) flexmock(file_io).should_receive("mkdir").and_return(None) flexmock(file_io).should_receive("exists").and_return(True) tqc = TaskQueueConfig('myapp') header_template = os.path.join(os.path.dirname(__file__), '../../appscale', 'taskqueue', 'templates', 'header.py') with open(header_template) as header_template_file: file1 = header_template_file.read() task_template = os.path.join(os.path.dirname(__file__), '../../appscale', 'taskqueue', 'templates', 'task.py') with open(task_template) as task_template_file: file2 = task_template_file.read() flexmock(file_io).should_receive('write').and_return(None) flexmock(file_io).should_receive("read").and_return(file1).\ and_return(file2) self.assertEquals(tqc.create_celery_worker_scripts(), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
from appscale.taskqueue.tq_config import TaskQueueConfig from appscale.taskqueue.tq_lib import TASK_STATES from google.appengine.runtime import apiproxy_errors from google.appengine.api import apiproxy_stub_map from google.appengine.api import datastore_errors from google.appengine.api import datastore_distributed from google.appengine.api import datastore from google.appengine.ext import db sys.path.append(TaskQueueConfig.CELERY_CONFIG_DIR) sys.path.append(TaskQueueConfig.CELERY_WORKER_DIR) app_id = 'APP_ID' module_name = TaskQueueConfig.get_celery_worker_module_name(app_id) celery = Celery(module_name, broker=rabbitmq.get_connection_string(), backend='amqp://') celery.config_from_object('CELERY_CONFIGURATION') logger = get_task_logger(__name__) master_db_ip = appscale_info.get_db_master_ip() connection_str = master_db_ip + ":" + str(constants.DB_SERVER_PORT) ds_distrib = datastore_distributed.DatastoreDistributed( "appscaledashboard", connection_str, require_indexes=False) apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', ds_distrib) os.environ['APPLICATION_ID'] = "appscaledashboard" # This template header and tasks can be found in appscale/AppTaskQueue/templates
from appscale.taskqueue.tq_config import TaskQueueConfig from appscale.taskqueue.tq_lib import TASK_STATES from google.appengine.runtime import apiproxy_errors from google.appengine.api import apiproxy_stub_map from google.appengine.api import datastore_errors from google.appengine.api import datastore_distributed from google.appengine.api import datastore from google.appengine.ext import db sys.path.append(TaskQueueConfig.CELERY_CONFIG_DIR) sys.path.append(TaskQueueConfig.CELERY_WORKER_DIR) app_id = 'APP_ID' module_name = TaskQueueConfig.get_celery_worker_module_name(app_id) celery = Celery(module_name, broker=rabbitmq.get_connection_string(), backend='amqp://') celery.config_from_object('CELERY_CONFIGURATION') logger = get_task_logger(__name__) logger.setLevel(logging.INFO) db_proxy = appscale_info.get_db_proxy() connection_str = '{}:{}'.format(db_proxy, str(constants.DB_SERVER_PORT)) ds_distrib = datastore_distributed.DatastoreDistributed( "appscaledashboard", connection_str, require_indexes=False) apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', ds_distrib) os.environ['APPLICATION_ID'] = "appscaledashboard"
def test_constructor(self): flexmock(file_io).should_receive("read").and_return(sample_queue_yaml) flexmock(TaskQueueConfig).should_receive('load_queues_from_file') TaskQueueConfig('myapp')