def create_celery_worker_scripts(self): """ Creates the task worker python script. It uses a configuration file for setup. Returns: The full path of the worker script. """ header_template = file_io.read(self.HEADER_LOC) task_template = file_io.read(self.TASK_LOC) header_template = header_template.replace("APP_ID", self._app_id) script = header_template.replace("CELERY_CONFIGURATION", self._app_id) + \ '\n' for name, queue in self.queues.iteritems(): # Celery only handles push queues. if not isinstance(queue, PushQueue): continue # The queue name is used as a function name so replace invalid chars queue_name = queue.name.replace('-', '_') new_task = task_template.\ replace("QUEUE_NAME", self.get_queue_function_name(queue_name)) # For tasks generated by mapreduce, or destined to be run by a module, # the hostname may have a prefix that corresponds to a different # subdomain. # AppScale does not support that type of routing currently, so the main # loadbalancer IP/hostname is used here for the execution of a task. new_task = new_task.\ replace("PUBLIC_IP", "\"{}\"".format(self.get_public_ip())) script += new_task + '\n' worker_file = self.get_celery_worker_script_path(self._app_id) file_io.write(worker_file, script) return worker_file
def __init__(self, persist=False, logs_path=None, request_data=None): """Initializer. Args: persist: For backwards compatability. Has no effect. logs_path: A str containing the filename to use for logs storage. Defaults to in-memory if unset. request_data: A apiproxy_stub.RequestData instance used to look up state associated with the request that generated an API call. """ super(LogServiceStub, self).__init__('logservice', request_data=request_data) self._pending_requests = defaultdict( logging_capnp.RequestLog.new_message) self._pending_requests_applogs = dict() self._log_server = defaultdict(Queue) # get head node_private ip from /etc/appscale/head_node_private_ip self._log_server_ip = file_io.read( "/etc/appscale/head_node_private_ip").rstrip() if os.path.exists('/etc/appscale/elk-enabled'): self._requests_logger = RequestsLogger() self._requests_logger.start() self.is_elk_enabled = True else: self._requests_logger = None self.is_elk_enabled = False
def get_connection_string(): """ Reads from the local FS to get the RabbitMQ location to connect to. Returns: A string representing the location of RabbitMQ. """ raw_ips = file_io.read(RABBITMQ_LOCATION_FILE) ips = raw_ips.split('\n') rabbitmq_ip = ips[0] return 'amqp://*****:*****@' + rabbitmq_ip + ':' + \ str(RABBITMQ_PORT) + '//'
def __init__(self, persist=False, logs_path=None, request_data=None): """Initializer. Args: persist: For backwards compatability. Has no effect. logs_path: A str containing the filename to use for logs storage. Defaults to in-memory if unset. request_data: A apiproxy_stub.RequestData instance used to look up state associated with the request that generated an API call. """ super(LogServiceStub, self).__init__('logservice', request_data=request_data) self._pending_requests = defaultdict(logging_capnp.RequestLog.new_message) self._pending_requests_applogs = dict() self._log_server = defaultdict(Queue) #get head node_private ip from /etc/appscale/head_node_private_ip self._log_server_ip = file_io.read("/etc/appscale/head_node_private_ip").rstrip()
def load_queues_from_file(self): """ Translates an application's queue configuration file to queue objects. Returns: A dictionary mapping queue names to Queue objects. Raises: ValueError: If queue_file is unable to get loaded. """ using_default = False queue_file = '' try: queue_file = self.get_queue_file_location(self._app_id) try: info = file_io.read(queue_file) logger.info('Found queue file for {} in: {}'.format( self._app_id, queue_file)) except IOError: logger.error( 'No queue file found for {}, using default queue'.format( self._app_id)) info = self.DEFAULT_QUEUE_YAML using_default = True except apiproxy_errors.ApplicationError as application_error: logger.error(application_error.message) info = self.DEFAULT_QUEUE_YAML using_default = True #TODO handle bad xml/yaml files. if queue_file.endswith('yaml') or using_default: queue_info = queueinfo.LoadSingleQueue(info).ToDict() elif queue_file.endswith('xml'): queue_info = self.parse_queue_xml(info) else: raise ValueError("Unable to load queue information with %s" % queue_file) if not queue_info: raise ValueError("Queue information with %s not set" % queue_file) # We add in the default queue if its not already in there. has_default = False if 'queue' not in queue_info or len(queue_info['queue']) == 0: queue_info = {'queue': [{'rate': '5/s', 'name': 'default'}]} for queue in queue_info['queue']: if queue['name'] == 'default': has_default = True if not has_default: queue_info['queue'].append({'rate': '5/s', 'name': 'default'}) logger.info('Queue for {}:\n{}'.format(self._app_id, queue_info)) # Discard the invalid queues. queues = {} for queue in queue_info['queue']: if 'mode' in queue and queue['mode'] == 'pull': try: queues[queue['name']] = PullQueue(queue, self._app_id, self.db_access) except InvalidQueueConfiguration: logger.exception('Invalid queue configuration') else: try: queues[queue['name']] = PushQueue(queue, self._app_id) except InvalidQueueConfiguration: logger.exception('Invalid queue configuration') return queues