def install_task(self, task_name, task_code):
     """
     Installs new task code in the execution environment.
     @type task_name: str
     @param task_name: The name of the task. This name must be on 
     the form name1.name2.name3, e.g., daimi.imaging.scale
     @type task_code: str
     @param task_code: The code of the task. The code will be validated
     by the Locusts code validator and thus must adhere to a lot of different 
     rules.
     @raise Exception: Raised if the code fails to validate.  
     """
     # Check the validity of the task name.
     if not TaskRegistry.valid_task_name(task_name):
         self.__logger.info('task with invalid name given (%s)'%task_name)
         raise Exception('Invalid task name.')
     
     # Check that the task is not already installed.
     if self.registry.has_task(task_name):
         self.__logger.info('Attempt to re-install task.')
         raise Exception('task %s already installed.'%task_name)
     
     # Avoid malicious attempts to push __init__.py this way...
     if task_name[-8:] == '__init__':
         self.__logger.info('Attempt to hack by pushing __init__.py')
         raise Exception('Stop trying to hack me!')
     
     # Validate the code.
     v = Validator(task_code)
     try:
         v.validate()
     except ValidationError, error:
         self.__logger.info('Validation error: %s'%error.message)
         raise Exception(error.message)
    def __init__(self, pipe, cores, basedir = 'pexecenv', debug = False):
        """
        Constructor.
        @type pipe: EIPC
        @param pipe: The pipe used for IPC.
        @type cores: int
        @param cores: The number of cores/cpu to utilize when scheduling.
        @type basedir: str
        @param basedir: The base directory where task code is stored. 
        """
        # Initialize super class.
        super(Jailor, self).__init__(pipe)
        
        # Set up logging.
        if debug:
            logging.basicConfig(level=logging.DEBUG,
                                format='%(asctime)s - %(levelname)s: %(message)s\n'\
                                    '\t%(filename)s, %(funcName)s, %(lineno)s',
                                datefmt='%m/%d/%y %H:%M:%S')
        else:
            logging.basicConfig(level=logging.ERROR,
                                format='%(asctime)s - %(levelname)s: %(message)s',
                                datefmt='%m/%d/%y %H:%M:%S')


        # Register a logger.
        self.__logger = logging.getLogger('jailor')

        # Create the scheduler and registry.
        self.registry = TaskRegistry(basedir)
        self.scheduler = Scheduler(self, cores, basedir)

        # Register functions for IPC.
        self.register_function(self.perform_task)
        self.register_function(self.task_exists)
        self.register_function(self.install_task)
        self.register_function(self.fetch_task_code)

        self.__logger.info('Jailor initialized.')
Beispiel #3
0
    def __init__(self, task_registry=None, store_results=True, **storage_kwargs):
        self.storage = self.get_storage(**storage_kwargs)
        self.store_results = store_results

        self.task_registry = TaskRegistry() if task_registry is None else task_registry
Beispiel #4
0
class Producer(object):
    def __init__(self, task_registry=None, store_results=True, **storage_kwargs):
        self.storage = self.get_storage(**storage_kwargs)
        self.store_results = store_results

        self.task_registry = TaskRegistry() if task_registry is None else task_registry

    def get_storage(self, **kwargs):
        raise NotImplementedError('Backend for base `Producer` class not specified. '
                                  'Use a subclass instead.')

    def get_consumer(self, **config):
        return Consumer(self, **config)
        
    """ Task Decorators """

    def task(self, blocking=False, retries=0, retry_delay=0, name=None,
             **task_config):
        """
        Return a decorator that can be called to enqueue function execution
        """
        def decorator(f):
            return TaskGenerator(self, f,
                retries=retries,
                retry_delay=retry_delay,
                lock=lock,
                name=name,
                **task_config)
        return decorator

    def periodic_task(self, blocking=False, retries=0, retry_delay=0, name=None,
                      ready_handler=None, **task_config):
        """
        Return a decorator that can be called to add a reoccuring, periodic task.
        """
        def decorator(f):
            return TaskGenerator(self, f,
                retries=retries,
                retry_delay=retry_delay,
                lock=lock,
                name=name,
                ready=ready_handler,
                periodic=True)
        return decorator

    """ Enqueueing, scheduling and backend data handling. """

    def _enqueue(self, task_ser):
        self.storage.enqueue(task_ser)

    def enqueue(self, task):
        if not isinstance(task, QueuedTask):
            raise Exception('`{}` is an invalid task type. `task` must be an '
                            'instance of a subclass of `QueuedTask`.')
        self.register_task(type(task))

        task_ser = self.task_registry.serialize_task(task)
        self._enqueue(task_ser)

        return TaskResult(self, task)

    def dequeue(self):
        task_ser = self.storage.dequeue()
        task = self.task_registry.deserialize_task(task_ser)

        return task

    def requeue(self, task):
        self._enqueue(task)

    def add_to_schedule(self, task, ts):
        task_ser = self.task_registry.serialize_task(task)
        self.storage.add_to_schedule(task_ser, ts)

    def get_from_schedule(self, ts):
        task_list = self.storage.get_from_schedule(ts)
        return [self.task_registry.deserialize_task(t) for t in task_list]

    def restart(self, task):
        pass

    def revoke(self, task):
        task_ser = self.task_registry.serialize_task(task)
        self.storage.unqueue(task_ser)

    def get_data(self, task, preserve=False):
        if preserve:
            return self.storage.get_data(task.task_id)
        return self.storage.pop_data(task.task_id)

    def put_data(self, task, data):
        self.storage.put_data(task.task_id, data)

    """ Task Registry handling. """

    def register_task(self, task_class):
        if task_class.periodic:
            self.task_registry.register_periodic(task_class)
        else:
            self.task_registry.register_task(task_class)

    def unregister_task(self, task_class):
        self.task_registry.unregister_task(task_class)

    def read_periodic_tasks(self):
        return self.task_registry.periodic_tasks()

    def get_ready_tasks(self, ts=None):
        if ts is None:
            ts = datetime.utcnow()
        return self.task_registry.get_ready_tasks(ts)

    """ Task execution handling. """

    def execute(self, task):
        # TODO: Add exception handling
        res = task.execute()
        if not self.store_results:
            return res

        self.put_data(task, res)

    def check_ready(self, task, ts):
        return True
class Jailor(EIPCProcess):
    """
    This class manages the communication between the execution environment
    and the outside world.
    """
    
    def __init__(self, pipe, cores, basedir = 'pexecenv', debug = False):
        """
        Constructor.
        @type pipe: EIPC
        @param pipe: The pipe used for IPC.
        @type cores: int
        @param cores: The number of cores/cpu to utilize when scheduling.
        @type basedir: str
        @param basedir: The base directory where task code is stored. 
        """
        # Initialize super class.
        super(Jailor, self).__init__(pipe)
        
        # Set up logging.
        if debug:
            logging.basicConfig(level=logging.DEBUG,
                                format='%(asctime)s - %(levelname)s: %(message)s\n'\
                                    '\t%(filename)s, %(funcName)s, %(lineno)s',
                                datefmt='%m/%d/%y %H:%M:%S')
        else:
            logging.basicConfig(level=logging.ERROR,
                                format='%(asctime)s - %(levelname)s: %(message)s',
                                datefmt='%m/%d/%y %H:%M:%S')


        # Register a logger.
        self.__logger = logging.getLogger('jailor')

        # Create the scheduler and registry.
        self.registry = TaskRegistry(basedir)
        self.scheduler = Scheduler(self, cores, basedir)

        # Register functions for IPC.
        self.register_function(self.perform_task)
        self.register_function(self.task_exists)
        self.register_function(self.install_task)
        self.register_function(self.fetch_task_code)

        self.__logger.info('Jailor initialized.')
    
    def perform_task(self, task_name, task_input):
        """
        Starts performing a named task on behalf of the client.
        @type task_name: str
        @param task_name: The task identifier.
        @type task_input: dict (kwargs), tuple (pos args), or any (single argument).
        @param task_input: The input for the given task.
        @rtype: int
        @return: The execution id of the scheduled task.
        """        
        # Check that the task exists.
        if not self.registry.has_task(task_name):
            self.__logger.info('Call to non-existing task %s'%task_name)
            raise Exception('The named task does not exist.')
        
        # Now start performing the task.
        execid = self.scheduler.schedule(task_name, task_input)
        self.__logger.info('%s scheduled with execid=%i.'%(task_name, execid))
        return execid
    
    def task_exists(self, task_name):
        """
        Checks whether a given task exists.
        @type task_name: str
        @param task_name: The task identifier.
        """
        # Ask the registry whether or not the task is installed.
        return self.registry.has_task(task_name)
        
    def task_callback(self, execution_id, status, args):
        """
        Entry point for task callbacks. This is called by tasks 
        upon completion or when an error occurs.
        @type execution_id: int
        @param execution_id: The id of the task execution. This is used on 
        the client side to identify the responding task.
        @type status: str
        @param status: The status of the execution. This is: 'DONE' if the task 
        has finished its execution, 'ERROR' if an error has occurred, and 'STATUS' if 
        the task is simply returning some status information about its execution.
        @type args: dict
        @param args: Keyword-based arguments. Depending on the value of the 
        status parameter different keyword arguments are expected. 
        """
        # Log the event.
        self.__logger.info('Callback: execid=%i, status=%s'%(execution_id, status))
    
        # Handle the callback.    
        try:
            # Switch out based on status.
            if status == 'DONE':
                # The task has finished its execution. Return its output to 
                # the client.
                try:
                    self._ipc.task_callback('RESULT', execution_id, args['output'])
                except Exception, excep:
                    self.__logger.exception('Error returning result.')
                    self._ipc.task_callback('ERROR', execution_id, 'Error returning result: %s'%excep.message)
            elif status == 'ERROR':
                # The task has encountered an error. Return the 
                # error message to the client.
                self._ipc.task_callback('ERROR', execution_id, args['error'])
            elif status == 'STATUS':
                # The task is relaying status information about its
                # execution.
                self._ipc.task_callback('STATUS', execution_id, args['message'])