def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        algorithm = curent_operation.algorithm
        algorithm_group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation, adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst, error_message=str(excep))
Exemplo n.º 2
0
def do_operation_launch(operation_id):
    """
    Event attached to the local queue for executing an operation, when we will have resources available.
    """
    LOGGER = get_logger('tvb.core.operation_async_launcher')

    try:
        LOGGER.debug("Loading operation with id=%s" % operation_id)
        curent_operation = dao.get_operation_by_id(operation_id)
        algorithm = curent_operation.algorithm
        algorithm_group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
        LOGGER.debug("Importing Algorithm: " + str(algorithm_group.classname) +
                     " for Operation:" + str(curent_operation.id))
        PARAMS = parse_json_parameters(curent_operation.parameters)
        adapter_instance = ABCAdapter.build_adapter(algorithm_group)

        ## Un-comment bellow for profiling an operation:
        ## import cherrypy.lib.profiler as profiler
        ## p = profiler.Profiler("/Users/lia.domide/TVB/profiler/")
        ## p.run(OperationService().initiate_prelaunch, curent_operation, adapter_instance, {}, **PARAMS)

        OperationService().initiate_prelaunch(curent_operation,
                                              adapter_instance, {}, **PARAMS)
        LOGGER.debug("Successfully finished operation " + str(operation_id))

    except Exception, excep:
        LOGGER.error("Could not execute operation " + str(sys.argv[1]))
        LOGGER.exception(excep)
        parent_burst = dao.get_burst_for_operation_id(operation_id)
        if parent_burst is not None:
            WorkflowService().mark_burst_finished(parent_burst,
                                                  error_message=str(excep))
Exemplo n.º 3
0
 def get_algo_group_by_identifier(self, ident):
     """
     Retrieve Algorithm Group entity by ID.
     Return None, if ID is not found in DB.
     """
     try:
         return dao.get_algo_group_by_id(ident)
     except Exception, excep:
         self.logger.exception(excep)
         return None
Exemplo n.º 4
0
 def get_algo_group_by_identifier(self, ident):
     """
     Retrieve Algorithm Group entity by ID.
     Return None, if ID is not found in DB.
     """
     try:
         return dao.get_algo_group_by_id(ident)
     except Exception, excep:
         self.logger.exception(excep)
         return None
Exemplo n.º 5
0
    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
                adapter_instance = ABCAdapter.build_adapter(group)
            PARAMS = parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance, operation.user.username)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)
Exemplo n.º 6
0
    def launch_operation(self, operation_id, send_to_cluster=False, adapter_instance=None):
        """
        Method exposed for Burst-Workflow related calls.
        It is used for cascading operation in the same workflow.
        """
        if operation_id is not None:
            operation = dao.get_operation_by_id(operation_id)
            if adapter_instance is None:
                algorithm = operation.algorithm
                group = dao.get_algo_group_by_id(algorithm.fk_algo_group)
                adapter_instance = ABCAdapter.build_adapter(group)
            PARAMS = parse_json_parameters(operation.parameters)

            if send_to_cluster:
                self._send_to_cluster([operation], adapter_instance)
            else:
                self.initiate_prelaunch(operation, adapter_instance, {}, **PARAMS)