예제 #1
0
    def send_message(cls, connection, host, port, message_type, *args):
        """
        Sends a message with the given type and optional args via the given connection.
        If the connection is established, it is used. Otherwise a new
        connection is created and will be returned.

        :param connection: The connection to use for sending.
        :type connection: socket.socket
        :param host: The host to send the message to if the connection is not established
        :type host: basestring
        :param port: The port to use for establishing a new connection
        :type port: int
        :param message_type: The type of the message
        :param args: Optional arguments
        :return: The connection that has been used to send the message
        :rtype: socket.socket
        """
        message = str(message_type.value)
        for arg in args:
            message += ";%s" % arg
        message += cls.JOB_END_TOKEN
        return inform(message, conn=connection, ip_port=(host, port))
예제 #2
0
    def send_message(cls, connection, host, port, message_type, *args):
        """
        Sends a message with the given type and optional args via the given connection.
        If the connection is established, it is used. Otherwise a new
        connection is created and will be returned.

        :param connection: The connection to use for sending.
        :type connection: socket.socket
        :param host: The host to send the message to if the connection is not established
        :type host: basestring
        :param port: The port to use for establishing a new connection
        :type port: int
        :param message_type: The type of the message
        :param args: Optional arguments
        :return: The connection that has been used to send the message
        :rtype: socket.socket
        """
        message = str(message_type.value)
        for arg in args:
            message += ";%s" % arg
        message += cls.JOB_END_TOKEN
        return inform(message, conn=connection, ip_port=(host, port))
예제 #3
0
파일: ll_backend.py 프로젝트: pyspace/test
    def execute(self):
        """ Execute all processes specified in the currently staged operation """
        assert (self.state == "staged")

        self._log("Operation - executing")
        self.state = "executing"

        # The handler that is used remotely for logging
        handler_class = logging.handlers.SocketHandler
        handler_args = {"host": self.host, "port": self.port}
        # the communication properties to talk to LoadLevelerComHandler
        backend_com = (self.SERVER_IP, self.SERVER_PORT)
        print('--> Loadleveler Communication : \n\t\t host:%s, port:%s' %
              (self.host, self.port))
        # Prepare the directory where processes are stored before submitted
        # to LoadLeveler
        self.process_dir = os.sep.join(
            [self.current_operation.result_directory, ".processes"])
        if not os.path.exists(self.process_dir):
            os.mkdir(self.process_dir)
        # create and start server socket thread
        self.listener = LoadLevelerComHandler(
            self.sock,
            self.result_handlers,
            self.progress_bar,
            self.LL_COMMAND_FILE_TEMPLATE,
            operation_dir=self.current_operation.result_directory)
        self.listener.start()
        # create a client socket to talk to server socket thread
        send_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        send_socket.connect((self.SERVER_IP, self.SERVER_PORT))

        # get first process from creation queue
        process = self.current_operation.processes.get()
        process_counter = 0

        # Until not all Processes have been created, prepare all processes
        # from the queue for remote execution and execute them
        while process != False:
            process.prepare(pySPACE.configuration, handler_class, handler_args,
                            backend_com)
            # since preparing the process might be quite faster than executing
            # it we need another queue where processes get out when they have
            # finished execution
            self.result_handlers.put(1)
            # pickle the process object
            proc_file_name = os.sep.join(
                [self.process_dir,
                 "process_%d.pickle" % process_counter])
            proc_file = open(proc_file_name, "w")
            cPickle.dump(process, proc_file, cPickle.HIGHEST_PROTOCOL)
            proc_file.close()
            # fill out LoadLeveler template
            llfile = self.LL_COMMAND_FILE_TEMPLATE % \
                      {"process_file_path": proc_file_name,
                       "server_port": self.SERVER_PORT,
                       "op_result_dir": self.current_operation.result_directory}
            llfilepath = os.path.join(self.current_operation.result_directory,
                                      "ll_call.cmd")
            f = open(llfilepath, 'w')
            f.write(llfile)
            f.close()
            # submit to LoadLeveler
            while 1:
                outlog, errlog = sub.Popen(["llsubmit", llfilepath],
                                           stdout=sub.PIPE,
                                           stderr=sub.PIPE).communicate()
                if errlog == "":
                    break
                else:
                    self._log("Warning: Job submission to LoadLeveler failed"\
                              " with %s. Job will be resubmitted." % errlog,
                              logging.WARNING)
                    time.sleep(1)
            # parse job_id for monitoring
            loadl_id = outlog.split("\"")[1].split(".")[-1]
            # inform listener that we successfully submitted the job
            send_socket = inform('submitted;%d;%s%s' % \
                                 (process_counter, loadl_id, self.end_token),
                                send_socket, (self.SERVER_IP,self.SERVER_PORT))
            # get next process and update process_counter
            process = self.current_operation.processes.get()
            process_counter += 1

        # send message 'creation finished' to listener
        send_socket = inform('creation finished' + self.end_token, send_socket,
                             (self.SERVER_IP, self.SERVER_PORT))
        # give socket chance to process message
        # time.sleep(0.001)
        self.listener.creation_finished = True
        send_socket.shutdown(socket.SHUT_RDWR)
        send_socket.close()
예제 #4
0
 def execute(self):
     """ Execute all processes specified in the currently staged operation """
     assert(self.state == "staged")
     
     self._log("Operation - executing")
     self.state = "executing" 
     
     # The handler that is used remotely for logging
     handler_class = logging.handlers.SocketHandler
     handler_args = {"host" : self.host, "port" : self.port}
     # the communication properties to talk to LoadLevelerComHandler
     backend_com = (self.SERVER_IP, self.SERVER_PORT)
     print('--> Loadleveler Communication : \n\t\t host:%s, port:%s' % \
                                         (self.SERVER_IP, self.SERVER_PORT))
     # Prepare the directory where processes are stored before submitted
     # to LoadLeveler
     self.process_dir = os.sep.join([self.current_operation.result_directory,
                                ".processes"])
     if not os.path.exists(self.process_dir):
         os.mkdir(self.process_dir)
     # create and start server socket thread 
     self.listener = LoadLevelerComHandler(self.sock, self.result_handlers,
                                           self.progress_bar, 
                                           self.LL_COMMAND_FILE_TEMPLATE,
                                           operation_dir=self.current_operation.result_directory)
     self.listener.start()
     # create a client socket to talk to server socket thread
     send_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
     send_socket.connect((self.SERVER_IP,self.SERVER_PORT))
     
     # get first process from creation queue
     process = self.current_operation.processes.get()
     process_counter = 0
     
     # Until not all Processes have been created, prepare all processes
     # from the queue for remote execution and execute them
     while process != False:
         process.prepare(pySPACE.configuration, handler_class, handler_args,
                         backend_com)
         # since preparing the process might be quite faster than executing
         # it we need another queue where processes get out when they have
         # finished execution
         self.result_handlers.put(1)
         # pickle the process object
         proc_file_name = os.sep.join([self.process_dir,
                                      "process_%d.pickle" % process_counter])
         proc_file = open(proc_file_name, "w")
         cPickle.dump(process, proc_file, cPickle.HIGHEST_PROTOCOL)
         proc_file.close()
         # fill out LoadLeveler template
         llfile = self.LL_COMMAND_FILE_TEMPLATE % \
                   {"process_file_path": proc_file_name,
                    "server_port": self.SERVER_PORT,
                    "op_result_dir": self.current_operation.result_directory}
         llfilepath = os.path.join(self.current_operation.result_directory,
                                     "ll_call.cmd")
         f=open(llfilepath,'w')
         f.write(llfile)
         f.close()
         # submit to LoadLeveler
         while 1:
             outlog, errlog = sub.Popen(["llsubmit", llfilepath], 
                             stdout=sub.PIPE, stderr=sub.PIPE).communicate()
             if errlog == "":
                 break
             else:
                 self._log("Warning: Job submission to LoadLeveler failed"\
                           " with %s. Job will be resubmitted." % errlog,
                           logging.WARNING)
                 time.sleep(1)
         # parse job_id for monitoring
         loadl_id = outlog.split("\"")[1].split(".")[-1]
         # inform listener that we successfully submitted the job
         send_socket = inform('submitted;%d;%s%s' % \
                              (process_counter, loadl_id, self.end_token), 
                             send_socket, (self.SERVER_IP,self.SERVER_PORT))
         # get next process and update process_counter
         process = self.current_operation.processes.get()
         process_counter+=1
         
     # send message 'creation finished' to listener
     send_socket = inform('creation finished'+self.end_token, send_socket,
                                           (self.SERVER_IP,self.SERVER_PORT))
     # give socket chance to process message
     # time.sleep(0.001)
     self.listener.creation_finished = True
     send_socket.shutdown(socket.SHUT_RDWR)
     send_socket.close()