def test_run_process_python(self): proc = Processor(os.environ['USER']) replies = proc.run( """#! /usr/bin/python import os import time import socket print 1 os.environ['KEY2'] = socket.gethostname() print 2 __exit(3)""", 'python', {'KEY1': 'SOME VALUE'}) pipe = next(replies) self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual( next(replies), [LOCAL_USER, 3, '', '', { 'KEY2': socket.gethostname() }]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def test_run_process_bash(self): proc = Processor(os.environ['USER']) replies = proc.run( """#! /usr/bin/bash echo 1 export KEY2=$(python -c "import socket; print socket.gethostname()") echo 2 __exit 3""", 'bash', {'KEY1': 'SOME VALUE'}) pipe = next(replies) host = socket.gethostname() self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual(next(replies), [LOCAL_USER, 3, '', '', { 'KEY2': host }]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def job(self, remote_user, request): LOG.info('Recv:: JOB for session %s', self.session_id) command = request['script'] env = request['env'] processor = Processor(remote_user) lang = parser.parse_lang(command) LOG.info("Node[%s]::: User:::[%s] UUID:::[%s] LANG:::[%s]" % (CONFIG.node_id, remote_user, self.session_id, lang)) if isinstance(command, unicode): command = command.encode('utf8') try: atts = request.get('attachments', []) if atts and isinstance(atts, list): for att in atts: for name, content in att.items(): # Save libs if any processor.add_libs(name=name, source=content) # For long-running jobs # self.socket.send_multipart([self.session_id, # StatusCodes.WORKING, # msgpack.packb(dict(stdout=value))]) proc_iter = iter(processor.run(command, lang, env)) proc = next(proc_iter) def _encode(data): try: return msgpack.packb(data) except: return "" if not isinstance(proc, list): proc.set_input_fd(self.queue) running = True while running: to_read = proc.select(.2) if proc.poll() is not None: # We are done with the task LOG.info("Job %s finished" % self.session_id) running = False # Do not break, let consume the streams for fd_type in to_read: if fd_type == proc.TRANSPORT: try: frames = self.queue.recv(0) except: continue if frames and len(frames) == 2: if frames[0] == 'INPUT': try: proc.write(frames[1]) except: continue elif frames[0] == 'TERM': if (len(frames) > 1 and frames[1] == 'kill'): # kill task proc.kill() LOG.info("Job %s killed" % self.session_id) else: # terminate task proc.terminate() LOG.info("Job %s terminated" % self.session_id) continue if fd_type == proc.STDOUT: data = proc.read_out() if data: self._yield_reply(StatusCodes.STDOUT, proc.run_as, data) if fd_type == proc.STDERR: data = proc.read_err() if data: self._yield_reply(StatusCodes.STDERR, proc.run_as, data) run_as, ret_code, stdout, stderr, env = next(proc_iter) else: # Error invoking Popen, get params run_as, ret_code, stdout, stderr, env = proc if stdout: self._yield_reply(StatusCodes.STDOUT, run_as, stdout) if stderr: self._yield_reply(StatusCodes.STDERR, run_as, stderr) if '__EXPORT__' in env: try: file_name = env.pop('__EXPORT__') path = os.path.join(processor.session_cwd, file_name) file_size = os.stat(path).st_size if file_size > 4 * 1024 * 1024: raise Exception('Exported file size bigger than' ' limit(4 MB) [%s]' % file_name) with open(path) as exp_f: self._yield_file(file_name, exp_f.read()) except Exception, ex: LOG.error(ex) pass job_result = dict(env=env, ret_code=ret_code, stdout=stdout, stderr=stderr) LOG.info('Job [%s] DONE' % (self.session_id)) self._yield_reply(StatusCodes.FINISHED, run_as, job_result)