def test_run_process_python(self): proc = Processor(os.environ['USER']) replies = proc.run( """#! /usr/bin/python import os import time import socket print 1 os.environ['KEY2'] = socket.gethostname() print 2 __exit(3)""", 'python', {'KEY1': 'SOME VALUE'}) pipe = next(replies) self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual( next(replies), [LOCAL_USER, 3, '', '', { 'KEY2': socket.gethostname() }]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def run_local(self, script_content): from cloudrunner.core.process import Processor if os.name != 'nt': import pwd run_as = pwd.getpwuid(os.getuid())[0] else: import win32api import win32con run_as = win32api.GetUserNameEx(win32con.NameSamCompatible) proc = Processor(run_as) lang = parser.parse_lang(script_content) wrap = proc.run(script_content, lang, self.env) return AsyncRespLocal(self, wrap)
def test_run_process_python(self): proc = Processor(os.environ['USER']) replies = proc.run("""#! /usr/bin/python import os import time import socket print 1 os.environ['KEY2'] = socket.gethostname() print 2 __exit(3)""", 'python', {'KEY1': 'SOME VALUE'}) pipe = next(replies) self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual(next(replies), [LOCAL_USER, 3, '', '', {'KEY2': socket.gethostname()}]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def test_run_process_bash(self): proc = Processor(os.environ['USER']) replies = proc.run("""#! /usr/bin/bash echo 1 export KEY2=$(python -c "import socket; print socket.gethostname()") echo 2 __exit 3""", 'bash', {'KEY1': 'SOME VALUE'}) pipe = next(replies) host = socket.gethostname() self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual(next(replies), [LOCAL_USER, 3, '', '', {'KEY2': host}]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def test_run_process_bash(self): proc = Processor(os.environ['USER']) replies = proc.run( """#! /usr/bin/bash echo 1 export KEY2=$(python -c "import socket; print socket.gethostname()") echo 2 __exit 3""", 'bash', {'KEY1': 'SOME VALUE'}) pipe = next(replies) host = socket.gethostname() self.assertType(pipe, PopenWrapper) self.assertEqual(pipe.stdout.readline(), '1\n') self.assertEqual(pipe.stdout.readline(), '2\n') self.assertEqual(next(replies), [LOCAL_USER, 3, '', '', { 'KEY2': host }]) self.assertIsNotNone(pipe.poll()) self.assertRaises(StopIteration, next, replies)
def job(self, remote_user, request): LOG.info('Recv:: JOB for session %s', self.session_id) command = request['script'] env = request['env'] processor = Processor(remote_user) lang = parser.parse_lang(command) LOG.info("Node[%s]::: User:::[%s] UUID:::[%s] LANG:::[%s]" % (CONFIG.node_id, remote_user, self.session_id, lang)) if isinstance(command, unicode): command = command.encode('utf8') try: atts = request.get('attachments', []) if atts and isinstance(atts, list): for att in atts: for name, content in att.items(): # Save libs if any processor.add_libs(name=name, source=content) # For long-running jobs # self.socket.send_multipart([self.session_id, # StatusCodes.WORKING, # msgpack.packb(dict(stdout=value))]) proc_iter = iter(processor.run(command, lang, env)) proc = next(proc_iter) def _encode(data): try: return msgpack.packb(data) except: return "" if not isinstance(proc, list): proc.set_input_fd(self.queue) running = True while running: to_read = proc.select(.2) if proc.poll() is not None: # We are done with the task LOG.info("Job %s finished" % self.session_id) running = False # Do not break, let consume the streams for fd_type in to_read: if fd_type == proc.TRANSPORT: try: frames = self.queue.recv(0) except: continue if frames and len(frames) == 2: if frames[0] == 'INPUT': try: proc.write(frames[1]) except: continue elif frames[0] == 'TERM': if (len(frames) > 1 and frames[1] == 'kill'): # kill task proc.kill() LOG.info("Job %s killed" % self.session_id) else: # terminate task proc.terminate() LOG.info("Job %s terminated" % self.session_id) continue if fd_type == proc.STDOUT: data = proc.read_out() if data: self._yield_reply(StatusCodes.STDOUT, proc.run_as, data) if fd_type == proc.STDERR: data = proc.read_err() if data: self._yield_reply(StatusCodes.STDERR, proc.run_as, data) run_as, ret_code, stdout, stderr, env = next(proc_iter) else: # Error invoking Popen, get params run_as, ret_code, stdout, stderr, env = proc if stdout: self._yield_reply(StatusCodes.STDOUT, run_as, stdout) if stderr: self._yield_reply(StatusCodes.STDERR, run_as, stderr) if '__EXPORT__' in env: try: file_name = env.pop('__EXPORT__') path = os.path.join(processor.session_cwd, file_name) file_size = os.stat(path).st_size if file_size > 4 * 1024 * 1024: raise Exception('Exported file size bigger than' ' limit(4 MB) [%s]' % file_name) with open(path) as exp_f: self._yield_file(file_name, exp_f.read()) except Exception, ex: LOG.error(ex) pass job_result = dict(env=env, ret_code=ret_code, stdout=stdout, stderr=stderr) LOG.info('Job [%s] DONE' % (self.session_id)) self._yield_reply(StatusCodes.FINISHED, run_as, job_result)
script_content = parser.CRN_SHEBANG.sub("", script_content) sections = parser.split_sections(script_content) if not sections: return first_section = sections[0] if not parser.parse_selectors(first_section.strip())[0]: if first_section.strip(): # has content? # Local run console.green("=" * 80) console.green("Running local script", bold=1) console.green("=" * 80) from cloudrunner.core.process import Processor processor = Processor("@") lang = parser.parse_lang(first_section) proc_iter = iter(processor.run(first_section, lang, self.env)) proc = next(proc_iter) if not isinstance(proc, list): while True: try: to_read = proc.select(.2) if proc.poll() is not None: # We are done with the task break for fd_type in to_read: if fd_type == proc.STDOUT: data = proc.read_out() if data: console.log(data)
def job(self, *args): LOG.info('Recv:: JOB for session %s', self.session_id) try: remote_user, request = json.loads(args[0]) except ValueError: LOG.error('Malformed request received: %s' % args[0]) return command = request['script'] env = request['env'] proc = Processor(remote_user) lang = parser.parse_lang(command) LOG.info("Node[%s]::: User:::[%s] UUID:::[%s] LANG:::[%s]" % (CONFIG.node_id, remote_user, self.session_id, lang)) if isinstance(command, unicode): command = command.encode('utf8') incl_header = [] libs = request.get('libs', []) if libs: for lib in libs: # Save libs if any script = proc.add_libs(**lib) if script: # inline if isinstance(script, unicode): script = script.encode('utf8') incl_header.append(script) # For long-running jobs # self.socket.send_multipart([self.session_id, # StatusCodes.WORKING, # json.dumps(dict(stdout=value))]) proc_iter = iter(proc.run(command, lang, env, inlines=incl_header)) proc = next(proc_iter) def _encode(data): try: return json.dumps(data) except: return "" if not isinstance(proc, list): proc.set_input_fd(self.queue) running = True while running: to_read = proc.select(.2) if proc.poll() is not None: # We are done with the task LOG.info("Job %s finished" % self.session_id) running = False # Do not break, let consume the streams for fd_type in to_read: if fd_type == proc.TRANSPORT: try: frames = self.queue.recv(0) except: continue if frames and len(frames) == 2: if frames[0] == 'INPUT': try: proc.write(frames[1]) except: continue elif frames[0] == 'TERM': if len(frames) > 1 and frames[1] == 'kill': # kill task proc.kill() LOG.info("Job %s killed" % self.session_id) else: # terminate task proc.terminate() LOG.info("Job %s terminated" % self.session_id) continue if fd_type == proc.STDOUT: data = proc.read_out() if data: enc_data = _encode(dict(stdout=data)) if enc_data: self._yield_reply( StatusCodes.STDOUT, proc.run_as, enc_data) if fd_type == proc.STDERR: data = proc.read_err() if data: enc_data = _encode(dict(stderr=data)) if enc_data: self._yield_reply( StatusCodes.STDERR, proc.run_as, enc_data) run_as, ret_code, stdout, stderr, env = next(proc_iter) else: # Error invoking Popen, get params run_as, ret_code, stdout, stderr, env = proc if stdout: self._yield_reply(StatusCodes.STDOUT, run_as, json.dumps(dict(stdout=stdout))) if stderr: self._yield_reply(StatusCodes.STDERR, run_as, json.dumps(dict(stderr=stderr))) job_result = json.dumps(dict(env=env, ret_code=ret_code, stdout=stdout, stderr=stderr)) LOG.info('Job [%s] DONE' % (self.session_id)) self._yield_reply(StatusCodes.FINISHED, run_as, job_result) self._close() # Invoke clean if os.name == 'nt': import win32api import win32con win32api.GenerateConsoleCtrlEvent(win32con.CTRL_C_EVENT, 0) else: os.kill(os.getpid(), signal.SIGHUP)