def _options_string_to_dict(istr): ans = {} istr = istr.strip() if not istr: return ans if istr[0] == "'" or istr[0] == '"': istr = eval(istr) tokens = quote_split('[ ]+',istr) for token in tokens: index = token.find('=') if index == -1: raise ValueError( "Solver options must have the form option=value: '%s'" % istr) try: val = eval(token[(index+1):]) except: val = token[(index+1):] ans[token[:index]] = val return ans
def X_process_include(cmd, _model, _data, _default): if len(cmd) == 1: raise IOError("Cannot execute 'include' command without a filename") if len(cmd) > 2: raise IOError("The 'include' command only accepts a single filename") global Filename Filename = cmd[1] global Lineno Lineno = 0 cmd = "" status = True INPUT = open(Filename, 'r') for line in INPUT: Lineno = Lineno + 1 line = re.sub(":", " :", line) line = line.strip() if line == "" or line[0] == '#': continue cmd = cmd + " " + line if ';' in cmd: # # We assume that a ';' indicates an end-of-command declaration. # However, the user might have put multiple commands on a single # line, so we need to split the line based on these values. # BUT, at the end of the line we should see an 'empty' command, # which we ignore. # for item in cmd.split(';'): item = item.strip() if item != "": _process_data(quote_split("[\t ]+", item), _model, _data, _default, Filename, Lineno) cmd = "" if cmd != "": INPUT.close() raise IOError( "ERROR: There was unprocessed text at the end of the data file!: \"" + cmd + "\"") INPUT.close() return status
def __init__(self, cmd, stdin=None, stdout=None, stderr=None, env=None, bufsize=0, shell=False): """ Setup and launch a subprocess """ self.process = None # # By default, stderr is mapped to stdout # #if stderr is None: # stderr=subprocess.STDOUT self.stdin = stdin if stdin is None: stdin_arg = None else: stdin_arg = subprocess.PIPE # # We would *really* like to deal with commands in execve form # if type(cmd) not in (list, tuple): cmd = quote_split(cmd.strip()) # # Launch subprocess using a subprocess.Popen object # if subprocess.mswindows: # # Launch without console on MSWindows # startupinfo = subprocess.STARTUPINFO() #startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, startupinfo=startupinfo, env=env, bufsize=bufsize, shell=shell) elif False: # subprocess.jython: # # Launch from Jython # self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, env=env, bufsize=bufsize, shell=shell) else: # # Launch on *nix # self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, preexec_fn=os.setsid, env=env, bufsize=bufsize, shell=shell)
def X_process_include(cmd, _model, _data, _default): if len(cmd) == 1: raise IOError("Cannot execute 'include' command without a filename") if len(cmd) > 2: raise IOError("The 'include' command only accepts a single filename") global Filename Filename = cmd[1] global Lineno Lineno = 0 cmd="" status=True INPUT=open(Filename,'r') for line in INPUT: Lineno = Lineno + 1 line = re.sub(":"," :",line) line = line.strip() if line == "" or line[0] == '#': continue cmd = cmd + " " + line if ';' in cmd: # # We assume that a ';' indicates an end-of-command declaration. # However, the user might have put multiple commands on a single # line, so we need to split the line based on these values. # BUT, at the end of the line we should see an 'empty' command, # which we ignore. # for item in cmd.split(';'): item = item.strip() if item != "": _process_data(quote_split("[\t ]+",item), _model, _data, _default, Filename, Lineno) cmd = "" if cmd != "": INPUT.close() raise IOError("ERROR: There was unprocessed text at the end of the data file!: \"" + cmd + "\"") INPUT.close() return status
def __init__(self, cmd, stdin=None, stdout=None, stderr=None, env=None, bufsize=0, shell=False): """ Setup and launch a subprocess """ self.process = None # # By default, stderr is mapped to stdout # #if stderr is None: # stderr=subprocess.STDOUT self.stdin = stdin if stdin is None: stdin_arg = None else: stdin_arg = subprocess.PIPE # # We would *really* like to deal with commands in execve form # if type(cmd) not in (list, tuple): cmd = quote_split(cmd.strip()) # # Launch subprocess using a subprocess.Popen object # if _mswindows: # # Launch without console on MSWindows # startupinfo = subprocess.STARTUPINFO() #startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, startupinfo=startupinfo, env=env, bufsize=bufsize, shell=shell) elif getattr(subprocess, 'jython', False): # # Launch from Jython # self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, env=env, bufsize=bufsize, shell=shell) else: # # Launch on *nix # self.process = subprocess.Popen(cmd, stdin=stdin_arg, stdout=stdout, stderr=stderr, preexec_fn=os.setsid, env=env, bufsize=bufsize, shell=shell)
def run_command(cmd, outfile=None, cwd=None, ostream=None, stdin=None, stdout=None, stderr=None, valgrind=False, valgrind_log=None, valgrind_options=None, memmon=False, env=None, define_signal_handlers=None, debug=False, verbose=True, timelimit=None, tee=None, ignore_output=False, shell=False, thread_reader=None): # # Set the define_signal_handlers based on the global default flag. # if define_signal_handlers is None: define_signal_handlers = GlobalData.DEFINE_SIGNAL_HANDLERS_DEFAULT # # Move to the specified working directory # if cwd is not None: oldpwd = os.getcwd() os.chdir(cwd) cmd_type = type(cmd) if cmd_type is list: # make a private copy of the list _cmd = cmd[:] elif cmd_type is tuple: _cmd = list(cmd) else: _cmd = quote_split(cmd.strip()) # # Setup memmoon # if memmon: memmon = pyutilib.services.registered_executable("memmon") if memmon is None: raise IOError("Unable to find the 'memmon' executable") _cmd.insert(0, memmon.get_path()) # # Setup valgrind # if valgrind: # # The valgrind_log option specifies a logfile that is used to store # valgrind output. # valgrind_cmd = pyutilib.services.registered_executable("valgrind") if valgrind_cmd is None: raise IOError("Unable to find the 'valgrind' executable") valgrind_cmd = [valgrind_cmd.get_path()] if valgrind_options is None: valgrind_cmd.extend( ("-v", "--tool=memcheck", "--trace-children=yes")) elif type(valgrind_options) in (list, tuple): valgrind_cmd.extend(valgrind_options) else: valgrind_cmd.extend(quote_split(valgrind_options.strip())) if valgrind_log is not None: valgrind_cmd.append("--log-file-exactly=" + valgrind_log.strip()) _cmd = valgrind_cmd + _cmd # # Redirect stdout and stderr # tmpfile = None if ostream is not None: stdout_arg = stderr_arg = ostream if outfile is not None or stdout is not None or stderr is not None: raise ValueError("subprocess.run_command(): ostream, outfile, and " "{stdout, stderr} options are mutually exclusive") output = "Output printed to specified ostream" elif outfile is not None: stdout_arg = stderr_arg = open(outfile, "w") if stdout is not None or stderr is not None: raise ValueError("subprocess.run_command(): outfile and " "{stdout, stderr} options are mutually exclusive") output = "Output printed to file '%s'" % outfile elif not (stdout is None and stderr is None): stdout_arg = stdout stderr_arg = stderr output = "Output printed to specified stdout and stderr streams" else: # Create a temporary file. The mode is w+, which means that we # can read and write. # NOTE: the default mode is w+b, but writing to the binary mode # seems to cause problems in the _stream_reader function on Python # 3.x. stdout_arg = stderr_arg = tmpfile = tempfile.TemporaryFile(mode='w+') output = "" if stdout_arg is stderr_arg: try: if not tee or (not tee[0] and not tee[1]): stderr_arg = STDOUT except: pass # # Setup the default environment # if env is None: env = os.environ.copy() # # Setup signal handler # if define_signal_handlers: handler = verbose_signal_handler if verbose else signal_handler if sys.platform[0:3] != "win" and sys.platform[0:4] != 'java': GlobalData.original_signal_handlers[signal.SIGHUP] \ = signal.signal(signal.SIGHUP, handler) GlobalData.original_signal_handlers[signal.SIGINT] \ = signal.signal(signal.SIGINT, handler) GlobalData.original_signal_handlers[signal.SIGTERM] \ = signal.signal(signal.SIGTERM, handler) rc = -1 if debug: print("Executing command %s" % (_cmd, )) try: try: simpleCase = not tee if stdout_arg is not None: stdout_arg.fileno() if stderr_arg is not None: stderr_arg.fileno() except: simpleCase = False out_th = [] th = None GlobalData.signal_handler_busy = False if simpleCase: # # Redirect IO to the stdout_arg/stderr_arg files # process = SubprocessMngr(_cmd, stdin=stdin, stdout=stdout_arg, stderr=stderr_arg, env=env, shell=shell) GlobalData.current_process = process.process rc = process.wait(timelimit) GlobalData.current_process = None else: # # Aggressively wait for output from the process, and # send this to both the stdout/stdarg value, as well # as doing a normal 'print' # out_fd = [] for fid in (0, 1): if fid == 0: s, raw = stdout_arg, sys.stdout else: s, raw = stderr_arg, sys.stderr try: tee_fid = tee[fid] except: tee_fid = tee if s is None or s is STDOUT: out_fd.append(s) elif not tee_fid: # This catches using StringIO as an output buffer: # Python's subprocess requires the stream objects to # have a "fileno()" attribute, which StringIO does # not have. We will mock things up by putting a # pipe in between the subprocess and the StringIO # buffer. <sigh> # #if hasattr(s, 'fileno'): # # Update: in Python 3, StringIO declares a fileno() # method, but that method throws an exception. So, # we can't just check for the attribute: we *must* # call the method and see if we get an exception. try: s.fileno() out_fd.append(s) except: r, w = os.pipe() out_fd.append(w) out_th.append(((fid, r, s), r, w)) #th = Thread(target=thread_reader, args=(r,None,s,fid)) #out_th.append((th, r, w)) else: r, w = os.pipe() out_fd.append(w) out_th.append(((fid, r, raw, s), r, w)) #th = Thread( target=thread_reader, args=(r,raw,s,fid) ) #out_th.append((th, r, w)) # process = SubprocessMngr(_cmd, stdin=stdin, stdout=out_fd[0], stderr=out_fd[1], env=env, shell=shell) GlobalData.current_process = process.process GlobalData.signal_handler_busy = False # # Create a thread to read in stdout and stderr data # if out_th: if thread_reader is not None: reader = thread_reader elif len(out_th) == 1: reader = _stream_reader elif _peek_available: reader = _merged_reader else: reader = _pseudo_merged_reader th = Thread(target=reader, args=[x[0] for x in out_th]) th.daemon = True th.start() # # Wait for process to finish # rc = process.wait(timelimit) GlobalData.current_process = None out_fd = None except _WindowsError: err = sys.exc_info()[1] raise ApplicationError( "Could not execute the command: '%s'\n\tError message: %s" % (' '.join(_cmd), err)) except OSError: # # Ignore IOErrors, which are caused by interupts # pass finally: # restore the previous signal handlers, if necessary for _sig in list(GlobalData.original_signal_handlers): signal.signal(_sig, GlobalData.original_signal_handlers.pop(_sig)) # # Flush stdout/stderr. Some platforms (notably Matlab, which # replaces stdout with a MexPrinter) have stdout/stderr that do not # implement flush() See https://github.com/Pyomo/pyomo/issues/156 # try: sys.stdout.flush() except AttributeError: pass try: sys.stderr.flush() except AttributeError: pass if out_th: # # 'Closing' the PIPE to send EOF to the reader. # for p in out_th: os.close(p[2]) if th is not None: # Note, there is a condition where the subprocess can die # very quickly (raising an OSError) before the reader # threads have a chance to be set up. Testing for None # avoids joining a thread that doesn't exist. th.join() for p in out_th: os.close(p[1]) if th is not None: del th if outfile is not None: stdout_arg.close() elif tmpfile is not None and not ignore_output: tmpfile.seek(0) output = "".join(tmpfile.readlines()) tmpfile.close() # # Move back from the specified working directory # if cwd is not None: os.chdir(oldpwd) # # Return the output # return [rc, output]
def run_command(cmd, outfile=None, cwd=None, ostream=None, stdin=None, stdout=None, stderr=None, valgrind=False, valgrind_log=None, valgrind_options=None, memmon=False, env=None, define_signal_handlers=True, debug=False, verbose=True, timelimit=None, tee=None, ignore_output=False, shell=False, thread_reader=None): # # Move to the specified working directory # if cwd is not None: oldpwd = os.getcwd() os.chdir(cwd) cmd_type = type(cmd) if cmd_type is list: # make a private copy of the list _cmd = cmd[:] elif cmd_type is tuple: _cmd = list(cmd) else: _cmd = quote_split(cmd.strip()) # # Setup memmoon # if memmon: memmon = pyutilib.services.registered_executable("memmon") if memmon is None: raise IOError("Unable to find the 'memmon' executable") _cmd.insert(0, memmon.get_path()) # # Setup valgrind # if valgrind: # # The valgrind_log option specifies a logfile that is used to store # valgrind output. # valgrind_cmd = pyutilib.services.registered_executable("valgrind") if valgrind_cmd is None: raise IOError("Unable to find the 'valgrind' executable") valgrind_cmd = [valgrind_cmd.get_path()] if valgrind_options is None: valgrind_cmd.extend( ("-v", "--tool=memcheck", "--trace-children=yes")) elif type(valgrind_options) in (list, tuple): valgrind_cmd.extend(valgrind_options) else: valgrind_cmd.extend(quote_split(valgrind_options.strip())) if valgrind_log is not None: valgrind_cmd.append("--log-file-exactly=" + valgrind_log.strip()) _cmd = valgrind_cmd + _cmd # # Redirect stdout and stderr # tmpfile = None if ostream is not None: stdout_arg = stderr_arg = ostream if outfile is not None or stdout is not None or stderr is not None: raise ValueError("subprocess.run_command(): ostream, outfile, and " "{stdout, stderr} options are mutually exclusive") output = "Output printed to specified ostream" elif outfile is not None: stdout_arg = stderr_arg = open(outfile, "w") if stdout is not None or stderr is not None: raise ValueError("subprocess.run_command(): outfile and " "{stdout, stderr} options are mutually exclusive") output = "Output printed to file '%s'" % outfile elif not (stdout is None and stderr is None): stdout_arg = stdout stderr_arg = stderr output = "Output printed to specified stdout and stderr streams" else: # Create a temporary file. The mode is w+, which means that we # can read and write. # NOTE: the default mode is w+b, but writing to the binary mode # seems to cause problems in the _stream_reader function on Python # 3.x. stdout_arg = stderr_arg = tmpfile = tempfile.TemporaryFile(mode='w+') output = "" if stdout_arg is stderr_arg: try: if not tee or (not tee[0] and not tee[1]): stderr_arg = STDOUT except: pass # # Setup the default environment # if env is None: env = copy.copy(os.environ) # # Setup signal handler # if define_signal_handlers: if verbose: signal.signal(signal.SIGINT, verbose_signal_handler) if sys.platform[0:3] != "win" and sys.platform[0:4] != 'java': signal.signal(signal.SIGHUP, verbose_signal_handler) signal.signal(signal.SIGTERM, verbose_signal_handler) else: signal.signal(signal.SIGINT, signal_handler) if sys.platform[0:3] != "win" and sys.platform[0:4] != 'java': signal.signal(signal.SIGHUP, signal_handler) signal.signal(signal.SIGTERM, signal_handler) rc = -1 if debug: print("Executing command %s" % (_cmd,)) try: try: simpleCase = not tee if stdout_arg is not None: stdout_arg.fileno() if stderr_arg is not None: stderr_arg.fileno() except: simpleCase = False GlobalData.signal_handler_busy = False if simpleCase: # # Redirect IO to the stdout_arg/stderr_arg files # process = SubprocessMngr( _cmd, stdin=stdin, stdout=stdout_arg, stderr=stderr_arg, env=env, shell=shell) GlobalData.current_process = process.process rc = process.wait(timelimit) GlobalData.current_process = None else: # # Aggressively wait for output from the process, and # send this to both the stdout/stdarg value, as well # as doing a normal 'print' # out_fd = [] out_th = [] for fid in (0, 1): if fid == 0: s, raw = stdout_arg, sys.stdout else: s, raw = stderr_arg, sys.stderr try: tee_fid = tee[fid] except: tee_fid = tee if s is None or s is STDOUT: out_fd.append(s) elif not tee_fid: # This catches using StringIO as an output buffer: # Python's subprocess requires the stream objects to # have a "fileno()" attribute, which StringIO does # not have. We will mock things up by putting a # pipe in between the subprocess and the StringIO # buffer. <sigh> # #if hasattr(s, 'fileno'): # # Update: in Python 3, StringIO declares a fileno() # method, but that method throws an exception. So, # we can't just check for the attribute: we *must* # call the method and see if we get an exception. try: s.fileno() out_fd.append(s) except: r, w = os.pipe() out_fd.append(w) out_th.append(((fid, r, s), r, w)) #th = Thread(target=thread_reader, args=(r,None,s,fid)) #out_th.append((th, r, w)) else: r, w = os.pipe() out_fd.append(w) out_th.append(((fid, r, raw, s), r, w)) #th = Thread( target=thread_reader, args=(r,raw,s,fid) ) #out_th.append((th, r, w)) # process = SubprocessMngr( _cmd, stdin=stdin, stdout=out_fd[0], stderr=out_fd[1], env=env, shell=shell) GlobalData.current_process = process.process GlobalData.signal_handler_busy = False # # Create a thread to read in stdout and stderr data # if out_th: if thread_reader is not None: reader = thread_reader elif len(out_th) == 1: reader = _stream_reader elif _peek_available: reader = _merged_reader else: reader = _pseudo_merged_reader th = Thread(target=reader, args=[x[0] for x in out_th]) th.daemon = True th.start() #for th in out_th: # th[0].daemon = True # th[0].start() # # Wait for process to finish # rc = process.wait(timelimit) GlobalData.current_process = None out_fd = None # # 'Closing' the PIPE to send EOF to the reader. # if out_th: for p in out_th: os.close(p[2]) th.join() for p in out_th: os.close(p[1]) del th #for th in reversed(out_th): # os.close(th[2]) # # # # Wait for readers to finish up with the data in the pipe. # # # th[0].join() # os.close(th[1]) # thread = th[0] # del thread except _WindowsError: err = sys.exc_info()[1] raise ApplicationError( "Could not execute the command: '%s'\n\tError message: %s" % (' '.join(_cmd), err)) except OSError: # # Ignore IOErrors, which are caused by interupts # pass if outfile is not None: stdout_arg.close() elif tmpfile is not None and not ignore_output: tmpfile.seek(0) output = "".join(tmpfile.readlines()) tmpfile.close() sys.stdout.flush() sys.stderr.flush() # # Move back from the specified working directory # if cwd is not None: os.chdir(oldpwd) # # Return the output # return [rc, output]