def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument("--no-ht", default=False, action="store_true", help="do not use Hyper Threading") parser.add_argument("--N0-only", default=False, action="store_true", help="operate within single node") parser.add_argument("--each", default=False, action="store_true", help="print each number without ranges") parser.add_argument("--total", default=False, action="store_true", help="print total number of selected cpus") parser.add_argument("--sep", "-s", default=",", help="output separator") parser.add_argument( "threads", default="all", help= 'number of threads to list. Accepts "all", "half", "/{number}", "{number}" ', ) args = parser.parse_args() with tempfile.SpooledTemporaryFile(mode="w+") as tmp, Popen( "lscpu -p", shell=True, stdout=PIPE) as pipe: for line in iter(pipe.stdout.readline, b""): decoded_line = line.decode() if decoded_line.startswith( "#") and not decoded_line.startswith("# CPU"): continue tmp.write(decoded_line) tmp.seek(0) df = pd.read_csv(tmp) if args.no_ht: df = df.groupby("Core").min() nd0 = df[df["Node"] == 0]["# CPU"].values if args.N0_only: cpus = list(nd0) else: nd1 = df[df["Node"] == 1]["# CPU"].values cpus = [v for p in zip(nd0, nd1) for v in p] n = len(cpus) if args.threads == "all": pass elif args.threads == "half": n = int(n / 2) elif args.threads.startswith("/"): n = int(n / int(args.threads[1:])) else: n = min(int(args.threads), n) cpus = cpus[0:n] if args.total: print(len(cpus)) elif args.each: print(*cpus, sep=args.sep) else: # ranges cpus.sort() dash = False print(cpus[0], end="") for i in range(1, len(cpus) - 1): if cpus[i - 1] == cpus[i] - 1: # print range dash = True continue if dash: print("-", cpus[i - 1], end="", sep="") dash = False print(args.sep, cpus[i], end="", sep="") if len(cpus) > 1: print("-" if dash else args.sep, cpus[-1], sep="") else: print("") return 0
def test_written_spooled_temp(self): with tempfile.SpooledTemporaryFile(max_size=4) as temp: temp.write(b"foo bar baz quux\n") django_file = File(temp, name="something.txt") self.assertEqual(django_file.size, 17)
break except Exception, e: self._logger.debug(e) continue self.speaker.play(dingdangpath.data('audio', 'beep_lo.wav')) # save the audio data try: stream.stop_stream() stream.close() except Exception, e: self._logger.debug(e) pass with tempfile.SpooledTemporaryFile(mode='w+b') as f: wav_fp = wave.open(f, 'wb') wav_fp.setnchannels(1) wav_fp.setsampwidth(pyaudio.get_sample_size(pyaudio.paInt16)) wav_fp.setframerate(RATE) wav_fp.writeframes(''.join(frames)) wav_fp.close() f.seek(0) frames = [] return self.active_stt_engine.transcribe(f) def say(self, phrase, OPTIONS=" -vdefault+m3 -p 40 -s 160 --stdout > say.wav"): self._logger.info(u"机器人说:%s" % phrase) if self.wxbot is not None:
def get_next(self): while self.num_running < self.num_jobs and self.test_list: # Add tests self.num_running += 1 test = self.test_list.pop(0) portseed = len(self.test_list) portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) test_argv = test.split() testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) tmpdir_arg = ["--tmpdir={}".format(testdir)] self.jobs.append( (test, time.time(), subprocess.Popen( [sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr), testdir, log_stdout, log_stderr)) if not self.jobs: raise IndexError('pop from empty list') # Print remaining running jobs when all jobs have been started. if not self.test_list: print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs))) dot_count = 0 while True: # Return first proc that finishes time.sleep(.5) for job in self.jobs: (name, start_time, proc, testdir, log_out, log_err) = job if proc.poll() is not None: log_out.seek(0), log_err.seek(0) [stdout, stderr] = [ log_file.read().decode('utf-8') for log_file in (log_out, log_err) ] log_out.close(), log_err.close() if proc.returncode == TEST_EXIT_PASSED and stderr == "": status = "Passed" elif proc.returncode == TEST_EXIT_SKIPPED: status = "Skipped" else: status = "Failed" self.num_running -= 1 self.jobs.remove(job) if self.use_term_control: clearline = '\r' + (' ' * dot_count) + '\r' print(clearline, end='', flush=True) dot_count = 0 return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr if self.use_term_control: print('.', end='', flush=True) dot_count += 1
def _build_offer(self): offer = {} args = self._args text = args.text if text == "-": print(u"Reading text message from stdin..", file=args.stdout) text = sys.stdin.read() if not text and not args.what: text = six.moves.input("Text to send: ") if text is not None: print(u"Sending text message (%s)" % naturalsize(len(text)), file=args.stdout) offer = {"message": text} fd_to_send = None return offer, fd_to_send what = os.path.join(args.cwd, args.what) what = what.rstrip(os.sep) if not os.path.exists(what): raise TransferError("Cannot send: no file/directory named '%s'" % args.what) basename = os.path.basename(what) if os.path.isfile(what): # we're sending a file filesize = os.stat(what).st_size offer["file"] = { "filename": basename, "filesize": filesize, } print(u"Sending %s file named '%s'" % (naturalsize(filesize), basename), file=args.stdout) fd_to_send = open(what, "rb") return offer, fd_to_send if os.path.isdir(what): print(u"Building zipfile..", file=args.stdout) # We're sending a directory. Create a zipfile in a tempdir and # send that. fd_to_send = tempfile.SpooledTemporaryFile() num_files = 0 num_bytes = 0 tostrip = len(what.split(os.sep)) with zipfile.ZipFile(fd_to_send, "w", zipfile.ZIP_DEFLATED) as zf: for path, dirs, files in os.walk(what): # path always starts with args.what, then sometimes might # have "/subdir" appended. We want the zipfile to contain # "" or "subdir" localpath = list(path.split(os.sep)[tostrip:]) for fn in files: archivename = os.path.join(*tuple(localpath + [fn])) localfilename = os.path.join(path, fn) zf.write(localfilename, archivename) num_bytes += os.stat(localfilename).st_size num_files += 1 fd_to_send.seek(0, 2) filesize = fd_to_send.tell() fd_to_send.seek(0, 0) offer["directory"] = { "mode": "zipfile/deflated", "dirname": basename, "zipsize": filesize, "numbytes": num_bytes, "numfiles": num_files, } print(u"Sending directory (%s compressed) named '%s'" % (naturalsize(filesize), basename), file=args.stdout) return offer, fd_to_send raise TypeError("'%s' is neither file nor directory" % args.what)
def do_create(self, max_size=0, dir=None, pre="", suf=""): if dir is None: dir = tempfile.gettempdir() file = tempfile.SpooledTemporaryFile(max_size=max_size, dir=dir, prefix=pre, suffix=suf) return file
def test_spooled_temporary_file(): _ = tempfile.SpooledTemporaryFile("r") # [consider-using-with]
def checkDists(self, tabIndex): if tabIndex == 0 or self.ignoreDistributionCheck: self.ignoreDistributionCheck = False return showMessage = False if self.distTable.getNumVariables() == 0: showMessage = True message = 'All inputs are fixed! One needs to be variable.' else: valid, error = self.distTable.checkValidInputs() if not valid: showMessage = True message = 'Distribution settings not correct or entirely filled out! %s' % error else: rowsToWarnAboutMass = [] for row in range(self.distTable.rowCount()): for col in [3, 4]: item = self.distTable.item(row, col) if col == 3: minVal = float(item.text()) else: maxVal = float(item.text()) #### Get distribution parameters for col in [6, 7]: cellTable = self.distTable.cellWidget(row, col) if isinstance(cellTable, QComboBox): continue item = None if cellTable is not None: item = cellTable.item(0, 1) if item is not None and item.text(): if col == 6: distParam1 = float(item.text()) else: distParam2 = float(item.text()) else: if col == 6: distParam1 = None else: distParam2 = None #### Check mass and warn if below 50% # Only collect those that are not fixed to generate inputs combobox = self.distTable.cellWidget(row, 5) dist = combobox.currentIndex() # Create file for psuade input if dist not in [Distribution.UNIFORM, Distribution.SAMPLE]: f = tempfile.SpooledTemporaryFile() for i in range(2): f.write(b'cdf_lookup\n') distNum = dist if dist == Distribution.BETA: distNum = 4 elif dist == Distribution.WEIBULL: distNum = 5 elif dist == Distribution.GAMMA: distNum = 6 elif dist == Distribution.EXPONENTIAL: distNum = 7 f.write(b'%d\n' % distNum) # Number of distribution f.write(b'%f\n' % distParam1) # Parameter 1 if distParam2 is not None: f.write(b'%f\n' % distParam2) # Parameter 2 if i == 0: val = minVal else: val = maxVal f.write(b'%f\n' % val) # Min or max value f.write(b'quit\n') f.seek(0) # invoke psuade psuadePath = LocalExecutionModule.getPsuadePath() if psuadePath is None: return p = subprocess.Popen(psuadePath, stdin=f, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) f.close() # process error out, error = p.communicate() if error: Common.showError(error, out) return None # parse output lines = out.splitlines() vals = [] for line in lines: if 'Cumulative probability = ' in line.decode( 'utf-8'): words = line.split() vals.append(float(words[-1])) mass = vals[1] - vals[0] if mass < 0.5: rowsToWarnAboutMass.append(row) if len(rowsToWarnAboutMass) > 0: self.samplingTabs.setCurrentIndex(0) for row in rowsToWarnAboutMass: msgbox = QMessageBox() msgbox.setWindowTitle('UQ/Opt GUI Warning') msgbox.setText('Regarding input ' + self.model.getInputNames()[row] + \ ': Min/max range is narrow for its distribution. ' + \ 'This could cause sample generation to take more time. Continue?') msgbox.setIcon(QMessageBox.Warning) msgbox.setStandardButtons(QMessageBox.Yes | QMessageBox.No) msgbox.setDefaultButton(QMessageBox.Yes) ret = msgbox.exec_() if ret != QMessageBox.Yes: self.distTable.selectRow(row) return self.ignoreDistributionCheck = True self.samplingTabs.setCurrentIndex(1) if showMessage: self.samplingTabs.setCurrentIndex(0) msgbox = QMessageBox() msgbox.setWindowTitle('UQ/Opt GUI Warning') msgbox.setText(message) msgbox.setIcon(QMessageBox.Warning) msgbox.exec_() return
def GetTempFileQuick(): return tempfile.SpooledTemporaryFile( max_size = 1024 * 1024 * 4 )
def __init__(self, init_data, opts=dict()): yid = init_data['yid'] # it must be here. if self.rickastley: yid = "dQw4w9WgXcQ" #trolololo if not isinstance(yid, str) or len(yid) != 11: raise ValueError( "yid expected to be valid Youtube movie identifier") #FIXME self.data = tempfile.SpooledTemporaryFile() self.fds = set() self.closing = False self.lock = Lock() # lock to prevent threads from colliding self.avail = range_t() self.safe_range = range_t() self.processing_range = range_t() self.filesize = 4096 self.disk = 0 self.extension = ".mp4" # FIXME self.atime = int(time()) try: # convert from iso 8601 self.ctime = timegm( datetime.strptime(init_data['pub_date'], "%Y-%m-%dT%H:%M:%S.%fZ").timetuple()) except KeyError: self.ctime = self.atime self.r_session = requests.Session() self.yid = yid _pref = deepcopy( self.preferences) # new object, just to not affect other intances. try: _pref['audio'] = opts['audio'] except KeyError: pass try: _pref['video'] = opts['video'] except KeyError: pass try: _pref['format'] = opts['format'] except KeyError: pass try: _pref['stream'] = opts['stream'] except KeyError: pass try: _pref['get_info_on_init'] = opts['get_info_on_init'] except KeyError: pass self.preferences = _pref self.ytdl = youtube_dl.YoutubeDL({ "quiet": True, "format": "bestvideo+bestaudio" }) self.ytdl.add_info_extractor(self.ytdl.get_info_extractor("Youtube"))
def _create_temp_file(cls): return tempfile.SpooledTemporaryFile(max_size=512*1024)
def run_script( self, script, arg_str, catch_stderr=False, with_retcode=False, catch_timeout=False, # FIXME A timeout of zero or disabling timeouts may not return results! timeout=15, raw=False, popen_kwargs=None, log_output=None, config_dir=None, **kwargs): """ Execute a script with the given argument string The ``log_output`` argument is ternary, it can be True, False, or None. If the value is boolean, then it forces the results to either be logged or not logged. If it is None, then the return code of the subprocess determines whether or not to log results. """ import salt.utils.platform script_path = self.get_script_path(script) if not os.path.isfile(script_path): return False popen_kwargs = popen_kwargs or {} python_path_env_var = os.environ.get("PYTHONPATH") or None if python_path_env_var is None: python_path_entries = [RUNTIME_VARS.CODE_DIR] else: python_path_entries = python_path_env_var.split(os.pathsep) if RUNTIME_VARS.CODE_DIR in python_path_entries: python_path_entries.remove(RUNTIME_VARS.CODE_DIR) python_path_entries.insert(0, RUNTIME_VARS.CODE_DIR) python_path_entries.extend(sys.path[0:]) if "env" not in popen_kwargs: popen_kwargs["env"] = os.environ.copy() popen_kwargs["env"]["PYTHONPATH"] = os.pathsep.join( python_path_entries) if "cwd" not in popen_kwargs: popen_kwargs["cwd"] = RUNTIME_VARS.TMP if salt.utils.platform.is_windows(): cmd = "python " else: cmd = "python{}.{} ".format(*sys.version_info) cmd += "{} --config-dir={} {} ".format( script_path, config_dir or RUNTIME_VARS.TMP_CONF_DIR, arg_str) if kwargs: # late import import salt.utils.json for key, value in kwargs.items(): cmd += "'{}={} '".format(key, salt.utils.json.dumps(value)) tmp_file = tempfile.SpooledTemporaryFile() popen_kwargs = dict( { "shell": True, "stdout": tmp_file, "universal_newlines": True }, **popen_kwargs) if catch_stderr is True: popen_kwargs["stderr"] = subprocess.PIPE if salt.utils.platform.is_windows(): # Windows does not support closing FDs close_fds = False elif salt.utils.platform.is_freebsd() and sys.version_info < (3, 9): # Closing FDs in FreeBSD before Py3.9 can be slow # https://bugs.python.org/issue38061 close_fds = False else: close_fds = True popen_kwargs["close_fds"] = close_fds if not salt.utils.platform.is_windows(): def detach_from_parent_group(): # detach from parent group (no more inherited signals!) os.setpgrp() popen_kwargs["preexec_fn"] = detach_from_parent_group def format_return(retcode, stdout, stderr=None, timed_out=False): """ DRY helper to log script result if it failed, and then return the desired output based on whether or not stderr was desired, and wither or not a retcode was desired. """ log_func = log.debug if timed_out: log.error( "run_script timed out after %d seconds (process killed)", timeout) log_func = log.error if log_output is True or timed_out or (log_output is None and retcode != 0): log_func( "run_script results for: %s %s\n" "return code: %s\n" "stdout:\n" "%s\n\n" "stderr:\n" "%s", script, arg_str, retcode, stdout, stderr, ) stdout = stdout or "" stderr = stderr or "" if not raw: stdout = stdout.splitlines() stderr = stderr.splitlines() ret = [stdout] if catch_stderr: ret.append(stderr) if with_retcode: ret.append(retcode) if catch_timeout: ret.append(timed_out) return ret[0] if len(ret) == 1 else tuple(ret) log.debug("Running Popen(%r, %r)", cmd, popen_kwargs) process = subprocess.Popen(cmd, **popen_kwargs) if timeout is not None: stop_at = datetime.now() + timedelta(seconds=timeout) while True: process.poll() time.sleep(0.1) if datetime.now() <= stop_at: # We haven't reached the timeout yet if process.returncode is not None: break else: terminate_process(process.pid, kill_children=True) return format_return(process.returncode, *process.communicate(), timed_out=True) tmp_file.seek(0) try: out = tmp_file.read().decode(__salt_system_encoding__) except (NameError, UnicodeDecodeError): # Let's cross our fingers and hope for the best out = tmp_file.read().decode("utf-8") if catch_stderr: _, err = process.communicate() # Force closing stderr/stdout to release file descriptors if process.stdout is not None: process.stdout.close() if process.stderr is not None: process.stderr.close() # pylint: disable=maybe-no-member try: return format_return(process.returncode, out, err or "") finally: try: if os.path.exists(tmp_file.name): if isinstance(tmp_file.name, str): # tmp_file.name is an int when using SpooledTemporaryFiles # int types cannot be used with os.remove() in Python 3 os.remove(tmp_file.name) else: # Clean up file handles tmp_file.close() process.terminate() except OSError as err: # process already terminated pass # pylint: enable=maybe-no-member # TODO Remove this? process.communicate() if process.stdout is not None: process.stdout.close() try: return format_return(process.returncode, out) finally: try: if os.path.exists(tmp_file.name): if isinstance(tmp_file.name, str): # tmp_file.name is an int when using SpooledTemporaryFiles # int types cannot be used with os.remove() in Python 3 os.remove(tmp_file.name) else: # Clean up file handles tmp_file.close() process.terminate() except OSError as err: # process already terminated pass
def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False): if file_size and file_size > 5 * 1024 * 1024: max_read_size = 1024 * 1024 else: max_read_size = 512 * 1024 if pos_to: read_bytes = min(max_read_size, pos_to - pos_from) else: read_bytes = max_read_size location = pos_from if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: buff = io.BytesIO() s = time.time() while True: # Read in smaller parts if config.stream_downloads or read_bytes > 256 * 1024 or streaming: res = self.request("streamFile", { "site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size }, stream_to=buff) if not res or "location" not in res: # Error return False else: self.log("Send: %s" % inner_path) res = self.request( "getFile", { "site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size }) if not res or "location" not in res: # Error return False self.log("Recv: %s" % inner_path) buff.write(res["body"]) res["body"] = None # Save memory if res["location"] == res["size"] or res[ "location"] == pos_to: # End of file break else: location = res["location"] if pos_to: read_bytes = min(max_read_size, pos_to - location) if pos_to: recv = pos_to - pos_from else: recv = res["location"] self.download_bytes += recv self.download_time += (time.time() - s) if self.site: self.site.settings["bytes_recv"] = self.site.settings.get( "bytes_recv", 0) + recv self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes)) buff.seek(0) return buff
def read_file(self, filepath): """ Read the specified file and return it's handle. """ outputfile = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=dbbackup_settings.TMP_DIR) self.sftp.getfo(filepath, outputfile) return outputfile
def activeListenToAllOptions(self, THRESHOLD=None, LISTEN=True, MUSIC=False): """ Records until a second of silence or times out after 12 seconds Returns a list of the matching options or None """ RATE = 16000 CHUNK = 1024 LISTEN_TIME = 12 # check if no threshold provided if THRESHOLD is None: THRESHOLD = self.fetchThreshold() self.speaker.play(jasperpath.data('audio', 'beep_hi.wav')) # prepare recording stream stream = self._audio.open(format=pyaudio.paInt16, channels=1, rate=RATE, input=True, frames_per_buffer=CHUNK) frames = [] # increasing the range # results in longer pause after command # generation lastN = [THRESHOLD * 1.2 for i in range(30)] for i in range(0, RATE / CHUNK * LISTEN_TIME): data = stream.read(CHUNK) frames.append(data) score = self.getScore(data) lastN.pop(0) lastN.append(score) average = sum(lastN) / float(len(lastN)) # TODO: 0.8 should not be a MAGIC NUMBER! if average < THRESHOLD * 0.8: break self.speaker.play(jasperpath.data('audio', 'beep_lo.wav')) # save the audio data stream.stop_stream() stream.close() with tempfile.SpooledTemporaryFile(mode='w+b') as f: wav_fp = wave.open(f, 'wb') wav_fp.setnchannels(1) wav_fp.setsampwidth(pyaudio.get_sample_size(pyaudio.paInt16)) wav_fp.setframerate(RATE) wav_fp.writeframes(''.join(frames)) wav_fp.close() f.seek(0) mode = (TranscriptionMode.MUSIC if MUSIC else TranscriptionMode.NORMAL) transcribed = self.active_stt_engine.transcribe(f, mode=mode) return transcribed
def run_script( self, script, arg_str, catch_stderr=False, with_retcode=False, # FIXME A timeout of zero or disabling timeouts may not return results! timeout=15, raw=False): ''' Execute a script with the given argument string ''' script_path = self.get_script_path(script) if not os.path.isfile(script_path): return False python_path = os.environ.get('PYTHONPATH', None) if sys.platform.startswith('win'): cmd = 'set PYTHONPATH=' else: cmd = 'PYTHONPATH=' if python_path is not None: cmd += '{0}:'.format(python_path) if sys.version_info[0] < 3: cmd += '{0} '.format(':'.join(sys.path[1:])) else: cmd += '{0} '.format(':'.join(sys.path[0:])) cmd += 'python{0}.{1} '.format(*sys.version_info) cmd += '{0} '.format(script_path) cmd += '{0} '.format(arg_str) tmp_file = tempfile.SpooledTemporaryFile() popen_kwargs = { 'shell': True, 'stdout': tmp_file, 'universal_newlines': True } if catch_stderr is True: popen_kwargs['stderr'] = subprocess.PIPE if not sys.platform.lower().startswith('win'): popen_kwargs['close_fds'] = True def detach_from_parent_group(): # detach from parent group (no more inherited signals!) os.setpgrp() popen_kwargs['preexec_fn'] = detach_from_parent_group process = subprocess.Popen(cmd, **popen_kwargs) if timeout is not None: stop_at = datetime.now() + timedelta(seconds=timeout) term_sent = False while True: process.poll() time.sleep(0.1) if datetime.now() > stop_at: if term_sent is False: # Kill the process group since sending the term signal # would only terminate the shell, not the command # executed in the shell if salt.utils.is_windows(): _, alive = win32_kill_process_tree(process.pid) if alive: log.error("Child processes still alive: %s", alive) else: os.killpg(os.getpgid(process.pid), signal.SIGINT) term_sent = True continue try: # As a last resort, kill the process group if salt.utils.is_windows(): _, alive = win32_kill_process_tree(process.pid) if alive: log.error("Child processes still alive: %s", alive) else: os.killpg(os.getpgid(process.pid), signal.SIGINT) except OSError as exc: if exc.errno != errno.ESRCH: # If errno is not "no such process", raise raise out = [ 'Process took more than {0} seconds to complete. ' 'Process Killed!'.format(timeout) ] if catch_stderr: err = ['Process killed, unable to catch stderr output'] if with_retcode: return out, err, process.returncode else: return out, err if with_retcode: return out, process.returncode else: return out if process.returncode is not None: break tmp_file.seek(0) if sys.version_info >= (3, ): try: out = tmp_file.read().decode(__salt_system_encoding__) except (NameError, UnicodeDecodeError): # Let's cross our fingers and hope for the best out = tmp_file.read().decode('utf-8') else: out = tmp_file.read() if catch_stderr: if sys.version_info < (2, 7): # On python 2.6, the subprocess'es communicate() method uses # select which, is limited by the OS to 1024 file descriptors # We need more available descriptors to run the tests which # need the stderr output. # So instead of .communicate() we wait for the process to # finish, but, as the python docs state "This will deadlock # when using stdout=PIPE and/or stderr=PIPE and the child # process generates enough output to a pipe such that it # blocks waiting for the OS pipe buffer to accept more data. # Use communicate() to avoid that." <- a catch, catch situation # # Use this work around were it's needed only, python 2.6 process.wait() err = process.stderr.read() else: _, err = process.communicate() # Force closing stderr/stdout to release file descriptors if process.stdout is not None: process.stdout.close() if process.stderr is not None: process.stderr.close() # pylint: disable=maybe-no-member try: if with_retcode: if out is not None and err is not None: if not raw: return out.splitlines(), err.splitlines( ), process.returncode else: return out, err, process.returncode return out.splitlines(), [], process.returncode else: if out is not None and err is not None: if not raw: return out.splitlines(), err.splitlines() else: return out, err if not raw: return out.splitlines(), [] else: return out, [] finally: try: if os.path.exists(tmp_file.name): if isinstance(tmp_file.name, str): # tmp_file.name is an int when using SpooledTemporaryFiles # int types cannot be used with os.remove() in Python 3 os.remove(tmp_file.name) else: # Clean up file handles tmp_file.close() process.terminate() except OSError as err: # process already terminated pass # pylint: enable=maybe-no-member # TODO Remove this? process.communicate() if process.stdout is not None: process.stdout.close() try: if with_retcode: if not raw: return out.splitlines(), process.returncode else: return out, process.returncode else: if not raw: return out.splitlines() else: return out finally: try: if os.path.exists(tmp_file.name): if isinstance(tmp_file.name, str): # tmp_file.name is an int when using SpooledTemporaryFiles # int types cannot be used with os.remove() in Python 3 os.remove(tmp_file.name) else: # Clean up file handles tmp_file.close() process.terminate() except OSError as err: # process already terminated pass
def _save(self, name, content): content.seek(0) with tempfile.SpooledTemporaryFile() as tmp: tmp.write(content.read()) return super()._save(name, tmp)
def get_next(self): while self.num_running < self.num_jobs and self.test_list: # Add tests self.num_running += 1 t = self.test_list.pop(0) portseed = len(self.test_list) + self.portseed_offset portseed_arg = ["--portseed={}".format(portseed)] log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16) log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16) test_argv = t.split() testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed) tmpdir_arg = ["--tmpdir={}".format(testdir)] self.jobs.append( (t, time.time(), subprocess.Popen( [self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg, universal_newlines=True, stdout=log_stdout, stderr=log_stderr), testdir, log_stdout, log_stderr)) if not self.jobs: raise IndexError('pop from empty list') # Print remaining running jobs when all jobs have been started. if not self.test_list: print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs))) dot_count = 0 while True: # Return first proc that finishes time.sleep(.5) for j in self.jobs: (name, time0, proc, testdir, log_out, log_err) = j if os.getenv('TRAVIS') == 'true' and int(time.time() - time0) > 20 * 60: # In travis, timeout individual tests after 20 minutes (to stop tests hanging and not # providing useful output. proc.send_signal(signal.SIGINT) if proc.poll() is not None: log_out.seek(0), log_err.seek(0) [stdout, stderr] = [ l.read().decode('utf-8') for l in (log_out, log_err) ] log_out.close(), log_err.close() if proc.returncode == TEST_EXIT_PASSED: status = "Passed" elif proc.returncode == TEST_EXIT_SKIPPED: status = "Skipped" else: status = "Failed" self.num_running -= 1 self.jobs.remove(j) clearline = '\r' + (' ' * dot_count) + '\r' print(clearline, end='', flush=True) dot_count = 0 return TestResult(name, status, int(time.time() - time0)), testdir, stdout, stderr print('.', end='', flush=True) dot_count += 1
def install_packages(ip, port): check_digest_header() check_authorization_header(client_key_recoverer, "Digest") check_registered(ip, port) check_is_json() packages = request.json memory_storage = get_memory_storage() with rcl.ReaderLock(memory_storage, "repository", 30, 1): with tempfile.SpooledTemporaryFile() as f: # Can throw ValueError. try: test_utils.compress_test_packages( f, packages, current_app.config['TESTS_PATH']) except ValueError as e: abort(400, description=str(e)) f.seek(0) prepared = rq.Request("PATCH", f"http://{ip}:{port}/test_sets", files={ 'packages': f }).prepare() digest = b64encode(sha256(prepared.body).digest()).decode() prepared.headers['Digest'] = f"sha-256={digest}" headers = ['Digest'] signature = signatures.new_signature( current_app.config['NODE_SECRET'], "PATCH", "/test_sets", signature_headers=headers, header_recoverer=lambda h: prepared.headers.get(h)) prepared.headers['Authorization'] = ( signatures.new_authorization_header("C2", signature, headers)) environment_key = f"environments:{ip}:{port}" with memory_storage.lock(f"{environment_key}:installed:mutex", timeout=30, sleep=1): try: resp = rq.Session().send(prepared) except rq.exceptions.ConnectionError: abort( 504, description="The requested environment could not be reached" ) if resp.status_code == 204: installed_cached = memory_storage.hget(environment_key, "installed_cached") if installed_cached == "1": # Updates cache if it exists. pipe = memory_storage.pipeline() for pack in packages: pipe.hset(environment_key, f"installed:{pack}", memory_storage.get(f"repository:{pack}")) pipe.zadd(f"{environment_key}:installed_index", {pack: 0}) pipe.execute() return Response(status=204, mimetype="application/json") if resp.status_code in {400, 401, 415}: abort(500, description="Something went wrong when handling the request") abort(502, description=f"Unexpected response from node at {ip}:{port}")