def init_3d(): """Initialise 3D plots within the IPython notebook, by injecting the required javascript libraries. """ library_javascript = StringIO() library_javascript.write(""" <p>Loading javascript for 3D plot in browser</p> /* Beginning of javascript injected by multinet.js */ <script type="text/javascript" src="multinet/static/js/jquery-2.1.4.js"></script> <script type="text/javascript" src="multinet/static/js/jquery-ui-1.11.4.js"></script> <script type="text/javascript" src="multinet/static/js/threejs/three-r71.js"></script> <script type="text/javascript" src="multinet/static/js/threejs/orbitcontrols.js"></script> <script type="text/javascript" src="multinet/static/js/threejs/stats-r12.min.js"></script> <script type="text/javascript" src="multinet/static/js/threejs/detector.js"></script> <script type="text/javascript" src="multinet/static/js/multinet-core.js"></script> <script type="text/javascript"> var multinet_javascript_injected = true; </script> """) library_javascript.write( "/* End of javascript injected by multinet.js */\n</script>\n") display(HTML(library_javascript.getvalue()))
def smart_unquote(s): """ Returns a string that is created from the input string by unquoting all quoted regions in there. If there are no quoted regions in the input string then output string is identical to the input string. """ unquoted = StringIO() escaped_char = False is_quoted_section = False for c in s: if is_quoted_section: if escaped_char: escaped_char = False else: if c == '"': is_quoted_section = False continue elif c == '\\': escaped_char = True continue else: if c == '"': is_quoted_section = True continue unquoted.write(c) return unquoted.getvalue()
class _MockLogger(object): def __init__(self): self._output = StringIO() def message(self, msg): self._output.write(msg.message) def value(self): return self._output.getvalue()
def test_deqatn_6(self): func_str = 'def f(x, y, z):\n' func_str += ' c = 3\n' func_str += ' return x + y + z + c\n' #func = exec(fnc_str) s = StringIO() s.write(s) s.close() exec (func_str) f(1, 2, 3) #func = exec func_str assert f(1, 2, 3) == 9, func(1, 2, 3)
def _report_testsuite(suite_name, tests, xml_document, parentElement, properties): """ Appends the testsuite section to the XML document. """ testsuite = xml_document.createElement('testsuite') parentElement.appendChild(testsuite) testsuite.setAttribute('name', suite_name) testsuite.setAttribute('tests', str(len(tests))) testsuite.setAttribute( 'time', '%.3f' % sum(map(lambda e: e.elapsed_time, tests)) ) failures = filter(lambda e: e.outcome == e.FAILURE, tests) testsuite.setAttribute('failures', str(len(list(failures)))) errors = filter(lambda e: e.outcome == e.ERROR, tests) testsuite.setAttribute('errors', str(len(list(errors)))) skips = filter(lambda e: e.outcome == _TestInfo.SKIP, tests) testsuite.setAttribute('skipped', str(len(list(skips)))) _XMLTestResult._report_testsuite_properties( testsuite, xml_document, properties) for test in tests: _XMLTestResult._report_testcase(test, testsuite, xml_document) systemout = xml_document.createElement('system-out') testsuite.appendChild(systemout) stdout = StringIO() for test in tests: # Merge the stdout from the tests in a class if test.stdout is not None: stdout.write(test.stdout) _XMLTestResult._createCDATAsections( xml_document, systemout, stdout.getvalue()) systemerr = xml_document.createElement('system-err') testsuite.appendChild(systemerr) stderr = StringIO() for test in tests: # Merge the stderr from the tests in a class if test.stderr is not None: stderr.write(test.stderr) _XMLTestResult._createCDATAsections( xml_document, systemerr, stderr.getvalue()) return testsuite
def test_throw_exception_if_input_data_invalid(self): """4.5.5.3 Throw exception if input data is invalid.""" membuf = StringIO() membuf.write("invalid") membuf.seek(0) self.patch(sys, "stdin", membuf) if sys.version_info.major <= 2: with ExpectedException(ValueError): load.main() elif sys.version_info.major >= 3: with ExpectedException(json.decoder.JSONDecodeError): load.main()
def print_ascii_graph(model_): """ pip install img2txt.py python -c """ from PIL import Image from six.moves import StringIO #import networkx as netx import copy model = copy.deepcopy(model_) assert model is not model_ # model.graph.setdefault('graph', {})['size'] = '".4,.4"' model.graph.setdefault('graph', {})['size'] = '".3,.3"' model.graph.setdefault('graph', {})['height'] = '".3,.3"' pydot_graph = netx.to_pydot(model) png_str = pydot_graph.create_png(prog='dot') sio = StringIO() sio.write(png_str) sio.seek(0) pil_img = Image.open(sio) print('pil_img.size = %r' % (pil_img.size,)) #def print_ascii_image(pil_img): # img2txt = ut.import_module_from_fpath('/home/joncrall/venv/bin/img2txt.py') # import sys # pixel = pil_img.load() # width, height = pil_img.size # bgcolor = None # #fill_string = # img2txt.getANSIbgstring_for_ANSIcolor(img2txt.getANSIcolor_for_rgb(bgcolor)) # fill_string = "\x1b[49m" # fill_string += "\x1b[K" # does not move the cursor # sys.stdout.write(fill_string) # img_ansii_str = img2txt.generate_ANSI_from_pixels(pixel, width, height, bgcolor) # sys.stdout.write(img_ansii_str) def print_ascii_image(pil_img): #https://gist.github.com/cdiener/10491632 SC = 1.0 GCF = 1.0 WCF = 1.0 img = pil_img S = (int(round(img.size[0] * SC * WCF * 3)), int(round(img.size[1] * SC))) img = np.sum( np.asarray( img.resize(S) ), axis=2) print('img.shape = %r' % (img.shape,)) img -= img.min() chars = np.asarray(list(' .,:;irsXA253hMHGS#9B&@')) img = (1.0 - img / img.max()) ** GCF * (chars.size - 1) print( "\n".join( ("".join(r) for r in chars[img.astype(int)]) ) ) print_ascii_image(pil_img) pil_img.close() pass
def load_cookies_file(cookies_file): """ Loads the cookies file. We pre-pend the file with the special Netscape header because the cookie loader is very particular about this string. """ cookies = StringIO() cookies.write('# Netscape HTTP Cookie File') cookies.write(open(cookies_file, 'rU').read()) cookies.flush() cookies.seek(0) return cookies
def init_3d(): """Initialise 3D plots within the IPython notebook, by injecting the required javascript libraries. """ library_javascript = StringIO() library_javascript.write(""" <script type="text/javascript"> /* Beginning of javascript injected by OpenModes */ var openmodes_javascript_injected = true; """) three_js_libraries = ("three.min.js", "OrbitControls.js", "Lut.js", "Detector.js", "CanvasRenderer.js", "Projector.js") # Include required parts of three.js inline for library in three_js_libraries: with open(osp.join(three_js_dir, library)) as infile: library_javascript.write(infile.read()) # include my custom javascript inline with open(osp.join(static_dir, "three_js_plot.js")) as infile: library_javascript.write(infile.read()) library_javascript.write( "/* End of javascript injected by OpenModes */\n</script>\n") display(HTML(library_javascript.getvalue())) logging.info("Javascript injected for 3D interactive WebGL plots")
def _find_snippet_imports(module_data, module_path, strip_comments): """ Given the source of the module, convert it to a Jinja2 template to insert module code and return whether it's a new or old style module. """ module_style = "old" if REPLACER in module_data: module_style = "new" elif REPLACER_WINDOWS in module_data: module_style = "new" elif "from ansible.module_utils." in module_data: module_style = "new" elif "WANT_JSON" in module_data: module_style = "non_native_want_json" output = StringIO() lines = module_data.split("\n") snippet_names = [] for line in lines: if REPLACER in line: output.write(_slurp(os.path.join(_SNIPPET_PATH, "basic.py"))) snippet_names.append("basic") if REPLACER_WINDOWS in line: ps_data = _slurp(os.path.join(_SNIPPET_PATH, "powershell.ps1")) output.write(ps_data) snippet_names.append("powershell") elif line.startswith("from ansible.module_utils."): tokens = line.split(".") import_error = False if len(tokens) != 3: import_error = True if " import *" not in line: import_error = True if import_error: raise AnsibleError( "error importing module in %s, expecting format like 'from ansible.module_utils.basic import *'" % module_path ) snippet_name = tokens[2].split()[0] snippet_names.append(snippet_name) output.write(_slurp(os.path.join(_SNIPPET_PATH, snippet_name + ".py"))) else: if strip_comments and line.startswith("#") or line == "": pass output.write(line) output.write("\n") if not module_path.endswith(".ps1"): # Unixy modules if len(snippet_names) > 0 and not "basic" in snippet_names: raise AnsibleError("missing required import in %s: from ansible.module_utils.basic import *" % module_path) else: # Windows modules if len(snippet_names) > 0 and not "powershell" in snippet_names: raise AnsibleError("missing required import in %s: # POWERSHELL_COMMON" % module_path) return (output.getvalue(), module_style)
def _filter_leading_non_json_lines(self, data): ''' Used to avoid random output from SSH at the top of JSON output, like messages from tcagetattr, or where dropbear spews MOTD on every single command (which is nuts). need to filter anything which starts not with '{', '[', ', '=' or is an empty line. filter only leading lines since multiline JSON is valid. ''' filtered_lines = StringIO() stop_filtering = False for line in data.splitlines(): if stop_filtering or line.startswith('{') or line.startswith('['): stop_filtering = True filtered_lines.write(line + '\n') return filtered_lines.getvalue()
def load_cookies_file(cookies_file): """ Loads the cookies file. We pre-pend the file with the special Netscape header because the cookie loader is very particular about this string. """ logging.debug("Loading cookie file %s into memory.", cookies_file) cookies = StringIO() cookies.write("# Netscape HTTP Cookie File") cookies.write(open(cookies_file, "rU").read()) cookies.flush() cookies.seek(0) return cookies
def _consume_stderr(self, chan, call_line_handler_func=False): """ Try to consume stderr data from chan if it's receive ready. """ out = bytearray() stderr = StringIO() if chan.recv_stderr_ready(): data = chan.recv_stderr(self.CHUNK_SIZE) if six.PY3 and isinstance(data, six.text_type): data = data.encode('utf-8') out += data while data: ready = chan.recv_stderr_ready() if not ready: break data = chan.recv_stderr(self.CHUNK_SIZE) if six.PY3 and isinstance(data, six.text_type): data = data.encode('utf-8') out += data stderr.write(self._get_decoded_data(out)) if self._handle_stderr_line_func and call_line_handler_func: data = strip_shell_chars(stderr.getvalue()) lines = data.split('\n') lines = [line for line in lines if line] for line in lines: # Note: If this function performs network operating no sleep is # needed, otherwise if a long blocking operating is performed, # sleep is recommended to yield and prevent from busy looping self._handle_stderr_line_func(line=line + '\n') stderr.seek(0) return stderr
def cmdline_example_to_rst(src, out=None, ref=None): if out is None: from six.moves import StringIO out = StringIO() # place header out.write('.. AUTO-GENERATED FILE -- DO NOT EDIT!\n\n') if ref: # place cross-ref target out.write('.. {0}:\n\n'.format(ref)) # parser status vars inexample = False incodeblock = False for line in src: if line.startswith('#% EXAMPLE START'): inexample = True incodeblock = False continue if not inexample: continue if line.startswith('#% EXAMPLE END'): break if not inexample: continue if line.startswith('#%'): incodeblock = not incodeblock if incodeblock: out.write('\n.. code-block:: sh\n\n') continue if not incodeblock and line.startswith('#'): out.write(line[(min(2, len(line) - 1)):]) continue if incodeblock: if not line.rstrip().endswith('#% SKIP'): out.write(' %s' % line) continue if not len(line.strip()): continue else: raise RuntimeError("this should not happen") return out
def _recover_base64(s): if six.PY2: return s.translate(None, _b64_invalid_chars) buf = StringIO() chunk_start = 0 for i, c in enumerate(s): if (('A' <= c <= 'Z') or ('a' <= c <= 'z') or ('0' <= c <= '9') or c == '+' or c == '/' ): continue buf.write(s[chunk_start:i]) chunk_start = i + 1 buf.write(s[chunk_start:len(s)]) return buf.getvalue()
def generateTransactionsSummary(files,unmatched_only=False): try: # Split transactions into biller codes biller_codes = {} biller_code_emails = {} if unmatched_only: for f in files: for t in f.transactions.all(): if t.biller_code in biller_codes: txns = list(biller_codes[t.biller_code]) txns.append(t) biller_codes[t.biller_code] = txns else: biller_codes[t.biller_code] = [t] else: for n, f in files: for t in f.transactions.all(): if t.biller_code in biller_codes: txns = list(biller_codes[t.biller_code]) txns.append(t) biller_codes[t.biller_code] = txns else: biller_codes[t.biller_code] = [t] # Generate summaries per biller code for k,v in biller_codes.items(): matched = [] unmatched = [] for t in v: if t.matched: matched.append(t) else: unmatched.append(t) output = StringIO() if not unmatched_only: # Matched txns output.write('Matched transactions:\n') for m in matched: output.write(' CRN: {} Amount: ${}\n'.format(m.crn,m.amount)) # Unmatched txns output.write('\nUnmatched transactions:\n') for u in unmatched: output.write(' CRN: {} Amount: ${}\n'.format(u.crn,u.amount)) contents = output.getvalue() output.close() # Add the biller code email biller_code_emails[k] = contents return biller_code_emails except Exception as e: traceback.print_exc(e) raise
def generateTransactionsSummary(files,unmatched_only=False): try: # Split transactions into biller codes biller_codes = {} biller_code_emails = {} if unmatched_only: for f in files: for t in f.transactions.all(): if t.biller_code in biller_codes: txns = list(biller_codes[t.biller_code]) txns.append(t) biller_codes[t.biller_code] = txns else: biller_codes[t.biller_code] = [t] else: for n, f in files: for t in f.transactions.all(): if t.biller_code in biller_codes: txns = list(biller_codes[t.biller_code]) txns.append(t) biller_codes[t.biller_code] = txns else: biller_codes[t.biller_code] = [t] # Generate summaries per biller code for k,v in biller_codes.items(): matched = [] unmatched = [] for t in v: if t.matched: matched.append(t) else: unmatched.append(t) output = StringIO() if not unmatched_only: # Matched txns output.write('Matched transactions:\n') for m in matched: output.write(' CRN: {} Amount: ${}\n'.format(m.crn,m.amount)) # Unmatched txns output.write('\nUnmatched transactions:\n') for u in unmatched: output.write(' CRN: {} Amount: ${}\n'.format(u.crn,u.amount)) contents = output.getvalue() output.close() # Add the biller code email biller_code_emails[k] = contents return biller_code_emails except Exception as e: traceback.print_exc(e) raise
def _askYesNo(question=None): message = StringIO() while True: askString = "\r%s? (yes|no): " % (question) logging.debug("asking user: %s" % askString) message.write(askString) message.seek(0) raw = raw_input(message.read()) if not len(raw): continue answer = raw[0].lower() logging.debug("user answered read: %s" % (answer)) if answer not in 'yn': continue return answer == 'y'
def _askYesNo(question=None): message = StringIO() while True: askString = "\r%s? (yes|no): " % (question) logging.debug("asking user: %s" % askString) message.write(askString) message.seek(0) raw = raw_input(message.read()) if not len(raw): continue answer = raw[0].lower() logging.debug("user answered read: %s" % (answer)) if answer not in 'yn': continue return answer == 'y'
def interactive_plot_init(self): if not JupyterNotebookManager.interactive_plotting_loaded: library_javascript = StringIO() library_javascript.write(""" <script type="text/javascript"> var pychaste_javascript_injected = true; """) three_js_libraries = ("three.min.js", "OrbitControls.js", "VRMLLoader.js", "Detector.js") # Include three.js for library in three_js_libraries: with open( os.path.join( JupyterNotebookManager.three_js_dir, library)) as infile: library_javascript.write(infile.read()) # Include internal plotting functions with open( os.path.join(JupyterNotebookManager.three_js_dir, "plotting_script.js")) as infile: library_javascript.write(infile.read()) JupyterNotebookManager.interactive_plotting_loaded = True display(HTML(library_javascript.getvalue()))
def log_point(msg="", levels=None): if levels is None: # Default to 6, which works in most cases levels = 6 stack = inspect.stack() # get rid of logPoint's part of the stack: stack = stack[1:] stack.reverse() output = StringIO() if msg: output.write(ustr(msg) + "\n") stackSection = stack[-1*levels:] for stackLine in stackSection: frame, filename, line, funcname, lines, unknown = stackLine if filename.endswith("/unittest.py"): # unittest.py code is a boring part of the traceback continue if filename.startswith("./"): filename = filename[2:] output.write("%s:%s in %s:\n" % (filename, line, funcname)) if lines: output.write(" %s\n" % "".join(lines)[:-1]) s = output.getvalue() # I actually logged the result, but you could also print it: return s
def _coef_table(self): model = self.model k = model.neqs Xnames = self.model.exog_names data = list( zip(model.params.T.ravel(), model.stderr.T.ravel(), model.tvalues.T.ravel(), model.pvalues.T.ravel())) header = ('coefficient', 'std. error', 't-stat', 'prob') buf = StringIO() dim = k * model.k_ar + model.k_trend for i in range(k): section = "Results for equation %s" % model.names[i] buf.write(section + '\n') table = SimpleTable(data[dim * i:dim * (i + 1)], header, Xnames, title=None, txt_fmt=self.default_fmt) buf.write(str(table) + '\n') if i < k - 1: buf.write('\n') return buf.getvalue()
class EncodedStringIO(object): def __init__(self): self._data = StringIO() self.encoding = "ascii" def read(self): return self._data.read() def write(self, data): return self._data.write(data) def flush(self): self._data.flush()
class EncodedStringIO(object): def __init__(self): self._data = StringIO() self.encoding = "ascii" def read(self): return self._data.read() def write(self, data): return self._data.write(data) def flush(self): self._data.flush()
def _test_dynamic1(self): """ xref test for: - DLOAD -> DAREA -> NID DLOAD take priority useful for dynamic nodal forces/disp/vel/acc """ msg = """ SOL 108 CEND SUBCASE 1 DLOAD = 33 DISP(PLOT) = ALL BEGIN BULK $DLOAD SID S S1 L1 S2 L2 DLOAD, 33, 1.0, 1.0, 35, 1.0, 36 $RLOAD1 SID EXCITEID DELAY DPHASE TC TD TYPE RLOAD1, 35, 29, 0.2, 5.0, 40, 0.0, 0 RLOAD1, 36, 29, 31, 32, 4.0, 41, 0 $DAREA SID GRID COMP SCALE DAREA, 29, 30, 1, 5.2 $DELAY SID GRID COMP LAG DELAY, 31, 30, 1, 0.2 $DPHASE SID GRID COMP ANGLE DPHASE, 32, 30, 1, 5.0 $TABLED1 TID XAXIS YAXIS $ x1 y1 x2 y2 x3 y3 x4 y4 TABLED1, 40, LINEAR, LINEAR ,0.0, 4.0, 2.0, 8.0, 6.0, 8.0, ENDT TABLED1, 41, LINEAR, LINEAR ,0.0, 0.5, 0.6, 0.4, 0.8, 0.7, ENDT GRID,30 """ model = BDF(debug=False) bdf_file = StringIO() bdf_file.write(msg) bdf_file.seek(0) model.read_bdf(bdf_file)
def serialize(self, items): """Does the inverse of config parsing by taking parsed values and converting them back to a string representing config file contents. """ r = StringIO() for key, value in items.items(): if type(value) == OrderedDict: r.write('\n[%s]\n' % key) r.write(self.serialize(value)) else: value, help = value if help: r.write('; %s\n' % help) r.write('%s = %s\n' % (key, value)) return r.getvalue()
def _report_testsuite(suite_name, tests, xml_document, parentElement, properties): """ Appends the testsuite section to the XML document. """ testsuite = xml_document.createElement("testsuite") parentElement.appendChild(testsuite) testsuite.setAttribute("name", suite_name) testsuite.setAttribute("tests", str(len(tests))) testsuite.setAttribute("time", "%.3f" % sum(map(lambda e: e.elapsed_time, tests))) failures = filter(lambda e: e.outcome == _TestInfo.FAILURE, tests) testsuite.setAttribute("failures", str(len(list(failures)))) errors = filter(lambda e: e.outcome == _TestInfo.ERROR, tests) testsuite.setAttribute("errors", str(len(list(errors)))) _XMLTestResult._report_testsuite_properties(testsuite, xml_document, properties) systemout = xml_document.createElement("system-out") testsuite.appendChild(systemout) stdout = StringIO() for test in tests: # Merge the stdout from the tests in a class stdout.write(test.stdout) _XMLTestResult._createCDATAsections(xml_document, systemout, stdout.getvalue()) systemerr = xml_document.createElement("system-err") testsuite.appendChild(systemerr) stderr = StringIO() for test in tests: # Merge the stderr from the tests in a class stderr.write(test.stderr) _XMLTestResult._createCDATAsections(xml_document, systemerr, stderr.getvalue()) return testsuite
class ClosableOutput(object): encoding = None def __init__(self, path): self._output = StringIO() self._path = path def __enter__(self): return self def __exit__(self, *args): self.close() def write(self, data): self._output.write(data) def close(self): self.value = self._output.getvalue() self._output.close() def __str__(self): return self._path
class ClosableOutput(object): encoding = None def __init__(self, path): self._output = StringIO() self._path = path def __enter__(self): return self def __exit__(self, *args): self.close() def write(self, data): self._output.write(data) def close(self): self.value = self._output.getvalue() self._output.close() def __str__(self): return self._path
def _consume_stderr(self, chan, call_line_handler_func=False): """ Try to consume stderr data from chan if it's receive ready. """ out = bytearray() stderr = StringIO() if chan.recv_stderr_ready(): data = chan.recv_stderr(self.CHUNK_SIZE) out += data while data: ready = chan.recv_stderr_ready() if not ready: break data = chan.recv_stderr(self.CHUNK_SIZE) out += data stderr.write(self._get_decoded_data(out)) if self._handle_stderr_line_func and call_line_handler_func: data = strip_shell_chars(stderr.getvalue()) lines = data.split('\n') lines = [line for line in lines if line] for line in lines: # Note: If this function performs network operating no sleep is # needed, otherwise if a long blocking operating is performed, # sleep is recommended to yield and prevent from busy looping self._handle_stderr_line_func(line=line + '\n') stderr.seek(0) return stderr
class _CollectOutputProtocol(ProcessProtocol): """ Internal helper. Collects all output (stdout + stderr) into self.output, and callback's on done with all of it after the process exits (for any reason). """ def __init__(self): self.done = Deferred() self.output = StringIO() def processEnded(self, reason): if not self.done.called: self.done.callback(self.output.getvalue()) def processExited(self, reason): if not isinstance(reason.value, ProcessDone): self.done.errback(reason) def outReceived(self, data): self.output.write(data) def errReceived(self, data): print("ERR: {}".format(data)) self.output.write(data)
class _MagicTextProtocol(ProcessProtocol): """ Internal helper. Monitors all stdout looking for a magic string, and then .callback()s on self.done and .errback's if the process exits """ def __init__(self, magic_text): self.magic_seen = Deferred() self.exited = Deferred() self._magic_text = magic_text self._output = StringIO() def processEnded(self, reason): self.exited.callback(None) def outReceived(self, data): sys.stdout.write(data) self._output.write(data) if not self.magic_seen.called and self._magic_text in self._output.getvalue( ): print("Saw '{}' in the logs".format(self._magic_text)) self.magic_seen.callback(self) def errReceived(self, data): sys.stdout.write(data)
def make(self, endog_names=None, exog_names=None): """ Summary of VAR model """ buf = StringIO() buf.write(self._header_table() + '\n') buf.write(self._stats_table() + '\n') buf.write(self._coef_table() + '\n') buf.write(self._resid_info() + '\n') return buf.getvalue()
def preview_sql(self, url, step, **args): """Mocks SQLAlchemy Engine to store all executed calls in a string and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>` :returns: SQL file """ buf = StringIO() args['engine_arg_strategy'] = 'mock' args['engine_arg_executor'] = lambda s, p = '': buf.write(str(s) + p) @with_engine def go(url, step, **kw): engine = kw.pop('engine') self.run(engine, step) return buf.getvalue() return go(url, step, **args)
def preview_sql(self, url, step, **args): """Mocks SQLAlchemy Engine to store all executed calls in a string and runs :meth:`PythonScript.run <migrate.versioning.script.py.PythonScript.run>` :returns: SQL file """ buf = StringIO() args['engine_arg_strategy'] = 'mock' args['engine_arg_executor'] = lambda s, p='': buf.write(str(s) + p) @with_engine def go(url, step, **kw): engine = kw.pop('engine') self.run(engine, step) return buf.getvalue() return go(url, step, **args)
def ActiveAuthors(self): ''' TODO - template for generating the hot files list. Currently format is hard coded as HTML ordered list ''' self._printProgress("Calculating Active authors list") hotauthors = self.svnstats.getActiveAuthors(10) outstr = StringIO() outstr.write("<ol>\n") for author, temperatur in hotauthors: outstr.write("<li>%s</li>\n" % author) outstr.write("</ol>\n") return (outstr.getvalue())
def ActiveFiles(self): ''' TODO - template for generating the hot files list. Currently format is hard coded as HTML ordered list ''' self._printProgress("Calculating Active (hot) files list") hotfiles = self.svnstats.getHotFiles(10) outstr = StringIO() outstr.write("<ol>\n") for filepath, temperatur, revcount in hotfiles: outstr.write( "<li>%s (rev count: %d)</li>\n" % (self.svnstats.getSearchPathRelName(filepath), revcount)) outstr.write("</ol>\n") return (outstr.getvalue())
def run(self, cmd, timeout=None, quote=False, call_line_handler_func=False): """ Note: This function is based on paramiko's exec_command() method. :param timeout: How long to wait (in seconds) for the command to finish (optional). :type timeout: ``float`` :param call_line_handler_func: True to call handle_stdout_line_func function for each line of received stdout and handle_stderr_line_func for each line of stderr. :type call_line_handler_func: ``bool`` """ if quote: cmd = quote_unix(cmd) extra = {'_cmd': cmd} self.logger.info('Executing command', extra=extra) # Use the system default buffer size bufsize = -1 transport = self.client.get_transport() chan = transport.open_session() start_time = time.time() if cmd.startswith('sudo'): # Note that fabric does this as well. If you set pty, stdout and stderr # streams will be combined into one. chan.get_pty() chan.exec_command(cmd) stdout = StringIO() stderr = StringIO() # Create a stdin file and immediately close it to prevent any # interactive script from hanging the process. stdin = chan.makefile('wb', bufsize) stdin.close() # Receive all the output # Note #1: This is used instead of chan.makefile approach to prevent # buffering issues and hanging if the executed command produces a lot # of output. # # Note #2: If you are going to remove "ready" checks inside the loop # you are going to have a bad time. Trying to consume from a channel # which is not ready will block for indefinitely. exit_status_ready = chan.exit_status_ready() if exit_status_ready: stdout_data = self._consume_stdout(chan=chan, call_line_handler_func=call_line_handler_func) stdout_data = stdout_data.getvalue() stderr_data = self._consume_stderr(chan=chan, call_line_handler_func=call_line_handler_func) stderr_data = stderr_data.getvalue() stdout.write(stdout_data) stderr.write(stderr_data) while not exit_status_ready: current_time = time.time() elapsed_time = (current_time - start_time) if timeout and (elapsed_time > timeout): # TODO: Is this the right way to clean up? chan.close() stdout = strip_shell_chars(stdout.getvalue()) stderr = strip_shell_chars(stderr.getvalue()) raise SSHCommandTimeoutError(cmd=cmd, timeout=timeout, stdout=stdout, stderr=stderr) stdout_data = self._consume_stdout(chan=chan, call_line_handler_func=call_line_handler_func) stdout_data = stdout_data.getvalue() stderr_data = self._consume_stderr(chan=chan, call_line_handler_func=call_line_handler_func) stderr_data = stderr_data.getvalue() stdout.write(stdout_data) stderr.write(stderr_data) # We need to check the exit status here, because the command could # print some output and exit during this sleep below. exit_status_ready = chan.exit_status_ready() if exit_status_ready: break # Short sleep to prevent busy waiting eventlet.sleep(self.SLEEP_DELAY) # print('Wait over. Channel must be ready for host: %s' % self.hostname) # Receive the exit status code of the command we ran. status = chan.recv_exit_status() stdout = strip_shell_chars(stdout.getvalue()) stderr = strip_shell_chars(stderr.getvalue()) extra = {'_status': status, '_stdout': stdout, '_stderr': stderr} self.logger.debug('Command finished', extra=extra) return [stdout, stderr, status]
def __repr__(self): f = StringIO() f.write('<PLSOLID object> n=%s\n' % self.n) self.write_card(f) return f.getvalue()
def plot_3d(parts_list, charges, currents, centres, width=700, height=500, wireframe=False, skip_webgl=False): """Create a 3D plot in the IPython notebook Parameters ---------- parts_list : list of Part All the parts to plot charges : list of ndarray The charge distribution on each part currents : list of ndarray The current distribution on each part centres : list of ndarray The centre of each triangle, where the current vector is located width : integer, optional The width of the plot height : integer, optional The height of the plot wireframe : bool, optional Whether the plot should initially show a wireframe view skip_webgl : bool, optional Do not attempt to use webgl rendering, always use slower canvas rendering Returns ------- H : HTML An HTML object containing the necessary HTML, CSS and javascript to show the plot """ meshes = [part.mesh for part in parts_list] nodes = [part.nodes for part in parts_list] full_mesh = combine_mesh(meshes, nodes) # combine the meshes # scale all nodes so that the maximum size is known mesh_scale = 100/full_mesh.fast_size() full_mesh.nodes = full_mesh.nodes*mesh_scale # generate a javascript representation of the object geometry_name = "geometry_"+str(uuid.uuid4()).replace('-', '') geometry_javascript = StringIO() geometry_javascript.write("var %s = " % geometry_name) geometry_tree = {'nodes': full_mesh.nodes.tolist(), 'triangles': full_mesh.polygons.tolist()} # include the charge information if it is present if charges is not None: charges = np.hstack(charges) geometry_tree['charge'] = {'real': charges.real.tolist(), 'imag': charges.imag.tolist(), 'abs': abs(charges).tolist(), 'phase': np.angle(charges, deg=True).tolist()} # Include the current information if it is present. Vectors will be of # the form (length, x, y, z), where the 3 cartesian components are scaled # to be a normal vector if currents is not None: currents = np.vstack(currents) lengths_real = np.sqrt(np.sum(currents.real**2, axis=1)) currents.real /= lengths_real[:, None] current_real = np.hstack((lengths_real[:, None], currents.real)).tolist() if np.any(np.iscomplex(currents)): lengths_imag = np.sqrt(np.sum(currents.imag**2, axis=1)) currents.imag /= lengths_imag[:, None] current_imag = np.hstack((lengths_imag[:, None], currents.imag)).tolist() else: current_imag = np.zeros((currents.shape[0], 4)).tolist() geometry_tree['current'] = {'real': current_real, 'imag': current_imag} geometry_tree['centres'] = (np.vstack(centres)*mesh_scale).tolist() json.dump(geometry_tree, geometry_javascript) geometry_javascript.write(';') html_source = template_env.get_template('three_js_plot.html') html_generated = html_source.render({'geometry_javascript': geometry_javascript, 'geometry_name': geometry_name, 'canvas_width': width, 'canvas_height': height, 'initial_wireframe': wireframe, 'skip_webgl': skip_webgl, 'current_vector_len': np.median(full_mesh.edge_lens)}) display(HTML(html_generated))
def format(self, data, args): buf = StringIO() if args._mod_name == "stat": for j, line in enumerate(data[1:]): if j != 0: buf.write("\n") stattype = args.key[0] if stattype == "hsl:stat": for i, key in enumerate(line[:len(line)-3]): if i != 0: buf.write(".") key = key.replace("-", "_") if i == len(line)-4 else key.replace(":", "-") buf.write(key) buf.write(" {}".format(line[len(line)-3])) elif stattype in ( "system-mem-usage", "system-swap-usage", "system-storage-usage", "system-storage-latency", "mail-license-count", "mail-queue-count", "mail-quarantine-count" ): buf.write("{}.{} {}".format(line[0], stattype.replace("-", "_"), line[1])) else: for i, key in enumerate(line[1:]): if i != 0: buf.write("\n") name = data[0][i+1].replace("-", "_") buf.write("{}.{}.{} {}".format(line[0], stattype, name, key)) return buf.getvalue()
class THttpClient(TTransportBase): """Http implementation of TTransport base.""" def __init__(self, uri_or_host, port=None, path=None): """THttpClient supports two different types constructor parameters. THttpClient(host, port, path) - deprecated THttpClient(uri) Only the second supports https. """ if port is not None: warnings.warn( "Please use the THttpClient('http://host:port/path') syntax", DeprecationWarning, stacklevel=2) self.host = uri_or_host self.port = port assert path self.path = path self.scheme = 'http' else: parsed = urlparse.urlparse(uri_or_host) self.scheme = parsed.scheme assert self.scheme in ('http', 'https') if self.scheme == 'http': self.port = parsed.port or httplib.HTTP_PORT elif self.scheme == 'https': self.port = parsed.port or httplib.HTTPS_PORT self.host = parsed.hostname self.path = parsed.path if parsed.query: self.path += '?%s' % parsed.query self.__wbuf = StringIO() self.__http = None self.__timeout = None self.__custom_headers = None def open(self): if self.scheme == 'http': self.__http = httplib.HTTP(self.host, self.port) else: self.__http = httplib.HTTPS(self.host, self.port) def close(self): self.__http.close() self.__http = None def isOpen(self): return self.__http is not None def setTimeout(self, ms): if not hasattr(socket, 'getdefaulttimeout'): raise NotImplementedError if ms is None: self.__timeout = None else: self.__timeout = ms / 1000.0 def setCustomHeaders(self, headers): self.__custom_headers = headers def read(self, sz): return self.__http.file.read(sz) def write(self, buf): self.__wbuf.write(buf) def __withTimeout(f): def _f(*args, **kwargs): orig_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(args[0].__timeout) result = f(*args, **kwargs) socket.setdefaulttimeout(orig_timeout) return result return _f def flush(self): if self.isOpen(): self.close() self.open() # Pull data out of buffer data = self.__wbuf.getvalue() self.__wbuf = StringIO() # HTTP request self.__http.putrequest('POST', self.path) # Write headers self.__http.putheader('Host', self.host) self.__http.putheader('Content-Type', 'application/x-thrift') self.__http.putheader('Content-Length', str(len(data))) if not self.__custom_headers or 'User-Agent' not in self.__custom_headers: user_agent = 'Python/THttpClient' script = os.path.basename(sys.argv[0]) if script: user_agent = '%s (%s)' % (user_agent, urllib.quote(script)) self.__http.putheader('User-Agent', user_agent) if self.__custom_headers: for key, val in six.iteritems(self.__custom_headers): self.__http.putheader(key, val) self.__http.endheaders() # Write payload self.__http.send(data) # Get reply to flush the request self.code, self.message, self.headers = self.__http.getreply() # Decorate if we know how to timeout if hasattr(socket, 'getdefaulttimeout'): flush = __withTimeout(flush)
def __repr__(self): f = StringIO() f.write('<PCOMP object> n=%s\n' % self.n) self.write_card(f) return f.getvalue()
def __repr__(self): f = StringIO() f.write('<PSOLID object> n=%s\n' % self.n) self.write_bdf(f) #print f return f.getvalue()
def generateParserSummary(files): valid = files['valid'] other = files['other'] failed = files['failed'] processed = files['processed'] output = StringIO() output.write('Successful Files with transactions:\n') # Successful Files for n,t in valid: output.write(' File Name: {}\n'.format(n)) output.write(' Transactions:\n') for trans in t.transactions.all(): output.write(' CRN: {}\n'.format(trans.crn)) # Successful Files without transactions output.write('\nSuccessful Files without transactions:\n') for n,t in other: output.write(' File Name: {}\n'.format(n)) # Failed files output.write('\nFailed Files:\n') for n,r in failed: output.write(' File Name: {}\n'.format(n)) output.write(' Reason: {}\n'.format(r)) # Already processed Files output.write('\nFiles previously processed:\n') for n,t in processed: output.write(' File Name: {}\n'.format(n)) contents = output.getvalue() output.close() return contents
def __repr__(self): f = StringIO() f.write('<PSHELL object> n=%s\n' % self.n) self.write_bdf(f) return f.getvalue()
class HtmlReport(Plugin): """ Output test results as pretty html. """ name = 'html' score = 2000 encoding = 'UTF-8' report_file = None # stdout0 = None # stderr0 = None # outputBuffer = None def __init__(self, verbosity=1): super(HtmlReport, self).__init__() self.global_stdout0 = None self.global_stderr0 = None self.test_stdout0 = None self.test_stderr0 = None self.testOutputBuffer = StringIO() self.globalOutputBuffer = StringIO() self.stdout_redirector = OutputRedirector(sys.stdout) self.stderr_redirector = OutputRedirector(sys.stderr) self.test_stdout_redirector = OutputRedirector(sys.stdout) self.test_stderr_redirector = OutputRedirector(sys.stderr) self.verbosity = verbosity def startTest(self, test): # just one buffer for both stdout and stderr self.testOutputBuffer = StringIO() self.test_stdout_redirector.fp = self.testOutputBuffer self.test_stderr_redirector.fp = self.testOutputBuffer self.test_stdout0 = sys.stdout self.test_stderr0 = sys.stderr sys.stdout = self.test_stdout_redirector sys.stderr = self.test_stderr_redirector self.test_start_time = datetime.now() def complete_test_output(self, err_msg='', traceback=''): """ Disconnect output redirection and return buffer. Safe to call multiple times. """ if self.test_stdout0: sys.stdout = self.test_stdout0 sys.stderr = self.test_stderr0 self.test_stdout0 = None self.test_stderr0 = None self.globalOutputBuffer.write(self.testOutputBuffer.getvalue()) self.globalOutputBuffer.write(err_msg) self.globalOutputBuffer.write(traceback) return self.testOutputBuffer.getvalue() def begin(self): # just one buffer for both stdout and stderr # self.outputBuffer = StringIO() self.stdout_redirector.fp = self.globalOutputBuffer self.stderr_redirector.fp = self.globalOutputBuffer self.global_stdout0 = sys.stdout self.global_stderr0 = sys.stderr sys.stdout = self.stdout_redirector sys.stderr = self.stderr_redirector def complete_global_output(self): """ Disconnect output redirection and return buffer. Safe to call multiple times. """ if self.global_stdout0: sys.stdout = self.global_stdout0 sys.stderr = self.global_stderr0 self.global_stdout0 = None self.global_stderr0 = None return self.globalOutputBuffer.getvalue() def options(self, parser, env): """Sets additional command line options.""" Plugin.options(self, parser, env) parser.add_option( '--html-report', action='store', dest='html_file', metavar="FILE", default=env.get('NOSE_HTML_FILE', 'nosetests.html'), help="Path to html file to store the report in. " "Default is nosetests.html in the working directory " "[NOSE_HTML_FILE]") parser.add_option( '--html-report-template', action='store', dest='html_template', metavar="FILE", default=env.get('NOSE_HTML_TEMPLATE_FILE', os.path.join(os.path.dirname(__file__), "templates", "report2.jinja2")), help="Path to html template file in with jinja2 format." "Default is report.html in the lib sources" "[NOSE_HTML_TEMPLATE_FILE]") def configure(self, options, config): """Configures the xunit plugin.""" Plugin.configure(self, options, config) self.config = config if self.enabled: self.jinja = Environment( loader=FileSystemLoader(os.path.dirname(options.html_template)), trim_blocks=True, lstrip_blocks=True ) self.stats = {'errors': 0, 'failures': 0, 'passes': 0, 'skipped': 0} self.report_data = defaultdict(Group) htmlfile_dirname = os.path.dirname(options.html_file) if not os.path.exists(htmlfile_dirname): os.makedirs(htmlfile_dirname) self.report_file = codecs.open(options.html_file, 'w', self.encoding, 'replace') self.report_template_filename = options.html_template def report(self, stream): """Writes an Xunit-formatted XML file The file includes a report of test errors and failures. """ self.stats['total'] = sum(self.stats.values()) for group in self.report_data.values(): group.stats['total'] = sum(group.stats.values()) self.report_file.write(self.jinja.get_template(os.path.basename(self.report_template_filename)).render( report=self.report_data, stats=self.stats, rawoutput=self._format_output(self.complete_global_output()) )) self.report_file.close() if self.config.verbosity > 1: stream.writeln("-" * 70) stream.writeln("HTML: %s" % self.report_file.name) def addSuccess(self, test): name = id_split(test.id()) group = self.report_data[name[0]] self.stats['passes'] += 1 group.stats['passes'] += 1 group.tests.append({ 'name': name[-1], 'failed': False, 'output': self._format_output(self.complete_test_output()), 'shortDescription': test.shortDescription(), 'time': str(datetime.now() - self.test_start_time), }) def addError(self, test, err, capt=None): """Add error output to Xunit report. """ exc_type, exc_val, tb = err tb = ''.join(traceback.format_exception( exc_type, exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val), tb )) name = id_split(test.id()) group = self.report_data[name[0]] if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 group.stats['skipped'] += 1 else: type = 'error' self.stats['errors'] += 1 group.stats['errors'] += 1 group.tests.append({ 'name': name[-1], 'failed': True, 'type': type, 'errtype': nice_classname(err[0]), 'message': exc_message(err), 'tb': tb, 'output': self._format_output(self.complete_test_output(exc_message(err), tb)), 'shortDescription': test.shortDescription(), 'time': str(datetime.now() - self.test_start_time), }) def addFailure(self, test, err, capt=None): """Add failure output to Xunit report. """ exc_type, exc_val, tb = err tb = ''.join(traceback.format_exception( exc_type, exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val), tb )) name = id_split(test.id()) group = self.report_data[name[0]] self.stats['failures'] += 1 group.stats['failures'] += 1 group.tests.append({ 'name': name[-1], 'failed': True, 'errtype': nice_classname(err[0]), 'message': exc_message(err), 'tb': tb, 'output': self._format_output(self.complete_test_output(exc_message(err), tb)), 'shortDescription': test.shortDescription(), 'time': str(datetime.now() - self.test_start_time), }) def _format_output(self, o): if isinstance(o, str): return o.decode('latin-1') else: return o
def run(self, cmd, timeout=None, quote=False, call_line_handler_func=False): """ Note: This function is based on paramiko's exec_command() method. :param timeout: How long to wait (in seconds) for the command to finish (optional). :type timeout: ``float`` :param call_line_handler_func: True to call handle_stdout_line_func function for each line of received stdout and handle_stderr_line_func for each line of stderr. :type call_line_handler_func: ``bool`` """ if quote: cmd = quote_unix(cmd) extra = {'_cmd': cmd} self.logger.info('Executing command', extra=extra) # Use the system default buffer size bufsize = -1 transport = self.client.get_transport() chan = transport.open_session() start_time = time.time() if cmd.startswith('sudo'): # Note that fabric does this as well. If you set pty, stdout and stderr # streams will be combined into one. # NOTE: If pty is used, every new line character \n will be converted to \r\n which # isn't desired. Because of that we sanitize the output and replace \r\n with \n at the # bottom of this method uses_pty = True chan.get_pty() else: uses_pty = False chan.exec_command(cmd) stdout = StringIO() stderr = StringIO() # Create a stdin file and immediately close it to prevent any # interactive script from hanging the process. stdin = chan.makefile('wb', bufsize) stdin.close() # Receive all the output # Note #1: This is used instead of chan.makefile approach to prevent # buffering issues and hanging if the executed command produces a lot # of output. # # Note #2: If you are going to remove "ready" checks inside the loop # you are going to have a bad time. Trying to consume from a channel # which is not ready will block for indefinitely. exit_status_ready = chan.exit_status_ready() if exit_status_ready: stdout_data = self._consume_stdout( chan=chan, call_line_handler_func=call_line_handler_func) stdout_data = stdout_data.getvalue() stderr_data = self._consume_stderr( chan=chan, call_line_handler_func=call_line_handler_func) stderr_data = stderr_data.getvalue() stdout.write(stdout_data) stderr.write(stderr_data) while not exit_status_ready: current_time = time.time() elapsed_time = (current_time - start_time) if timeout and (elapsed_time > timeout): # TODO: Is this the right way to clean up? chan.close() stdout = sanitize_output(stdout.getvalue(), uses_pty=uses_pty) stderr = sanitize_output(stderr.getvalue(), uses_pty=uses_pty) raise SSHCommandTimeoutError(cmd=cmd, timeout=timeout, stdout=stdout, stderr=stderr) stdout_data = self._consume_stdout( chan=chan, call_line_handler_func=call_line_handler_func) stdout_data = stdout_data.getvalue() stderr_data = self._consume_stderr( chan=chan, call_line_handler_func=call_line_handler_func) stderr_data = stderr_data.getvalue() stdout.write(stdout_data) stderr.write(stderr_data) # We need to check the exit status here, because the command could # print some output and exit during this sleep below. exit_status_ready = chan.exit_status_ready() if exit_status_ready: break # Short sleep to prevent busy waiting concurrency.sleep(self.SLEEP_DELAY) # print('Wait over. Channel must be ready for host: %s' % self.hostname) # Receive the exit status code of the command we ran. status = chan.recv_exit_status() stdout = sanitize_output(stdout.getvalue(), uses_pty=uses_pty) stderr = sanitize_output(stderr.getvalue(), uses_pty=uses_pty) extra = {'_status': status, '_stdout': stdout, '_stderr': stderr} self.logger.debug('Command finished', extra=extra) return [stdout, stderr, status]
def __repr__(self): file_obj = StringIO() file_obj.write('<PSHELL object> n=%s\n' % self.n) self.write_card(file_obj) return file_obj.getvalue()
def plot_3d(parts_list, charges, currents, centres, width=700, height=500, wireframe=False, skip_webgl=False): """Create a 3D plot in the IPython notebook Parameters ---------- parts_list : list of Part All the parts to plot charges : list of ndarray The charge distribution on each part currents : list of ndarray The current distribution on each part centres : list of ndarray The centre of each triangle, where the current vector is located width : integer, optional The width of the plot height : integer, optional The height of the plot wireframe : bool, optional Whether the plot should initially show a wireframe view skip_webgl : bool, optional Do not attempt to use webgl rendering, always use slower canvas rendering Returns ------- H : HTML An HTML object containing the necessary HTML, CSS and javascript to show the plot """ meshes = [part.mesh for part in parts_list] nodes = [part.nodes for part in parts_list] full_mesh = combine_mesh(meshes, nodes) # combine the meshes # scale all nodes so that the maximum size is known mesh_scale = 100 / full_mesh.fast_size() full_mesh.nodes = full_mesh.nodes * mesh_scale # generate a javascript representation of the object geometry_name = "geometry_" + str(uuid.uuid4()).replace('-', '') geometry_javascript = StringIO() geometry_javascript.write("var %s = " % geometry_name) geometry_tree = { 'nodes': full_mesh.nodes.tolist(), 'triangles': full_mesh.polygons.tolist() } # include the charge information if it is present if charges is not None: charges = np.hstack(charges) geometry_tree['charge'] = { 'real': charges.real.tolist(), 'imag': charges.imag.tolist(), 'abs': abs(charges).tolist(), 'phase': np.angle(charges, deg=True).tolist() } # Include the current information if it is present. Vectors will be of # the form (length, x, y, z), where the 3 cartesian components are scaled # to be a normal vector if currents is not None: currents = np.vstack(currents) lengths_real = np.sqrt(np.sum(currents.real**2, axis=1)) currents.real /= lengths_real[:, None] current_real = np.hstack((lengths_real[:, None], currents.real)).tolist() if np.any(np.iscomplex(currents)): lengths_imag = np.sqrt(np.sum(currents.imag**2, axis=1)) currents.imag /= lengths_imag[:, None] current_imag = np.hstack( (lengths_imag[:, None], currents.imag)).tolist() else: current_imag = np.zeros((currents.shape[0], 4)).tolist() geometry_tree['current'] = {'real': current_real, 'imag': current_imag} geometry_tree['centres'] = (np.vstack(centres) * mesh_scale).tolist() json.dump(geometry_tree, geometry_javascript) geometry_javascript.write(';') html_source = template_env.get_template('three_js_plot.html') html_generated = html_source.render({ 'geometry_javascript': geometry_javascript, 'geometry_name': geometry_name, 'canvas_width': width, 'canvas_height': height, 'initial_wireframe': wireframe, 'skip_webgl': skip_webgl, 'current_vector_len': np.median(full_mesh.edge_lens) }) display(HTML(html_generated))
class InputFile(object): max_buffer_size = 1024 * 1024 def __init__(self, rfile, length): """File-like object used to provide a seekable view of request body data""" self._file = rfile self.length = length self._file_position = 0 if length > self.max_buffer_size: self._buf = tempfile.TemporaryFile() else: self._buf = StringIO() @property def _buf_position(self): rv = self._buf.tell() assert rv <= self._file_position return rv def read(self, bytes=-1): assert self._buf_position <= self._file_position if bytes < 0: bytes = self.length - self._buf_position bytes_remaining = min(bytes, self.length - self._buf_position) if bytes_remaining == 0: return "" if self._buf_position != self._file_position: buf_bytes = min(bytes_remaining, self._file_position - self._buf_position) old_data = self._buf.read(buf_bytes) bytes_remaining -= buf_bytes else: old_data = "" assert bytes_remaining == 0 or self._buf_position == self._file_position, ( "Before reading buffer position (%i) didn't match file position (%i)" % (self._buf_position, self._file_position)) new_data = self._file.read(bytes_remaining) self._buf.write(new_data) self._file_position += bytes_remaining assert bytes_remaining == 0 or self._buf_position == self._file_position, ( "After reading buffer position (%i) didn't match file position (%i)" % (self._buf_position, self._file_position)) return old_data + new_data def tell(self): return self._buf_position def seek(self, offset): if offset > self.length or offset < 0: raise ValueError if offset <= self._file_position: self._buf.seek(offset) else: self.read(offset - self._file_position) def readline(self, max_bytes=None): if max_bytes is None: max_bytes = self.length - self._buf_position if self._buf_position < self._file_position: data = self._buf.readline(max_bytes) if data.endswith("\n") or len(data) == max_bytes: return data else: data = "" assert self._buf_position == self._file_position initial_position = self._file_position found = False buf = [] max_bytes -= len(data) while not found: readahead = self.read(min(2, max_bytes)) max_bytes -= len(readahead) for i, c in enumerate(readahead): if c == "\n": buf.append(readahead[:i + 1]) found = True break if not found: buf.append(readahead) if not readahead or not max_bytes: break new_data = "".join(buf) data += new_data self.seek(initial_position + len(new_data)) return data def readlines(self): rv = [] while True: data = self.readline() if data: rv.append(data) else: break return rv def next(self): data = self.readline() if data: return data else: raise StopIteration def __iter__(self): return self
def __repr__(self): file_obj = StringIO() file_obj.write('<%s object> n=%s\n' % (self.type, self.n)) self.write_card(file_obj) return file_obj.getvalue()
def _getInputFromUser(param): """ this private func reads the data from the user for the given param """ loop = True userInput = None try: if param.USE_DEFAULT: logging.debug("setting default value (%s) for key (%s)" % (mask(param.DEFAULT_VALUE), param.CONF_NAME)) controller.CONF[param.CONF_NAME] = param.DEFAULT_VALUE else: while loop: # If the value was not supplied by the command line flags if param.CONF_NAME not in commandLineValues: message = StringIO() message.write(param.PROMPT) val_list = param.VALIDATORS or [] if(validators.validate_regexp not in val_list and param.OPTION_LIST): message.write(" [%s]" % "|".join(param.OPTION_LIST)) if param.DEFAULT_VALUE: message.write(" [%s] " % (str(param.DEFAULT_VALUE))) message.write(": ") message.seek(0) # mask password or hidden fields if (param.MASK_INPUT): userInput = getpass.getpass("%s :" % (param.PROMPT)) else: userInput = raw_input(message.read()) else: userInput = commandLineValues[param.CONF_NAME] # If DEFAULT_VALUE is set and user did not input anything if userInput == "" and len(str(param.DEFAULT_VALUE)) > 0: userInput = param.DEFAULT_VALUE # Param processing userInput = process_param_value(param, userInput) # If param requires validation try: validate_param_value(param, userInput) controller.CONF[param.CONF_NAME] = userInput loop = False except ParamValidationError: if param.LOOSE_VALIDATION: # If validation failed but LOOSE_VALIDATION is true, ask user answer = _askYesNo("User input failed validation, " "do you still wish to use it") loop = not answer if answer: controller.CONF[param.CONF_NAME] = userInput continue else: if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] else: # Delete value from commandLineValues so that we will prompt the user for input if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] loop = True except KeyboardInterrupt: # add the new line so messages wont be displayed in the same line as the question print("") raise except: logging.error(traceback.format_exc()) raise Exception(output_messages.ERR_EXP_READ_INPUT_PARAM % (param.CONF_NAME))
class CaptureStdOut(object): """ An logger that both prints to stdout and writes to file. """ def __init__(self, log_file_path = None, print_to_console = True, prefix = None): """ :param log_file_path: The path to save the records, or None if you just want to keep it in memory :param print_to_console: """ self._print_to_console = print_to_console if log_file_path is not None: # self._log_file_path = os.path.join(base_dir, log_file_path.replace('%T', now)) make_file_dir(log_file_path) self.log = open(log_file_path, 'w') else: self.log = StringIO() self._log_file_path = log_file_path self.old_stdout = _ORIGINAL_STDOUT self.prefix = None if prefix is None else prefix def __enter__(self): self.old_stdout = sys.stdout self.old_stderr = sys.stderr sys.stdout = self sys.stderr = self return self def __exit__(self, exc_type, exc_val, exc_tb): sys.stdout.flush() sys.stderr.flush() sys.stdout = self.old_stdout sys.stderr = self.old_stderr self.close() def get_log_file_path(self): assert self._log_file_path is not None, "You never specified a path when you created this logger, so don't come back and ask for one now" return self._log_file_path def write(self, message): if self._print_to_console: self.old_stdout.write(message if self.prefix is None or message=='\n' else self.prefix+message) self.log.write(message) self.log.flush() def close(self): if self._log_file_path is not None: self.log.close() def read(self): if self._log_file_path is None: return self.log.getvalue() else: with open(self._log_file_path) as f: txt = f.read() return txt def __getattr__(self, item): return getattr(self.old_stdout, item)
def _getInputFromUser(param): """ this private func reads the data from the user for the given param """ loop = True userInput = None try: if param.USE_DEFAULT: logging.debug("setting default value (%s) for key (%s)" % (mask(param.DEFAULT_VALUE), param.CONF_NAME)) controller.CONF[param.CONF_NAME] = param.DEFAULT_VALUE else: while loop: # If the value was not supplied by the command line flags if param.CONF_NAME not in commandLineValues: message = StringIO() message.write(param.PROMPT) val_list = param.VALIDATORS or [] if (validators.validate_regexp not in val_list and param.OPTION_LIST): message.write(" [%s]" % "|".join(param.OPTION_LIST)) if param.DEFAULT_VALUE: message.write(" [%s] " % (str(param.DEFAULT_VALUE))) message.write(": ") message.seek(0) # mask password or hidden fields if (param.MASK_INPUT): userInput = getpass.getpass("%s :" % (param.PROMPT)) else: userInput = raw_input(message.read()) else: userInput = commandLineValues[param.CONF_NAME] # If DEFAULT_VALUE is set and user did not input anything if userInput == "" and len(str(param.DEFAULT_VALUE)) > 0: userInput = param.DEFAULT_VALUE # Param processing userInput = process_param_value(param, userInput) # If param requires validation try: validate_param_value(param, userInput) controller.CONF[param.CONF_NAME] = userInput loop = False except ParamValidationError: if param.LOOSE_VALIDATION: # If validation failed but LOOSE_VALIDATION is true, ask user answer = _askYesNo("User input failed validation, " "do you still wish to use it") loop = not answer if answer: controller.CONF[param.CONF_NAME] = userInput continue else: if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] else: # Delete value from commandLineValues so that we will prompt the user for input if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] loop = True except KeyboardInterrupt: # add the new line so messages wont be displayed in the same line as the question print("") raise except Exception: logging.error(traceback.format_exc()) raise Exception(output_messages.ERR_EXP_READ_INPUT_PARAM % (param.CONF_NAME))
def __repr__(self): f = StringIO() f.write('<%s object> n=%s\n' % (self.type, self.n)) self.write_bdf(f) return f.getvalue()
def __repr__(self): f = StringIO() f.write('<%s object> n=%s\n' % (self.type, self.n)) self.write_card(f) return f.getvalue()
def run(args, verbose=False): from libtbx.utils import Sorry try: from dials.array_family import flex except ImportError: return str(Sorry("DIALS is not configured")) from iotbx.phil import parse import os from spotfinder.servers import LoggingFramework from dials.array_family import flex from dxtbx.model.experiment_list import ExperimentListFactory phil_scope = parse(""" file_name = None .type = str frame_number = None .type = int stats = True .type = bool include scope dials.algorithms.spot_finding.factory.phil_scope """, process_includes=True) #For the Apache server version, do not allow site, user, or dataset preferences #all parameters are to be passed in through the http: query line logfile = LoggingFramework() phil_objects = [] for key in args.keys(): arg = "%s=%s" % (key, args.get(key, "")) try: phil_objects.append(parse(arg)) except Exception: return str(Sorry("Unknown file or keyword: %s" % arg)) working_params = phil_scope.fetch(sources=phil_objects) params = working_params.extract() #working_params.show() if not os.path.isfile(params.file_name): return str(Sorry("%s is not a readable file" % params.file_name)) print "Image: %s\n" % params.file_name try: experiments = ExperimentListFactory.from_filenames([params.file_name]) assert len(experiments) == 1 if len(experiments[0].imageset ) > 0 and params.frame_number is not None: print "Frame number", params.frame_number experiments[0].imageset = experiments[0].imageset[ params.frame_number:params.frame_number + 1] experiments[0].scan = experiments[0].imageset.get_scan() reflections = flex.reflection_table.from_observations( experiments, params) if params.stats: from dials.algorithms.spot_finding.per_image_analysis import stats_single_image print stats_single_image(experiments[0].imageset, reflections, i=None, resolution_analysis=True, plot=False) except Exception: import traceback logger = StringIO() logger.write("Sorry, can't process %s. Please contact authors.\n" % params.file_name) traceback.print_exc(file=logger) return str(Sorry(logger.getvalue())) + logfile.getvalue() print "Found %d strong reflections" % len(reflections) return logfile.getvalue()
def TransformGroup(r, *keys): """Formats a [...] grouped list. Each group is enclosed in [...]. The first item separator is ':', subsequent separators are ','. [item1] [item1] ... [item1: item2] ... [item1: item2] [item1: item2, item3] ... [item1: item2, item3] Args: r: A JSON-serializable object. *keys: Optional attribute keys to select from the list. Otherwise the string value of each list item is selected. Returns: The [...] grouped formatted list, [] if r is empty. """ if not r: return '[]' buf = StringIO() sep = None parsed_keys = [_GetParsedKey(key) for key in keys] for item in r: if sep: buf.write(sep) else: sep = ' ' if not parsed_keys: buf.write('[{0}]'.format(six.text_type(item))) else: buf.write('[') sub = None for key in parsed_keys: if sub: buf.write(sub) sub = ', ' else: sub = ': ' value = resource_property.Get(item, key, None) if value is not None: buf.write(six.text_type(value)) buf.write(']') return buf.getvalue()