def update(filename='invenio.cfg', silent=True): """Update new config.py from conf options. The previous config.py is kept in a backup copy. """ d = get_instance_config_object(filename, silent) new_config = StringIO() keys = set(d.__dict__.keys()) | set(default_keys()) keys = list(keys) keys.sort() for key in keys: if key != key.upper(): continue value = d.__dict__.get(key, current_app.config[key]) type_ = type(value) prmt = key + ' (' + type_.__name__ + ') [' + pformat(value) + ']: ' new_value = raw_input(prmt) try: new_value = ast.literal_eval(new_value) except (SyntaxError, ValueError): pass print('>>>', key, '=', pformat(new_value)) print(key, '=', pformat(new_value), file=new_config) with current_app.open_instance_resource(filename, 'w') as config_file: config_file.write(new_config.getvalue())
def request(url, expected_dict): buffer = StringIO() curl, result = get_curl(buffer, url) body = buffer.getvalue() rendering_time = "%s;%s;%s;%s;%s" % \ (curl.getinfo(curl.NAMELOOKUP_TIME), curl.getinfo(curl.CONNECT_TIME), curl.getinfo(curl.PRETRANSFER_TIME), curl.getinfo(curl.STARTTRANSFER_TIME), curl.getinfo(curl.TOTAL_TIME)) response_code = curl.getinfo(pycurl.HTTP_CODE) expected_response = expected_dict.get("expected_response", None) if expected_response is not None and \ expected_response != response_code: result = "UNEXPECTED (%s != %s)" % (expected_response, response_code) expected_text = expected_dict.get("expected_text", None) if expected_text is not None and \ str(expected_text) not in str(body): result = "UNEXPECTED (%s not in page content)" % (expected_text) curl.close() info_list = ('GET', url, response_code, rendering_time, result) return info_list
def getSavedFormInputForEdit(self, header=False, delimiter=','): """ returns saved as CSV text """ sbuf = StringIO() writer = csvwriter(sbuf, delimiter=delimiter) names = self.getColumnNames() titles = self.getColumnTitles() if header: encoded_titles = [] for t in titles: if isinstance(t, unicode): t = t.encode('utf-8') encoded_titles.append(t) writer.writerow(encoded_titles) for row in self.getSavedFormInput(): def get_data(row, i): data = row.get(i, '') if self._is_file_data(data): return data.filename if isinstance(data, unicode): return data.encode('utf-8') return data writer.writerow([get_data(row, i) for i in names]) res = sbuf.getvalue() sbuf.close() return res
def dump_table(table, filename, con, std=None, delimiter=',', format=None, encoding='utf-8', inspector=None): from uliweb.utils.common import str_value from StringIO import StringIO import csv if not std: if isinstance(filename, (str, unicode)): std = open(filename, 'w') else: std = filename else: std = sys.stdout #add inspector table columns process, will not use model fields but database fields if inspector: meta = MetaData() table = Table(table.name, meta) inspector.reflecttable(table, None) result = do_(table.select()) fields = [x.name for x in table.c] if not format: print >>std, '#' + ' '.join(fields) elif format == 'txt': print >>std, '#' + ','.join(fields) for r in result: if not format: print >>std, r elif format == 'txt': buf = StringIO() fw = csv.writer(buf, delimiter=delimiter) fw.writerow([str_value(x, encoding=encoding) for x in r]) print >>std, buf.getvalue().rstrip() else: raise Exception, "Can't support the text format %s" % format
def write_to_file(locations): """ locations = [ ('loc_type1', { 'headers': ['header1', 'header2', ...] 'rows': [ { 'header1': val1 'header2': val2 }, {...}, ] }) ] """ outfile = StringIO() writer = Excel2007ExportWriter() header_table = [(loc_type, [tab['headers']]) for loc_type, tab in locations] writer.open(header_table=header_table, file=outfile) for loc_type, tab in locations: headers = tab['headers'] tab_rows = [[row.get(header, '') for header in headers] for row in tab['rows']] writer.write([(loc_type, tab_rows)]) writer.close() return outfile.getvalue()
def loop(self, filename, format): original_records = list(SeqIO.parse(open(filename, "rU"), format)) # now open a connection to load the database server = BioSeqDatabase.open_database(driver = DBDRIVER, user = DBUSER, passwd = DBPASSWD, host = DBHOST, db = TESTDB) db_name = "test_loop_%s" % filename # new namespace! db = server.new_database(db_name) count = db.load(original_records) self.assertEqual(count, len(original_records)) server.commit() #Now read them back... biosql_records = [db.lookup(name=rec.name) for rec in original_records] #And check they agree self.assertTrue(compare_records(original_records, biosql_records)) #Now write to a handle... handle = StringIO() SeqIO.write(biosql_records, handle, "gb") #Now read them back... handle.seek(0) new_records = list(SeqIO.parse(handle, "gb")) #And check they still agree self.assertEqual(len(new_records), len(original_records)) for old, new in zip(original_records, new_records): #TODO - remove this hack because we don't yet write these (yet): for key in ["comment", "references", "db_source"]: if key in old.annotations and key not in new.annotations: del old.annotations[key] self.assertTrue(compare_record(old, new)) #Done server.close()
def render_POST(self, request): text = request.args.get("feedback") if text is None: raise FeedbackException("No text.") if len(text) > 50000: raise FeedbackException("Too much text.") text = text[0] # basic checksum to stop really lame kiddies spamming, see feedback.js for js version checksum = 0; text = text.decode("utf-8", "ignore") for x in text: checksum = ((checksum + 1) % 256) ^ (ord(x) % 256); sentchecksum = int(request.args.get("c", [0])[0]) if checksum != sentchecksum: raise FeedbackException("Bad checksum: %d vs. %d" % (sentchecksum, checksum)) msg = MIMEText(text.encode("utf-8"), _charset="utf-8") msg["Subject"] = "qwebirc feedback from %s" % request.getclientIP() msg["From"] = config.feedbackengine["from"] msg["To"] = config.feedbackengine["to"] email = StringIO(msg.as_string()) email.seek(0, 0) factorytype = SMTPSenderFactory factory = factorytype(fromEmail=config.feedbackengine["from"], toEmail=config.feedbackengine["to"], file=email, deferred=defer.Deferred()) reactor.connectTCP(config.feedbackengine["smtp_host"], config.feedbackengine["smtp_port"], factory) self.__hit() return "1"
def test_import(self, server_proxy, requests): """ Test import operation """ proxy = MagicMock() proxy.ImportInfrastructure.return_value = (True, "newinfid") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("import", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("New Inf: newinfid", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("import", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("New Inf: newinfid", output) sys.stdout = oldstdout
def test_sshvm_key(self, server_proxy): """ Test sshvm operation """ proxy = MagicMock() radl = open(get_abs_path("../files/test_priv.radl"), 'r').read() proxy.GetVMInfo.return_value = (True, radl) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout oldstderr = sys.stderr sys.stdout = out sys.stderr = out res = main("sshvm", options, ["infid", "vmid", "1"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("ssh -p 1022 -i /tmp/", output) self.assertIn(" -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no [email protected]", output) sys.stdout = oldstdout sys.stderr = oldstderr
def test_rebootvm(self, server_proxy, requests): """ Test rebootvm operation """ proxy = MagicMock() proxy.RebootVM.return_value = (True, "") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("rebootvm", options, ["infid", "vmid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("VM successfully rebooted", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("rebootvm", options, ["infid", "vmid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("VM successfully rebooted", output) sys.stdout = oldstdout
def test_getversion(self, server_proxy, requests): """ Test getversion operation """ proxy = MagicMock() proxy.GetVersion.return_value = (True, "1.0") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getversion", options, [], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("1.0", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getversion", options, [], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("1.0", output) sys.stdout = oldstdout
def test_getstate(self, server_proxy, requests): """ Test getstate operation """ proxy = MagicMock() proxy.GetInfrastructureState.return_value = (True, {"state": "running", "vm_states": {"vm1": "running"}}) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getstate", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getstate", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("The infrastructure is in state: running\nVM ID: vm1 is in state: running.", output) sys.stdout = oldstdout
def test_getinfo(self, server_proxy, requests): """ Test getinfo operation """ proxy = MagicMock() proxy.GetVMInfo.return_value = (True, "radltest") proxy.GetInfrastructureInfo.return_value = (True, ["vm1"]) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getinfo", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Info about VM with ID: vm1\nradltest", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getinfo", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Info about VM with ID: vm1\nradltest", output) sys.stdout = oldstdout
def test_addresource(self, server_proxy, requests): """ Test addresource operation """ proxy = MagicMock() proxy.AddResource.return_value = (True, ["1"]) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Resources with IDs: 1 successfully added.", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("addresource", options, ["infid", get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Resources with IDs: 1 successfully added.", output) sys.stdout = oldstdout
def test_getcontmsg(self, server_proxy, requests): """ Test getcontmsg operation """ proxy = MagicMock() proxy.GetInfrastructureContMsg.return_value = (True, "contmsg") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("getcontmsg", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Msg Contextualizator: \n\ncontmsg", output) out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("getcontmsg", options, ["infid"], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Msg Contextualizator: \n\ncontmsg", output) sys.stdout = oldstdout
def test_list(self, server_proxy, requests): """ Test list operation """ proxy = MagicMock() proxy.GetInfrastructureList.return_value = (True, ["inf1", "inf2"]) server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.xmlrpc = "https://localhost:8899" options.restapi = None options.verify = False parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("list", options, [], parser) output = out.getvalue().strip() self.assertEquals(res, True) self.assertIn("IDs: \n inf1\n inf2", output) sys.stdout = oldstdout out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("list", options, [], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("IDs: \n inf1\n inf2", output) sys.stdout = oldstdout
def test_create(self, server_proxy, requests): """ Test create operation """ proxy = MagicMock() proxy.CreateInfrastructure.return_value = (True, "inf1") server_proxy.return_value = proxy options = MagicMock() options.auth_file = get_abs_path("../../auth.dat") options.restapi = None parser = MagicMock() out = StringIO() oldstdout = sys.stdout sys.stdout = out res = main("create", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Infrastructure successfully created with ID: inf1", output) sys.stdout = oldstdout out = StringIO() sys.stdout = out options.xmlrpc = None options.restapi = "https://localhost:8800" requests.side_effect = self.get_response res = main("create", options, [get_abs_path("../files/test.radl")], parser) self.assertEquals(res, True) output = out.getvalue().strip() self.assertIn("Infrastructure successfully created with ID: inf1", output) sys.stdout = oldstdout
def command_response(self, cmd): """Return stdout produce by the specified CLI command""" buf = StringIO() with redirect_stdout(buf): gist.cli.main(argv=shlex.split(cmd), config=self.config) return buf.getvalue().splitlines()
def checkRoundtrip(self, t): s = StringIO() t.write(s) s.seek(0) t2 = xpt.Typelib.read(s) self.assert_(t2 is not None) self.assertEqualTypelibs(t, t2)
def deepCopy(obj): stream = StringIO() p = Pickler(stream, 1) p.dump(obj) stream.seek(0) u = Unpickler(stream) return u.load()
def browse(self, max_lines=None, headers=None): """Try reading specified number of lines from the CSV object. Args: max_lines: max number of lines to read. If None, the whole file is read headers: a list of strings as column names. If None, it will use "col0, col1..." Returns: A pandas DataFrame with the schema inferred from the data. Raises: Exception if the csv object cannot be read or not enough lines to read, or the headers size does not match columns size. """ if self.path.startswith('gs://'): lines = Csv._read_gcs_lines(self.path, max_lines) else: lines = Csv._read_local_lines(self.path, max_lines) if len(lines) == 0: return pd.DataFrame(columns=headers) columns_size = len(next(csv.reader([lines[0]], delimiter=self._delimiter))) if headers is None: headers = ['col' + newstr(e) for e in range(columns_size)] if len(headers) != columns_size: raise Exception('Number of columns in CSV do not match number of headers') buf = StringIO() for line in lines: buf.write(line) buf.write('\n') buf.seek(0) df = pd.read_csv(buf, names=headers, delimiter=self._delimiter) for key, col in df.iteritems(): if self._is_probably_categorical(col): df[key] = df[key].astype('category') return df
def _export_dashboard(request, dashboard_id): """ Exports dashboard as multi-sheet Excel workbook. This is a helper method for export_dashboard and export_shared_dashboard below. Renders an export without requiring login, should not be exposed directly via a URL pattern. """ dashboard = Dashboard.objects.get(id=dashboard_id) stream = StringIO() workbook = xlwt.Workbook() for metric in dashboard.metric_set.all(): metric.export(workbook) workbook.save(stream) response = HttpResponse( stream.getvalue(), mimetype='application/vnd.ms-excel' ) response['Content-Disposition'] = 'attachment; filename="%s-%s.xls"' \ % (slugify(dashboard.name), date.today()) stream.close() return response
def image(path): if '..' in path: abort(500) fd = open(join(app.root_path, "images", path)) data = fd.read() hsize = int(request.args.get("h", 0)) vsize = int(request.args.get("v", 0)) if hsize > 1000 or vsize > 1000: abort(500) if hsize: image = Image.open(StringIO(data)) x, y = image.size x1 = hsize y1 = int(1.0 * y * hsize / x) image.thumbnail((x1, y1), Image.ANTIALIAS) output = StringIO() image.save(output, "PNG") data = output.getvalue() if vsize: image = Image.open(StringIO(data)) x, y = image.size x1 = int(1.0 * x * vsize / y) y1 = vsize image.thumbnail((x1, y1), Image.ANTIALIAS) output = StringIO() image.save(output, "PNG") data = output.getvalue() response = make_response(data) response.headers['content-type'] = mimetypes.guess_type(path) return response
def __call(self, method, endpoint, params={}, postfield={}, urlencode=True, timeout=True): c = pycurl.Curl() b = StringIO() # 24/4-2012 [email protected] avoid crash in libcurl "longjmp causes uninitialized stack frame" c.setopt(pycurl.NOSIGNAL, 1) c.setopt(pycurl.URL, self.server + endpoint.format(**params)) c.setopt(pycurl.FOLLOWLOCATION, False) c.setopt(pycurl.CONNECTTIMEOUT, 2) if timeout: c.setopt(pycurl.TIMEOUT, 2) c.setopt(pycurl.NOSIGNAL, 1) c.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_DIGEST) c.setopt(pycurl.USERPWD, self.user + ':' + self.password) c.setopt(pycurl.HTTPHEADER, ['X-Requested-Auth: Digest']) if (method == 'POST'): if urlencode: c.setopt(pycurl.POST, 1) c.setopt(pycurl.POSTFIELDS, urllib.urlencode(postfield)) else: c.setopt(pycurl.HTTPPOST, postfield) c.setopt(pycurl.WRITEFUNCTION, b.write) #c.setopt(pycurl.VERBOSE, True) try: c.perform() except: raise RuntimeError, 'connect timed out!' status_code = c.getinfo(pycurl.HTTP_CODE) c.close() if status_code != 200: logger.error('call error in %s, status code {%r}', self.server + endpoint.format(**params), status_code) raise IOError, 'Error in Matterhorn client' return b.getvalue()
def test_threading_easy_single(self): out = StringIO() threading_easy(self.func, self.myiter, 1, ',', out) benchmark = set(['mymy', 'namename', 'danieldaniel', 'isis', '']) results = set(out.getvalue().split(',')) self.assertEqual(results, benchmark)
def test_to_html(self): # big mixed biggie = DataFrame({'A' : randn(200), 'B' : tm.makeStringIndex(200)}, index=range(200)) biggie['A'][:20] = nan biggie['B'][:20] = nan s = biggie.to_html() buf = StringIO() retval = biggie.to_html(buf=buf) self.assert_(retval is None) self.assertEqual(buf.getvalue(), s) self.assert_(isinstance(s, basestring)) biggie.to_html(columns=['B', 'A'], col_space=17) biggie.to_html(columns=['B', 'A'], formatters={'A' : lambda x: '%.1f' % x}) biggie.to_html(columns=['B', 'A'], float_format=str) biggie.to_html(columns=['B', 'A'], col_space=12, float_format=str) frame = DataFrame(index=np.arange(200)) frame.to_html()
def update_main_config(self, configobj): cfile = StringIO() configobj.write(cfile) cfile.seek(0) self.myconfig.readfp(cfile) self.save_main_config(self.mainconfigfilename) self.load_main_config()
def _queryapi(self, method_url, get, post): c = pycurl.Curl() if bool(get): query_url = method_url + '?' + urlencode(get) else: query_url = method_url c.setopt(c.URL, query_url) if bool(post): # first clear all fields that are None post_cleared = {} for i in post: if post[i] is not None: post_cleared[i] = post[i] postfields = urlencode(post_cleared) c.setopt(c.POSTFIELDS, postfields) buffer = StringIO() c.setopt(c.WRITEFUNCTION, buffer.write) c.setopt(c.HTTPHEADER, ['PddToken: ' + self.token]) c.perform() http_response_code = c.getinfo(c.RESPONSE_CODE) http_response_data = json.loads(buffer.getvalue()) c.close() if 200 != http_response_code: self.module.fail_json(msg='Error querying yandex pdd api, HTTP status=' + c.getinfo(c.RESPONSE_CODE) + ' error=' + http_response_data.error) return (http_response_code, http_response_data)
def test_to_string(self): buf = StringIO() s = self.ts.to_string() retval = self.ts.to_string(buf=buf) self.assert_(retval is None) self.assertEqual(buf.getvalue().strip(), s) # pass float_format format = '%.4f'.__mod__ result = self.ts.to_string(float_format=format) result = [x.split()[1] for x in result.split('\n')] expected = [format(x) for x in self.ts] self.assertEqual(result, expected) # empty string result = self.ts[:0].to_string() self.assertEqual(result, '') result = self.ts[:0].to_string(length=0) self.assertEqual(result, '') # name and length cp = self.ts.copy() cp.name = 'foo' result = cp.to_string(length=True, name=True) last_line = result.split('\n')[-1].strip() self.assertEqual(last_line, "Freq: B, Name: foo, Length: %d" % len(cp))
def dump_fixtures(request): output = StringIO() fixture = request.GET.get('fixture', None) try: if fixture: call_command('dumpdata', fixture, '--indent=2', stdout=output) else: call_command('dumpdata', '--indent=2', stdout=output) data = output.getvalue() output.close() if fixture: file_label = 'fixtures_%s_%s' % (fixture, datetime.datetime.now().strftime('%d-%b-%Y_%H-%M')) else: file_label = 'fixtures_all_%s' % datetime.datetime.now().strftime('%d-%b-%Y_%H-%M') response = HttpResponse(data, content_type="application/json") response['Content-Disposition'] = 'attachment; filename=%s' % file_label return response except: dest = request.META.get('HTTP_REFERER', '/') messages.info(request, 'Fixture name not recognized: %s' % fixture) return HttpResponseRedirect(dest)
#!/usr/bin/python import sys import pycurl from StringIO import StringIO from os import walk print len(sys.argv) if len(sys.argv) < 3: print "Please use uploader as: uploadTest.py IP_ADDRESS CODE_NAME.BIN" else: buffer = StringIO() c = pycurl.Curl() c.setopt(c.URL, str(sys.argv[1]) + "/edit") f = [] path = "data\\" for (dirpath, dirnames, filenames) in walk(path): f.extend(filenames) break for file in f: print path+file c.setopt(c.HTTPPOST, [ ('fileupload', ( # upload the contents of this file c.FORM_FILE, path+file, )), ]) c.perform() print sys.argv[2]
def main(): parser = argparse.ArgumentParser(description= 'Reformat changed lines in diff. Without -i ' 'option just output the diff that would be ' 'introduced. Use something like: ' 'git diff master..my-branch | ./sys/clang-format-diff.py -p1 -i') parser.add_argument('-i', action='store_true', default=False, help='apply edits to files instead of displaying a diff') parser.add_argument('-p', metavar='NUM', default=0, help='strip the smallest prefix containing P slashes') parser.add_argument('-regex', metavar='PATTERN', default=None, help='custom pattern selecting file paths to reformat ' '(case sensitive, overrides -iregex)') parser.add_argument('-iregex', metavar='PATTERN', default= r'.*\.(cpp|cc|c\+\+|cxx|c|cl|h|hpp|m|mm|inc|js|ts|proto' r'|protodevel|java)', help='custom pattern selecting file paths to reformat ' '(case insensitive, overridden by -regex)') parser.add_argument('-sort-includes', action='store_true', default=False, help='let clang-format sort include blocks') parser.add_argument('-v', '--verbose', action='store_true', help='be more verbose, ineffective without -i') parser.add_argument('--debug', action='store_true', help='debug mode') parser.add_argument('-style', help='formatting style to apply (LLVM, Google, Chromium, ' 'Mozilla, WebKit)') parser.add_argument('-binary', default='clang-format', help='location of binary to use for clang-format') args = parser.parse_args() def debug(s): if args.debug: sys.stderr.write(str(s) + '\n') # Extract changed lines for each file. filename = None lines_by_file = {} input = sys.stdin.read().split('\n') for lineidx, line in enumerate(input): match = re.search('^\+\+\+\ (.*?/){%s}(\S*)' % args.p, line) if match: filename = match.group(2) if filename is None: continue if args.regex is not None: if not re.match('^%s$' % args.regex, filename): continue else: if not re.match('^%s$' % args.iregex, filename, re.IGNORECASE): continue match = re.search('^@@.*\+(\d+)(,(\d+))?', line) if match: start_line = int(match.group(1)) line_count = 1 if match.group(3): line_count = int(match.group(3)) if line_count == 0: continue range_start, range_end = None, None range_line = -1 debug(line_count) i = 0 while True: # stop iterating when finding the next diff if lineidx + i >= len(input): break debug('lineidx : ' + input[lineidx + i]) # do not count lines that are removed if not input[lineidx + i].startswith('-'): range_line += 1 if input[lineidx + i].startswith('+'): if range_start is None: range_start = start_line + range_line debug('set range_start: ' + str(start_line + range_line)) elif range_start is not None and range_end is None: range_end = start_line + range_line debug('set range_end: ' + str(start_line + range_line)) lines_by_file.setdefault(filename, []).append([range_start, range_end - 1]) range_start, range_end = None, None if input[lineidx + i].startswith('diff'): break i += 1 # Reformat files containing changes in place. for filename, lines in lines_by_file.items(): debug('%s: %s' % (filename,lines)) command = [args.binary, filename] if args.sort_includes: command.append('-sort-includes') if lines: s = [('-lines', str(x[0]) + ':' + str(x[1])) for x in lines] s = reduce(lambda x, y: x + y, s) command.extend(s) if args.style: command.extend(['-style', args.style]) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=None, stdin=subprocess.PIPE, universal_newlines=True) stdout, stderr = p.communicate() if p.returncode != 0: sys.exit(p.returncode) with open(filename) as f: code = f.readlines() formatted_code = StringIO(stdout).readlines() modified_lines = dict() if lines: for x in lines: for i in range(x[0], x[1] + 1): modified_lines[i] = True delta = 10 # handle functions definitions/declarations: do not use space before ( for i, l in enumerate(formatted_code): if modified_lines and not any(map(lambda x: x in modified_lines, range(i + 1 - delta, i + 1 + delta))): continue debug('formatted_code: ' + formatted_code[i]) if formatted_code[i].startswith('R_API ') or formatted_code[i].startswith('static ') or formatted_code[i].startswith('R_IPI '): formatted_code[i] = formatted_code[i].replace(' (', '(') formatted_code[i] = formatted_code[i].replace('Elf_ (', 'Elf_(') while ' ? ' in formatted_code[i] and ' : ' in formatted_code[i]: pos_q = formatted_code[i].index(' ? ') pos_c = formatted_code[i].index(' : ') if pos_q >= pos_c: break formatted_code[i] = formatted_code[i].replace(' ? ', '? ', 1) formatted_code[i] = formatted_code[i].replace(' : ', ': ', 1) diff = difflib.unified_diff(code, formatted_code, filename, filename, '(before formatting)', '(after formatting)') diff_string = ''.join(diff) if len(diff_string) > 0: if args.i: f = tempfile.NamedTemporaryFile(delete=False) f.write(diff_string.encode()) f.close() os.system('git apply -p0 < "%s"' % (f.name)) os.unlink(f.name) else: sys.stdout.write(diff_string)
def exception_message(): f = StringIO() traceback.print_exc(file=f) s = f.getvalue() f.close() return s
def add_publications(generator): """ Populates context with a list of BibTeX publications. Configuration ------------- generator.settings['PUBLICATIONS_SRC']: local path to the BibTeX file to read. Output ------ generator.context['publications']: List of tuples (key, year, text, bibtex, pdf, slides, poster). See Readme.md for more details. """ if 'PUBLICATIONS_SRC' not in generator.settings: return try: from StringIO import StringIO except ImportError: from io import StringIO try: from pybtex.database.input.bibtex import Parser from pybtex.database.output.bibtex import Writer from pybtex.database import BibliographyData, PybtexError from pybtex.backends import html from pybtex.style.formatting import plain except ImportError: logger.warn('`pelican_bibtex` failed to load dependency `pybtex`') return refs_file = generator.settings['PUBLICATIONS_SRC'] try: bibdata_all = Parser().parse_file(refs_file) except PybtexError as e: logger.error('`pelican_bibtex` failed to parse file %s: %s' % ( refs_file, str(e))) exit(1) return publications = [] # format entries plain_style = plain.Style() html_backend = html.Backend() formatted_entries = plain_style.format_entries(bibdata_all.entries.values()) for formatted_entry in formatted_entries: key = formatted_entry.key entry = bibdata_all.entries[key] year = entry.fields.get('year') XEcategory = entry.fields.get('XEcategory') XEmember = entry.fields.get('XEmember') XEProject = entry.fields.get('XEProject') url = entry.fields.get('XEurl') #render the bibtex string for the entry bib_buf = StringIO() bibdata_this = BibliographyData(entries={key: entry}) Writer().write_stream(bibdata_this, bib_buf) text = formatted_entry.text.render(html_backend) # publications.append((key, # year, # text, # url, # XEmember, # XEcategory, # XEProject # )) publications.append({'key' : key, 'year' : year, 'text' : text, 'url' : url, 'XEmember' : XEmember, 'XEcategory' : XEcategory, 'XEProject' : XEProject}) generator.context['publications'] = publications
def test_getPAZ(self): """ Test extracting poles and zeros information """ filename = os.path.join(self.path, 'arclink_full.seed') sp = Parser(filename) paz = sp.getPAZ('BHE') self.assertEqual(paz['gain'], +6.00770e+07) self.assertEqual(paz['zeros'], [0j, 0j]) self.assertEqual( paz['poles'], [(-3.70040e-02 + 3.70160e-02j), (-3.70040e-02 - 3.70160e-02j), (-2.51330e+02 + 0.00000e+00j), (-1.31040e+02 - 4.67290e+02j), (-1.31040e+02 + 4.67290e+02j)]) self.assertEqual(paz['sensitivity'], +7.86576e+08) self.assertEqual(paz['seismometer_gain'], +1.50000E+03) # Raise exception for undefined channels self.assertRaises(SEEDParserException, sp.getPAZ, 'EHE') # # Do the same for another dataless file # filename = os.path.join(self.path, 'dataless.seed.BW_FURT') sp = Parser(filename) paz = sp.getPAZ('EHE') self.assertEqual(paz['gain'], +1.00000e+00) self.assertEqual(paz['zeros'], [0j, 0j, 0j]) self.assertEqual(paz['poles'], [(-4.44400e+00 + 4.44400e+00j), (-4.44400e+00 - 4.44400e+00j), (-1.08300e+00 + 0.00000e+00j)]) self.assertEqual(paz['sensitivity'], +6.71140E+08) self.assertEqual(paz['seismometer_gain'], 4.00000E+02) # Raise exception for undefined channels self.assertRaises(SEEDParserException, sp.getPAZ, 'BHE') # Raise UserWarning if not a Laplacian transfer function ('A'). # Modify transfer_fuction_type on the fly for blk in sp.blockettes[53]: blk.transfer_function_types = 'X' with warnings.catch_warnings(record=True): warnings.simplefilter("error", UserWarning) self.assertRaises(UserWarning, sp.getPAZ, 'EHE') # # And the same for yet another dataless file # filename = os.path.join(self.path, 'nied.dataless.gz') f = StringIO(gzip.open(filename).read()) sp = Parser(f) gain = [+3.94857E+03, +4.87393E+04, +3.94857E+03] zeros = [[+0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j], [+0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j, -6.32511E+02 + 0.00000E+00j], [+0.00000E+00 + 0.00000E+00j, +0.00000E+00 + 0.00000E+00j]] poles = [[-1.23413E-02 + 1.23413E-02j, -1.23413E-02 - 1.23413E-02j, -3.91757E+01 + 4.91234E+01j, -3.91757E+01 - 4.91234E+01j], [-3.58123E-02 - 4.44766E-02j, -3.58123E-02 + 4.44766E-02j, -5.13245E+02 + 0.00000E+00j, -6.14791E+04 + 0.00000E+00j], [-1.23413E-02 + 1.23413E-02j, -1.23413E-02 - 1.23413E-02j, -3.91757E+01 + 4.91234E+01j, -3.91757E+01 - 4.91234E+01j]] sensitivity = [+4.92360E+08, +2.20419E+06, +9.84720E+08] seismometer_gain = [+2.29145E+03, +1.02583E+01, +2.29145E+03] for i, channel in enumerate(['BHZ', 'BLZ', 'LHZ']): paz = sp.getPAZ(channel) self.assertEqual(paz['gain'], gain[i]) self.assertEqual(paz['zeros'], zeros[i]) self.assertEqual(paz['poles'], poles[i]) self.assertEqual(paz['sensitivity'], sensitivity[i]) self.assertEqual(paz['seismometer_gain'], seismometer_gain[i]) sp = Parser(os.path.join(self.path, 'dataless.seed.BW_RJOB')) paz = sp.getPAZ("BW.RJOB..EHZ", UTCDateTime("2007-01-01")) result = {'gain': 1.0, 'poles': [(-4.444 + 4.444j), (-4.444 - 4.444j), (-1.083 + 0j)], 'seismometer_gain': 400.0, 'sensitivity': 671140000.0, 'zeros': [0j, 0j, 0j], 'digitizer_gain': 1677850.0} self.assertEqual(paz, result) paz = sp.getPAZ("BW.RJOB..EHZ", UTCDateTime("2010-01-01")) result = {'gain': 60077000.0, 'poles': [(-0.037004000000000002 + 0.037016j), (-0.037004000000000002 - 0.037016j), (-251.33000000000001 + 0j), (-131.03999999999999 - 467.29000000000002j), (-131.03999999999999 + 467.29000000000002j)], 'seismometer_gain': 1500.0, 'sensitivity': 2516800000.0, 'zeros': [0j, 0j], 'digitizer_gain': 1677850.0} self.assertEqual(sorted(paz.items()), sorted(result.items())) # last test again, check arg name changed in [3722] result = {'gain': 60077000.0, 'poles': [(-0.037004000000000002 + 0.037016j), (-0.037004000000000002 - 0.037016j), (-251.33000000000001 + 0j), (-131.03999999999999 - 467.29000000000002j), (-131.03999999999999 + 467.29000000000002j)], 'seismometer_gain': 1500.0, 'sensitivity': 2516800000.0, 'zeros': [0j, 0j], 'digitizer_gain': 1677850.0} with warnings.catch_warnings(record=True) as w: paz = sp.getPAZ(channel_id="BW.RJOB..EHZ", datetime=UTCDateTime("2010-01-01")) self.assertEqual(len(w), 1) self.assertEqual(w[0].category, DeprecationWarning) self.assertEqual(sorted(paz.items()), sorted(result.items())) paz = sp.getPAZ(seed_id="BW.RJOB..EHZ", datetime=UTCDateTime("2010-01-01")) self.assertEqual(sorted(paz.items()), sorted(result.items()))
def parse_after_page_find(self, response): """ level 2 data struct { 'date': 'stockid': 'stocknm': 'traderlist': [ { 'index': 'traderid': 'tradernm': 'price': 'buyvolume': 'sellvolume': }, ... ].sort(buyvo)limit(30) } """ log.msg("URL: %s" % (response.url), level=log.DEBUG) item = response.meta['item'] item['traderlist'] = [] # populate top content item['url'] = response.url item['date'] = item['date'] item['stockid'], item['stocknm'] = item['stockid'], "" item['open'] = 0 item['high'] = 0 item['low'] = 0 item['close'] = 0 item['volume'] = 0 # use as pandas frame to dict try: frame = pd.read_csv(StringIO(response.body), delimiter=',', na_values=['--'], header=None, skiprows=[0, 1, 2], encoding=None, dtype=np.object) if frame.empty: log.msg("fetch %s empty" % (item['stockid']), log.INFO) return except: log.msg("fetch %s fail" % (item['stockid']), log.INFO) return # divided left right frames fm0, fm1 = frame.ix[:, 0:5], frame.ix[:, 6:] for fm in [fm0, fm1]: for elem in fm.T.to_dict().values(): nwelem = [ str(elem[it]).strip(string.whitespace).replace(',', '') for it in sorted(elem.keys()) ] sub = {} m = re.search( r'([0-9a-zA-Z]+)(\W+)?', nwelem[1].decode('cp950').replace(u'\u3000', u'').replace(u' ', u'')) sub.update({ 'index': nwelem[0] if nwelem[0] else -1, 'traderid': m.group(1) if m and m.group(1) else None, 'tradernm': m.group(2) if m and m.group(2) else "", 'price': nwelem[2] if nwelem[2] else 0, 'buyvolume': nwelem[3] if nwelem[3] else 0, 'sellvolume': nwelem[4] if nwelem[4] else 0 }) item['traderlist'].append(sub) log.msg("fetch %s pass" % (item['stockid']), log.INFO) log.msg("item[0] %s ..." % (item['traderlist'][0]), level=log.DEBUG) yield item
def _getInputFromUser(param): """ this private func reads the data from the user for the given param """ loop = True userInput = None try: if param.USE_DEFAULT: logging.debug("setting default value (%s) for key (%s)" % (mask(param.DEFAULT_VALUE), param.CONF_NAME)) controller.CONF[param.CONF_NAME] = param.DEFAULT_VALUE else: while loop: # If the value was not supplied by the command line flags if param.CONF_NAME not in commandLineValues: message = StringIO() message.write(param.PROMPT) val_list = param.VALIDATORS or [] if(validators.validate_regexp not in val_list and param.OPTION_LIST): message.write(" [%s]" % "|".join(param.OPTION_LIST)) if param.DEFAULT_VALUE: message.write(" [%s] " % (str(param.DEFAULT_VALUE))) message.write(": ") message.seek(0) # mask password or hidden fields if (param.MASK_INPUT): userInput = getpass.getpass("%s :" % (param.PROMPT)) else: userInput = raw_input(message.read()) else: userInput = commandLineValues[param.CONF_NAME] # If DEFAULT_VALUE is set and user did not input anything if userInput == "" and len(str(param.DEFAULT_VALUE)) > 0: userInput = param.DEFAULT_VALUE # Param processing userInput = process_param_value(param, userInput) # If param requires validation try: validate_param_value(param, userInput) controller.CONF[param.CONF_NAME] = userInput loop = False except ParamValidationError: if param.LOOSE_VALIDATION: # If validation failed but LOOSE_VALIDATION is true, ask user answer = _askYesNo("User input failed validation, " "do you still wish to use it") loop = not answer if answer: controller.CONF[param.CONF_NAME] = userInput continue else: if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] else: # Delete value from commandLineValues so that we will prompt the user for input if param.CONF_NAME in commandLineValues: del commandLineValues[param.CONF_NAME] loop = True except KeyboardInterrupt: # add the new line so messages wont be displayed in the same line as the question print("") raise except: logging.error(traceback.format_exc()) raise Exception(output_messages.ERR_EXP_READ_INPUT_PARAM % (param.CONF_NAME))
def commentNode(node): buf = StringIO() node.show(buf=buf) genComment(buf.getvalue())
def __init__(self): StringIO.__init__(self) self.stdout = sys.stdout self.logger = logging.getLogger('root') sys.stdout = self
def run_desktop_test(self, suite=None, test_file=None, debugger=None, debugger_args=None, shuffle=False, keep_open=False, rerun_failures=False, no_autorun=False, repeat=0, run_until_failure=False, slow=False, chunk_by_dir=0, total_chunks=None, this_chunk=None): """Runs a mochitest. test_file is a path to a test file. It can be a relative path from the top source directory, an absolute filename, or a directory containing test files. suite is the type of mochitest to run. It can be one of ('plain', 'chrome', 'browser', 'metro', 'a11y'). debugger is a program name or path to a binary (presumably a debugger) to run the test in. e.g. 'gdb' debugger_args are the arguments passed to the debugger. shuffle is whether test order should be shuffled (defaults to false). keep_open denotes whether to keep the browser open after tests complete. """ if rerun_failures and test_file: print('Cannot specify both --rerun-failures and a test path.') return 1 # Need to call relpath before os.chdir() below. test_path = '' if test_file: test_path = self._wrap_path_argument(test_file).relpath() failure_file_path = os.path.join(self.statedir, 'mochitest_failures.json') if rerun_failures and not os.path.exists(failure_file_path): print('No failure file present. Did you run mochitests before?') return 1 from StringIO import StringIO from automation import Automation # runtests.py is ambiguous, so we load the file/module manually. if 'mochitest' not in sys.modules: import imp path = os.path.join(self.mochitest_dir, 'runtests.py') with open(path, 'r') as fh: imp.load_module('mochitest', fh, path, ('.py', 'r', imp.PY_SOURCE)) import mochitest # This is required to make other components happy. Sad, isn't it? os.chdir(self.topobjdir) automation = Automation() # Automation installs its own stream handler to stdout. Since we want # all logging to go through us, we just remove their handler. remove_handlers = [l for l in logging.getLogger().handlers if isinstance(l, logging.StreamHandler)] for handler in remove_handlers: logging.getLogger().removeHandler(handler) runner = mochitest.Mochitest(automation) opts = mochitest.MochitestOptions(automation) options, args = opts.parse_args([]) appname = '' if sys.platform.startswith('darwin'): appname = os.path.join(self.distdir, self.substs['MOZ_MACBUNDLE_NAME'], 'Contents', 'MacOS', 'webapprt-stub' + automation.BIN_SUFFIX) else: appname = os.path.join(self.distdir, 'bin', 'webapprt-stub' + automation.BIN_SUFFIX) # Need to set the suite options before verifyOptions below. if suite == 'plain': # Don't need additional options for plain. pass elif suite == 'chrome': options.chrome = True elif suite == 'browser': options.browserChrome = True elif suite == 'metro': options.immersiveMode = True options.browserChrome = True elif suite == 'a11y': options.a11y = True elif suite == 'webapprt-content': options.webapprtContent = True options.app = appname elif suite == 'webapprt-chrome': options.webapprtChrome = True options.app = appname options.browserArgs.append("-test-mode") else: raise Exception('None or unrecognized mochitest suite type.') options.autorun = not no_autorun options.closeWhenDone = not keep_open options.shuffle = shuffle options.consoleLevel = 'INFO' options.repeat = repeat options.runUntilFailure = run_until_failure options.runSlower = slow options.testingModulesDir = os.path.join(self.tests_dir, 'modules') options.extraProfileFiles.append(os.path.join(self.distdir, 'plugins')) options.symbolsPath = os.path.join(self.distdir, 'crashreporter-symbols') options.chunkByDir = chunk_by_dir options.totalChunks = total_chunks options.thisChunk = this_chunk options.failureFile = failure_file_path if test_path: test_root = runner.getTestRoot(options) test_root_file = mozpack.path.join(self.mochitest_dir, test_root, test_path) if not os.path.exists(test_root_file): print('Specified test path does not exist: %s' % test_root_file) print('You may need to run |mach build| to build the test files.') return 1 options.testPath = test_path if rerun_failures: options.testManifest = failure_file_path if debugger: options.debugger = debugger if debugger_args: if options.debugger == None: print("--debugger-args passed, but no debugger specified.") return 1 options.debuggerArgs = debugger_args options = opts.verifyOptions(options, runner) if options is None: raise Exception('mochitest option validator failed.') automation.setServerInfo(options.webServer, options.httpPort, options.sslPort, options.webSocketPort) # We need this to enable colorization of output. self.log_manager.enable_unstructured() # Output processing is a little funky here. The old make targets # grepped the log output from TEST-UNEXPECTED-* and printed these lines # after test execution. Ideally the test runner would expose a Python # API for obtaining test results and we could just format failures # appropriately. Unfortunately, it doesn't yet do that. So, we capture # all output to a buffer then "grep" the buffer after test execution. # Bug 858197 tracks a Python API that would facilitate this. test_output = StringIO() handler = logging.StreamHandler(test_output) handler.addFilter(UnexpectedFilter()) handler.setFormatter(StructuredHumanFormatter(0, write_times=False)) logging.getLogger().addHandler(handler) result = runner.runTests(options) # Need to remove our buffering handler before we echo failures or else # it will catch them again! logging.getLogger().removeHandler(handler) self.log_manager.disable_unstructured() if test_output.getvalue(): result = 1 for line in test_output.getvalue().splitlines(): self.log(logging.INFO, 'unexpected', {'msg': line}, '{msg}') return result
def getStacktrace(): stream = StringIO() traceback.print_exc(file=stream) return stream.getvalue()
except ImportError: from io import StringIO # Python 3 # Graphite historically has an install prefix set in setup.cfg. Being in a # configuration file, it's not easy to override it or unset it (for installing # graphite in a virtualenv for instance). # The prefix is now set by ``setup.py`` and *unset* if an environment variable # named ``GRAPHITE_NO_PREFIX`` is present. # While ``setup.cfg`` doesn't contain the prefix anymore, the *unset* step is # required for installations from a source tarball because running # ``python setup.py sdist`` will re-add the prefix to the tarball's # ``setup.cfg``. with open('setup.cfg', 'r') as f: orig_setup_cfg = f.read() cf = ConfigParser() cf.readfp(StringIO(orig_setup_cfg), 'setup.cfg') if os.environ.get('GRAPHITE_NO_PREFIX') or os.environ.get('READTHEDOCS'): cf.remove_section('install') else: try: cf.add_section('install') except DuplicateSectionError: pass if not cf.has_option('install', 'prefix'): cf.set('install', 'prefix', '/opt/graphite') if not cf.has_option('install', 'install-lib'): cf.set('install', 'install-lib', '%(prefix)s/webapp') with open('setup.cfg', 'w') as f: cf.write(f)
def setUp(self): """define some top-level data""" self.tree = DndParser(StringIO(TEST_TREE), constructor=PhyloNode)
def _serialize(obj, keypos): if keypos: if isinstance(obj, (int, long, float, bool)): return ('i:%i;' % obj).encode('latin1') if isinstance(obj, basestring): encoded_obj = obj if isinstance(obj, unicode): encoded_obj = obj.encode(charset, errors) s = BytesIO() s.write(b's:') s.write(str(len(encoded_obj)).encode('latin1')) s.write(b':"') s.write(encoded_obj) s.write(b'";') return s.getvalue() if obj is None: return b's:0:"";' raise TypeError('can\'t serialize %r as key' % type(obj)) else: if obj is None: return b'N;' if isinstance(obj, bool): return ('b:%i;' % obj).encode('latin1') if isinstance(obj, (int, long)): return ('i:%s;' % obj).encode('latin1') if isinstance(obj, float): return ('d:%s;' % obj).encode('latin1') if isinstance(obj, basestring): encoded_obj = obj if isinstance(obj, unicode): encoded_obj = obj.encode(charset, errors) s = BytesIO() s.write(b's:') s.write(str(len(encoded_obj)).encode('latin1')) s.write(b':"') s.write(encoded_obj) s.write(b'";') return s.getvalue() if isinstance(obj, (list, tuple, dict)): out = [] if isinstance(obj, dict): iterable = obj.items() else: iterable = enumerate(obj) for key, value in iterable: out.append(_serialize(key, True)) out.append(_serialize(value, False)) return b''.join([ b'a:', str(len(obj)).encode('latin1'), b':{', b''.join(out), b'}' ]) if isinstance(obj, phpobject): return b'O' + _serialize(obj.__name__, True)[1:-1] + \ _serialize(obj.__php_vars__, False)[1:] if object_hook is not None: return _serialize(object_hook(obj), False) raise TypeError('can\'t serialize %r' % type(obj))
def get_cluster_config(): cfg = StringIO() fab.get("~/.cstar_perf/cluster_config.json", cfg) cfg.seek(0) return json.load(cfg)
def generate_file_rst(fname, target_dir, src_dir, root_dir, plot_gallery): """ Generate the rst file for a given example. """ base_image_name = os.path.splitext(fname)[0] image_fname = '%s_%%s.png' % base_image_name this_template = rst_template last_dir = os.path.split(src_dir)[-1] # to avoid leading . in file names, and wrong names in links if last_dir == '.' or last_dir == 'examples': last_dir = '' else: last_dir += '_' short_fname = last_dir + fname src_file = os.path.join(src_dir, fname) example_file = os.path.join(target_dir, fname) shutil.copyfile(src_file, example_file) # The following is a list containing all the figure names figure_list = [] image_dir = os.path.join(target_dir, 'images') thumb_dir = os.path.join(image_dir, 'thumb') if not os.path.exists(image_dir): os.makedirs(image_dir) if not os.path.exists(thumb_dir): os.makedirs(thumb_dir) image_path = os.path.join(image_dir, image_fname) stdout_path = os.path.join(image_dir, 'stdout_%s.txt' % base_image_name) time_path = os.path.join(image_dir, 'time_%s.txt' % base_image_name) thumb_file = os.path.join(thumb_dir, fname[:-3] + '.png') time_elapsed = 0 time_m = 0 time_s = 0 if plot_gallery and fname.startswith('plot'): # generate the plot as png image if file name # starts with plot and if it is more recent than an # existing image. first_image_file = image_path % 1 if os.path.exists(stdout_path): stdout = open(stdout_path).read() else: stdout = '' if os.path.exists(time_path): time_elapsed = float(open(time_path).read()) if not os.path.exists(first_image_file) or \ os.stat(first_image_file).st_mtime <= os.stat(src_file).st_mtime: # We need to execute the code print 'plotting %s' % fname t0 = time() import matplotlib.pyplot as plt plt.close('all') cwd = os.getcwd() try: # First CD in the original example dir, so that any file # created by the example get created in this directory orig_stdout = sys.stdout os.chdir(os.path.dirname(src_file)) my_buffer = StringIO() my_stdout = Tee(sys.stdout, my_buffer) sys.stdout = my_stdout my_globals = {'pl': plt} execfile(os.path.basename(src_file), my_globals) time_elapsed = time() - t0 sys.stdout = orig_stdout my_stdout = my_buffer.getvalue() # get variables so we can later add links to the documentation example_code_obj = {} for var_name, var in my_globals.iteritems(): if not hasattr(var, '__module__'): continue if not isinstance(var.__module__, basestring): continue if var.__module__.split('.')[0] not in DOCMODULES: continue # get the type as a string with other things stripped tstr = str(type(var)) tstr = (tstr[tstr.find('\'') + 1:tstr.rfind('\'')].split('.')[-1]) # get shortened module name module_short = get_short_module_name(var.__module__, tstr) cobj = {'name': tstr, 'module': var.__module__, 'module_short': module_short, 'obj_type': 'object'} example_code_obj[var_name] = cobj # find functions so we can later add links to the documentation funregex = re.compile('[\w.]+\(') with open(src_file, 'rt') as fid: for line in fid.readlines(): if line.startswith('#'): continue for match in funregex.findall(line): fun_name = match[:-1] try: exec('this_fun = %s' % fun_name, my_globals) except Exception as err: # Here, we were not able to execute the # previous statement, either because the # fun_name was not a function but a statement # (print), or because the regexp didn't # catch the whole function name : # eg: # X = something().blah() # will work for something, but not blah. continue this_fun = my_globals['this_fun'] if not callable(this_fun): continue if not hasattr(this_fun, '__module__'): continue if not isinstance(this_fun.__module__, basestring): continue if (this_fun.__module__.split('.')[0] not in DOCMODULES): continue # get shortened module name fun_name_short = fun_name.split('.')[-1] module_short = get_short_module_name( this_fun.__module__, fun_name_short) cobj = {'name': fun_name_short, 'module': this_fun.__module__, 'module_short': module_short, 'obj_type': 'function'} example_code_obj[fun_name] = cobj fid.close() if len(example_code_obj) > 0: # save the dictionary, so we can later add hyperlinks codeobj_fname = example_file[:-3] + '_codeobj.pickle' with open(codeobj_fname, 'wb') as fid: cPickle.dump(example_code_obj, fid, cPickle.HIGHEST_PROTOCOL) fid.close() if '__doc__' in my_globals: # The __doc__ is often printed in the example, we # don't with to echo it my_stdout = my_stdout.replace( my_globals['__doc__'], '') my_stdout = my_stdout.strip() if my_stdout: stdout = """**Script output**: .. rst-class:: max_height :: %s """ % ( '\n '.join(my_stdout.split('\n'))) os.chdir(cwd) open(stdout_path, 'w').write(stdout) open(time_path, 'w').write('%f' % time_elapsed) # In order to save every figure we have two solutions : # * iterate from 1 to infinity and call plt.fignum_exists(n) # (this requires the figures to be numbered # incrementally: 1, 2, 3 and not 1, 2, 5) # * iterate over [fig_mngr.num for fig_mngr in # matplotlib._pylab_helpers.Gcf.get_all_fig_managers()] for fig_num in (fig_mngr.num for fig_mngr in matplotlib._pylab_helpers.Gcf.get_all_fig_managers()): # Set the fig_num figure as the current figure as we can't # save a figure that's not the current figure. fig = plt.figure(fig_num) kwargs = {} to_rgba = matplotlib.colors.colorConverter.to_rgba for attr in ['facecolor', 'edgecolor']: fig_attr = getattr(fig, 'get_' + attr)() default_attr = matplotlib.rcParams['figure.' + attr] if to_rgba(fig_attr) != to_rgba(default_attr): kwargs[attr] = fig_attr fig.savefig(image_path % fig_num, **kwargs) figure_list.append(image_fname % fig_num) except: print 80 * '_' print '%s is not compiling:' % fname traceback.print_exc() print 80 * '_' finally: os.chdir(cwd) sys.stdout = orig_stdout print " - time elapsed : %.2g sec" % time_elapsed else: figure_list = [f[len(image_dir):] for f in glob.glob(image_path % '[1-9]')] #for f in glob.glob(image_path % '*')] # Catter for the fact that there can be more than 10 images if len(figure_list) >= 9: figure_list.extend([f[len(image_dir):] for f in glob.glob(image_path % '1[0-9]')]) # generate thumb file this_template = plot_rst_template car_thumb_path = os.path.join(os.path.split(root_dir)[0], '_build/html/_images/') # Note: normaly, make_thumbnail is used to write to the path contained in `thumb_file` # which is within `auto_examples/../images/thumbs` depending on the example. # Because the carousel has different dimensions than those of the examples gallery, # I did not simply reuse them all as some contained whitespace due to their default gallery # thumbnail size. Below, for a few cases, seperate thumbnails are created (the originals can't # just be overwritten with the carousel dimensions as it messes up the examples gallery layout). # The special carousel thumbnails are written directly to _build/html/stable/_images/, # as for some reason unknown to me, Sphinx refuses to copy my 'extra' thumbnails from the # auto examples gallery to the _build folder. This works fine as is, but it would be cleaner to # have it happen with the rest. Ideally the should be written to 'thumb_file' as well, and then # copied to the _images folder during the `Copying Downloadable Files` step like the rest. if not os.path.exists(car_thumb_path): os.makedirs(car_thumb_path) if os.path.exists(first_image_file): # We generate extra special thumbnails for the carousel carousel_tfile = os.path.join(car_thumb_path, fname[:-3] + '_carousel.png') first_img = image_fname % 1 if first_img in carousel_thumbs: make_thumbnail((image_path % carousel_thumbs[first_img][0]), carousel_tfile, carousel_thumbs[first_img][1], 190) make_thumbnail(first_image_file, thumb_file, 400, 280) if not os.path.exists(thumb_file): # create something to replace the thumbnail make_thumbnail('images/no_image.png', thumb_file, 200, 140) docstring, short_desc, end_row = extract_docstring(example_file) # Depending on whether we have one or more figures, we're using a # horizontal list or a single rst call to 'image'. if len(figure_list) == 1: figure_name = figure_list[0] image_list = SINGLE_IMAGE % figure_name.lstrip('/') else: image_list = HLIST_HEADER for figure_name in figure_list: image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/') time_m, time_s = divmod(time_elapsed, 60) f = open(os.path.join(target_dir, fname[:-2] + 'rst'), 'w') f.write(this_template % locals()) f.flush()
def s3_csv_to_df(bucket_name, s3_filename): blah = read_s3_file(bucket_name, s3_filename) foo = StringIO(blah.decode("utf-8")) return pd.read_csv(foo)
def put_cluster_config(): cfg = StringIO() json.dump(cluster_config, cfg, indent=2) fab.put(cfg, "~/.cstar_perf/cluster_config.json")
def do(self): ############################################################ # Imports. script = self.script from mayavi.sources.vtk_file_reader import VTKFileReader from mayavi.modules.outline import Outline from mayavi.modules.iso_surface import IsoSurface from mayavi.modules.contour_grid_plane \ import ContourGridPlane from mayavi.modules.scalar_cut_plane import ScalarCutPlane ############################################################ # Create a new scene and set up the visualization. s = self.new_scene() # Read a VTK (old style) data file. r = VTKFileReader() r.initialize(get_example_data('heart.vtk')) script.add_source(r) # Create an outline for the data. o = Outline() script.add_module(o) # Create one ContourGridPlane normal to the 'x' axis. cgp1 = ContourGridPlane() script.add_module(cgp1) # Set the position to the middle of the data. cgp1.grid_plane.position = 15 # Another with filled contours normal to 'y' axis. cgp2 = ContourGridPlane() cgp2.contour.filled_contours = True # Set the axis and position to the middle of the data. cgp2.grid_plane.axis = 'y' cgp2.grid_plane.position = 15 script.add_module(cgp2) # An isosurface module. iso = IsoSurface(compute_normals=True) script.add_module(iso) iso.contour.contours = [200.0] # An interactive scalar cut plane. cp = ScalarCutPlane() script.add_module(cp) ip = cp.implicit_plane ip.normal = 0, 0, 1 ip.origin = 0, 0, 5 ip.widget.enabled = False # Set the scene to an isometric view. s.scene.isometric_view() # Now test. self.check() ############################################################ # Test if the modules respond correctly when the components # are changed. ctr = cgp2.contour cgp2.contour = ctr.__class__() cgp2.contour = ctr cgp2.actor = cgp2.actor.__class__() iso.contour = iso.contour.__class__() iso.contour.contours = [200.0] iso.actor = iso.actor.__class__() iso.normals = iso.normals.__class__() ip = cp.implicit_plane cp.implicit_plane = cp.implicit_plane.__class__() cp.implicit_plane = ip ip.widget.enabled = False cp.contour = cp.contour.__class__() cp.cutter = cp.cutter.__class__() cp.actor = cp.actor.__class__() s.render() # Now check. self.check() ############################################################ # Test if saving a visualization and restoring it works. # Save visualization. f = StringIO() f.name = abspath('test.mv2') # We simulate a file. script.save_visualization(f) f.seek(0) # So we can read this saved data. # Remove existing scene. engine = script.engine engine.close_scene(s) # Load visualization script.load_visualization(f) s = engine.current_scene self.check() ############################################################ # Test if the MayaVi2 visualization can be deep-copied. # Pop the source object. source = s.children.pop() # Add it back to see if that works without error. s.children.append(source) # Now set the enabled status of the widget, this is impossible # to get correctly. cp = source.children[0].children[-1] cp.implicit_plane.widget.enabled = False self.check() # Now deepcopy the source and replace the existing one with # the copy. This basically simulates cutting/copying the # object from the UI via the right-click menu on the tree # view, and pasting the copy back. source1 = copy.deepcopy(source) s.children[0] = source1 cp = source1.children[0].children[-1] cp.implicit_plane.widget.enabled = False self.check()
def df_to_s3(bucket_name, df, s3fn, index=False): s = StringIO() df.to_csv(s, index=index) write_s3_file(bucket_name, s3fn, content=s.getvalue())
def test_export_course_stdout(self): output = self.run_export_course('-') with tarfile.open(fileobj=StringIO(output)) as tar_file: self.check_export_file(tar_file)
def login(browser, username, password, anticaptcha=False, anticaptcha_max_tries=3): """Display the CAPTCHA and log in.""" captcha_tries = 0 while 1: try: if not password: password = getpass.getpass('Xtrazone password: '******'https://xtrazone.sso.bluewin.ch/index.html') browser.addheaders = [ ('X-Requested-With', 'XMLHttpRequest'), ('X-Header-XtraZone', 'XtraZone'), ('Referer', 'https://xtrazone.sso.bluewin.ch/index.html'), ] url = 'https://xtrazone.sso.bluewin.ch/index.php/20,53,ajax,,,283/' \ '?route=%2Flogin%2Fgetcaptcha' data = {'action': 'getCaptcha', 'do_sso_login': 0, 'passphrase': '', 'sso_password': password, 'sso_user': username, 'token': '', } browser.open(url, urllib.urlencode(data)) resp = json.loads(browser.response().read()) # Convert response to dict captcha_token = resp['content']['messages']['operation']['token'] captcha = '' captcha_tries += 1 # Try to crack CAPTCHA automatically (Service by gorrion.ch) if anticaptcha and captcha_tries <= anticaptcha_max_tries: if captcha_tries == 1: print 'Trying to crack CAPTCHA...' try: captcha = gorrion.get_captcha(captcha_token) except Exception as e: anticaptcha = False print 'Error, cracking CAPTCHA failed (%s)' % str(e) # User has to enter CAPTCHA manually else: if anticaptcha and captcha_tries == anticaptcha_max_tries + 1: print 'Automatically cracking CAPTCHA failed. :(' captcha_url = 'http:%s' % resp['content']['messages']['operation']['imgUrl'] # Display CAPTCHA in a new window tk_root = Tkinter.Tk(className='CAPTCHA') img = ImageTk.PhotoImage( Image.open( StringIO( urllib.urlopen(captcha_url).read() ) ) ) captcha_label = Tkinter.Label(tk_root, image=img) captcha_label.pack() # Get CAPTCHA text while not captcha: captcha = raw_input('Please enter CAPTCHA: ').strip() # Destroy CAPTCHA window try: tk_root.destroy() except Tkinter.TclError: pass # Log in browser.addheaders = [ ('X-Requested-With', 'XMLHttpRequest'), ('X-Header-XtraZone', 'XtraZone'), ('Referer', 'https://xtrazone.sso.bluewin.ch/index.html'), ] url = 'https://xtrazone.sso.bluewin.ch/index.php/22,39,ajax_json,,,157/' data = {'action': 'ssoLogin', 'do_sso_login': 1, 'passphrase': captcha, 'sso_password': password, 'sso_user': username, 'token': captcha_token, } browser.open(url, urllib.urlencode(data)) resp = json.loads(browser.response().read()) if resp['status'] == 'captcha_failed': raise RuntimeError('CAPTCHA failed: %s' % resp['message']) if resp['status'] != 'login_ok': raise RuntimeError('Login failed: %s' % resp['message']) # Everything worked fine :) if anticaptcha and captcha_tries <= anticaptcha_max_tries: if captcha: # Report successful CAPTCHAs to gorrion try: gorrion.report(captcha, 1) except gorrion.GorrionError as e: print 'Anticaptcha reporting: %s' % str(e) break except RuntimeError as e: if anticaptcha and captcha_tries <= anticaptcha_max_tries: if captcha: pass # Possibly report to gorrion if captcha_tries > anticaptcha_max_tries: print 'Wrong CAPTCHA. Try again.'
def assert_parsed_results_equals(self, xml_text, expected_results): results_reader = results.ResultsReader(StringIO(xml_text)) actual_results = [x for x in results_reader] self.assertEquals(expected_results, actual_results)
def call_command(self, name, *args, **kwargs): """Call management command and return output""" out = StringIO() # To Capture the output of the command call_command(name, *args, stdout=out, **kwargs) out.seek(0) return out.read()
def _startCapture(self): self._capture_stack.append((sys.stdout, sys.stderr)) self._currentStdout = StringIO() self._currentStderr = StringIO() sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout) sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr)
def test_emergency_error(self): sio = StringIO() emergency_error(sio, "Testing emergency error facility") self.assertEqual(rpartition(sio.getvalue(), ":")[2].strip(), "Testing emergency error facility")
def parse_text(self, message_text, body_parsing=True): self.body_parsing = body_parsing self.msg = message_text stream = StringIO(self.msg) self.__parse(stream)
def setUp(self): """standard files""" self.big_interleaved = StringIO("""10 705 I Cow ATGGCATATCCCATACAACTAGGATTCCAAGATGCAACATCACCAATCATAGAAGAACTA Carp ATGGCACACCCAACGCAACTAGGTTTCAAGGACGCGGCCATACCCGTTATAGAGGAACTT Chicken ATGGCCAACCACTCCCAACTAGGCTTTCAAGACGCCTCATCCCCCATCATAGAAGAGCTC Human ATGGCACATGCAGCGCAAGTAGGTCTACAAGACGCTACTTCCCCTATCATAGAAGAGCTT Loach ATGGCACATCCCACACAATTAGGATTCCAAGACGCGGCCTCACCCGTAATAGAAGAACTT Mouse ATGGCCTACCCATTCCAACTTGGTCTACAAGACGCCACATCCCCTATTATAGAAGAGCTA Rat ATGGCTTACCCATTTCAACTTGGCTTACAAGACGCTACATCACCTATCATAGAAGAACTT Seal ATGGCATACCCCCTACAAATAGGCCTACAAGATGCAACCTCTCCCATTATAGAGGAGTTA Whale ATGGCATATCCATTCCAACTAGGTTTCCAAGATGCAGCATCACCCATCATAGAAGAGCTC Frog ATGGCACACCCATCACAATTAGGTTTTCAAGACGCAGCCTCTCCAATTATAGAAGAATTA CTTCACTTTCATGACCACACGCTAATAATTGTCTTCTTAATTAGCTCATTAGTACTTTAC CTTCACTTCCACGACCACGCATTAATAATTGTGCTCCTAATTAGCACTTTAGTTTTATAT GTTGAATTCCACGACCACGCCCTGATAGTCGCACTAGCAATTTGCAGCTTAGTACTCTAC ATCACCTTTCATGATCACGCCCTCATAATCATTTTCCTTATCTGCTTCCTAGTCCTGTAT CTTCACTTCCATGACCATGCCCTAATAATTGTATTTTTGATTAGCGCCCTAGTACTTTAT ATAAATTTCCATGATCACACACTAATAATTGTTTTCCTAATTAGCTCCTTAGTCCTCTAT ACAAACTTTCATGACCACACCCTAATAATTGTATTCCTCATCAGCTCCCTAGTACTTTAT CTACACTTCCATGACCACACATTAATAATTGTGTTCCTAATTAGCTCATTAGTACTCTAC CTACACTTTCACGATCATACACTAATAATCGTTTTTCTAATTAGCTCTTTAGTTCTCTAC CTTCACTTCCACGACCATACCCTCATAGCCGTTTTTCTTATTAGTACGCTAGTTCTTTAC ATTATTTCACTAATACTAACGACAAAGCTGACCCATACAAGCACGATAGATGCACAAGAA ATTATTACTGCAATGGTATCAACTAAACTTACTAATAAATATATTCTAGACTCCCAAGAA CTTCTAACTCTTATACTTATAGAAAAACTATCA---TCAAACACCGTAGATGCCCAAGAA GCCCTTTTCCTAACACTCACAACAAAACTAACTAATACTAACATCTCAGACGCTCAGGAA GTTATTATTACAACCGTCTCAACAAAACTCACTAACATATATATTTTGGACTCACAAGAA ATCATCTCGCTAATATTAACAACAAAACTAACACATACAAGCACAATAGATGCACAAGAA ATTATTTCACTAATACTAACAACAAAACTAACACACACAAGCACAATAGACGCCCAAGAA ATTATCTCACTTATACTAACCACGAAACTCACCCACACAAGTACAATAGACGCACAAGAA ATTATTACCCTAATGCTTACAACCAAATTAACACATACTAGTACAATAGACGCCCAAGAA ATTATTACTATTATAATAACTACTAAACTAACTAATACAAACCTAATGGACGCACAAGAG GTAGAGACAATCTGAACCATTCTGCCCGCCATCATCTTAATTCTAATTGCTCTTCCTTCT ATCGAAATCGTATGAACCATTCTACCAGCCGTCATTTTAGTACTAATCGCCCTGCCCTCC GTTGAACTAATCTGAACCATCCTACCCGCTATTGTCCTAGTCCTGCTTGCCCTCCCCTCC ATAGAAACCGTCTGAACTATCCTGCCCGCCATCATCCTAGTCCTCATCGCCCTCCCATCC ATTGAAATCGTATGAACTGTGCTCCCTGCCCTAATCCTCATTTTAATCGCCCTCCCCTCA GTTGAAACCATTTGAACTATTCTACCAGCTGTAATCCTTATCATAATTGCTCTCCCCTCT GTAGAAACAATTTGAACAATTCTCCCAGCTGTCATTCTTATTCTAATTGCCCTTCCCTCC GTGGAAACGGTGTGAACGATCCTACCCGCTATCATTTTAATTCTCATTGCCCTACCATCA GTAGAAACTGTCTGAACTATCCTCCCAGCCATTATCTTAATTTTAATTGCCTTGCCTTCA ATCGAAATAGTGTGAACTATTATACCAGCTATTAGCCTCATCATAATTGCCCTTCCATCC TTACGAATTCTATACATAATAGATGAAATCAATAACCCATCTCTTACAGTAAAAACCATA CTACGCATCCTGTACCTTATAGACGAAATTAACGACCCTCACCTGACAATTAAAGCAATA CTCCAAATCCTCTACATAATAGACGAAATCGACGAACCTGATCTCACCCTAAAAGCCATC CTACGCATCCTTTACATAACAGACGAGGTCAACGATCCCTCCCTTACCATCAAATCAATT CTACGAATTCTATATCTTATAGACGAGATTAATGACCCCCACCTAACAATTAAGGCCATG CTACGCATTCTATATATAATAGACGAAATCAACAACCCCGTATTAACCGTTAAAACCATA CTACGAATTCTATACATAATAGACGAGATTAATAACCCAGTTCTAACAGTAAAAACTATA TTACGAATCCTCTACATAATGGACGAGATCAATAACCCTTCCTTGACCGTAAAAACTATA TTACGGATCCTTTACATAATAGACGAAGTCAATAACCCCTCCCTCACTGTAAAAACAATA CTTCGTATCCTATATTTAATAGATGAAGTTAATGATCCACACTTAACAATTAAAGCAATC GGACATCAGTGATACTGAAGCTATGAGTATACAGATTATGAGGACTTAAGCTTCGACTCC GGACACCAATGATACTGAAGTTACGAGTATACAGACTATGAAAATCTAGGATTCGACTCC GGACACCAATGATACTGAACCTATGAATACACAGACTTCAAGGACCTCTCATTTGACTCC GGCCACCAATGGTACTGAACCTACGAGTACACCGACTACGGCGGACTAATCTTCAACTCC GGGCACCAATGATACTGAAGCTACGAGTATACTGATTATGAAAACTTAAGTTTTGACTCC GGGCACCAATGATACTGAAGCTACGAATATACTGACTATGAAGACCTATGCTTTGATTCA GGACACCAATGATACTGAAGCTATGAATATACTGACTATGAAGACCTATGCTTTGACTCC GGACATCAGTGATACTGAAGCTATGAGTACACAGACTACGAAGACCTGAACTTTGACTCA GGTCACCAATGATATTGAAGCTATGAGTATACCGACTACGAAGACCTAAGCTTCGACTCC GGCCACCAATGATACTGAAGCTACGAATATACTAACTATGAGGATCTCTCATTTGACTCT TACATAATTCCAACATCAGAATTAAAGCCAGGGGAGCTACGACTATTAGAAGTCGATAAT TATATAGTACCAACCCAAGACCTTGCCCCCGGACAATTCCGACTTCTGGAAACAGACCAC TACATAACCCCAACAACAGACCTCCCCCTAGGCCACTTCCGCCTACTAGAAGTCGACCAT TACATACTTCCCCCATTATTCCTAGAACCAGGCGACCTGCGACTCCTTGACGTTGACAAT TACATAATCCCCACCCAGGACCTAACCCCTGGACAATTCCGGCTACTAGAGACAGACCAC TATATAATCCCAACAAACGACCTAAAACCTGGTGAACTACGACTGCTAGAAGTTGATAAC TACATAATCCCAACCAATGACCTAAAACCAGGTGAACTTCGTCTATTAGAAGTTGATAAT TATATGATCCCCACACAAGAACTAAAGCCCGGAGAACTACGACTGCTAGAAGTAGACAAT TATATAATCCCAACATCAGACCTAAAGCCAGGAGAACTACGATTATTAGAAGTAGATAAC TATATAATTCCAACTAATGACCTTACCCCTGGACAATTCCGGCTGCTAGAAGTTGATAAT CGAGTTGTACTACCAATAGAAATAACAATCCGAATGTTAGTCTCCTCTGAAGACGTATTA CGAATAGTTGTTCCAATAGAATCCCCAGTCCGTGTCCTAGTATCTGCTGAAGACGTGCTA CGCATTGTAATCCCCATAGAATCCCCCATTCGAGTAATCATCACCGCTGATGACGTCCTC CGAGTAGTACTCCCGATTGAAGCCCCCATTCGTATAATAATTACATCACAAGACGTCTTG CGAATGGTTGTTCCCATAGAATCCCCTATTCGCATTCTTGTTTCCGCCGAAGATGTACTA CGAGTCGTTCTGCCAATAGAACTTCCAATCCGTATATTAATTTCATCTGAAGACGTCCTC CGGGTAGTCTTACCAATAGAACTTCCAATTCGTATACTAATCTCATCCGAAGACGTCCTG CGAGTAGTCCTCCCAATAGAAATAACAATCCGCATACTAATCTCATCAGAAGATGTACTC CGAGTTGTCTTACCTATAGAAATAACAATCCGAATATTAGTCTCATCAGAAGACGTACTC CGAATAGTAGTCCCAATAGAATCTCCAACCCGACTTTTAGTTACAGCCGAAGACGTCCTC CACTCATGAGCTGTGCCCTCTCTAGGACTAAAAACAGACGCAATCCCAGGCCGTCTAAAC CATTCTTGAGCTGTTCCATCCCTTGGCGTAAAAATGGACGCAGTCCCAGGACGACTAAAT CACTCATGAGCCGTACCCGCCCTCGGGGTAAAAACAGACGCAATCCCTGGACGACTAAAT CACTCATGAGCTGTCCCCACATTAGGCTTAAAAACAGATGCAATTCCCGGACGTCTAAAC CACTCCTGGGCCCTTCCAGCCATGGGGGTAAAGATAGACGCGGTCCCAGGACGCCTTAAC CACTCATGAGCAGTCCCCTCCCTAGGACTTAAAACTGATGCCATCCCAGGCCGACTAAAT CACTCATGAGCCATCCCTTCACTAGGGTTAAAAACCGACGCAATCCCCGGCCGCCTAAAC CACTCATGAGCCGTACCGTCCCTAGGACTAAAAACTGATGCTATCCCAGGACGACTAAAC CACTCATGGGCCGTACCCTCCTTGGGCCTAAAAACAGATGCAATCCCAGGACGCCTAAAC CACTCGTGAGCTGTACCCTCCTTGGGTGTCAAAACAGATGCAATCCCAGGACGACTTCAT CAAACAACCCTTATATCGTCCCGTCCAGGCTTATATTACGGTCAATGCTCAGAAATTTGC CAAGCCGCCTTTATTGCCTCACGCCCAGGGGTCTTTTACGGACAATGCTCTGAAATTTGT CAAACCTCCTTCATCACCACTCGACCAGGAGTGTTTTACGGACAATGCTCAGAAATCTGC CAAACCACTTTCACCGCTACACGACCGGGGGTATACTACGGTCAATGCTCTGAAATCTGT CAAACCGCCTTTATTGCCTCCCGCCCCGGGGTATTCTATGGGCAATGCTCAGAAATCTGT CAAGCAACAGTAACATCAAACCGACCAGGGTTATTCTATGGCCAATGCTCTGAAATTTGT CAAGCTACAGTCACATCAAACCGACCAGGTCTATTCTATGGCCAATGCTCTGAAATTTGC CAAACAACCCTAATAACCATACGACCAGGACTGTACTACGGTCAATGCTCAGAAATCTGT CAAACAACCTTAATATCAACACGACCAGGCCTATTTTATGGACAATGCTCAGAGATCTGC CAAACATCATTTATTGCTACTCGTCCGGGAGTATTTTACGGACAATGTTCAGAAATTTGC GGGTCAAACCACAGTTTCATACCCATTGTCCTTGAGTTAGTCCCACTAAAGTACTTTGAA GGAGCTAATCACAGCTTTATACCAATTGTAGTTGAAGCAGTACCTCTCGAACACTTCGAA GGAGCTAACCACAGCTACATACCCATTGTAGTAGAGTCTACCCCCCTAAAACACTTTGAA GGAGCAAACCACAGTTTCATGCCCATCGTCCTAGAATTAATTCCCCTAAAAATCTTTGAA GGAGCAAACCACAGCTTTATACCCATCGTAGTAGAAGCGGTCCCACTATCTCACTTCGAA GGATCTAACCATAGCTTTATGCCCATTGTCCTAGAAATGGTTCCACTAAAATATTTCGAA GGCTCAAATCACAGCTTCATACCCATTGTACTAGAAATAGTGCCTCTAAAATATTTCGAA GGTTCAAACCACAGCTTCATACCTATTGTCCTCGAATTGGTCCCACTATCCCACTTCGAG GGCTCAAACCACAGTTTCATACCAATTGTCCTAGAACTAGTACCCCTAGAAGTCTTTGAA GGAGCAAACCACAGCTTTATACCAATTGTAGTTGAAGCAGTACCGCTAACCGACTTTGAA AAATGATCTGCGTCAATATTA---------------------TAA AACTGATCCTCATTAATACTAGAAGACGCCTCGCTAGGAAGCTAA GCCTGATCCTCACTA------------------CTGTCATCTTAA ATA---------------------GGGCCCGTATTTACCCTATAG AACTGGTCCACCCTTATACTAAAAGACGCCTCACTAGGAAGCTAA AACTGATCTGCTTCAATAATT---------------------TAA AACTGATCAGCTTCTATAATT---------------------TAA AAATGATCTACCTCAATGCTT---------------------TAA AAATGATCTGTATCAATACTA---------------------TAA AACTGATCTTCATCAATACTA---GAAGCATCACTA------AGA """) self.space_interleaved = StringIO(""" 5 176 I cox2_leita MAFILSFWMI FLLDSVIVLL SFVCFVCVWI CALLFSTVLL VSKLNNIYCT cox2_crifa MAFILSFWMI FLIDAVIVLL SFVCFVCIWI CSLFFSSFLL VSKINNVYCT cox2_bsalt MSFIISFWML FLIDSLIVLL SGAIFVCIWI CSLFFLCILF ICKLDYIFCS cox2_trybb MSFILTFWMI FLMDSIIVLI SFSIFLSVWI CALIIATVLT VTKINNIYCT cox2_tborr MLFFINQLLL LLVDTFVILE IFSLFVCVFI IVMYILFINY NIFLKNINVY WDFTASKFID VYWFTIGGMF SLGLLLRLCL LLYFGHLNFV SFDLCKVVGF WDFTASKFID AYWFTIGGMF VLCLLLRLCL LLYFGCLNFV SFDLCKVVGF WDFISAKFID LYWFTLGCLF IVCLLIRLCL LLYFSCLNFV CFDLCKCIGF WDFISSKFID TYWFVLGMMF ILCLLLRLCL LLYFSCINFV SFDLCKVIGF LDFIGSKYLD LYWFLIGIFF VIVLLIRLCL LLYYSWISLL IFDLCKIMGF QWYWVYFIFG ETTIFSNLIL ESDYMIGDLR LLQCNHVLTL LSLVIYKLWL QWYWVYFIFG ETTIFSNLIL ESDYLIGDLR LLQCNHVLTL LSLVIYKLWL QWYWVYFIFG ETTIFSNLIL ESDYLIGDLR LLQCNHVLTL LSLVIYKVWL QWYWVYFLFG ETTIFSNLIL ESDYLIGDLR ILQCNHVLTL LSLVIYKLWV QWYWIFFVFK ENVIFSNLLI ESDYWIGDLR LLQCNNTFNL ICLVVYKIWV SAVDVIHSFA ISSLGVKVEN LVAVMK SAVDVIHSFA VSSLGIKVDC IPGRCN SAIDVIHSFT LANLGIKVD? ?PGRCN SAVDVIHSFT ISSLGIKVEN PGRCNE TSIDVIHSFT ISTLGIKIDC IPGRCN """) self.interleaved_little = StringIO(""" 6 39 I Archaeopt CGATGCTTAC CGCCGATGCT HesperorniCGTTACTCGT TGTCGTTACT BaluchitheTAATGTTAAT TGTTAATGTT B. virginiTAATGTTCGT TGTTAATGTT BrontosaurCAAAACCCAT CATCAAAACC B.subtilisGGCAGCCAAT CACGGCAGCC TACCGCCGAT GCTTACCGC CGTTGTCGTT ACTCGTTGT AATTGTTAAT GTTAATTGT CGTTGTTAAT GTTCGTTGT CATCATCAAA ACCCATCAT AATCACGGCA GCCAATCAC """) self.empty = [] self.noninterleaved_little = StringIO(""" 6 20 Archaeopt CGATGCTTAC CGCCGATGCT HesperorniCGTTACTCGT TGTCGTTACT BaluchitheTAATGTTAAT TGTTAATGTT B. virginiTAATGTTCGT TGTTAATGTT BrontosaurCAAAACCCAT CATCAAAACC B.subtilisGGCAGCCAAT CACGGCAGCC """) self.noninterleaved_big = StringIO("""10 297 Rhesus tgtggcacaaatactcatgccagctcattacagcatgagaac---agtttgttactcact aaagacagaatgaatgtagaaaaggctgaattctgtaataaaagcaaacagcctggcttg gcaaggagccaacataacagatggactggaagtaaggaaacatgtaatgataggcagact cccagcacagagaaaaaggtagatctgaatgctaatgccctgtatgagagaaaagaatgg aataagcaaaaactgccatgctctgagaatcctagagacactgaagatgttccttgg Manatee tgtggcacaaatactcatgccagctcattacagcatgagaatagcagtttattactcact aaagacagaatgaatgtagaaaaggctgaattctgtcataaaagcaaacagcctggctta acaaggagccagcagagcagatgggctgaaagtaaggaaacatgtaatgataggcagact cctagcacagagaaaaaggtagatatgaatgctaatccattgtatgagagaaaagaagtg aataagcagaaacctccatgctccgagagtgttagagatacacaagatattccttgg Pig tgtggcacagatactcatgccagctcgttacagcatgagaacagcagtttattactcact aaagacagaatgaatgtagaaaaggctgaattttgtaataaaagcaagcagcctgtctta gcaaagagccaacagagcagatgggctgaaagtaagggcacatgtaatgataggcagact cctaacacagagaaaaaggtagttctgaatactgatctcctgtatgggagaaacgaactg aataagcagaaacctgcgtgctctgacagtcctagagattcccaagatgttccttgg """)
def test_position(): """ Test positioned progress bars """ if nt_and_no_colorama: raise SkipTest # Use regexp because the it rates can change RE_pos = re.compile(r'((\x1b\[A|\r|\n)+((pos\d+) bar:\s+\d+%|\s{3,6})?)') # NOQA # Artificially test nested loop printing # Without leave our_file = StringIO() t = tqdm(total=2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos2 bar', leave=False, position=2) t.update() t.close() our_file.seek(0) out = our_file.read() res = [m[0] for m in RE_pos.findall(out)] exres = ['\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\n\r ', '\x1b[A\x1b[A'] if res != exres: raise AssertionError("\nExpected:\n{0}\nGot:\n{1}\nRaw:\n{2}\n".format( str(exres), str(res), str([out]))) # Test iteration-based tqdm positioning our_file = StringIO() for i in trange(2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos0 bar', position=0): for j in trange(2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos1 bar', position=1): for k in trange(2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos2 bar', position=2): pass our_file.seek(0) out = our_file.read() res = [m[0] for m in RE_pos.findall(out)] exres = ['\rpos0 bar: 0%', '\n\rpos1 bar: 0%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\n\rpos2 bar: 100%', '\x1b[A\x1b[A\n\n\x1b[A\x1b[A\n\rpos1 bar: 50%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\n\rpos2 bar: 100%', '\x1b[A\x1b[A\n\n\x1b[A\x1b[A\n\rpos1 bar: 100%', '\x1b[A\n\x1b[A\rpos0 bar: 50%', '\n\rpos1 bar: 0%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\n\rpos2 bar: 100%', '\x1b[A\x1b[A\n\n\x1b[A\x1b[A\n\rpos1 bar: 50%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\n\rpos2 bar: 100%', '\x1b[A\x1b[A\n\n\x1b[A\x1b[A\n\rpos1 bar: 100%', '\x1b[A\n\x1b[A\rpos0 bar: 100%', '\n'] if res != exres: raise AssertionError("\nExpected:\n{0}\nGot:\n{1}\nRaw:\n{2}\n".format( str(exres), str(res), str([out]))) # Test manual tqdm positioning our_file = StringIO() t1 = tqdm(total=2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos0 bar', position=0) t2 = tqdm(total=2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos1 bar', position=1) t3 = tqdm(total=2, file=our_file, miniters=1, mininterval=0, maxinterval=0, desc='pos2 bar', position=2) for i in _range(2): t1.update() t3.update() t2.update() our_file.seek(0) out = our_file.read() res = [m[0] for m in RE_pos.findall(out)] exres = ['\rpos0 bar: 0%', '\n\rpos1 bar: 0%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\rpos0 bar: 50%', '\n\n\rpos2 bar: 50%', '\x1b[A\x1b[A\n\rpos1 bar: 50%', '\x1b[A\rpos0 bar: 100%', '\n\n\rpos2 bar: 100%', '\x1b[A\x1b[A\n\rpos1 bar: 100%', '\x1b[A'] if res != exres: raise AssertionError("\nExpected:\n{0}\nGot:\n{1}\nRaw:\n{2}\n".format( str(exres), str(res), str([out]))) t1.close() t2.close() t3.close() # Test auto repositionning of bars when a bar is prematurely closed # tqdm._instances.clear() # reset number of instances with closing(StringIO()) as our_file: t1 = tqdm(total=10, file=our_file, desc='pos0 bar', mininterval=0) t2 = tqdm(total=10, file=our_file, desc='pos1 bar', mininterval=0) t3 = tqdm(total=10, file=our_file, desc='pos2 bar', mininterval=0) res = [m[0] for m in RE_pos.findall(our_file.getvalue())] exres = ['\rpos0 bar: 0%', '\n\rpos1 bar: 0%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A'] if res != exres: raise AssertionError( "\nExpected:\n{0}\nGot:\n{1}\n".format( str(exres), str(res))) t2.close() t4 = tqdm(total=10, file=our_file, desc='pos3 bar', mininterval=0) t1.update(1) t3.update(1) t4.update(1) res = [m[0] for m in RE_pos.findall(our_file.getvalue())] exres = ['\rpos0 bar: 0%', '\n\rpos1 bar: 0%', '\x1b[A\n\n\rpos2 bar: 0%', '\x1b[A\x1b[A\n\x1b[A\n\n\rpos3 bar: 0%', '\x1b[A\x1b[A\rpos0 bar: 10%', '\n\rpos2 bar: 10%', '\x1b[A\n\n\rpos3 bar: 10%', '\x1b[A\x1b[A'] if res != exres: raise AssertionError( "\nExpected:\n{0}\nGot:\n{1}\n".format( str(exres), str(res))) t4.close() t3.close() t1.close()
class Xunit(Plugin): """This plugin provides test results in the standard XUnit XML format.""" name = 'xunit' score = 1500 encoding = 'UTF-8' error_report_file = None def __init__(self): super(Xunit, self).__init__() self._capture_stack = [] self._currentStdout = None self._currentStderr = None def _timeTaken(self): if hasattr(self, '_timer'): taken = time() - self._timer else: # test died before it ran (probably error in setup()) # or success/failure added before test started probably # due to custom TestResult munging taken = 0.0 return taken def _quoteattr(self, attr): """Escape an XML attribute. Value can be unicode.""" attr = xml_safe(attr) return saxutils.quoteattr(attr) def options(self, parser, env): """Sets additional command line options.""" Plugin.options(self, parser, env) parser.add_option( '--xunit-file', action='store', dest='xunit_file', metavar="FILE", default=env.get('NOSE_XUNIT_FILE', 'nosetests.xml'), help=("Path to xml file to store the xunit report in. " "Default is nosetests.xml in the working directory " "[NOSE_XUNIT_FILE]")) def configure(self, options, config): """Configures the xunit plugin.""" Plugin.configure(self, options, config) self.config = config if self.enabled: self.stats = { 'errors': 0, 'failures': 0, 'passes': 0, 'skipped': 0 } self.errorlist = [] self.error_report_file_name = options.xunit_file def report(self, stream): """Writes an Xunit-formatted XML file The file includes a report of test errors and failures. """ self.error_report_file = codecs.open(self.error_report_file_name, 'w', self.encoding, 'replace') self.stats['encoding'] = self.encoding self.stats['total'] = (self.stats['errors'] + self.stats['failures'] + self.stats['passes'] + self.stats['skipped']) self.error_report_file.write( u'<?xml version="1.0" encoding="%(encoding)s"?>' u'<testsuite name="nosetests" tests="%(total)d" ' u'errors="%(errors)d" failures="%(failures)d" ' u'skip="%(skipped)d">' % self.stats) self.error_report_file.write(u''.join( [force_unicode(e, self.encoding) for e in self.errorlist])) self.error_report_file.write(u'</testsuite>') self.error_report_file.close() if self.config.verbosity > 1: stream.writeln("-" * 70) stream.writeln("XML: %s" % self.error_report_file.name) def _startCapture(self): self._capture_stack.append((sys.stdout, sys.stderr)) self._currentStdout = StringIO() self._currentStderr = StringIO() sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout) sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr) def startContext(self, context): self._startCapture() def beforeTest(self, test): """Initializes a timer before starting a test.""" self._timer = time() self._startCapture() def _endCapture(self): if self._capture_stack: sys.stdout, sys.stderr = self._capture_stack.pop() def afterTest(self, test): self._endCapture() self._currentStdout = None self._currentStderr = None def finalize(self, test): while self._capture_stack: self._endCapture() def _getCapturedStdout(self): if self._currentStdout: value = self._currentStdout.getvalue() if value: return '<system-out><![CDATA[%s]]></system-out>' % escape_cdata( value) return '' def _getCapturedStderr(self): if self._currentStderr: value = self._currentStderr.getvalue() if value: return '<system-err><![CDATA[%s]]></system-err>' % escape_cdata( value) return '' def addError(self, test, err, capt=None): """Add error output to Xunit report. """ taken = self._timeTaken() if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 else: type = 'error' self.stats['errors'] += 1 tb = format_exception(err, self.encoding) id = test.id() self.errorlist.append( u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">' u'<%(type)s type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>' u'</%(type)s>%(systemout)s%(systemerr)s</testcase>' % { 'cls': self._quoteattr(id_split(id)[0]), 'name': self._quoteattr(id_split(id)[-1]), 'taken': taken, 'type': type, 'errtype': self._quoteattr(nice_classname(err[0])), 'message': self._quoteattr(exc_message(err)), 'tb': escape_cdata(tb), 'systemout': self._getCapturedStdout(), 'systemerr': self._getCapturedStderr(), }) def addFailure(self, test, err, capt=None, tb_info=None): """Add failure output to Xunit report. """ taken = self._timeTaken() tb = format_exception(err, self.encoding) self.stats['failures'] += 1 id = test.id() self.errorlist.append( u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">' u'<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>' u'</failure>%(systemout)s%(systemerr)s</testcase>' % { 'cls': self._quoteattr(id_split(id)[0]), 'name': self._quoteattr(id_split(id)[-1]), 'taken': taken, 'errtype': self._quoteattr(nice_classname(err[0])), 'message': self._quoteattr(exc_message(err)), 'tb': escape_cdata(tb), 'systemout': self._getCapturedStdout(), 'systemerr': self._getCapturedStderr(), }) def addSuccess(self, test, capt=None): """Add success output to Xunit report. """ taken = self._timeTaken() self.stats['passes'] += 1 id = test.id() self.errorlist.append( '<testcase classname=%(cls)s name=%(name)s ' 'time="%(taken).3f">%(systemout)s%(systemerr)s</testcase>' % { 'cls': self._quoteattr(id_split(id)[0]), 'name': self._quoteattr(id_split(id)[-1]), 'taken': taken, 'systemout': self._getCapturedStdout(), 'systemerr': self._getCapturedStderr(), })