def test_emptyFile(): # No defaults simple = makeSimple() assert_equals(simple.int(), None) assert_equals(simple.bool(), None) assert_equals(simple.float(), None) # Preserved by reading empty file? simple.readfp(sio()) assert_equals(simple.int(), None) assert_equals(simple.bool(), None) assert_equals(simple.float(), None) # Preserved by reading one-comment file? simple.readfp(sio('#!/usr/bin/env finitd\n')) assert_equals(simple.int(), None) assert_equals(simple.bool(), None) assert_equals(simple.float(), None) # Set values and try again simple.int.set(1) simple.bool.set(True) simple.float.set(1.0) assert_equals(simple.int(), 1) assert_equals(simple.bool(), True) assert_equals(simple.float(), 1.0) # Preserved by reading empty file? simple.readfp(sio()) assert_equals(simple.int(), 1) assert_equals(simple.bool(), True) assert_equals(simple.float(), 1.0) # Preserved by reading one-comment file? simple.readfp(sio('#!/usr/bin/env finitd\n')) assert_equals(simple.int(), 1) assert_equals(simple.bool(), True) assert_equals(simple.float(), 1.0)
def _sio(s = None): if not s: return sio() if PY3K: return sio(bytes(s, "ascii")) else: return sio(s)
def update_ipk(): def _sio(s=None): if not s: return sio() if PY3K: return sio(bytes(s, "ascii")) else: return sio(s) def flen(fobj): pos = fobj.tell() fobj.seek(0) _ = len(fobj.read()) fobj.seek(pos) return _ def add_to_tar(tar, name, sio_obj, mode=33279): info = tarfile.TarInfo(name=name) info.size = flen(sio_obj) info.mode = mode sio_obj.seek(0) tar.addfile(info, sio_obj) if os.path.exists(ipk_file): os.remove(ipk_file) ipk_fobj = tarfile.open(name=ipk_file, mode='w:gz') data_stream = sio() data_fobj = tarfile.open(fileobj=data_stream, mode='w:gz') data_content = open(shell_file, 'rb') add_to_tar(data_fobj, './bin/swjsq', data_content) data_fobj.close() add_to_tar(ipk_fobj, './data.tar.gz', data_stream) data_stream.close() control_stream = sio() control_fobj = tarfile.open(fileobj=control_stream, mode='w:gz') control_content = _sio('''Package: swjsq Version: 0.0.1 Depends: libc Source: none Section: net Maintainer: fffonion Architecture: all Installed-Size: %d Description: Xunlei Fast Dick ''' % flen(data_content)) add_to_tar(control_fobj, './control', control_content) control_fobj.close() add_to_tar(ipk_fobj, './control.tar.gz', control_stream) control_stream.close() data_content.close() control_content.close() debian_binary_stream = _sio('2.0\n') add_to_tar(ipk_fobj, './debian-binary', debian_binary_stream) debian_binary_stream.close() ipk_fobj.close()
def test_writefp_annotate(): config = hieropt.Group('config') config.register(hieropt.Int('x', default=1, comment='config int x')) fp = sio() config.writefp(fp) assert_equals(fp.getvalue().strip(), """ # config int x # config.x: 1 """.strip()) fp = sio() config.writefp(fp, annotate=False) assert_equals(fp.getvalue().strip(), """ # config.x: 1 """.strip()) config.x.set(2) fp = sio() config.writefp(fp) assert_equals(fp.getvalue().strip(), """ # config int x config.x: 2 """.strip()) fp = sio() config.writefp(fp, annotate=False) assert_equals(fp.getvalue().strip(), """ config.x: 2 """.strip())
def update_ipk(): # FIXME: 3.X compatibility def get_sio(tar, name): return sio(tar.extractfile(name).read()) def flen(fobj): pos = fobj.tell() fobj.seek(0) _ = len(fobj.read()) fobj.seek(pos) return _ def add_to_tar(tar, name, sio_obj, mode=33279): info = tarfile.TarInfo(name=name) info.size = flen(sio_obj) info.mode = mode sio_obj.seek(0) tar.addfile(info, sio_obj) if os.path.exists(ipk_file): os.remove(ipk_file) ipk_fobj = tarfile.open(name=ipk_file, mode="w:gz") data_stream = sio() data_fobj = tarfile.open(fileobj=data_stream, mode="w:gz") data_content = open(shell_file, "r") add_to_tar(data_fobj, "./bin/swjsq", data_content) data_fobj.close() add_to_tar(ipk_fobj, "./data.tar.gz", data_stream) data_stream.close() control_stream = sio() control_fobj = tarfile.open(fileobj=control_stream, mode="w:gz") control_content = sio( """Package: swjsq Version: 0.0.1 Depends: libc Source: none Section: net Maintainer: fffonion Architecture: all Installed-Size: %d Description: Xunlei Fast Dick """ % flen(data_content) ) add_to_tar(control_fobj, "./control", control_content) control_fobj.close() add_to_tar(ipk_fobj, "./control.tar.gz", control_stream) control_stream.close() data_content.close() control_content.close() debian_binary_stream = sio("2.0\n") add_to_tar(ipk_fobj, "./debian-binary", debian_binary_stream) debian_binary_stream.close() ipk_fobj.close()
def test_read_sg_005_edges(self): """Just a edge""" g = SizeGraph.readsg(sio("""4 MS 0.1 0.2 0.3 0.4 1 0 1 """)) self.assertEqual(4, len(g.nodes)) C = g.get_connections() self.assertEqual(1, len(C)) c = C.pop() self._assert_connection(c, 0.1, 0.2) g = SizeGraph.readsg(sio("""4 1 0 1 """),ms=[.1,.2,.3,.4]) self.assertEqual(4, len(g.nodes)) C = g.get_connections() self.assertEqual(1, len(C)) c = C.pop() self._assert_connection(c, 0.1, 0.2) """More edges""" g = SizeGraph.readsg(sio("""4 MS 0.1 0.2 0.3 0.4 4 0 1 1 2 2 3 3 0 """)) self.assertEqual(4, len(g.nodes)) C = g.get_connections() self.assertEqual(4, len(C)) ii=[.1,.2,.3,.4,.1] for i in xrange(4): self._assert_in_connection(C, ii[i],ii[i+1]) g = SizeGraph.readsg(sio("""4 4 0 1 1 2 2 3 3 0 """),ms=[.1,.2,.3,.4]) self.assertEqual(4, len(g.nodes)) C = g.get_connections() self.assertEqual(4, len(C)) ii=[.1,.2,.3,.4,.1] for i in xrange(4): self._assert_in_connection(C, ii[i],ii[i+1])
def test_nonstrict(): s = """ simple.int: 1 simple.float: 2.0 notsimple.somethingelse: True """.strip() simple = makeSimple() assert_raises(hieropt.UnregisteredName, simple.readfp, sio(s)) simple = makeSimple(strict=False) simple.readfp(sio(s)) assert_equals(simple.int(), 1) assert_equals(simple.float(), 2.0)
def test_protocol_failing(self): """ Test that a failure in the process termination is correctly propagated to the finished deferred. """ s = sio() sa = sio() ac = self._makeConnector(s, sa) ac.finished.addCallback(_raise) fail = failure.Failure(error.ProcessTerminated()) self.assertFailure(ac.finished, error.ProcessTerminated) ac.processEnded(fail)
def test_protocol(self): """ Test that outReceived writes to AMP and that it triggers the finished deferred once the process ended. """ s = sio() sa = sio() ac = self._makeConnector(s, sa) for x in xrange(99): ac.childDataReceived(4, str(x)) ac.processEnded(failure.Failure(error.ProcessDone(0))) return ac.finished.addCallback(lambda _: self.assertEqual(sa.getvalue(), "".join(str(x) for x in xrange(99))))
def test_protocol(self): """ Test that outReceived writes to AMP and that it triggers the finished deferred once the process ended. """ s = sio() sa = sio() ac = self._makeConnector(s, sa) for x in xrange(99): ac.childDataReceived(4, str(x)) ac.processEnded(failure.Failure(error.ProcessDone(0))) return ac.finished.addCallback(lambda _: self.assertEqual( sa.getvalue(), ''.join(str(x) for x in xrange(99))))
def write_document(tree, stream=None, encoding=None): """ If a stream is given, the result is encoded (encoding defaults to sys.getfilesystemencoding) and written to the stream. If no stream is given, the result is returned, either as a unicode string or encoded using the requested encoding. """ if stream is None and encoding is None: _stream = utils._ListStream() elif stream is None: try: from cStringIO import StringIO as sio except ImportError: from StringIO import StringIO as sio _stream = utils.StreamWriteEncoder(sio(), encoding) else: if encoding is None: encoding = sys.getfilesystemencoding() _stream = utils.StreamWriteEncoder(stream, encoding) if encoding is not None and isinstance(tree, ast.Document): _stream.write('<?xml version="1.0" encoding="%s"?>' % encoding) tree.write_xml(_stream) if stream is None: res = _stream.getvalue() _stream.close() return res
def _hostmaskPatternEqual(pattern, hostmask): try: return _patternCache[pattern](hostmask) is not None except KeyError: # We make our own regexps, rather than use fnmatch, because fnmatch's # case-insensitivity is not IRC's case-insensitity. fd = sio() for c in pattern: if c == '*': fd.write('.*') elif c == '?': fd.write('.') elif c in '[{': fd.write('[[{]') elif c in '}]': fd.write(r'[}\]]') elif c in '|\\': fd.write(r'[|\\]') elif c in '^~': fd.write('[~^]') else: fd.write(re.escape(c)) fd.write('$') f = re.compile(fd.getvalue(), re.I).match _patternCache[pattern] = f return f(hostmask) is not None
def test_env_setting(self): """ Test that and environment variable passed to the process starter is correctly passed to the child process. """ s = sio() a = FakeAMP(s) STRING = "ciao" BOOT = """\ import sys, os def main(): os.write(4, os.getenv("FOOBAR")) main() """ starter = main.ProcessStarter(bootstrap=BOOT, packages=("twisted", "ampoule"), env={"FOOBAR": STRING}) amp, finished = starter.startPythonProcess(main.AMPConnector(a), "I'll be ignored") def _eb(reason): print reason finished.addErrback(_eb) return finished.addCallback( lambda _: self.assertEquals(s.getvalue(), STRING))
def writeto(self, f): for i,ar in enumerate(self.areas): if hasattr(ar,'contours'): continue ar.contours = [] for w in ar.walls: cont = [] while not hasattr(w, 'seen'): w.seen = True cont.append(w) w = w.point2 if cont: ar.contours.append(cont) ar.idx = i f.writeu16(len(self.areas)) ncont = nwalls = nparts = nlinks = nactors = 0 for ar in self.areas: timer.progress() f.writestring(ar.id, usetbl = False) ar.ceil.writeto(f); ar.floor.writeto(f); f.writeu8(len(ar.contours)) for c in ar.contours: ncont += 1 f.writeu8(len(c)) for w in c: nwalls += 1 f.writef(w.x1, w.y1) f.writeu8(len(w.links)) for oa, flags in w.links.iteritems(): nlinks += 1 f.writeu16(oa.idx) f.writeu8(flags) parts = [p for p in w.parts if p.tex] f.writeu8(len(parts)) for p in parts: nparts += 1 p.writeto(f) actors = [a for a in ar.actors if a.clas] f.writeu16(len(actors)) for a in actors: nactors += 1 f.writestring(a.clas) f.writef(a.x, a.y, a.z, a.ang) nf = sio(); nds = datastream(nf) a.extra(nds) extra = nf.getvalue() f.writeu16(len(extra)) f.write(extra) return len(self.areas), ncont, nwalls, nlinks, nparts, nactors
def test_startProcess(self): """ Test that startProcess actually starts a subprocess and that it receives data back from the process through AMP. """ s = sio() a = FakeAMP(s) STRING = "ciao" BOOT = """\ import sys, os def main(arg): os.write(4, arg) main(sys.argv[1]) """ starter = main.ProcessStarter(bootstrap=BOOT, args=(STRING, ), packages=("twisted", "ampoule")) amp, finished = starter.startPythonProcess(main.AMPConnector(a)) def _eb(reason): print reason finished.addErrback(_eb) return finished.addCallback( lambda _: self.assertEquals(s.getvalue(), STRING))
def load(self, key, silent=False): """Load from location ``key`` :param key: location :type key: str :param silent: whether load fails silently :type silent: bool :return: loaded object :rtype: object """ # Load a pickle object from s3 bucket try: #buffer = sio.StringIO() buffer = sio() load_key = key if self.aws_prefix: load_key = '/'.join([self.aws_prefix, key]) self.bucket.download_fileobj(load_key, buffer) # buffer has been filled, offset is at the end, seek to beginning for unpickling buffer.seek(0) obj = pickle.load(buffer) if self.verbose > 1: print("[{}: log] Loaded file: {}".format(self.pp, load_key)) return obj except Exception as e: if self.verbose > 1 and not silent: err_msg = "[{}: error ({}: {})] Could not load object with key: {}" print(err_msg.format(self.pp, type(e), e, load_key))
def test_flexible_group(): config = hieropt.Group('config') config.register(hieropt.Group('strings', Child=hieropt.Value)) config.register(hieropt.Group('ints', Child=hieropt.Int)) assert_write_then_read_equivalence(config) config.strings.s1.set('foo') assert_write_then_read_equivalence(config) assert_equals(config.strings.s1(), 'foo') config.strings.s2.set('bar') assert_write_then_read_equivalence(config) assert_equals(config.strings.s2(), 'bar') config.ints.x.set(1) assert_equals(config.ints.x(), 1) assert_write_then_read_equivalence(config) config.ints.y.set(2) assert_equals(config.ints.y(), 2) assert_write_then_read_equivalence(config) fp = sio(""" config.strings.s3: baz config.ints.z: 3 """) config.readfp(fp) assert_equals(config.strings.s3(), 'baz') assert_equals(config.ints.z(), 3) assert_write_then_read_equivalence(config)
def load(self, key, silent=False): """Load from location ``key`` :param key: location :type key: str :param silent: whether load fails silently :type silent: bool :return: loaded object :rtype: object """ # Load a pickle object from s3 bucket try: #buffer = sio.StringIO() buffer = sio() load_key = key if self.aws_prefix: load_key = '/'.join([self.aws_prefix, key]) # Define a Callback function and print out based on verbose level? # This can hang if load_key is not found? #self.bucket.download_fileobj(load_key, buffer) resp = self.s3.Object(bucket_name=self.bucket_name, key=load_key) # Try to access 'content_length' to generate an 404 error if not found if resp.content_length == 0: return None resp.download_fileobj(buffer) # buffer has been filled, offset is at the end, seek to beginning for unpickling buffer.seek(0) obj = pickle.load(buffer) if self.verbose > 2: print("[{}: log] Loaded file: {}".format(self.pp, load_key)) return obj except Exception as e: if self.verbose > 1 and not silent: err_msg = "[{}: error ({}: {})] Could not load object with key: {}" print(err_msg.format(self.pp, type(e), e, load_key))
def dispPareto(dFrame): dFrame.amount = dFrame.amount.astype('float') dFrame = dFrame.groupby('cat').sum().sort_values('amount') plot = dFrame.plot.bar(color='grey') fig = plot.get_figure() buf = sio() fig.savefig(buf, format='png') return buf
def test_read_sf_from_file_dump_dual(self): sf = SF.SizeFunction() sf.new_ssf(-10,10, [(-3,2),(-5,-2),(2,9),(-1,1)]) sf.new_ssf(-3,12, [(-1,2),(2,10),(2,11),(-1,3)]) f=sio() sf.dump(f) f.seek(0) self.assertEqual(sf, readsf(f))
def test_read_sf_from_file_dump_dual(self): sf = SF.SizeFunction() sf.new_ssf(-10, 10, [(-3, 2), (-5, -2), (2, 9), (-1, 1)]) sf.new_ssf(-3, 12, [(-1, 2), (2, 10), (2, 11), (-1, 3)]) f = sio() sf.dump(f) f.seek(0) self.assertEqual(sf, readsf(f))
def test_inflexible_group(): config = hieropt.Group('config') config.register(hieropt.Int('x', default=1)) assert_raises(AttributeError, getattr, config, 'y') fp = sio(""" config.x: 2 config.y: 2 """) assert_raises(hieropt.UnregisteredName, config.readfp, fp)
def dispMonthPareto(dFrame, cat): dFrame.amount = dFrame.amount.astype('float') dFrame = dFrame.loc[dFrame['cat']==cat] dFrame = dFrame.groupby([ dFrame['date'].dt.month, dFrame['date'].dt.year ]).sum().sort_values('amount') plot = dFrame.plot.bar(color='grey') fig = plot.get_figure() buf = sio() fig.savefig(buf, format='png') return buf
def test_xxx_dump_read_sg_dual(self): g = SG() N = [g.add_node(.1), g.add_node(.2), g.add_node(.3), g.add_node(.4) ] N[0].connect(N[1]) N[0].connect(N[2]) N[1].connect(N[3]) f = sio() g.dump(f) f.seek(0) self.assertSameGraph(g, SizeGraph.readsg(f)) print f.getvalue()
def test_equals_works_as_separator(): simple = makeSimple() fp = sio(""" simple.int: 1 simple.bool = True """) assert_equals(simple.int(), None) assert_equals(simple.bool(), None) simple.readfp(fp) assert_equals(simple.int(), 1) assert_equals(simple.bool(), True)
def integrate_symbol_zip_from_url(self, symbol_zip_url): if self.have_integrated(symbol_zip_url): return LOG.info("Retrieving symbol zip from {symbol_zip_url}...".format( symbol_zip_url=symbol_zip_url)) try: io = urlopen(symbol_zip_url, None, 30) with zipfile.ZipFile(sio(io.read())) as zf: self.integrate_symbol_zip(zf) self._create_file_if_not_exists(self._marker_file(symbol_zip_url)) except IOError: LOG.info("Symbol zip request failed.")
def test_writeComment(): fp = sio() L = [] for i in xrange(100): fp.seek(0) comment = ' '.join(L) hieropt.writeComment(fp, comment) fp.seek(0) for line in fp: assert line.startswith('#') assert len(line) < 80, 'Line longer than 80 chars: %r' % line L.append('xyz')
def load_djh(): s = """\ tc tc_err 2457898.54880 0.00459 2457906.46919 0.00569 2457914.39057 0.00674 2457922.31341 0.00776 2457930.23357 0.00881 2457938.15467 0.00981 2457946.07734 0.01071 2457953.99728 0.01167 2457961.91812 0.01261 """ s = pd.read_table(sio(s), sep='\s*') s['tc'] -= bjd0 s['i_epoch'] = s.index s['i_epoch'] += 137 s['i_planet'] = 1 times_djh = s.copy() s = """\ tc tc_err 2457900.04646 0.02629 2457911.94202 0.03194 2457923.83491 0.03837 2457935.73184 0.04370 2457947.62592 0.04959 2457959.52420 0.05443 2457971.41940 0.05959 """ s = pd.read_table(sio(s), sep='\s*') s['tc'] -= bjd0 s['i_epoch'] = s.index s['i_epoch'] += 91 s['i_planet'] = 2 times_djh = times_djh.append(s) times_djh.index = times_djh.i_planet return times_djh
def test_xxx_dump_options(self): g = SG() N = [g.add_node(.1), g.add_node(.2), g.add_node(.3), g.add_node(.4) ] N[0].connect(N[1]) N[0].connect(N[2]) N[1].connect(N[3]) f = sio() g.dump(f,legacy=True) f.seek(0) print """LEGACY ######################## """ + f.getvalue() self.assertSameGraph(g, SizeGraph.readsg(f,ms=[.1,.2,.3,.4])) self.assertNotIn("#", f.getvalue()) self.assertNotIn("MS", f.getvalue()) f = sio() g.dump(f,comments=False) f.seek(0) print """COMMENTS ######################## """ + f.getvalue() self.assertSameGraph(g, SizeGraph.readsg(f)) self.assertNotIn("#", f.getvalue()) self.assertIn("MS", f.getvalue())
def save(self, key, obj): """Save object ``obj`` at location ``key`` :param key: location to save :type key: str :param obj: object to save, will be pickled. :type obj: object """ # Pickle and save to s3 bucket #buffer = sio.StringIO(pickle.dumps(obj)) buffer = sio(pickle.dumps(obj)) save_key = key if self.aws_prefix: save_key = '/'.join([self.aws_prefix, key]) self.bucket.upload_fileobj(buffer, save_key) if self.verbose > 1: print("[{}: log] Saved file: {}".format(self.pp, save_key))
def regenerate_human_readable(iris_data, outfile): """ Generate human readable, tabular version of the IRIS database. :param iris_data: String containing result string returned from IRIS query (without data errors). :type iris_data: str :param outfile: Output text file name :type outfile: str """ print("Generating human readable version...") from seismic.inventory.pdconvert import inventory_to_dataframe import pandas as pd from obspy import read_inventory if sys.version_info[0] < 3: from cStringIO import StringIO as sio # pylint: disable=import-error, unresolved-import else: from io import BytesIO as sio # end if iris_str = iris_data.encode('utf-8') print(" Ingesting query response into obspy...") obspy_input = sio(iris_str) try: station_inv = read_inventory(obspy_input) except: dumpfile = 'fdsn_stn_inv_dump.xml' print( "FAILED ingesting server response into obspy, dumping server response string to " + dumpfile) with open(dumpfile, 'w') as f: f.write(iris_str.decode('utf-8')) raise # end try print(" Converting to dataframe...") inv_df = inventory_to_dataframe(station_inv) with pd.option_context("display.max_rows", None, "display.max_columns", None, "display.width", 1000): print(" Converting to tabular text file " + outfile) inv_str = str(inv_df) with open(outfile, "w") as f: f.write(inv_str)
def _checkForAnnouncements(self, irc): start = time.time() self.log.info('Checking mailbox for announcements.') pop = self._getPop(irc) i = None for (i, msg) in self._getMsgs(pop): message = rfc822.Message(sio(msg)) frm = message.get('From') if not frm: self.log.warning('Received message without From header.') continue else: frm = frm.rstrip() subject = message.get('Subject', '').rstrip() content = message.fp.read() self.log.info('Received message with subject %q from %q.', subject, frm) if subject == 'all': channels = list(irc.state.channels) else: channels = subject.split() if not channels or not all(irc.isChannel, channels): channels = list(self.registryValue('defaultChannels')) if subject: content = '%s: %s' % (subject, content) if not channels: self.log.info('Received message with improper subject ' 'line from %s.', frm) continue prefix = self.registryValue('prefix') content = utils.str.normalizeWhitespace(content) self.log.info('Making announcement to %L.', channels) chunks = textwrap.wrap(content, 350) for channel in channels: if channel in irc.state.channels: maximum = self.registryValue('limit', channel) for chunk in chunks[:maximum]: s = self._formatChunk( self._formatPrefix(prefix + " ")+chunk) irc.queueMsg(ircmsgs.privmsg(channel, s)) prefix = '' self._quit(pop) self.log.info('Finished checking mailbox, time elapsed: %s', utils.timeElapsed(time.time() - start))
def test_read_sg_000_nodes(self): g = SizeGraph.readsg(sio()) self.assertIsInstance(g, SG) """Just a Node""" g = SizeGraph.readsg(sio("1")) self.assertEqual(1, len(g.nodes)) g = SizeGraph.readsg(sio("""1"""),[0.2]) self.assertEqual(1, len(g.nodes)) self.assertEqual(0.2,g.nodes.pop().phy) g = SizeGraph.readsg(sio("""1 MS 0.5""")) self.assertEqual(1, len(g.nodes)) self.assertEqual(0.5,g.nodes.pop().phy) """Just a Nodes""" g = SizeGraph.readsg(sio("4")) self.assertEqual(4, len(g.nodes)) g = SizeGraph.readsg(sio("""4"""),[1,2,3,4]) self.assertEqual(4, len(g.nodes)) for i in xrange(1,5): self.assertIn(i,[n.phy for n in g.nodes]) g = SizeGraph.readsg(sio("""4 MS 0.1 0.2 0.3 0.4""")) self.assertEqual(4, len(g.nodes)) for i in range(1,5): self.assertIn(i/10.0,[n.phy for n in g.nodes]) """ms instead file value""" g = SizeGraph.readsg(sio("""4 MS 0.1 0.2 0.3 0.4"""),[1,2,3,4]) self.assertEqual(4, len(g.nodes)) for i in range(1,5): self.assertIn(i,[n.phy for n in g.nodes])
def assert_write_then_read_equivalence(config): fp = sio() config.writefp(fp) initial = fp.getvalue() initial_defaults = [(name, value.expectsValue() and value.isSet() and value.isDefault()) for (name, value) in config] fp.seek(0) config.readfp(fp) fp.seek(0) config.writefp(fp) subsequent = fp.getvalue() subsequent_defaults = [(name, value.expectsValue() and value.isSet() and value.isDefault()) for (name, value) in config] assert_equals(initial, subsequent) assert_equals(initial_defaults, subsequent_defaults)
def test_simple_defaults_commented_out(): simple = makeSimpleWithDefaults() fp = sio() simple.writefp(fp) fp.seek(0) for line in fp: line = line.strip() if line: assert line.startswith('#'), 'Default is not commented out: %r' % line simple.int.set(0) simple.bool.set(False) simple.float.set(0.0) fp.seek(0) simple.writefp(fp) fp.seek(0) for line in fp: line = line.strip() if line: assert not line.startswith('#'), 'Non-default commented out: %r' % line assert_write_then_read_equivalence(simple)
def test_failing_deferToProcess(self): """ Test failing subprocesses and the way they terminate and preserve failing information. """ s = sio() a = FakeAMP(s) STRING = "ciao" BOOT = """\ import sys def main(arg): raise Exception(arg) main(sys.argv[1]) """ starter = main.ProcessStarter(bootstrap=BOOT, args=(STRING,), packages=("twisted",)) ready, finished = starter.startPythonProcess(main.AMPConnector(a), "I'll be ignored") self.assertFailure(finished, error.ProcessTerminated) finished.addErrback(lambda reason: self.assertEquals(reason.getMessage(), STRING)) return finished
def serialize(self, obj, stream=None, encoding=None): """ Serialize an object to XML using this serializer's format string. If a stream is given, the result is encoded (encoding defaults to sys.getfilesystemencoding) and written to the stream. If no stream is given, the result is returned, either as a unicode string or encoded using the requested encoding. In any case, if the result is encoded, it is prefixed with an xml version tag containing the encoding, e.g. for UTF-8: <?xml version="1.0" encoding="UTF-8" ?> """ if stream is None and encoding is None: _stream = _ListStream() elif stream is None: try: from cStringIO import StringIO as sio except ImportError: from StringIO import StringIO as sio _stream = StreamWriteEncoder(sio(), encoding) else: if encoding is None: encoding = sys.getfilesystemencoding() _stream = StreamWriteEncoder(stream, encoding) try: idx, tree = self._tag(0, None, obj) except IndexError: raise SerializationFormatError("Unexpected end of format", self.fmt, len(self.fmt)) if idx != len(self.fmt): raise SerializationFormatError("Tralining format characters", self.fmt, idx) if encoding is not None: _stream.write('<?xml version="1.0" encoding="%s"?>' % encoding) self._write(_stream, tree) if stream is None: res = _stream.getvalue() _stream.close() return res
def test_env_setting(self): """ Test that and environment variable passed to the process starter is correctly passed to the child process. """ s = sio() a = FakeAMP(s) STRING = "ciao" BOOT = """\ import sys, os def main(): os.write(4, os.getenv("FOOBAR")) main() """ starter = main.ProcessStarter(bootstrap=BOOT, packages=("twisted", "ampoule"), env={"FOOBAR": STRING}) amp, finished = starter.startPythonProcess(main.AMPConnector(a), "I'll be ignored") def _eb(reason): print reason finished.addErrback(_eb) return finished.addCallback(lambda _: self.assertEquals(s.getvalue(), STRING))
def test_startProcess(self): """ Test that startProcess actually starts a subprocess and that it receives data back from the process through AMP. """ s = sio() a = FakeAMP(s) STRING = "ciao" BOOT = """\ import sys, os def main(arg): os.write(4, arg) main(sys.argv[1]) """ starter = main.ProcessStarter(bootstrap=BOOT, args=(STRING,), packages=("twisted",)) amp, finished = starter.startPythonProcess(main.AMPConnector(a)) def _eb(reason): print reason finished.addErrback(_eb) return finished.addCallback(lambda _: self.assertEquals(s.getvalue(), STRING))
def __init__(self, s): self.fd = sio(s) self.last = None
def get_sio(tar, name): return sio(tar.extractfile(name).read())