def test_buffer_tail(): writer = Compatibility.StringIO() buf = Buffer(writer, infinite=False) # infinite ==> eof is end of line assert buf.next() is None write_and_rewind(writer, '1234') assert buf.next() == '1234' writer = Compatibility.StringIO() buf = Buffer(writer, infinite=True) # infinite ==> eof is end of line assert buf.next() is None write_and_rewind(writer, '1234') assert buf.next() is None write_and_rewind(writer, '\n') assert buf.next() == '1234'
def _generate_coverage_config(self, source_mappings): # For the benefit of macos testing, add the 'real' path the the directory as an equivalent. def add_realpath(path): realpath = os.path.realpath(path) if realpath != canonical and realpath not in alternates: realpaths.add(realpath) cp = configparser.SafeConfigParser() cp.readfp(Compatibility.StringIO(self.DEFAULT_COVERAGE_CONFIG)) # We use the source_mappings to setup the `combine` coverage command to transform paths in # coverage data files into canonical form. # See the "[paths]" entry here: http://nedbatchelder.com/code/coverage/config.html for details. cp.add_section('paths') for canonical, alternates in source_mappings.items(): key = canonical.replace(os.sep, '.') realpaths = set() add_realpath(canonical) for path in alternates: add_realpath(path) cp.set('paths', key, self._format_string_list([canonical] + list(alternates) + list(realpaths))) # See the debug options here: http://nedbatchelder.com/code/coverage/cmd.html#cmd-run-debug if self._debug: debug_options = self._format_string_list([ # Dumps the coverage config realized values. 'config', # Logs which files are skipped or traced and why. 'trace']) cp.set('run', 'debug', debug_options) return cp
def test_stream(): stream = Stream(sio(), (GlogLine,)) lines = read_all(stream, terminator=Stream.EOF) assert len(lines) == 3 last_line = lines[-1] # does assembly of trailing non-GlogLines work properly? assert last_line.raw.startswith('I1101') assert TEST_GLOG_LINES[-len(last_line.raw):] == last_line.raw # test tailed logs writer = Compatibility.StringIO() stream = Stream(writer, (GlogLine,), infinite=True) assert stream.next() is None write_and_rewind(writer, lines[0].raw) assert stream.next() is None write_and_rewind(writer, '\n') # this is somewhat counterintuitive behavior -- we need to see two log lines in order # to print one, simply because otherwise we don't know if the current line is finished. # you could imagine a scenario, however, when you'd want (after a certain duration) # to print out whatever is in the buffers regardless. this should probably be the # default behavior in infinite=True, but it will add a lot of complexity to the # implementation. assert stream.next() is None write_and_rewind(writer, lines[1].raw) assert stream.next() == lines[0] assert stream.next() is None write_and_rewind(writer, '\n') assert stream.next() == None write_and_rewind(writer, lines[2].raw) assert stream.next() == lines[1]
def generate_coverage_config(target): cp = configparser.ConfigParser() cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) cp.add_section('html') target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', os.path.dirname(target.address.buildfile.relpath), target.name) safe_mkdir(target_dir) cp.set('html', 'directory', target_dir) return cp
def test_dump(self): props = OrderedDict() props['a'] = 1 props['b'] = '''2 ''' props['c'] = ' 3 : =' out = Compatibility.StringIO() Properties.dump(props, out) self.assertEquals('a=1\nb=2\\\n\nc=\\ 3\\ \\:\\ \\=\n', out.getvalue())
def synthesize_thread_stacks(): threads = dict([(th.ident, th) for th in threading.enumerate()]) ostr = Compatibility.StringIO() # _current_frames not yet implemented on pypy and not guaranteed anywhere but # cpython in practice. if hasattr(sys, '_current_frames') and (len(sys._current_frames()) > 1 or sys._current_frames().values()[0] != inspect.currentframe()): # Multi-threaded ostr.write('\nAll threads:\n') for thread_id, stack in sys._current_frames().items(): BasicExceptionHandler.print_stack(thread_id, threads[thread_id], stack, ostr, indent=2) return ostr.getvalue()
def generate_coverage_config(targets): cp = configparser.ConfigParser() cp.readfp(Compatibility.StringIO(DEFAULT_COVERAGE_CONFIG)) cp.add_section('html') if len(targets) == 1: target = targets[0] relpath = os.path.join(os.path.dirname(target.address.buildfile.relpath), target.name) else: relpath = Target.maybe_readable_identify(targets) target_dir = os.path.join(Config.load().getdefault('pants_distdir'), 'coverage', relpath) safe_mkdir(target_dir) cp.set('html', 'directory', target_dir) return cp
def test_tracing_filter(): sio = Compatibility.StringIO() tracer = Tracer(output=sio) tracer.log('hello world') assert sio.getvalue() == 'hello world\n' sio = Compatibility.StringIO() tracer = Tracer(output=sio, predicate=lambda v: v >= 1) tracer.log('hello world') assert sio.getvalue() == '' tracer.log('hello world', V=1) assert sio.getvalue() == 'hello world\n' tracer.log('ehrmagherd', V=2) assert sio.getvalue() == 'hello world\nehrmagherd\n' sio = Compatibility.StringIO() tracer = Tracer(output=sio, predicate=lambda v: (v % 2 == 0)) tracer.log('hello world', V=0) assert sio.getvalue() == 'hello world\n' tracer.log('morf gorf', V=1) assert sio.getvalue() == 'hello world\n' tracer.log('ehrmagherd', V=2) assert sio.getvalue() == 'hello world\nehrmagherd\n'
def test_download_listener(self): downloaded = '' for chunk in self.expect_get('http://foo', chunk_size_bytes=1048576, timeout_secs=3600): self.listener.recv_chunk(chunk) downloaded += chunk self.listener.finished() self.response.close() self.mox.ReplayAll() with closing(Compatibility.StringIO()) as fp: self.fetcher.fetch('http://foo', Fetcher.DownloadListener(fp).wrap(self.listener), chunk_size=Amount(1, Data.MB), timeout=Amount(1, Time.HOURS)) self.assertEqual(downloaded, fp.getvalue())
def test_tracing_timed(): sio = Compatibility.StringIO() clock = ThreadedClock() final_trace = [] class PrintTraceInterceptor(Tracer): def print_trace(self, *args, **kw): final_trace.append(self._local.parent) tracer = PrintTraceInterceptor(output=sio, clock=clock, predicate=lambda v: False) assert not hasattr(tracer._local, 'parent') with tracer.timed('hello'): clock.tick(1.0) with tracer.timed('world 1'): clock.tick(1.0) with tracer.timed('world 2'): clock.tick(1.0) assert len(final_trace) == 1 final_trace = final_trace[0] assert final_trace._start == 0 assert final_trace._stop == 3 assert final_trace.duration() == 3 assert final_trace.msg == 'hello' assert len(final_trace.children) == 2 child = final_trace.children[0] assert child._start == 1 assert child._stop == 2 assert child.parent is final_trace assert child.msg == 'world 1' child = final_trace.children[1] assert child._start == 2 assert child._stop == 3 assert child.parent is final_trace assert child.msg == 'world 2' # should not log if verbosity low assert sio.getvalue() == ''
def iter_tokens(cls, blob): """ Iterate over tokens found in blob contents :param blob: Input string with python file contents :return: token iterator """ return tokenize.generate_tokens(Compatibility.StringIO(blob).readline)
def format(exctype, value, tb): ostr = Compatibility.StringIO() ostr.write('Uncaught exception:\n') ostr.write(''.join(traceback.format_exception(exctype, value, tb))) ostr.write(BasicExceptionHandler.synthesize_thread_stacks()) return ostr.getvalue()
def sio(): return Compatibility.StringIO(TEST_GLOG_LINES)
def iter_tokens(cls, blob): return tokenize.generate_tokens(Compatibility.StringIO(blob).readline)