def _run_complexity_analysis(on_ci): """Generates cyclomatic complexity reports for the package :param bool on_ci: Indicates whether an automated tool is running this operation. Output will be customized for machine readability """ modlog.debug("Running complexity analysis") # generate cyclomatic complexities for source files in XML format for integration with external tools pyjen_path = os.path.join(os.getcwd(), "pyjen") from radon.cli import cc # TODO: output in XML format when running on CI standard_output = StringIO() with redirect_stdout(standard_output): modlog.debug("Calling radon.cc") cc(paths=[pyjen_path], show_complexity=True, show_closures=True, total_average=True, xml=on_ci) modlog.debug("Writing report to disk") cc_report = os.path.join(log_folder, "radon_complexity.xml") with open(cc_report, "w") as fh: fh.write(standard_output.getvalue()) standard_output.close() modlog.info("Cyclomatic complexity analysis complete. See " + os.path.relpath(cc_report))
def test_simple_json_output(): output = StringIO() reporter = JSONReporter() linter = PyLinter(reporter=reporter) checkers.initialize(linter) linter.config.persistent = 0 linter.reporter.set_output(output) linter.open() linter.set_current_module("0123") linter.add_message("line-too-long", line=1, args=(1, 2)) # we call this method because we didn't actually run the checkers reporter.display_messages(None) expected_result = [ [ ("column", 0), ("line", 1), ("message", "Line too long (1/2)"), ("message-id", "C0301"), ("module", "0123"), ("obj", ""), ("path", "0123"), ("symbol", "line-too-long"), ("type", "convention"), ] ] report_result = json.loads(output.getvalue()) report_result = [sorted(report_result[0].items(), key=lambda item: item[0])] assert report_result == expected_result
def test_lazy_load_index(): f = StringIO() dump({'wakka': 42}, f) f.seek(0) lj = LazyJSON(f) assert_equal({'wakka': 10, '__total__': 0}, lj.offsets) assert_equal({'wakka': 2, '__total__': 14}, lj.sizes)
def excerpt(mass, note_indices): from patternfinder.geometric_helsinki.geometric_notes import NotePointSet score = music21.converter.parse(PALESTRINA_PATH + mass + '.mid.xml') pointset = list(NotePointSet(score).flat.notes) pointset_indices = [int(i) for i in note_indices.split(',')] score_note_ids = [pointset[i].original_note_id for i in pointset_indices] # Get stream excerpt _, start_measure = score.beatAndMeasureFromOffset(pointset[pointset_indices[0]].offset) _, end_measure = score.beatAndMeasureFromOffset(pointset[pointset_indices[-1]].offset + pointset[-1].duration.quarterLength - 1) excerpt = score.measures(numberStart=start_measure.number, numberEnd=end_measure.number) # Colour notes for note in excerpt.flat.notes: if note.id in score_note_ids: note.style.color = 'red' # Delete part names (midi files have bad data) for part in excerpt: part.partName = '' sx = music21.musicxml.m21ToXml.ScoreExporter(excerpt) musicxml = sx.parse() from io import StringIO import sys bfr = StringIO() sys.stdout = bfr sx.dump(musicxml) output = bfr.getvalue() sys.stdout = sys.__stdout__ return Response(output, mimetype='application/xml')
def stylesxml(self): """ Generates the styles.xml file @return valid XML code as a unicode string """ xml=StringIO() xml.write(_XMLPROLOGUE) x = DocumentStyles() x.write_open_tag(0, xml) if self.fontfacedecls.hasChildNodes(): self.fontfacedecls.toXml(1, xml) self.styles.toXml(1, xml) a = AutomaticStyles() a.write_open_tag(1, xml) for s in self._used_auto_styles([self.masterstyles]): s.toXml(2, xml) a.write_close_tag(1, xml) if self.masterstyles.hasChildNodes(): self.masterstyles.toXml(1, xml) x.write_close_tag(0, xml) result = xml.getvalue() assert(type(result)==type(u"")) return result
def run(self): try: # Try parsing as XML root = etree.parse(self.fn) ns = "{%s}" % schemas['corpus'] out = StringIO() for i in root.getiterator(ns + "entry"): out.write(i.text + "\n") self.corpus = out.getvalue() del out except: # Turns out it's not XML self.corpus = open(self.fn, 'r') try: open(self.dct) # test existence except: raise # TODO: wrap error for output if not self.result: delim = re.compile(r"\$[^^]*\^") f = open(self.fn, 'r') data = f.read() f.close() output = destxt(data).encode('utf-8') timing_begin = time.time() proc = Popen([self.app] + self.app_args + [self.dct], stdin=PIPE, stdout=PIPE, close_fds=True) output = str(proc.communicate(output)[0].decode('utf-8')) self.timer = time.time() - timing_begin output = retxt(output) output = delim.sub("$\n^", output) self.result = output.split('\n') return 0
def test_low_mapq(self): """ We no longer fail reads because of low mapq. When we use more than one reference, reads can receive low mapq if they are in a conserved region that matches more than one reference. """ remap_file = StringIO("""\ qname,flag,rname,pos,mapq,cigar,rnext,pnext,tlen,seq,qual Example_read_1,99,V3LOOP,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Example_read_1,147,V3LOOP,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Example_read_2,99,INT,1,8,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Example_read_2,147,INT,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA """) expected_aligned_csv = """\ refname,qcut,rank,count,offset,seq INT,15,0,1,0,TGTACAAGACCCAACAACAATACAAGAAAAAG V3LOOP,15,0,1,0,TGTACAAGACCCAACAACAATACAAGAAAAAG """ expected_failed_csv = """\ qname,cause """ actual_aligned_csv = StringIO() actual_failed_csv = StringIO() sam2aln(remap_file, actual_aligned_csv, failed_csv=actual_failed_csv) self.assertMultiLineEqual(expected_aligned_csv, actual_aligned_csv.getvalue()) self.assertMultiLineEqual(expected_failed_csv, actual_failed_csv.getvalue())
def _open(self): header = StringIO(self.fp.read(24)) magic = header.read(4) if magic != "FTEX": raise ValueError("not a FTEX file") version = unpack("i", header.read(4)) self.size = unpack("ii", header.read(8)) linesize = (self.size[0] + 3) / 4 * 8 mipmap_count, format_count = unpack("ii", header.read(8)) self.mode = "RGB" self.tile = [] for i in range(format_count): format, where = unpack("ii", self.fp.read(8)) if format == 0: data = [] self.fp.seek(where) size, = unpack("i", self.fp.read(4)) for yb in xrange((self.size[1] + 3) / 4): decoded = dxtc.decodeDXT1(self.fp.read(linesize)) for d in decoded: data.append(d) data = "".join(data[:self.size[1]]) self.im = Image.core.new(self.mode, self.size) return self.fromstring(data) elif format == 1: # Uncompressed RGB self.tile.append(("raw", (0, 0) + self.size, where+4, (self.mode, 0, 1))) else: raise ValueError("Invalid texture format (expected 0 or 1, got %i)" % (format))
def cmd(self, *args, **kw): exit_code = kw.get('exit_code', 0) fork = kw.get('fork', False) if fork: try: output = subprocess.check_output((sys.executable, '-m', 'borg.archiver') + args) ret = 0 except subprocess.CalledProcessError as e: output = e.output ret = e.returncode output = os.fsdecode(output) if ret != exit_code: print(output) self.assert_equal(exit_code, ret) return output args = list(args) stdin, stdout, stderr = sys.stdin, sys.stdout, sys.stderr try: sys.stdin = StringIO() output = StringIO() sys.stdout = sys.stderr = output ret = self.archiver.run(args) sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr if ret != exit_code: print(output.getvalue()) self.assert_equal(exit_code, ret) return output.getvalue() finally: sys.stdin, sys.stdout, sys.stderr = stdin, stdout, stderr
def flush(self): self.file.flush() #super(TeeFile, self).flush() StringIO.flush(self) if self.queue is not None: self.queue.put((current_process().pid, ''.join(self.queue_buffer))) self.queue_buffer = []
def test_import_loop(self): finders.get_finder.cache_clear() err = StringIO() with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'): call_command('collectstatic', interactive=False, verbosity=0, stderr=err) self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue()) self.assertPostCondition()
def __repr__(self): if self._parent is None: return super().__repr__() out = StringIO() self.__call__(out=out) return out.getvalue()
def call_command_returns(*args, **kwargs): "call command but wich returns the output." from django.core.management import call_command stdout = StringIO() kwargs['stdout'] = stdout call_command(*args, **kwargs) return stdout.getvalue().strip()
def scenario_parse_args_exits(self, argv): with self.assertRaises(SystemExit) as cm: stdout = StringIO() stderr = StringIO() with Redirect(stdout=stdout, stderr=stderr): parsed_args = args.parse(argv) return (stdout.getvalue(), stderr.getvalue(), cm.exception.code)
def export_set(dataset): """HTML representation of a Dataset.""" stream = StringIO() page = markup.page() page.table.open() if dataset.headers is not None: new_header = [item if item is not None else '' for item in dataset.headers] page.thead.open() headers = markup.oneliner.th(new_header) page.tr(headers) page.thead.close() for row in dataset: new_row = [item if item is not None else '' for item in row] html_row = markup.oneliner.td(new_row) page.tr(html_row) page.table.close() stream.writelines(str(page)) return stream.getvalue()
def results(id): select_stmt = "SELECT q.name, d.name as department, q.description, q.query " \ "FROM query q JOIN departments d ON q.department_id = d.id " \ "WHERE q.id=%s;" with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(select_stmt, (str(id), )) res = cursor.fetchone() if res: with RealDictConnection(dsn=local) as conn: with conn.cursor() as cursor: cursor.execute(res['query']) result = cursor.fetchall() header = result[0].keys() if request.args.get('download', '').strip(): si = StringIO() f = csv.writer(si) f.writerow(header) f.writerows([row.values() for row in result]) output = make_response(si.getvalue()) output.headers["Content-Disposition"] = "attachment; filename=%s.csv" \ % str(res['name']) output.headers["Content-type"] = "text/csv" return output else: return render_template('results.html', details=res, rows=result[0:5], id=id, header=header) else: return 'Query with id %s does not exist!' % str(id)
def assertBlock(self, python, java): self.maxDiff = None dump = False py_block = PyBlock(parent=PyModule('test', 'test.py')) if python: python = adjust(python) code = compile(python, '<test>', 'exec') py_block.extract(code, debug=dump) java_code = py_block.transpile() out = BytesIO() constant_pool = ConstantPool() java_code.resolve(constant_pool) constant_pool.add(Utf8('test')) constant_pool.add(Utf8('Code')) constant_pool.add(Utf8('LineNumberTable')) writer = ClassFileWriter(out, constant_pool) java_code.write(writer) debug = StringIO() reader = ClassFileReader(BytesIO(out.getbuffer()), constant_pool, debug=debug) JavaCode.read(reader, dump=0) if dump: print(debug.getvalue()) java = adjust(java) self.assertEqual(debug.getvalue(), java[1:])
class DistantInteractiveConsole(InteractiveConsole): def __init__(self, ipc): InteractiveConsole.__init__(self, globals()) self.ipc = ipc self.set_buffer() def set_buffer(self): self.out_buffer = StringIO() sys.stdout = sys.stderr = self.out_buffer def unset_buffer(self): sys.stdout, sys.stderr = sys.__stdout__, sys.__stderr__ value = self.out_buffer.getvalue() self.out_buffer.close() return value def raw_input(self, prompt=""): output = self.unset_buffer() # payload format: 'prompt' ? '\n' 'output' self.ipc.send('\n'.join((prompt, output))) cmd = self.ipc.recv() self.set_buffer() return cmd
def test_insertion(self): remap_file = StringIO("""\ qname,flag,rname,pos,mapq,cigar,rnext,pnext,tlen,seq,qual Example_read_1,99,V3LOOP,1,44,12M6I14M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Example_read_1,147,V3LOOP,1,44,12M6I14M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA """) expected_aligned_csv = """\ refname,qcut,rank,count,offset,seq V3LOOP,15,0,1,0,TGTACAAGACCCAATACAAGAAAAAG """ expected_insert_csv = """\ qname,fwd_rev,refname,pos,insert,qual Example_read_1,F,V3LOOP,12,AACAAC,AAAAAA Example_read_1,R,V3LOOP,12,AACAAC,AAAAAA """ actual_aligned_csv = StringIO() actual_insert_csv = StringIO() sam2aln(remap_file, actual_aligned_csv, actual_insert_csv) self.assertMultiLineEqual(expected_aligned_csv, actual_aligned_csv.getvalue()) self.assertMultiLineEqual(expected_insert_csv, actual_insert_csv.getvalue())
class ContentState( State ): """ This state records every line it meets as content until it reaches a line notifying a deposit reference or a publication reference. Can return ``self`` or :class:`.PublicationState` or :class:`.DepositState`. """ def __init__( self, store ): State.__init__( self, store ) self.textIO = StringIO() def process_line( self, line ): if publication_line_re.match( line ): self.store_content() return PublicationState( self.store ).process_line( line ) elif deposit_line_re.match( line ): self.store_content() return DepositState( self.store ).process_line( line ) else: self.add_content_line( line ) globalOutputter.writeTagLine( 'TXT', line ) return self def add_content_line( self, line ): self.textIO.write( line ) self.textIO.write( '\n' ) def store_content( self ): self.store['current_article']['content'] = self.textIO.getvalue()
def test_low_read_quality(self): remap_file = StringIO("""\ qname,flag,rname,pos,mapq,cigar,rnext,pnext,tlen,seq,qual Example_read_1,99,V3LOOP,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,000000000000000000AAAAAAAAAAAAAA Example_read_1,147,V3LOOP,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,000000000000000000AAAAAAAAAAAAAA Example_read_2,99,INT,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Example_read_2,147,INT,1,44,32M,=,1,-32,TGTACAAGACCCAACAACAATACAAGAAAAAG,AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA """) expected_aligned_csv = """\ refname,qcut,rank,count,offset,seq INT,15,0,1,0,TGTACAAGACCCAACAACAATACAAGAAAAAG """ expected_failed_csv = """\ qname,cause Example_read_1,manyNs """ actual_aligned_csv = StringIO() actual_failed_csv = StringIO() sam2aln(remap_file, actual_aligned_csv, failed_csv=actual_failed_csv) self.assertMultiLineEqual(expected_aligned_csv, actual_aligned_csv.getvalue()) self.assertMultiLineEqual(expected_failed_csv, actual_failed_csv.getvalue())
def test_opened_file(self): sio = StringIO() sio.write('test_data') sio.seek(0) file, close = open_if_filename(sio) assert not close eq_('test_data', file.read())
def contentxml(self): """ Generates the content.xml file @return a bytestream in UTF-8 encoding """ xml=StringIO() xml.write(_XMLPROLOGUE) x = DocumentContent() x.write_open_tag(0, xml) if self.scripts.hasChildNodes(): self.scripts.toXml(1, xml) if self.fontfacedecls.hasChildNodes(): self.fontfacedecls.toXml(1, xml) a = AutomaticStyles() stylelist = self._used_auto_styles([self.styles, self.automaticstyles, self.body]) if len(stylelist) > 0: a.write_open_tag(1, xml) for s in stylelist: s.toXml(2, xml) a.write_close_tag(1, xml) else: a.toXml(1, xml) self.body.toXml(1, xml) x.write_close_tag(0, xml) return xml.getvalue().encode("utf-8")
def test_with_thumbor_disabled(self) -> None: self.login(self.example_email("hamlet")) fp = StringIO("zulip!") fp.name = "zulip.jpeg" result = self.client_post("/json/user_uploads", {'file': fp}) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("uri", json) uri = json["uri"] base = '/user_uploads/' self.assertEqual(base, uri[:len(base)]) quoted_uri = urllib.parse.quote(uri[1:], safe='') with self.settings(THUMBOR_URL=''): result = self.client_get("/thumbnail?url=%s&size=original" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) self.assertEqual(uri, result.url) uri = 'https://www.google.com/images/srpr/logo4w.png' quoted_uri = urllib.parse.quote(uri, safe='') with self.settings(THUMBOR_URL=''): result = self.client_get("/thumbnail?url=%s&size=original" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) self.assertEqual(uri, result.url) uri = 'http://www.google.com/images/srpr/logo4w.png' quoted_uri = urllib.parse.quote(uri, safe='') with self.settings(THUMBOR_URL=''): result = self.client_get("/thumbnail?url=%s&size=original" % (quoted_uri)) self.assertEqual(result.status_code, 302, result) base = 'https://external-content.zulipcdn.net/7b6552b60c635e41e8f6daeb36d88afc4eabde79/687474703a2f2f7777772e676f6f676c652e636f6d2f696d616765732f737270722f6c6f676f34772e706e67' self.assertEqual(base, result.url)
def testYAMLConfigFileParser_All(self): try: import yaml except: logging.warning("WARNING: PyYAML not installed. " "Couldn't test YAMLConfigFileParser") return p = configargparse.YAMLConfigFileParser() # test the all syntax case config_lines = [ "a: '3'", "list_arg:", "- 1", "- 2", "- 3", ] # test parse input_config_str = StringIO("\n".join(config_lines)+"\n") parsed_obj = p.parse(input_config_str) # test serialize output_config_str = p.serialize(parsed_obj) self.assertEqual(input_config_str.getvalue(), output_config_str) self.assertDictEqual(parsed_obj, dict([ ('a', '3'), ('list_arg', [1,2,3]), ]))
def test_run_api_500_error(): os.environ["CODECLIMATE_API_HOST"] = "http://example.com" api_mock = ApiMock() api_mock.setup(500) err = StringIO() runner = Runner(["--file", "./coverage.txt", "--token", "token"], err=err) orig_dir = os.getcwd() os.chdir("./tests/fixtures") subprocess.call(["git", "init"]) subprocess.call(["git", "config", "user.name", "Test User"]) subprocess.call(["git", "config", "user.email", "*****@*****.**"]) subprocess.call(["git", "commit", "--allow-empty", "--message", "init"]) try: return_code = runner.run() assert(return_code == 1) assert("500 Server Error" in err.getvalue().strip()) finally: del os.environ["CODECLIMATE_API_HOST"] os.chdir(orig_dir) shutil.rmtree("./tests/fixtures/.git") api_mock.cleanup()
def get_tokens(self, text, unfiltered=False): """ Return an iterable of (tokentype, value) pairs generated from `text`. If `unfiltered` is set to `True`, the filtering mechanism is bypassed even if filters are defined. Also preprocess the text, i.e. expand tabs and strip it if wanted and applies registered filters. """ if isinstance(text, str): if self.stripall: text = text.strip() elif self.stripnl: text = text.strip('\n') if sys.version_info[0] < 3 and isinstance(text, str): text = StringIO(text.encode('utf-8')) self.encoding = 'utf-8' else: text = StringIO(text) def streamer(): for i, t, v in self.get_tokens_unprocessed(text): yield t, v stream = streamer() if not unfiltered: stream = apply_filters(stream, self.filters, self) return stream
def correct_INCA_format(fp): fp_list = list() fp.seek(0) if '(' in fp.readline(): for line in fp: line = line.replace( "(MLX::", "").replace( " : ", "\t").replace( " :", "\t").replace( " ", "\t").lower().strip().replace( ")", "\n") if "record-by" in line: if "image" in line: line = "record-by\timage" if "vector" in line: line = "record-by\tvector" if "dont-care" in line: line = "record-by\tdont-care" fp_list.append(line) fp = StringIO() fp.writelines(fp_list) fp.seek(0) return fp
def test_no_inherit_future(self): # This file has from __future__ import print_function... f = StringIO() print('hello', file=f) # ...but the template doesn't exec_in('print >> f, "world"', dict(f=f)) self.assertEqual(f.getvalue(), 'hello\nworld\n')
def write_with_template(self, fname, tname, data): fout = StringIO() template = ezt.Template(compress_whitespace = 0) template.parse_file(os.path.join('build', 'generator', tname)) template.generate(fout, data) self.write_file_if_changed(fname, fout.getvalue())