def test_truncate(): s = u"1234567890" sio = StringIO(s) raises(ValueError, sio.truncate, -1) sio.seek(6) res = sio.truncate() assert res == 6 assert sio.getvalue() == s[:6] res = sio.truncate(4) assert res == 4 assert sio.getvalue() == s[:4] # truncate() accepts long objects res = sio.truncate(4L) assert res == 4 assert sio.getvalue() == s[:4] assert sio.tell() == 6 sio.seek(0, 2) sio.write(s) assert sio.getvalue() == s[:4] + s pos = sio.tell() res = sio.truncate(None) assert res == pos assert sio.tell() == pos raises(TypeError, sio.truncate, '0') sio.close() raises(ValueError, sio.truncate, 0)
def test3get_match_element_valid_match(self): """Parse data and check if the MatchContext was not changed.""" old_stderr = sys.stderr output = StringIO() sys.stderr = output debug_model_element = DebugModelElement(self.id_) self.assertEqual(output.getvalue(), "DebugModelElement %s added\n" % self.id_) output.seek(0) output.truncate(0) data = b"some data" match_context = DummyMatchContext(data) match_element = debug_model_element.get_match_element(self.path, match_context) self.assertEqual( output.getvalue(), 'DebugModelElement path = "%s", unmatched = "%s"\n' % (match_element.get_path(), repr( match_context.match_data))) self.compare_match_results(data, match_element, match_context, self.id_, self.path, b"", b"", None) output.seek(0) output.truncate(0) data = b"123 0x2a. [\"abc\"]:" match_context = DummyMatchContext(data) match_element = debug_model_element.get_match_element(self.path, match_context) self.assertEqual( output.getvalue(), 'DebugModelElement path = "%s", unmatched = "%s"\n' % (match_element.get_path(), repr( match_context.match_data))) self.compare_match_results(data, match_element, match_context, self.id_, self.path, b"", b"", None) sys.stderr = old_stderr
def LoadPlugin(self, plugin): '''LoadPlugin(self, plugin) --> None Loads a data handler note that there is no UnLoad function since only one DataHandler can be plugged in at a time. ''' # Unload the plugins names = self.plugin_handler.loaded_plugins.copy() try: [self.plugin_handler.unload_plugin(pl) for pl in names] self.parent.SetStatusText('Unloaded data loader %s' % list(names.keys())[0]) except: #outp = StringIO.StringIO() outp = StringIO() traceback.print_exc(200, outp) tbtext = outp.getvalue() outp.close() ShowErrorDialog(self.parent, 'Can NOT unload plugin object'+ \ list(names.keys())[0] + '\nPython traceback below:\n\n' + tbtext) try: self.plugin_handler.load_plugin(plugin) self.parent.SetStatusText('Loaded data loader: %s' % plugin) except: outp = StringIO() traceback.print_exc(200, outp) tbtext = outp.getvalue() outp.close() ShowErrorDialog(self.parent, 'Can NOT load plugin ' + plugin\ + '\nPython traceback below:\n\n' + tbtext)
def test_simplest_create(self): print("-- test_simplest_create") ucisdb = "file.ucis" db = MemFactory.create() testnode = db.createHistoryNode(None, "logicalName", ucisdb, UCIS_HISTORYNODE_TEST) td = TestData(teststatus=UCIS_TESTSTATUS_OK, toolcategory="UCIS:simulator", date="20200202020") testnode.setTestData(td) file = db.createFileHandle("dummy", os.getcwd()) srcinfo = SourceInfo(file, 0, 0) du = db.createScope( "foo.bar", srcinfo, 1, # weight UCIS_OTHER, UCIS_DU_MODULE, UCIS_ENABLED_STMT | UCIS_ENABLED_BRANCH | UCIS_ENABLED_COND | UCIS_ENABLED_EXPR | UCIS_ENABLED_FSM | UCIS_ENABLED_TOGGLE | UCIS_INST_ONCE | UCIS_SCOPE_UNDER_DU) instance = db.createInstance( "dummy", None, # sourceinfo 1, # weight UCIS_OTHER, UCIS_INSTANCE, du, UCIS_INST_ONCE) cg = instance.createCovergroup( "cg", SourceInfo(file, 3, 0), 1, # weight UCIS_OTHER) cp = cg.createCoverpoint( "t", SourceInfo(file, 4, 0), 1, # weight UCIS_VLOG) cp.setComment("Hello There") cp.createBin("auto[a]", SourceInfo(file, 4, 0), 1, 4, "a") out = StringIO() writer = XmlWriter() writer.write(out, db) print("XML Output:\n" + out.getvalue()) input = StringIO(out.getvalue()) validate_ucis_xml(input)
def test_roundtrip_translation(): sio1 = StringIO(u'a\nb', newline='\r\n') pos = sio1.seek(1) assert sio1.getvalue() == u'a\r\nb' state = sio1.__getstate__() sio2 = StringIO() sio2.__setstate__(state) assert sio2.getvalue() == u'a\r\nb' assert sio2.tell() == pos
def test_pprintInterface(self): u = ExampleWithArrayAxi3Lite() o = StringIO() self.prepareUnit(u) pprintInterface(u.clk, file=o) self.assertEqual(o.getvalue(), "'clk'\n") o = StringIO() pprintInterface(u.axi, file=o) self.assertEqual(o.getvalue(), axi_str)
def test_roundtrip_state(): s = u'12345678' sio1 = StringIO(s) sio1.foo = 42 sio1.seek(2) assert sio1.getvalue() == s state = sio1.__getstate__() sio2 = StringIO() sio2.__setstate__(state) assert sio2.getvalue() == s assert sio2.foo == 42 assert sio2.tell() == 2
def test_simple_dump(self): @vsc.covergroup class my_covergroup(object): def __init__(self): self.with_sample(dict( a=vsc.uint8_t(), b=vsc.uint8_t() )) self.a_cp = vsc.coverpoint(self.a, bins=dict( a_bins = vsc.bin_array([], [1,8]) )) self.b_cp = vsc.coverpoint(self.b, bins=dict( b_bins = vsc.bin_array([], [1,8]) )) cg_1 = my_covergroup() cg_1.sample(1, 2) cg_1.sample(2, 1) cg_1.sample(4, 2) cg_2 = my_covergroup() cg_2.sample(5, 4) cg_2.sample(6, 2) cg_2.sample(7, 8) db = MemFactory.create() v = CoverageSaveVisitor(db) td = TestData( teststatus=UCIS_TESTSTATUS_OK, toolcategory="UCIS:simulator", date="20200101132000") v.save(td, CoverageRegistry.inst().covergroup_types()) db.close() out = StringIO() writer = XmlWriter() writer.write(out, db) print("Output:\n" + out.getvalue()) xmlin = StringIO(out.getvalue()) XmlReader.validate(xmlin)
def test_overseek(): s = u"1234567890" sio = StringIO(s) res = sio.seek(11) assert res == 11 res = sio.read() assert res == u"" assert sio.tell() == 11 assert sio.getvalue() == s sio.write(u"") assert sio.getvalue() == s sio.write(s) assert sio.getvalue() == s + u"\0" + s
def test1event_generation_match_action(self): """This test case checks if events are generated and pushed to all event handlers.""" description = "Test1Rules" output_stream2 = StringIO() message = 'This message was generated, when the unit were successful.' match_context = MatchContext(b'25537') decimal_integer_value_me = DecimalIntegerValueModelElement( 'd1', DecimalIntegerValueModelElement.SIGN_TYPE_NONE, DecimalIntegerValueModelElement.PAD_TYPE_NONE) match_element = decimal_integer_value_me.get_match_element( 'match', match_context) stream_printer_event_handler2 = StreamPrinterEventHandler( self.analysis_context, output_stream2) t = time() event_generation_match_action = EventGenerationMatchAction( 'Test.%s' % self.__class__.__name__, message, [self.stream_printer_event_handler, stream_printer_event_handler2]) self.analysis_context.register_component(event_generation_match_action, description) log_atom = LogAtom(match_context.match_data, ParserMatch(match_element), t, event_generation_match_action) event_generation_match_action.match_action(log_atom) self.assertEqual(self.output_stream.getvalue(), output_stream2.getvalue()) self.assertEqual( self.output_stream.getvalue(), self.__expected_string % (datetime.fromtimestamp(t).strftime("%Y-%m-%d %H:%M:%S"), event_generation_match_action.__class__.__name__, description, 1, log_atom.parser_match.match_element.annotate_match('')))
class TestModuleName(unittest.TestCase): def __init__(self, methodName='runTest'): # noqa: H803 unittest.TestCase.__init__(self, methodName) self.stream = None self.handler = None self.log = None def setUp(self): self.stream = StringIO() self.handler = logging.StreamHandler(self.stream) self.log = logging.getLogger(module.__name__) for handler in self.log.handlers: self.log.removeHandler(handler) self.log.addHandler(self.handler) def test_wrong_name(self): mod_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "modules", "mod3") module.Module(project=mock.Mock(), path=mod_dir) self.handler.flush() assert ( "The name in the module file (mod1) does not match the directory name (mod3)" in self.stream.getvalue().strip()) def tearDown(self): self.log.removeHandler(self.handler) self.handler.close()
def output_func(col): ''' This function outputs the result of the reducer. Namely, the tf-idf vector for every abstract. ''' s = StringIO() col.to_json(s) print('%s\t%s' % (col.name,s.getvalue()))
def main(): parser = ArgumentParser() parser.add_argument('--spark', dest='spark', action='store_true', default=False) args = parser.parse_args() doc_freq = read_json('../../data/doc_freq.json', typ='series') N = doc_freq['TOTAL_DOCS'] idf_vector = np.log10(N / doc_freq) for line in sys.stdin: url, tf_vector = line.split() tf_vector = read_json(tf_vector, typ='series') # Calculate tf-idf from tf vector and doc_freq vector tf_idf_vector = tf_vector.multiply(idf_vector, fill_value=0) # Normalize the tf-idf vector, this is important for the clustering done # later on. tf_idf_vector = tf_idf_vector / tf_idf_vector.sum() # Normalize the tf-idf vector. if NONZERO_ONLY: # tf_idf terms where these terms actually exist in this document, since # this is going to be a sparse vector. output_vector = tf_idf_vector[tf_idf_vector.nonzero()[0]] else: output_vector = tf_idf_vector s = StringIO() output_vector.to_json(s) if args.spark: indices_zip = list(zip(tf_idf_vector.nonzero()[0].tolist(),output_vector.tolist())) print('%s\t%s' % (url,indices_zip)) else: print('%s\t%s' % (url, s.getvalue()))
def output_func(col): ''' This function outputs the result of the reducer. Namely, the tf-idf vector for every abstract. ''' s = StringIO() col.to_json(s) print('%s\t%s' % (col.name, s.getvalue()))
def test_stringio(): sio = StringIO() sio.write(u'Hello ') sio.write(u'world') assert sio.getvalue() == u'Hello world' assert StringIO(u"hello").read() == u'hello'
def build(cls, unit: Unit, unique_name: str, build_dir: Optional[str], target_platform=DummyPlatform(), do_compile=True) -> "BasicRtlSimulatorVcd": """ Create a pycocotb.basic_hdl_simulator based simulation model for specified unit and load it to python :param unit: interface level unit which you wont prepare for simulation :param unique_name: unique name for build directory and python module with simulator :param target_platform: target platform for this synthesis :param build_dir: directory to store sim model build files, if None sim model will be constructed only in memory """ if unique_name is None: unique_name = unit._getDefaultName() _filter = SerializerFilterDoNotExclude() if build_dir is None or not do_compile: buff = StringIO() store_man = SaveToStream(SimModelSerializer, buff, _filter=_filter) else: if not os.path.isabs(build_dir): build_dir = os.path.join(os.getcwd(), build_dir) build_private_dir = os.path.join(build_dir, unique_name) store_man = SaveToFilesFlat(SimModelSerializer, build_private_dir, _filter=_filter) store_man.module_path_prefix = unique_name to_rtl(unit, name=unique_name, target_platform=target_platform, store_manager=store_man) if build_dir is not None: d = build_dir dInPath = d in sys.path if not dInPath: sys.path.insert(0, d) if unique_name in sys.modules: del sys.modules[unique_name] simModule = importlib.import_module( unique_name + "." + unique_name, package='simModule_' + unique_name) if not dInPath: sys.path.pop(0) else: simModule = ModuleType('simModule_' + unique_name) # python supports only ~100 opened brackets # if exceeded it throws MemoryError: s_push: parser stack overflow exec(buff.getvalue(), simModule.__dict__) model_cls = simModule.__dict__[unit._name] # can not use just function as it would get bounded to class return cls(model_cls, unit)
def test2get_child_elements(self): """Test if get_child_elements returns None.""" old_stderr = sys.stderr output = StringIO() sys.stderr = output debug_me = DebugModelElement(self.id_) self.assertEqual(debug_me.get_child_elements(), None) self.assertEqual("DebugModelElement %s added\n" % self.id_, output.getvalue()) sys.stderr = old_stderr
def test1get_id(self): """Test if get_id works properly.""" old_stderr = sys.stderr output = StringIO() sys.stderr = output debug_me = DebugModelElement(self.id_) self.assertEqual(debug_me.get_id(), self.id_) self.assertEqual("DebugModelElement %s added\n" % self.id_, output.getvalue()) sys.stderr = old_stderr
def disabled_test_lib_dump(self): LibFactory.load_ucis_library("libucis.so") db = LibFactory.create("file.ucis") out = StringIO() writer = XmlWriter() writer.write(out, db) input = StringIO(out.getvalue())
def wrapper(*args, **kwargs): m = StringIO() temp_func = memory_profiler.profile(func=function, stream=m, precision=4) output = temp_func(*args, **kwargs) print(m.getvalue()) m.close() return output
def packtabs(self, s): from _io import StringIO sb = StringIO() for i in range(0, len(s), 8): c = s[i:i + 8] cr = c.rstrip(" ") if c != cr: sb.write(cr + "\t") ## Spaces at the end of a section else: sb.write(c) return sb.getvalue()
def get_coverage_report(details=False) -> str: """Returns a textual coverage report of all covergroups""" model = get_coverage_report_model() out = StringIO() formatter = TextCoverageReportFormatter(model, out) formatter.details = details formatter.report() return out.getvalue()
def test1start_debugging(self): """This test case checks if the DebugModelElement was initiated and the output was correct.""" output = StringIO() sys.stderr = output debug_model_element = DebugModelElement('debug') self.assertEqual( output.getvalue(), 'DebugModelElement %s added\n' % debug_model_element.element_id) output.seek(0) output.truncate(0) matchContext = MatchContext(b'some data') matchElement = debug_model_element.get_match_element( 'debugMatch', matchContext) self.assertEqual( output.getvalue(), 'DebugModelElement path = "%s", unmatched = "%s"\n' % (matchElement.get_path(), matchContext.match_data))
class FormatTest(unittest.TestCase): """ Test if different format specifiers are correctly printed even if the desired data has non-numerical columns """ fstrings = ["%.1f", "%.3f", "%.5f", "%.9f", "%16.5f", "%21.5f", "%28.5f", "%12.5f", "%9.5g", "%12.1g", "%9g", ] def setUp(self): """Redirect stdout""" self.out = StringIO() sys.stdout = self.out self.ev = IPETEvaluation(index="stringA stringB", indexsplit=1) self.numericcolumn = IPETEvaluationColumn(origcolname="numeric") self.ev.addColumn(self.numericcolumn) self.ev.addColumn(IPETEvaluationColumn(origcolname="Status", active="True", formatstr="%20s")) self.ev.addFilterGroup(IPETFilterGroup(name="FilterGroup")) def tearDown(self): """Close the String IO object""" self.out.close() sys.stdout = sys.__stdout__ def testFormat(self): """Test all fstrings """ for f in self.fstrings: self.numericcolumn.editAttribute("formatstr", f) ret, _ = self.ev.evaluate(HelperExperiment()) self.ev.streamDataFrame(ret, "Test", "stdout") # scan output and check if the formatted value is in there container = self.out.getvalue() mem = " {} ".format(f % val) msg = "Expected formatted number '{}' in output \n{}\n".format(mem, container) self.assertIn(mem, container, msg)
def wrapper(*args, **kwargs): m = StringIO() pr = cProfile.Profile() pr.enable() temp_func = memory_profiler.profile(func=function, stream=m, precision=4) output = temp_func(*args, **kwargs) print(m.getvalue()) pr.disable() ps = pstats.Stats(pr) ps.sort_stats('cumulative').print_stats( '(?!.*memory_profiler.*)(^.*$)', 20) m.close() return output
def expandtabs(s): from _io import StringIO if '\t' in s: sb = StringIO() pos = 0 for c in s: if c == '\t': ## tab is seen sb.write(" " * (8 - pos % 8)) ## replace by space pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def func4(): """ StringIO顾名思义就是在内存中读写str。 """ f = StringIO("可以这样初始化#\t#\t") # f = StringIO() f.write("HelloWorld!") # 后面写入会覆盖初始化 print(f.getvalue()) # getvalue()方法用于获得写入后的str。 """ StringIO操作的只能是str,如果要操作二进制数据,就需要使用BytesIO """ fb = BytesIO() # f = BytesIO(b'\xe4\xb8\xad\xe6\x96\x87')#也可以这样初始化 fb.write("测试中文".encode(encoding='utf_8')) print(fb.getvalue()) pass
def expandtabs(s): from _io import StringIO if "\t" in s: sb = StringIO() pos = 0 for c in s: if c == "\t": sb.write(" " * (8 - pos % 8)) pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def getTagAcessorias(self): """ Retorna uma string formatada com as tags acessórias ser incluído nos elementos. Essa implementação padrão possivelmente poderá ser utilizada em todos os widgets descendentes """ result = StringIO() if self.autofocus != None: result.write("autofocus ") if self.title != None: result.write("title='%s' " % self.title) if self.hidden != None: result.write("hidden ") if self.tabindex != None: result.write("tabindex='%d' " % self.tabindex) event_tag = result.getvalue() result.close() return event_tag
class TestModuleName(unittest.TestCase): def __init__(self, methodName="runTest"): # noqa: N803 unittest.TestCase.__init__(self, methodName) self.stream = None self.handler = None self.log = None def setUp(self): self.stream = StringIO() self.handler = logging.StreamHandler(self.stream) self.log = logging.getLogger(module.__name__) for handler in self.log.handlers: self.log.removeHandler(handler) self.log.addHandler(self.handler) def test_wrong_name(self): mod_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data", "modules", "mod3") module.ModuleV1(project=mock.Mock(), path=mod_dir) self.handler.flush() assert "The name in the module file (mod1) does not match the directory name (mod3)" in self.stream.getvalue( ).strip() def test_non_matching_name_v2_module(self) -> None: """ Make sure the warning regarding directory name does not trigger for v2 modules, as it is not relevant there. """ template_dir: str = os.path.join( os.path.dirname(os.path.abspath(__file__)), "data", "modules_v2", "minimalv2module") with tempfile.TemporaryDirectory() as tmpdir: mod_dir: str = os.path.join(tmpdir, "not-the-module-name") module_from_template(template_dir, mod_dir) module.ModuleV2(project=module.DummyProject(), path=mod_dir) self.handler.flush() assert self.stream.getvalue().strip() == "" def tearDown(self): self.log.removeHandler(self.handler) self.handler.close()
def main(): parser = ArgumentParser() parser.add_argument('--spark', dest='spark', action='store_true', default=False) args = parser.parse_args() doc_freq = read_json('../../data/doc_freq.json', typ='series') N = doc_freq['TOTAL_DOCS'] idf_vector = np.log10(N / doc_freq) for line in sys.stdin: url, tf_vector = line.split() tf_vector = read_json(tf_vector, typ='series') # Calculate tf-idf from tf vector and doc_freq vector tf_idf_vector = tf_vector.multiply(idf_vector, fill_value=0) # Normalize the tf-idf vector, this is important for the clustering done # later on. tf_idf_vector = tf_idf_vector / tf_idf_vector.sum( ) # Normalize the tf-idf vector. if NONZERO_ONLY: # tf_idf terms where these terms actually exist in this document, since # this is going to be a sparse vector. output_vector = tf_idf_vector[tf_idf_vector.nonzero()[0]] else: output_vector = tf_idf_vector s = StringIO() output_vector.to_json(s) if args.spark: indices_zip = list( zip(tf_idf_vector.nonzero()[0].tolist(), output_vector.tolist())) print('%s\t%s' % (url, indices_zip)) else: print('%s\t%s' % (url, s.getvalue()))
def get_query_texts(self, file_basenames): ''' Read all queries in files within Query. Return a dict {table_name : "the query text"} Leave out lines with sharp char (comment) at the start @param file_basenames: names of query file names in Queries (not full paths) @type file_basenames: [str] @return: dictionary mapping table names to the SQL text that creates them @rtype: {str : str} ''' full_query_paths = [ os.path.join(self.query_dir, file_name) for file_name in file_basenames if file_name.endswith('.sql') ] text_dict = {} for query_path in full_query_paths: # Table name is name of file without extension: table_name = os.path.splitext(os.path.basename(query_path))[0] with open(query_path, 'r') as fd: in_buf = StringIO(fd.read()) # Discard comments with hash char at start of line: out_buf = StringIO() for line in in_buf: if line[0] == '#': continue out_buf.write(line) # Store the entire query file content # in the value of the table dict: text_dict[table_name] = out_buf.getvalue() return text_dict
def get_config(self): outstr = StringIO() outstr.write("======================================\n") outstr.write(" Print Configuration \n") outstr.write("======================================\n") if(hasattr(self, 'labels_per_row')): outstr.write(" Labels per row : ") outstr.write(str(self.labels_per_row)) outstr.write("\n") if(hasattr(self, 'paper_width_mm')): outstr.write(" Paper width mm : ") outstr.write(str(self.paper_width_mm)) outstr.write("\n") if(hasattr(self, 'label_x_offset_mm')): outstr.write(" x offset : ") outstr.write(str(self.label_x_offset_mm)) outstr.write("\n") if(hasattr(self, 'label_y_offset_mm')): outstr.write(" y offset : ") outstr.write(str(self.label_y_offset_mm)) outstr.write("\n") if(hasattr(self, 'label_width_mm') and hasattr(self, 'label_width_mm')): outstr.write(" Labels size mm : ") outstr.write(str(self.label_width_mm)) outstr.write(" x ") outstr.write(str(self.label_height_mm)) outstr.write("\n") if(hasattr(self, 'label_x_gap_mm')): outstr.write(" x gap mm : ") outstr.write(str(self.label_x_gap_mm)) outstr.write("\n") if(hasattr(self, 'label_y_gap_mm')): outstr.write(" y gap mm : ") outstr.write(str(self.label_y_gap_mm)) outstr.write("\n") outstr.write("======================================\n") return outstr.getvalue()
def convert(self, fileName): logging.info("PDFConverter.convert STARTS") resourceManager = PDFResourceManager() retstr = StringIO() codec = 'utf-8' laparams = LAParams() device = TextConverter(resourceManager, retstr, codec=codec, laparams=laparams) filename = os.path.abspath(__file__ + '/../../../../../temp/' + fileName) fp = open(filename, "rb") interpreter = PDFPageInterpreter(resourceManager, device) password = "" maxpages = 0 caching = True pagenos = set() for page in PDFPage.get_pages(fp, pagenos, maxpages=maxpages, password=password, caching=caching, check_extractable=True): interpreter.process_page(page) text = retstr.getvalue() fp.close() device.close() retstr.close() os.remove(filename) logging.info("PDFConverter.convert ENDS") return text
def test_smoke(self): print("-- test_simplest_create") ucisdb = "file.ucis" db = MemFactory.create() testnode = db.createHistoryNode(None, "logicalName", ucisdb, UCIS_HISTORYNODE_TEST) td = TestData(teststatus=UCIS_TESTSTATUS_OK, toolcategory="UCIS:simulator", date="20200202020") testnode.setTestData(td) file = db.createFileHandle("dummy", os.getcwd()) srcinfo = SourceInfo(file, 0, 0) du = db.createScope( "foo.bar", srcinfo, 1, # weight UCIS_OTHER, UCIS_DU_MODULE, UCIS_ENABLED_STMT | UCIS_ENABLED_BRANCH | UCIS_ENABLED_COND | UCIS_ENABLED_EXPR | UCIS_ENABLED_FSM | UCIS_ENABLED_TOGGLE | UCIS_INST_ONCE | UCIS_SCOPE_UNDER_DU) instance = db.createInstance( "my_inst_scope", None, # sourceinfo 1, # weight UCIS_OTHER, UCIS_INSTANCE, du, UCIS_INST_ONCE) cg = instance.createCovergroup( "cg", SourceInfo(file, 3, 0), 1, # weight UCIS_OTHER) cp1 = cg.createCoverpoint( "cp1", SourceInfo(file, 4, 0), 1, # weight UCIS_VLOG) cp1.createBin("v[1]", SourceInfo(file, 4, 0), 1, 4, "v") cp1.createBin("v[2]", SourceInfo(file, 4, 0), 1, 4, "v") cp2 = cg.createCoverpoint( "cp2", SourceInfo(file, 4, 0), 1, # weight UCIS_VLOG) cp2.createBin("v2[1]", SourceInfo(file, 4, 0), 1, 4, "v2") cp2.createBin("v2[2]", SourceInfo(file, 4, 0), 1, 4, "v2") cr = cg.createCross("cr", SourceInfo(file, 4, 0), 1, UCIS_VLOG, [cp1, cp2]) cr.createBin("<v1[1],v2[1]>", SourceInfo(file, 4, 0), 1, 4, "v1,v2") cr.createBin("<v1[2],v2[1]>", SourceInfo(file, 4, 0), 1, 4, "v1,v2") cr.createBin("<v1[1],v2[2]>", SourceInfo(file, 4, 0), 1, 4, "v1,v2") cr.createBin("<v1[2],v2[2]>", SourceInfo(file, 4, 0), 1, 4, "v1,v2") out = StringIO() writer = XmlWriter() writer.write(out, db) print("XML Output:\n" + out.getvalue()) input = StringIO(out.getvalue()) validate_ucis_xml(input) input = StringIO(out.getvalue()) reader = XmlReader() db2 = reader.read(input) out2 = StringIO() writer = XmlWriter() writer.write(out2, db2) print("XML Output2:\n" + out2.getvalue()) input = StringIO(out2.getvalue()) validate_ucis_xml(input)
def on_tab_switch(self, change): # Clean up any previous messages with self.machine_messages_display: clear_output() with self.machine_failure_display: clear_output() if self.editor_tabs.get_title(change['new']) is 'Animate': with self.machine_messages_display: display(self.machine_messages_text) # Clear the last displayed machine with self.animated_machine_display: clear_output() # Generate the machine and display it's animator jove_error = StringIO() machine = None if self.machine_toggle.value is 'DFA': if len(self.dfa_editor.value.strip()) == 0: self.display_animate_error('DFA', 'No machine description in editor') return check_for_dict = self.dfa_editor.value.strip() if check_for_dict[0] is '{' and check_for_dict[-1] is '}': try: machine = ast.literal_eval(check_for_dict) if {'Q', 'Sigma', 'Delta', 'q0', 'F'} != machine.keys(): self.display_animate_error('DFA', 'Badly formed machine description in editor') return except Exception as e: self.display_animate_error('DFA', str(e)) return else: try: with redirect_stdout(jove_error): machine = md2mc('DFA\n{}'.format(self.dfa_editor.value)) except Exception as e: message = 'Jove error message:\n{}\n\nPython error message: {}'.format(jove_error.getvalue(), str(e)) self.display_animate_error('DFA', message) return with self.animated_machine_display: display(AnimateDFA(machine, FuseEdges=self.fuse_option.value, pick_start=self.alt_start_option.value, max_width=self.max_draw_size.value, accept_color=self.accept_colorpicker.value, reject_color=self.reject_colorpicker.value, neutral_color=self.transit_colorpicker.value)) elif self.machine_toggle.value is 'NFA': if len(self.nfa_editor.value.strip()) == 0: self.display_animate_error('NFA', 'No machine description in editor') return check_for_dict = self.nfa_editor.value.strip() if check_for_dict[0] is '{' and check_for_dict[-1] is '}': try: machine = ast.literal_eval(check_for_dict) if {'Q', 'Sigma', 'Delta', 'Q0', 'F'} != machine.keys(): self.display_animate_error('NFA', 'Badly formed machine description in editor') return except Exception as e: self.display_animate_error('NFA', str(e)) return else: try: with redirect_stdout(jove_error): machine = md2mc('NFA\n{}'.format(self.nfa_editor.value)) except Exception as e: message = 'Jove error message:\n{}\n\nPython error message: {}'.format(jove_error.getvalue(), str(e)) self.display_animate_error('NFA', message) return with self.animated_machine_display: display(AnimateNFA(machine, FuseEdges=self.fuse_option.value, pick_start=self.alt_start_option.value, max_width=self.max_draw_size.value, accept_color=self.accept_colorpicker.value, reject_color=self.reject_colorpicker.value, neutral_color=self.transit_colorpicker.value )) elif self.machine_toggle.value is 'PDA': if len(self.pda_editor.value.strip()) == 0: self.display_animate_error('PDA', 'No machine description in editor') return check_for_dict = self.pda_editor.value.strip() if check_for_dict[0] is '{' and check_for_dict[-1] is '}': try: machine = ast.literal_eval(check_for_dict) if {'Q', 'Sigma', 'Gamma', 'Delta', 'q0', 'z0', 'F'} != machine.keys(): self.display_animate_error('PDA', 'Badly formed machine description in editor') return except Exception as e: self.display_animate_error('PDA', str(e)) return else: try: with redirect_stdout(jove_error): machine = md2mc('PDA\n{}'.format(self.pda_editor.value)) except Exception as e: message = 'Jove error message:\n{}\n\nPython error message: {}'.format(jove_error.getvalue(), str(e)) self.display_animate_error('PDA', message) return with self.animated_machine_display: display(AnimatePDA(machine, FuseEdges=self.fuse_option.value, max_stack=self.max_stack_size.value, max_width=self.max_draw_size.value, accept_color=self.accept_colorpicker.value, reject_color=self.reject_colorpicker.value, neutral_color=self.transit_colorpicker.value )) elif self.machine_toggle.value is 'TM': if len(self.tm_editor.value.strip()) == 0: self.display_animate_error('TM', 'No machine description in editor') return check_for_dict = self.tm_editor.value.strip() if check_for_dict[0] is '{' and check_for_dict[-1] is '}': try: machine = ast.literal_eval(check_for_dict) if {'Q', 'Sigma', 'Gamma', 'Delta', 'q0', 'B', 'F'} != machine.keys(): self.display_animate_error('TM', 'Badly formed machine description in editor') except Exception as e: self.display_animate_error('TM', str(e)) return else: try: with redirect_stdout(jove_error): machine = md2mc('TM\n{}'.format(self.tm_editor.value)) except Exception as e: message = 'Jove error message:\n{}\n\nPython error message: {}'.format(jove_error.getvalue(), str(e)) self.display_animate_error('TM', message) return with self.animated_machine_display: display(AnimateTM(machine, FuseEdges=self.fuse_option.value, show_rejected=self.show_reject_option.value, max_width=self.max_draw_size.value, accept_color=self.accept_colorpicker.value, reject_color=self.reject_colorpicker.value, neutral_color=self.transit_colorpicker.value )) # Translation is not implemented yet elif self.machine_toggle.value is 'Translate': self.display_animate_error('Translation', 'Translate is not Implemented yet') return with self.machine_messages_display: clear_output() elif self.editor_tabs.get_title(change['new']) is 'Edit': with self.animated_machine_display: clear_output() with self.machine_failure_display: clear_output() with self.machine_messages_display: clear_output()
def _test_pprintAgent(self, intf, expectedStr): pointerRe = re.compile("0x[a-f0-9]*") o = StringIO() pprintAgents(intf, file=o) self.assertEqual(pointerRe.sub(o.getvalue(), ""), pointerRe.sub(expectedStr, ""))