class TestBase(unittest.TestCase): __configFilePath = os.getcwd() + '/unit/testutilities/config.py' def setUp(self): self.aminer_config = AMinerConfig.load_config(self.__configFilePath) self.analysis_context = AnalysisContext(self.aminer_config) self.output_stream = StringIO() self.stream_printer_event_handler = StreamPrinterEventHandler( self.analysis_context, self.output_stream) persistence_file_name = AMinerConfig.build_persistence_file_name( self.aminer_config) if os.path.exists(persistence_file_name): shutil.rmtree(persistence_file_name) if not os.path.exists(persistence_file_name): os.makedirs(persistence_file_name) def tearDown(self): self.aminer_config = AMinerConfig.load_config(self.__configFilePath) persistence_file_name = AMinerConfig.build_persistence_file_name( self.aminer_config) if os.path.exists(persistence_file_name): shutil.rmtree(persistence_file_name) if not os.path.exists(persistence_file_name): os.makedirs(persistence_file_name) def reset_output_stream(self): self.output_stream.seek(0) self.output_stream.truncate(0)
def output_func(col): ''' This function outputs the result of the reducer. Namely, the tf-idf vector for every abstract. ''' s = StringIO() col.to_json(s) print('%s\t%s' % (col.name,s.getvalue()))
def _runTest(self, text): input_stream = InputStream(text) parser = CUParser(input_stream, unittest.TestCase.id) cu = parser.parse() if len(cu.markers) > 0: print("Test Failed:") in_reader = StringIO(text) i = 1 while True: line = in_reader.readline() if line == "": break line = line[:-1] print("%3d: %s" % (i, line)) i += 1 self.assertEqual(len(cu.markers), 0, "Syntax Errors") # Now, run the linker... v = LinkVisitor([cu]) v.link() # Run the link checker to ensure we didn't miss resolving any v = TestLinker.LinkCheckVisitor() cu.accept(v)
def mails(self): self.mail_content = StringIO() self.mail_content.write("*This is an automatically generated email. Do not reply.*\n\n\n") self.is_need_mail = False if not self.is_all_7zfiles_exist: self.is_need_mail = True self.mail_content.write("**************************Missing 7z files**************************\n") self.mail_content.writelines(self.mail_content_7zfiles.getvalue()) self.mail_content.write("\n\n") if not self.is_all_scs_configed: self.is_need_mail = True self.mail_content.write("**************************Scs not configed**************************\n") self.mail_content.write(self.mail_content_scs) self.mail_content.write("\n\n") if not self.is_recovered_bitbake: self.is_need_mail = True self.mail_content.write("**************************Recover bitbake**************************\n") self.mail_content.writelines(self.mail_content_recover.getvalue()) self.mail_content.write("\n\n") elif not self.is_bitbake_content_fine: self.is_need_mail = True self.mail_content.write("**************************Recover bitbake**************************\n") self.mail_content.writelines(self.mail_content_recover.getvalue()) self.mail_content.write("\n\n") if self.is_need_mail: mail = Mail(self.config.get("mail_from"), self.config.get("mail_to")) mail.create(self.config.get("mail_subject"), self.mail_content.getvalue()) mail.send()
def main(): parser = ArgumentParser() parser.add_argument('--spark', dest='spark', action='store_true', default=False) args = parser.parse_args() doc_freq = read_json('../../data/doc_freq.json', typ='series') N = doc_freq['TOTAL_DOCS'] idf_vector = np.log10(N / doc_freq) for line in sys.stdin: url, tf_vector = line.split() tf_vector = read_json(tf_vector, typ='series') # Calculate tf-idf from tf vector and doc_freq vector tf_idf_vector = tf_vector.multiply(idf_vector, fill_value=0) # Normalize the tf-idf vector, this is important for the clustering done # later on. tf_idf_vector = tf_idf_vector / tf_idf_vector.sum() # Normalize the tf-idf vector. if NONZERO_ONLY: # tf_idf terms where these terms actually exist in this document, since # this is going to be a sparse vector. output_vector = tf_idf_vector[tf_idf_vector.nonzero()[0]] else: output_vector = tf_idf_vector s = StringIO() output_vector.to_json(s) if args.spark: indices_zip = list(zip(tf_idf_vector.nonzero()[0].tolist(),output_vector.tolist())) print('%s\t%s' % (url,indices_zip)) else: print('%s\t%s' % (url, s.getvalue()))
def test1event_generation_match_action(self): """This test case checks if events are generated and pushed to all event handlers.""" description = "Test1Rules" output_stream2 = StringIO() message = 'This message was generated, when the unit were successful.' match_context = MatchContext(b'25537') decimal_integer_value_me = DecimalIntegerValueModelElement( 'd1', DecimalIntegerValueModelElement.SIGN_TYPE_NONE, DecimalIntegerValueModelElement.PAD_TYPE_NONE) match_element = decimal_integer_value_me.get_match_element( 'match', match_context) stream_printer_event_handler2 = StreamPrinterEventHandler( self.analysis_context, output_stream2) t = time() event_generation_match_action = EventGenerationMatchAction( 'Test.%s' % self.__class__.__name__, message, [self.stream_printer_event_handler, stream_printer_event_handler2]) self.analysis_context.register_component(event_generation_match_action, description) log_atom = LogAtom(match_context.match_data, ParserMatch(match_element), t, event_generation_match_action) event_generation_match_action.match_action(log_atom) self.assertEqual(self.output_stream.getvalue(), output_stream2.getvalue()) self.assertEqual( self.output_stream.getvalue(), self.__expected_string % (datetime.fromtimestamp(t).strftime("%Y-%m-%d %H:%M:%S"), event_generation_match_action.__class__.__name__, description, 1, log_atom.parser_match.match_element.annotate_match('')))
def output_func(col): ''' This function outputs the result of the reducer. Namely, the tf-idf vector for every abstract. ''' s = StringIO() col.to_json(s) print('%s\t%s' % (col.name, s.getvalue()))
def LoadPlugin(self, plugin): '''LoadPlugin(self, plugin) --> None Loads a data handler note that there is no UnLoad function since only one DataHandler can be plugged in at a time. ''' # Unload the plugins names = self.plugin_handler.loaded_plugins.copy() try: [self.plugin_handler.unload_plugin(pl) for pl in names] self.parent.SetStatusText('Unloaded data loader %s' % list(names.keys())[0]) except: #outp = StringIO.StringIO() outp = StringIO() traceback.print_exc(200, outp) tbtext = outp.getvalue() outp.close() ShowErrorDialog(self.parent, 'Can NOT unload plugin object'+ \ list(names.keys())[0] + '\nPython traceback below:\n\n' + tbtext) try: self.plugin_handler.load_plugin(plugin) self.parent.SetStatusText('Loaded data loader: %s' % plugin) except: outp = StringIO() traceback.print_exc(200, outp) tbtext = outp.getvalue() outp.close() ShowErrorDialog(self.parent, 'Can NOT load plugin ' + plugin\ + '\nPython traceback below:\n\n' + tbtext)
def test_iterator(): s = u"1234567890\n" sio = StringIO(s * 10) assert iter(sio) is sio assert hasattr(sio, "__iter__") assert hasattr(sio, "next") i = 0 for line in sio: assert line == s i += 1 assert i == 10 sio.seek(0) i = 0 for line in sio: assert line == s i += 1 assert i == 10 sio.seek(len(s) * 10 + 1) assert list(sio) == [] sio = StringIO(s * 2) sio.close() raises(ValueError, next, sio)
def envfrom(self, f, *str): try: if self.CUSTOMFROM != "": #If anything is left over from the last email, re -initialize. self.__init__() self.startTime = time.time() self.startTimeDT = datetime.datetime.now() if self.milterConfig.mode == "shutdown": log = self.uuid + " milter in Maint mode, returning [Sender:" + ''.join( f) + "]" self.logger.writeLog(syslog.LOG_DEBUG, "%s" % log) return self.milterConfig.dispositionModes[ "InMaintMode".lower()] log = self.uuid + " envFrom: " log += ''.join(f) self.logger.writeLog(syslog.LOG_DEBUG, "%s" % log) self.CUSTOMFROM = f self.CUSTOMFROM = self.CUSTOMFROM.replace("<", "") self.CUSTOMFROM = self.CUSTOMFROM.replace(">", "") self.sender = self.CUSTOMFROM self.fph = StringIO.StringIO() self.fpb = StringIO.StringIO() except: log = self.uuid + " Uncaught Exception in EnvFrom" self.logger.writeLog(syslog.LOG_ERR, "%s" % (str(log))) return milter.CONTINUE #ALWAYS continue to gather the entire email
def build(cls, unit: Unit, unique_name: str, build_dir: Optional[str], target_platform=DummyPlatform(), do_compile=True) -> "BasicRtlSimulatorVcd": """ Create a pycocotb.basic_hdl_simulator based simulation model for specified unit and load it to python :param unit: interface level unit which you wont prepare for simulation :param unique_name: unique name for build directory and python module with simulator :param target_platform: target platform for this synthesis :param build_dir: directory to store sim model build files, if None sim model will be constructed only in memory """ if unique_name is None: unique_name = unit._getDefaultName() _filter = SerializerFilterDoNotExclude() if build_dir is None or not do_compile: buff = StringIO() store_man = SaveToStream(SimModelSerializer, buff, _filter=_filter) else: if not os.path.isabs(build_dir): build_dir = os.path.join(os.getcwd(), build_dir) build_private_dir = os.path.join(build_dir, unique_name) store_man = SaveToFilesFlat(SimModelSerializer, build_private_dir, _filter=_filter) store_man.module_path_prefix = unique_name to_rtl(unit, name=unique_name, target_platform=target_platform, store_manager=store_man) if build_dir is not None: d = build_dir dInPath = d in sys.path if not dInPath: sys.path.insert(0, d) if unique_name in sys.modules: del sys.modules[unique_name] simModule = importlib.import_module( unique_name + "." + unique_name, package='simModule_' + unique_name) if not dInPath: sys.path.pop(0) else: simModule = ModuleType('simModule_' + unique_name) # python supports only ~100 opened brackets # if exceeded it throws MemoryError: s_push: parser stack overflow exec(buff.getvalue(), simModule.__dict__) model_cls = simModule.__dict__[unit._name] # can not use just function as it would get bounded to class return cls(model_cls, unit)
def recover_bitbake(self): self.is_recovered_bitbake = True self.mail_content_recover = StringIO() if self.is_all_7zfiles_exist: os.mkdir("recover") os.chdir("recover") self.prepare_workspace() os.chdir("..")
def test_newline_crlf(): sio = StringIO(u"a\nb\r\nc\rd", newline="\r\n") res = sio.read() assert res == u"a\r\nb\r\r\nc\rd" sio.seek(0) res = list(sio) assert res == [u"a\r\n", u"b\r\r\n", u"c\rd"]
def captured_output(): new_out, new_err = StringIO(), StringIO() old_out, old_err = sys.stdout, sys.stderr try: sys.stdout, sys.stderr = new_out, new_err yield sys.stdout, sys.stderr finally: sys.stdout, sys.stderr = old_out, old_err
def do_print(self, m, print_values=False): self.ind = "" self.print_values = print_values self.out = StringIO() m.accept(self) return self.out.getvalue()
def test_simplest_create(self): print("-- test_simplest_create") ucisdb = "file.ucis" db = MemFactory.create() testnode = db.createHistoryNode(None, "logicalName", ucisdb, UCIS_HISTORYNODE_TEST) td = TestData(teststatus=UCIS_TESTSTATUS_OK, toolcategory="UCIS:simulator", date="20200202020") testnode.setTestData(td) file = db.createFileHandle("dummy", os.getcwd()) srcinfo = SourceInfo(file, 0, 0) du = db.createScope( "foo.bar", srcinfo, 1, # weight UCIS_OTHER, UCIS_DU_MODULE, UCIS_ENABLED_STMT | UCIS_ENABLED_BRANCH | UCIS_ENABLED_COND | UCIS_ENABLED_EXPR | UCIS_ENABLED_FSM | UCIS_ENABLED_TOGGLE | UCIS_INST_ONCE | UCIS_SCOPE_UNDER_DU) instance = db.createInstance( "dummy", None, # sourceinfo 1, # weight UCIS_OTHER, UCIS_INSTANCE, du, UCIS_INST_ONCE) cg = instance.createCovergroup( "cg", SourceInfo(file, 3, 0), 1, # weight UCIS_OTHER) cp = cg.createCoverpoint( "t", SourceInfo(file, 4, 0), 1, # weight UCIS_VLOG) cp.setComment("Hello There") cp.createBin("auto[a]", SourceInfo(file, 4, 0), 1, 4, "a") out = StringIO() writer = XmlWriter() writer.write(out, db) print("XML Output:\n" + out.getvalue()) input = StringIO(out.getvalue()) validate_ucis_xml(input)
def disabled_test_lib_dump(self): LibFactory.load_ucis_library("libucis.so") db = LibFactory.create("file.ucis") out = StringIO() writer = XmlWriter() writer.write(out, db) input = StringIO(out.getvalue())
def test1get_id(self): """Test if get_id works properly.""" old_stderr = sys.stderr output = StringIO() sys.stderr = output debug_me = DebugModelElement(self.id_) self.assertEqual(debug_me.get_id(), self.id_) self.assertEqual("DebugModelElement %s added\n" % self.id_, output.getvalue()) sys.stderr = old_stderr
def test_read_binary(): # data is from a test_imghdr test for a GIF file buf_in = (u'\x47\x49\x46\x38\x39\x61\x10\x00\x10\x00\xf6\x64\x00\xeb' u'\xbb\x18\xeb\xbe\x21\xf3\xc1\x1a\xfa\xc7\x19\xfd\xcb\x1b' u'\xff\xcc\x1c\xeb') assert len(buf_in) == 32 sio = StringIO(buf_in) buf_out = sio.read(32) assert buf_in == buf_out
def wrapper(*args, **kwargs): m = StringIO() temp_func = memory_profiler.profile(func=function, stream=m, precision=4) output = temp_func(*args, **kwargs) print(m.getvalue()) m.close() return output
def test2get_child_elements(self): """Test if get_child_elements returns None.""" old_stderr = sys.stderr output = StringIO() sys.stderr = output debug_me = DebugModelElement(self.id_) self.assertEqual(debug_me.get_child_elements(), None) self.assertEqual("DebugModelElement %s added\n" % self.id_, output.getvalue()) sys.stderr = old_stderr
def setUp(self): self.stream = StringIO() self.handler = logging.StreamHandler(self.stream) self.log = logging.getLogger(module.__name__) for handler in self.log.handlers: self.log.removeHandler(handler) self.log.addHandler(self.handler)
def setUp(self): """Redirect stdout""" self.out = StringIO() sys.stdout = self.out self.ev = IPETEvaluation(index="stringA stringB", indexsplit=1) self.numericcolumn = IPETEvaluationColumn(origcolname="numeric") self.ev.addColumn(self.numericcolumn) self.ev.addColumn(IPETEvaluationColumn(origcolname="Status", active="True", formatstr="%20s")) self.ev.addFilterGroup(IPETFilterGroup(name="FilterGroup"))
def get_coverage_report(details=False) -> str: """Returns a textual coverage report of all covergroups""" model = get_coverage_report_model() out = StringIO() formatter = TextCoverageReportFormatter(model, out) formatter.details = details formatter.report() return out.getvalue()
def test_pprintInterface(self): u = ExampleWithArrayAxi3Lite() o = StringIO() self.prepareUnit(u) pprintInterface(u.clk, file=o) self.assertEqual(o.getvalue(), "'clk'\n") o = StringIO() pprintInterface(u.axi, file=o) self.assertEqual(o.getvalue(), axi_str)
def traverse_recipes(self): self.is_all_7zfiles_exist = True self.mail_content_7zfiles = StringIO() self.all_7zfiles = [] for package in self.bitbake_recipes: if package["package_name"] == "enb": for recipe in package["recipes"]: if recipe["type"] == "target": self.check_all_7zfiles_exist(recipe['name'].lower().replace("_", "")) elif recipe["type"] == "source": self.remove_from_scs(recipe['name'])
def test_getstate(): sio = StringIO() state = sio.__getstate__() assert len(state) == 4 assert isinstance(state[0], unicode) assert isinstance(state[1], str) assert isinstance(state[2], int) assert state[3] is None or isinstance(state[3], dict) sio.close() raises(ValueError, sio.__getstate__)
def setUp(self): self.aminer_config = AMinerConfig.load_config(self.__configFilePath) self.analysis_context = AnalysisContext(self.aminer_config) self.output_stream = StringIO() self.stream_printer_event_handler = StreamPrinterEventHandler( self.analysis_context, self.output_stream) persistence_file_name = AMinerConfig.build_persistence_file_name( self.aminer_config) if os.path.exists(persistence_file_name): shutil.rmtree(persistence_file_name) if not os.path.exists(persistence_file_name): os.makedirs(persistence_file_name)
def downloadprepros(masukan): f = StringIO() for a in masukan: print('manja', a) f.write(a) f.write('\n') f.flush() f.seek(0) response = HttpResponse(FileWrapper(f), content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=test.csv' return response
class FormatTest(unittest.TestCase): """ Test if different format specifiers are correctly printed even if the desired data has non-numerical columns """ fstrings = ["%.1f", "%.3f", "%.5f", "%.9f", "%16.5f", "%21.5f", "%28.5f", "%12.5f", "%9.5g", "%12.1g", "%9g", ] def setUp(self): """Redirect stdout""" self.out = StringIO() sys.stdout = self.out self.ev = IPETEvaluation(index="stringA stringB", indexsplit=1) self.numericcolumn = IPETEvaluationColumn(origcolname="numeric") self.ev.addColumn(self.numericcolumn) self.ev.addColumn(IPETEvaluationColumn(origcolname="Status", active="True", formatstr="%20s")) self.ev.addFilterGroup(IPETFilterGroup(name="FilterGroup")) def tearDown(self): """Close the String IO object""" self.out.close() sys.stdout = sys.__stdout__ def testFormat(self): """Test all fstrings """ for f in self.fstrings: self.numericcolumn.editAttribute("formatstr", f) ret, _ = self.ev.evaluate(HelperExperiment()) self.ev.streamDataFrame(ret, "Test", "stdout") # scan output and check if the formatted value is in there container = self.out.getvalue() mem = " {} ".format(f % val) msg = "Expected formatted number '{}' in output \n{}\n".format(mem, container) self.assertIn(mem, container, msg)
def useStrIO(): s = StringIO() s.write('hello,world!') s.write('\n') s.write('hello,BeiJing!') #print(s.getvalue()) #获取所有信息 printStrIO(s)
def test_simple_dump(self): @vsc.covergroup class my_covergroup(object): def __init__(self): self.with_sample(dict( a=vsc.uint8_t(), b=vsc.uint8_t() )) self.a_cp = vsc.coverpoint(self.a, bins=dict( a_bins = vsc.bin_array([], [1,8]) )) self.b_cp = vsc.coverpoint(self.b, bins=dict( b_bins = vsc.bin_array([], [1,8]) )) cg_1 = my_covergroup() cg_1.sample(1, 2) cg_1.sample(2, 1) cg_1.sample(4, 2) cg_2 = my_covergroup() cg_2.sample(5, 4) cg_2.sample(6, 2) cg_2.sample(7, 8) db = MemFactory.create() v = CoverageSaveVisitor(db) td = TestData( teststatus=UCIS_TESTSTATUS_OK, toolcategory="UCIS:simulator", date="20200101132000") v.save(td, CoverageRegistry.inst().covergroup_types()) db.close() out = StringIO() writer = XmlWriter() writer.write(out, db) print("Output:\n" + out.getvalue()) xmlin = StringIO(out.getvalue()) XmlReader.validate(xmlin)
def test_stringio(): sio = StringIO() sio.write(u'Hello ') sio.write(u'world') assert sio.getvalue() == u'Hello world' assert StringIO(u"hello").read() == u'hello'
def packtabs(self, s): from _io import StringIO sb = StringIO() for i in range(0, len(s), 8): c = s[i:i + 8] cr = c.rstrip(" ") if c != cr: sb.write(cr + "\t") ## Spaces at the end of a section else: sb.write(c) return sb.getvalue()
def expandtabs(s): from _io import StringIO if '\t' in s: sb = StringIO() pos = 0 for c in s: if c == '\t': ## tab is seen sb.write(" " * (8 - pos % 8)) ## replace by space pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def expandtabs(s): from _io import StringIO if "\t" in s: sb = StringIO() pos = 0 for c in s: if c == "\t": sb.write(" " * (8 - pos % 8)) pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def getTagAcessorias(self): """ Retorna uma string formatada com as tags acessórias ser incluído nos elementos. Essa implementação padrão possivelmente poderá ser utilizada em todos os widgets descendentes """ result = StringIO() if self.autofocus != None: result.write("autofocus ") if self.title != None: result.write("title='%s' " % self.title) if self.hidden != None: result.write("hidden ") if self.tabindex != None: result.write("tabindex='%d' " % self.tabindex) event_tag = result.getvalue() result.close() return event_tag
def _test_pprintAgent(self, intf, expectedStr): pointerRe = re.compile("0x[a-f0-9]*") o = StringIO() pprintAgents(intf, file=o) self.assertEqual(pointerRe.sub(o.getvalue(), ""), pointerRe.sub(expectedStr, ""))