def test_newline_none(): sio = StringIO(u"a\nb\r\nc\rd", newline=None) res = list(sio) assert res == [u"a\n", u"b\n", u"c\n", u"d"] sio.seek(0) res = sio.read(1) assert res == u"a" res = sio.read(2) assert res == u"\nb" res = sio.read(2) assert res == u"\nc" res = sio.read(1) assert res == u"\n" sio = StringIO(newline=None) res = sio.write(u"a\n") assert res == 2 res = sio.write(u"b\r\n") assert res == 3 res = sio.write(u"c\rd") assert res == 3 sio.seek(0) res = sio.read() assert res == u"a\nb\nc\nd" sio = StringIO(u"a\r\nb", newline=None) res = sio.read(3) assert res == u"a\nb"
def test_stringio(): sio = StringIO() sio.write(u'Hello ') sio.write(u'world') assert sio.getvalue() == u'Hello world' assert StringIO(u"hello").read() == u'hello'
def test_newline_empty(): sio = StringIO(u"a\nb\r\nc\rd", newline="") res = list(sio) assert res == [u"a\n", u"b\r\n", u"c\r", u"d"] sio.seek(0) res = sio.read(4) assert res == u"a\nb\r" res = sio.read(2) assert res == u"\nc" res = sio.read(1) assert res == u"\r" sio = StringIO(newline="") res = sio.write(u"a\n") assert res == 2 res = sio.write(u"b\r") assert res == 2 res = sio.write(u"\nc") assert res == 2 res = sio.write(u"\rd") assert res == 2 sio.seek(0) res = list(sio) assert res == [u"a\n", u"b\r\n", u"c\r", u"d"]
def loadFile(self): # path = os.path.join('/docs/github/Opal/src/ui/view/opalview', 'bookInfo.html') out = StringIO() htmlhandler = rt.RichTextHTMLHandler() buffer = self.rtc.GetBuffer() # htmlhandler.SetFlags(rt.RICHTEXT_HANDLER_SAVE_IMAGES_TO_MEMORY) htmlhandler.SetFontSizeMapping([7, 9, 11, 12, 14, 22, 100]) logger.debug('canload: %s', htmlhandler.CanLoad()) logger.debug('cansave: %s', htmlhandler.CanSave()) logger.debug('CanHandle: %s', htmlhandler.CanHandle('bookInfo.html')) rt.RichTextBuffer.AddHandler(htmlhandler) # buffer.AddHandler(htmlhandler) try: if self.book != None: out.write(self.book.bookDescription) out.seek(0) except Exception as e: logger.error(e) # htmlhandler.LoadStream(buffer, out) # htmlhandler.LoadFile(path,'text') if self.book != None and self.book.bookDescription != None: self.rtc.AppendText(self.book.bookDescription) # htmlhandler.LoadStream(buffer, out.getvalue()) self.rtc.Refresh()
def test_truncate(): s = u"1234567890" sio = StringIO(s) raises(ValueError, sio.truncate, -1) sio.seek(6) res = sio.truncate() assert res == 6 assert sio.getvalue() == s[:6] res = sio.truncate(4) assert res == 4 assert sio.getvalue() == s[:4] # truncate() accepts long objects res = sio.truncate(4L) assert res == 4 assert sio.getvalue() == s[:4] assert sio.tell() == 6 sio.seek(0, 2) sio.write(s) assert sio.getvalue() == s[:4] + s pos = sio.tell() res = sio.truncate(None) assert res == pos assert sio.tell() == pos raises(TypeError, sio.truncate, '0') sio.close() raises(ValueError, sio.truncate, 0)
def packtabs(self, s): from _io import StringIO sb = StringIO() for i in range(0, len(s), 8): c = s[i:i + 8] cr = c.rstrip(" ") if c != cr: sb.write(cr + "\t") ## Spaces at the end of a section else: sb.write(c) return sb.getvalue()
def downloadprepros(masukan): f = StringIO() for a in masukan: print('manja', a) f.write(a) f.write('\n') f.flush() f.seek(0) response = HttpResponse(FileWrapper(f), content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=test.csv' return response
def test_overseek(): s = u"1234567890" sio = StringIO(s) res = sio.seek(11) assert res == 11 res = sio.read() assert res == u"" assert sio.tell() == 11 assert sio.getvalue() == s sio.write(u"") assert sio.getvalue() == s sio.write(s) assert sio.getvalue() == s + u"\0" + s
def expandtabs(s): from _io import StringIO if '\t' in s: sb = StringIO() pos = 0 for c in s: if c == '\t': ## tab is seen sb.write(" " * (8 - pos % 8)) ## replace by space pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def submitWait(self,jcl,wait=30): """ :param jcl: dataset or pds(member) containting JCL to submit :param wait: wait time in seconds until function is to return :return Job: Job object containing information on Job submitted or None >>> fz=ftptoolz.Ftpzos('zibm','mm','pw',verbose=2) >>> j=fz.submitWait("'mm.jobs(copyy)'") >>> x.cc 'AE37' """ j=None f=StringIO() # py2/3 for line in self.getlines(jcl): f.write(line+'\n') f.seek(0) self.ftp.sendcmd('SITE file=jes') # switch to Spool mode try: if PY3: # convert to latin1 (iso-8859-1) byte string f = BytesIO(f.read().encode('latin1')) fresp = self.ftp.storlines('STOR myjob.seq', f) if fresp.startswith('250-'): jobid = fresp.split()[6] if jobid.startswith('JOB') and len(jobid)==8: j=Job(jobid,jcl) finally: self.ftp.sendcmd('SITE file=seq') # switch to File mode if not j: return j for i in range(wait): js,sp = self.listjob(j.jobid) j.status=js.status j.jobstatus=js if js.status=='OUTPUT': j.cc=js.cc j.spoolfiles=sp break elif js.status=='ACTIVE': j.cputime=js.cputime j.elapsed=js.elapsed time.sleep(1.) return j
def expandtabs(s): from _io import StringIO if "\t" in s: sb = StringIO() pos = 0 for c in s: if c == "\t": sb.write(" " * (8 - pos % 8)) pos += 8 - pos % 8 else: sb.write(c) pos += 1 return sb.getvalue() else: return s
def func4(): """ StringIO顾名思义就是在内存中读写str。 """ f = StringIO("可以这样初始化#\t#\t") # f = StringIO() f.write("HelloWorld!") # 后面写入会覆盖初始化 print(f.getvalue()) # getvalue()方法用于获得写入后的str。 """ StringIO操作的只能是str,如果要操作二进制数据,就需要使用BytesIO """ fb = BytesIO() # f = BytesIO(b'\xe4\xb8\xad\xe6\x96\x87')#也可以这样初始化 fb.write("测试中文".encode(encoding='utf_8')) print(fb.getvalue()) pass
def useStrIO(): s = StringIO() s.write('hello,world!') s.write('\n') s.write('hello,BeiJing!') #print(s.getvalue()) #获取所有信息 printStrIO(s)
def get_query_texts(self, file_basenames): ''' Read all queries in files within Query. Return a dict {table_name : "the query text"} Leave out lines with sharp char (comment) at the start @param file_basenames: names of query file names in Queries (not full paths) @type file_basenames: [str] @return: dictionary mapping table names to the SQL text that creates them @rtype: {str : str} ''' full_query_paths = [ os.path.join(self.query_dir, file_name) for file_name in file_basenames if file_name.endswith('.sql') ] text_dict = {} for query_path in full_query_paths: # Table name is name of file without extension: table_name = os.path.splitext(os.path.basename(query_path))[0] with open(query_path, 'r') as fd: in_buf = StringIO(fd.read()) # Discard comments with hash char at start of line: out_buf = StringIO() for line in in_buf: if line[0] == '#': continue out_buf.write(line) # Store the entire query file content # in the value of the table dict: text_dict[table_name] = out_buf.getvalue() return text_dict
def getTagAcessorias(self): """ Retorna uma string formatada com as tags acessórias ser incluído nos elementos. Essa implementação padrão possivelmente poderá ser utilizada em todos os widgets descendentes """ result = StringIO() if self.autofocus != None: result.write("autofocus ") if self.title != None: result.write("title='%s' " % self.title) if self.hidden != None: result.write("hidden ") if self.tabindex != None: result.write("tabindex='%d' " % self.tabindex) event_tag = result.getvalue() result.close() return event_tag
def test_newline_property(): sio = StringIO(newline=None) assert sio.newlines is None sio.write(u"a\n") assert sio.newlines == "\n" sio.write(u"b\r\n") assert sio.newlines == ("\n", "\r\n") sio.write(u"c\rd") assert sio.newlines == ("\r", "\n", "\r\n")
def test_newline_property(): sio = StringIO(newline=None) assert sio.newlines is None sio.write(u"a\n") assert sio.newlines == "\n" sio.write(u"b\r\n") assert sio.newlines == ("\n", "\r\n") sio.write(u"c\rd") assert sio.newlines == ("\r", "\n", "\r\n") exc = raises(TypeError, StringIO, newline=b'\n') assert 'bytes' in str(exc.value)
def hasilCSV(request): if request.method == 'POST': name = request.FILES['fileInput'].name typeFile = name.split('.')[1] if(typeFile == 'txt'): reader = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8') elif(typeFile == 'csv'): try: text = TextIOWrapper(request.FILES['fileInput'].file, encoding='utf-8') reader = csv.reader(text) except: text = StringIO(request.FILES['fileInput'].file.read().decode()) reader = csv.reader(text) arrData = [] for line in reader: line = ''.join(line) arrData.append(line) myfile = StringIO() metode = request.POST['metode'] statusFormalisasi = request.POST.get('formalisasi', False) if(metode == 'EDR'): for line in arrData: hasil = F_EDR(line) myfile.write(hasil + os.linesep) elif(metode == 'ED'): for line in arrData: hasil = F_ED(line) myfile.write(hasil + os.linesep) elif(metode == 'BG'): for line in arrData: hasil = F_BG(line) myfile.write(hasil + os.linesep) myfile.flush() myfile.seek(0) response = HttpResponse(FileWrapper(myfile), content_type='text/csv') response['Content-Disposition'] = 'attachment; filename=hasil.csv' return response else: return render(request, 'index_preprocess.html', {})
#!/usr/bin/env python # -*- coding: utf-8 -*- # Time : 2018-04-07 21:52 # Author : MrFiona # File : summary_StringIO.py # Software: PyCharm try: from _io import StringIO except ImportError: from io import StringIO f = StringIO() f.write('hello world!!!') print(f.getvalue(), len(f.getvalue())) f.write('hello world!!!') print(f.getvalue(), len(f.getvalue())) f.truncate(0) f.write('start write!') print(f.tell()) print("next:\t", f.getvalue(), len(f.getvalue())) f_test = StringIO('Hello! Hi! Goodbye!') print("1:\t", f_test.read()) print(f_test.getvalue()) print(f_test.getvalue()) print("2:\t", f_test.read()) print("3:\t", f_test.read()) print("4:\t", f_test.read()) print("next:\t", f_test.getvalue(), len(f_test.getvalue()))
def get_config(self): outstr = StringIO() outstr.write("======================================\n") outstr.write(" Print Configuration \n") outstr.write("======================================\n") if(hasattr(self, 'labels_per_row')): outstr.write(" Labels per row : ") outstr.write(str(self.labels_per_row)) outstr.write("\n") if(hasattr(self, 'paper_width_mm')): outstr.write(" Paper width mm : ") outstr.write(str(self.paper_width_mm)) outstr.write("\n") if(hasattr(self, 'label_x_offset_mm')): outstr.write(" x offset : ") outstr.write(str(self.label_x_offset_mm)) outstr.write("\n") if(hasattr(self, 'label_y_offset_mm')): outstr.write(" y offset : ") outstr.write(str(self.label_y_offset_mm)) outstr.write("\n") if(hasattr(self, 'label_width_mm') and hasattr(self, 'label_width_mm')): outstr.write(" Labels size mm : ") outstr.write(str(self.label_width_mm)) outstr.write(" x ") outstr.write(str(self.label_height_mm)) outstr.write("\n") if(hasattr(self, 'label_x_gap_mm')): outstr.write(" x gap mm : ") outstr.write(str(self.label_x_gap_mm)) outstr.write("\n") if(hasattr(self, 'label_y_gap_mm')): outstr.write(" y gap mm : ") outstr.write(str(self.label_y_gap_mm)) outstr.write("\n") outstr.write("======================================\n") return outstr.getvalue()
from _io import StringIO, BytesIO f = open('E:/a.txt', 'r') for line in f.readlines(): print(line.strip()) f.close() #readline 和 readlines #像open()函数返回的这种有个read()方法的对象, #在Python中统称为file-like Object with open('E:/a.txt', 'w') as f: f.write("en") f = StringIO() f.write("hello") f.write(" ") f.write("world") print(f.getvalue()) b = BytesIO() b.write("中文".encode(encoding='utf_8', errors='strict')) str = b.getvalue() print(str)
def touchmembers(self, pds, membertimes, touchtemplate): """Submit TOUCH job to set modification times in members of a partitioned dataset. :param pds: partitioned dataset name :param membertimes: list of (membername, modtime, uid, size) tuples modtime is of datetime type or of string 'yyyymmdd.HHMMSS' :param touchtemplate: Touch template job skeleton (see touchtemplate_sample for further details """ if len(membertimes)==0: return f=StringIO() # py2/3 f.write(touchtemplate % pds.upper().strip()) for m, t, u, s in membertimes: # if touchuid/touchdate given as parameter asmdate will only count lines if self.verbose: print(m, t, u) if t: if isinstance(t,datetime.datetime): ttime = t.strftime('%Y%m%d.%H%M%S') else: ttime = t # 'yyyymmdd.HHMMSS' f.write('SET DATE=%s\n' % ttime) if u: f.write('SET USER=%s\n' % u.upper()) if s: f.write('SET LINES=%d\n' % s) f.write(m.upper()+'\n') f.write('//\n') # end of job f.seek(0) # rewind if self.test: print('\nThe following generated TOUCH job is not submitted in test mode:)') for line in f: print(line[:-1]) else: if PY3: # convert to latin1 (iso-8859-1) byte string f = BytesIO(f.read().encode('latin1')) self.ftp.sendcmd('SITE file=jes') # switch to Spool mode self.ftp.storlines('STOR touch.seq', f) self.ftp.sendcmd('SITE file=seq') # switch to File mode
class BitbakePostCheck(object): ''' classdocs ''' def __init__(self, baseline): ''' Constructor ''' self.logger = Log().getLogger('BitbakePostCheck') self.baseline = baseline def check_all_7zfiles_exist(self, target_name): bitbake_location = self.config.get("bitbakePath") bitbake_7zname = self.baseline + "-bs10k-" + target_name + ".7z" if not os.path.isfile(os.path.join(bitbake_location, bitbake_7zname)): self.logger.error("7z file {} not exist in {}".format(bitbake_7zname, bitbake_location)) self.mail_content_7zfiles.write("7z file {} not exist in {} \n".format(bitbake_7zname, bitbake_location)) self.is_all_7zfiles_exist = False else: self.all_7zfiles.append(os.path.join(bitbake_location, bitbake_7zname)) def traverse_recipes(self): self.is_all_7zfiles_exist = True self.mail_content_7zfiles = StringIO() self.all_7zfiles = [] for package in self.bitbake_recipes: if package["package_name"] == "enb": for recipe in package["recipes"]: if recipe["type"] == "target": self.check_all_7zfiles_exist(recipe['name'].lower().replace("_", "")) elif recipe["type"] == "source": self.remove_from_scs(recipe['name']) def remove_from_scs(self, sc_name): index = 0 for sc in self.scs: if sc == sc_name: del self.scs[index] break index = index + 1 def check_all_scs_configed(self): self.is_all_scs_configed = True ignore_list = self.config.get("ignoreSC", "").split(",") for sc in ignore_list: self.remove_from_scs(sc) if len(self.scs) > 0: self.is_all_scs_configed = False self.mail_content_scs = "These System Components {} are not configed in bitbake recipes.json".format(self.scs) def mails(self): self.mail_content = StringIO() self.mail_content.write("*This is an automatically generated email. Do not reply.*\n\n\n") self.is_need_mail = False if not self.is_all_7zfiles_exist: self.is_need_mail = True self.mail_content.write("**************************Missing 7z files**************************\n") self.mail_content.writelines(self.mail_content_7zfiles.getvalue()) self.mail_content.write("\n\n") if not self.is_all_scs_configed: self.is_need_mail = True self.mail_content.write("**************************Scs not configed**************************\n") self.mail_content.write(self.mail_content_scs) self.mail_content.write("\n\n") if not self.is_recovered_bitbake: self.is_need_mail = True self.mail_content.write("**************************Recover bitbake**************************\n") self.mail_content.writelines(self.mail_content_recover.getvalue()) self.mail_content.write("\n\n") elif not self.is_bitbake_content_fine: self.is_need_mail = True self.mail_content.write("**************************Recover bitbake**************************\n") self.mail_content.writelines(self.mail_content_recover.getvalue()) self.mail_content.write("\n\n") if self.is_need_mail: mail = Mail(self.config.get("mail_from"), self.config.get("mail_to")) mail.create(self.config.get("mail_subject"), self.mail_content.getvalue()) mail.send() def bitbake_content_check(self): self.is_bitbake_content_fine = True self.mail_content_bitbake_content = StringIO() if self.config.get("bitbakeContentCheck", False): package_path = self.config.get("packagePath", "") if package_path == "": self.logger.warning("packagePath not configed, ignore bitbake content check for {}".format(self.baseline)) return official_hashContainer = os.path.join(package_path, "HashContainer_{}.txt".format(self.baseline)) self.copy_official_hashContainer_to_current_folder(official_hashContainer) self.remove_ignore_lines("HashContainer_{}.txt".format(self.baseline)) current_hashContainer = "lteDo/package/btssm/bts_sw/HashContainer_{}.txt".format(self.baseline) self.remove_ignore_lines(current_hashContainer) result = Shell().execute("diff {} {}".format("HashContainer_{}.txt".format(self.baseline), current_hashContainer), errorOuput=True) if isinstance(result, list) and result[0] != 0: self.is_bitbake_content_fine = False self.mail_content_bitbake_content.write(result[1]) def copy_official_hashContainer_to_current_folder(self, official_path): Shell().execute("cp {} .".format(official_path)) def remove_ignore_lines(self, file): Shell().execute("sed -i '/TargetBD/d' {}".format(file)) def recover_bitbake(self): self.is_recovered_bitbake = True self.mail_content_recover = StringIO() if self.is_all_7zfiles_exist: os.mkdir("recover") os.chdir("recover") self.prepare_workspace() os.chdir("..") def prepare_workspace(self): prepare_script = os.path.join(self.config.get("bbScriptRepo"), "prepare-enb-ws.sh") SVN().export(prepare_script) if os.system("sh prepare-enb-ws.sh --enb {} --src enb".format(self.baseline)) == 0: self.logger.info("Successfully to prepare work space") self.package() else: self.logger.error("Error to recover bitbake with command 'prepare-enb-ws.sh --enb {} --src enb'".format(self.baseline)) self.is_recovered_bitbake = False self.mail_content_recover.write("Error to recover bitbake with below command\n") self.mail_content_recover.write("prepare-enb-ws.sh --enb {} --src enb".format(self.baseline)) def package(self): self.extract_7zfiles() if Shell().execute("source .property && make linsup.core -j24"): self.logger.info("Successfully package with bitbake result") self.bitbake_content_check() else: self.logger.error("Error to package with bitbake result, command is make linsup.core") self.is_recovered_bitbake = False self.mail_content_recover.write("Error to package with bitbake result for package {}\n".format(self.baseline)) self.mail_content_recover.write("Package command is make linsup.core") def extract_7zfiles(self): for file in self.all_7zfiles: Shell().execute("7za x {} -yo'{}'".format(file, "lteDo")) self.logger.info("Extract {} done".format(file)) def start(self): parse_config = ParseConfig(self.baseline) self.bitbake_recipes = parse_config.fetch_bitbake_config() self.config = parse_config.fetch_config() self.scs = parse_config.fetch_sc_from_package_config() self.externals = parse_config.get_externals() self.traverse_recipes() self.check_all_scs_configed() self.recover_bitbake() self.mails()
class ModelPrettyPrinter(ModelVisitor): def __init__(self): self.out = StringIO() self.ind = "" self.print_values = False def do_print(self, m, print_values=False): self.ind = "" self.print_values = print_values self.out = StringIO() m.accept(self) return self.out.getvalue() @staticmethod def print(m, print_values=False): p = ModelPrettyPrinter() return p.do_print(m, print_values) def write(self, s): self.out.write(s) def writeln(self, l): self.out.write(self.ind + l + "\n") def inc_indent(self): self.ind += " " * 4 def dec_indent(self): self.ind = self.ind[4:] def visit_constraint_block(self, c: vm.ConstraintBlockModel): self.writeln("constraint " + c.name + " {") self.inc_indent() for stmt in c.constraint_l: stmt.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_expr(self, c: ConstraintExprModel): self.write(self.ind) c.e.accept(self) self.write(";\n") def visit_constraint_foreach(self, f: ConstraintForeachModel): self.write(self.ind + "foreach (") f.lhs.accept(self) self.write("[i]) {\n") self.inc_indent() for s in f.constraint_l: s.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_if_else(self, c: vm.ConstraintIfElseModel): self.write(self.ind + "if (") c.cond.accept(self) self.write(") {\n") self.inc_indent() c.true_c.accept(self) self.dec_indent() if c.false_c is not None: self.writeln("} else {") self.inc_indent() c.false_c.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_implies(self, c: vm.ConstraintImpliesModel): self.write(self.ind) c.cond.accept(self) self.write(" -> {") for sc in c.constraint_l: sc.accept(self) self.write("}\n") def visit_expr_array_subscript(self, s): s.lhs.accept(self) self.write("[") s.rhs.accept(self) self.write("]") def visit_expr_array_sum(self, s): ModelVisitor.visit_expr_array_sum(self, s) def visit_expr_bin(self, e: vm.ExprBinModel): if e.lhs is None or e.rhs is None: print("op: " + str(e.op)) self.write("(") e.lhs.accept(self) self.write(" " + vm.BinExprType.toString(e.op) + " ") e.rhs.accept(self) self.write(")") def visit_expr_in(self, e: vm.ExprInModel): e.lhs.accept(self) self.write(" in [") for i, r in enumerate(e.rhs.rl): r.accept(self) if i + 1 < len(e.rhs.rl): self.write(", ") self.write("]") def visit_expr_literal(self, e: vm.ExprLiteralModel): self.write(str(int(e.val()))) def visit_expr_fieldref(self, e: vm.ExprFieldRefModel): if self.print_values and hasattr( e.fm, "is_used_rand") and not e.fm.is_used_rand: if isinstance(e.fm, FieldArrayModel): self.write("[") for i, f in enumerate(e.fm.field_l): if i > 0: self.write(", ") self.write(str(int(f.get_val()))) self.write("]") else: self.write(str(int(e.fm.get_val()))) else: self.write(e.fm.fullname) def visit_expr_unary(self, e: vm.ExprUnaryModel): print("PrettyPrinter::visit_expr_unary") self.write(UnaryExprType.toString(e.op)) self.write("(") e.expr.accept(self) self.write(")") def visit_scalar_field(self, f: FieldScalarModel): self.write(f.name)
class RuleModelFormatter(RuleModelVisitor): """ Allows to create textual representation of the rules in a `RuleModel`. """ def __init__(self, attributes: List[Attribute], labels: List[Attribute], print_feature_names: bool, print_label_names: bool, print_nominal_values: bool): """ :param attributes: A list that contains the attributes :param labels: A list that contains the labels :param print_feature_names: True, if the names of features should be printed, False otherwise :param print_label_names: True, if the names of labels should be printed, False otherwise :param print_nominal_values: True, if the values of nominal values should be printed, False otherwise """ self.print_feature_names = print_feature_names self.print_label_names = print_label_names self.print_nominal_values = print_nominal_values self.attributes = attributes self.labels = labels self.text = StringIO() def visit_empty_body(self, _: EmptyBody): self.text.write('{}') def __format_conditions(self, num_conditions: int, indices: np.ndarray, thresholds: np.ndarray, operator: str) -> int: result = num_conditions if indices is not None and thresholds is not None: text = self.text attributes = self.attributes print_feature_names = self.print_feature_names print_nominal_values = self.print_nominal_values for i in range(indices.shape[0]): if result > 0: text.write(' & ') feature_index = indices[i] threshold = thresholds[i] attribute = attributes[feature_index] if len( attributes) > feature_index else None if print_feature_names and attribute is not None: text.write(attribute.attribute_name) else: text.write(str(feature_index)) text.write(' ') text.write(operator) text.write(' ') if attribute is not None and attribute.nominal_values is not None: nominal_value = int(threshold) if print_nominal_values and len( attribute.nominal_values) > nominal_value: text.write('"' + attribute.nominal_values[nominal_value] + '"') else: text.write(str(nominal_value)) else: text.write(str(threshold)) result += 1 return result def visit_conjunctive_body(self, body: ConjunctiveBody): text = self.text text.write('{') num_conditions = self.__format_conditions(0, body.leq_indices, body.leq_thresholds, '<=') num_conditions = self.__format_conditions(num_conditions, body.gr_indices, body.gr_thresholds, '>') num_conditions = self.__format_conditions(num_conditions, body.eq_indices, body.eq_thresholds, '==') self.__format_conditions(num_conditions, body.neq_indices, body.neq_thresholds, '!=') text.write('}') def visit_complete_head(self, head: CompleteHead): text = self.text print_label_names = self.print_label_names labels = self.labels scores = head.scores text.write(' => (') for i in range(scores.shape[0]): if i > 0: text.write(', ') if print_label_names and len(labels) > i: text.write(labels[i].attribute_name) else: text.write(str(i)) text.write(' = ') text.write('{0:.2f}'.format(scores[i])) text.write(')\n') def visit_partial_head(self, head: PartialHead): text = self.text print_label_names = self.print_label_names labels = self.labels indices = head.indices scores = head.scores text.write(' => (') for i in range(indices.shape[0]): if i > 0: text.write(', ') label_index = indices[i] if print_label_names and len(labels) > label_index: text.write(labels[label_index].attribute_name) else: text.write(str(label_index)) text.write(' = ') text.write('{0:.2f}'.format(scores[i])) text.write(')\n') def get_text(self) -> str: """ Returns the textual representation that has been created via the `format` method. :return: The textual representation """ return self.text.getvalue()
def downloadfile1(allfile, facebookdata): panjang = len(allfile) if panjang == 3: ##do something f = StringIO() wb = Workbook() ws = wb.active for row in facebookdata: f.write(row.name + ',' + row.status + ',' + str(row.like) + ',' + str(row.comment) + ',' + str(row.share)) f.write('\n') ws.append([row.name, row.status, row.like, row.comment, row.share]) f.flush() f.seek(0) ##make json g = StringIO() data = [] for row in facebookdata: data1 = {} data1["NAME"] = row.name data1["STATUS"] = row.status data1["LIKE"] = row.like data1["COMMENT"] = row.comment data1["SHARE"] = row.share data.append(data1) json.dump(data, g, indent=3) g.flush() g.seek(0) output = BytesIO() zip = ZipFile(output, 'w') zip.writestr("Data_facebook.csv", f.getvalue()) zip.writestr("Data_facebook.JSON", g.getvalue()) zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb)) zip.close() response = HttpResponse(output.getvalue(), content_type='application/octet-stream') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.zip' return response elif panjang == 2: ##read list if '0' and '1' in allfile: ##return csv and json print('csv and jsn') #make csv f = StringIO() for row in facebookdata: f.write(row.name + ',' + row.status + ',' + str(row.like) + ',' + str(row.comment) + ',' + str(row.share)) f.write('\n') f.flush() f.seek(0) ##make json g = StringIO() data = [] for row in facebookdata: data1 = {} data1["NAME"] = row.name data1["STATUS"] = row.status data1["LIKE"] = row.like data1["COMMENT"] = row.comment data1["SHARE"] = row.share data.append(data1) json.dump(data, g, indent=3) g.flush() g.seek(0) output = BytesIO() zip = ZipFile(output, 'w') zip.writestr("Data_facebook.csv", f.getvalue()) zip.writestr("Data_facebook.JSON", g.getvalue()) zip.close() response = HttpResponse(output.getvalue(), content_type='application/octet-stream') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.zip' return response elif '0' and '2' in allfile: #return csv and xls print('csv and xls') f = StringIO() for row in facebookdata: f.write(row.name + ',' + row.status + ',' + str(row.like) + ',' + str(row.comment) + ',' + str(row.share)) f.write('\n') f.flush() f.seek(0) g = StringIO() wb = Workbook() ws = wb.active for row in facebookdata: ws.append( [row.name, row.status, row.like, row.comment, row.share]) output = BytesIO() zip = ZipFile(output, 'w') zip.writestr("Data_facebook.csv", f.getvalue()) zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb)) zip.close() response = HttpResponse(output.getvalue(), content_type='application/octet-stream') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.zip' return response else: #return json xls #make json file f = StringIO() data = [] for row in facebookdata: data1 = {} data1["NAME"] = row.name data1["TWEET"] = row.tweet data1["RETWEET_USER"] = row.Retweet_user data1["HASHTAGS"] = row.hashtag data1["DATE"] = str(row.date) data.append(data1) json.dump(data, f, indent=3) f.flush() f.seek(0) #make xls file g = StringIO() wb = Workbook() ws = wb.active for row in facebookdata: ws.append( [row.name, row.status, row.like, row.comment, row.share]) output = BytesIO() zip = ZipFile(output, 'w') zip.writestr("Data_facebook.csv", f.getvalue()) zip.writestr("Data_facebook.xlsx", save_virtual_workbook(wb)) zip.close() response = HttpResponse(output.getvalue(), content_type='application/octet-stream') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.zip' return response elif panjang == 0: ##asli print('do nothing') return HttpResponseRedirect('../search2') else: if '0' in allfile: f = StringIO() writer = csv.writer(f) for row in facebookdata: writer.writerow( [row.name, row.status, row.like, row.comment, row.share]) f.flush() f.seek(0) response = HttpResponse(FileWrapper(f), content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.csv' return response elif '1' in allfile: f = StringIO() data = [] for row in facebookdata: data1 = {} data1["NAME"] = row.name data1["STATUS"] = row.status data1["LIKE"] = row.like data1["COMMENT"] = row.comment data1["SHARE"] = row.share data.append(data1) json.dump(data, f, indent=3) f.flush() f.seek(0) response = HttpResponse(FileWrapper(f), content_type='js/json') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.json' return response else: f = StringIO() wb = Workbook() ws = wb.active for row in facebookdata: ws.append( [row.name, row.status, row.like, row.comment, row.share]) response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.ms-excel') response[ 'Content-Disposition'] = 'attachment; filename=Data_facebook.xlsx' return response
class ModelPrettyPrinter(ModelVisitor): def __init__(self): self.out = StringIO() self.ind = "" self.print_values = False def do_print(self, m, print_values=False, show_exp=False): self.ind = "" self.print_values = print_values self.show_exp = show_exp self.out = StringIO() m.accept(self) return self.out.getvalue() @staticmethod def print(m, print_values=False, show_exp=False): p = ModelPrettyPrinter() return p.do_print(m, print_values, show_exp) def write(self, s): self.out.write(s) def writeln(self, l): self.out.write(self.ind + l + "\n") def inc_indent(self): self.ind += " " * 4 def dec_indent(self): self.ind = self.ind[4:] def visit_constraint_block(self, c: vm.ConstraintBlockModel): self.writeln("constraint " + c.name + " {") self.inc_indent() for stmt in c.constraint_l: stmt.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_dist(self, d: ConstraintDistModel): self.write(self.ind) d.lhs.accept(self) self.write(" dist { ") for i in range(len(d.weights)): if i > 0: self.write(", ") d.weights[i].accept(self) self.write("}\n") def visit_dist_weight(self, w: DistWeightExprModel): if w.rng_rhs is not None: self.write("[") w.rng_lhs.accept(self) self.write(":") w.rng_rhs.accept(self) self.write("]") else: w.rng_lhs.accept(self) self.write(" : ") w.weight.accept(self) def visit_constraint_expr(self, c: ConstraintExprModel): self.write(self.ind) c.e.accept(self) self.write(";\n") def visit_constraint_foreach(self, f: ConstraintForeachModel): self.write(self.ind + "foreach (") f.lhs.accept(self) self.write("[i]) {\n") self.inc_indent() for s in f.constraint_l: s.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_if_else(self, c: vm.ConstraintIfElseModel): self.write(self.ind + "if (") c.cond.accept(self) self.write(") {\n") self.inc_indent() c.true_c.accept(self) self.dec_indent() if c.false_c is not None: self.writeln("} else {") self.inc_indent() c.false_c.accept(self) self.dec_indent() self.writeln("}") def visit_constraint_implies(self, c: vm.ConstraintImpliesModel): self.write(self.ind) c.cond.accept(self) self.write(" -> {") for sc in c.constraint_l: sc.accept(self) self.write("}\n") def visit_covergroup(self, cg: CovergroupModel): self.writeln("covergroup " + cg.name) self.inc_indent() for cp in cg.coverpoint_l: cp.accept(self) self.dec_indent() def visit_coverpoint(self, cp: CoverpointModel): self.writeln("coverpoint " + cp.name) self.inc_indent() for b in cp.bin_model_l: b.accept(self) self.dec_indent() def visit_coverpoint_bin_array(self, bn: CoverpointBinArrayModel): self.writeln("bin_array " + bn.name) def visit_coverpoint_bin_collection(self, bn: CoverpointBinCollectionModel): self.writeln("bin_collection " + bn.name) self.inc_indent() for b in bn.bin_l: b.accept(self) self.dec_indent() def visit_coverpoint_bin_single_range(self, bn: CoverpointBinSingleRangeModel): self.writeln("bin_single_range " + bn.name + " " + str(bn.target_val_low) + " .. " + str(bn.target_val_high)) def visit_expr_array_subscript(self, s): s.lhs.accept(self) self.write("[") s.rhs.accept(self) self.write("]") def visit_expr_array_sum(self, s): if self.show_exp: s.expr().accept(self) else: self.write(s.arr.fullname) self.write(".sum") def visit_expr_bin(self, e: vm.ExprBinModel): if e.lhs is None or e.rhs is None: print("op: " + str(e.op)) self.write("(") e.lhs.accept(self) self.write(" " + vm.BinExprType.toString(e.op) + " ") e.rhs.accept(self) self.write(")") def visit_expr_in(self, e: vm.ExprInModel): e.lhs.accept(self) self.write(" in [") for i, r in enumerate(e.rhs.rl): if i > 0: self.write(", ") r.accept(self) self.write("]") def visit_expr_literal(self, e: vm.ExprLiteralModel): self.write(str(int(e.val()))) def visit_expr_fieldref(self, e: vm.ExprFieldRefModel): if self.print_values and hasattr( e.fm, "is_used_rand") and not e.fm.is_used_rand: if isinstance(e.fm, FieldArrayModel): self.write("[") for i, f in enumerate(e.fm.field_l): if i > 0: self.write(", ") self.write(str(int(f.get_val()))) self.write("]") else: self.write(str(int(e.fm.get_val()))) else: self.write(e.fm.fullname) def visit_expr_unary(self, e: vm.ExprUnaryModel): self.write(UnaryExprType.toString(e.op)) self.write("(") e.expr.accept(self) self.write(")") def visit_expr_range(self, r): r.lhs.accept(self) self.write("..") r.rhs.accept(self) def visit_rangelist(self, r: RangelistModel): for re in r.range_l: if re[0] == re[1]: self.write(str(re[0])) else: self.write(str(re[0]) + ".." + str(re[1])) def visit_scalar_field(self, f: FieldScalarModel): self.write(f.name)
def write(self, stream: StringIO = sys.stdout) -> None: """write changelog to stream""" stream.write(self.format_title()) for entry in self.entries: entry.write(stream)
def write(self, stream: StringIO = sys.stdout) -> None: """write the entry to file """ stream.write("%s -- %s\n" % (self.date or "", self.version or "")) for msg, sub_msgs in self.messages: stream.write("%s%s %s\n" % (INDENT, BULLET, msg[0])) stream.write("".join(msg[1:])) if sub_msgs: stream.write("\n") for sub_msg in sub_msgs: stream.write("%s%s %s\n" % (INDENT * 2, SUBBULLET, sub_msg[0])) stream.write("".join(sub_msg[1:])) stream.write("\n") stream.write("\n\n")
''' @author: xilh @since: 20200128 ''' from _io import StringIO from demo.tools.tool import pline # 创建对象 f = StringIO() f.write('hello') f.write(' ') f.write('world') ret = f.getvalue() print(ret) pline() f = StringIO("abcdefghijk") print(f.read(2)) print(f.readline())
f = open('C:/Users/bingw/Desktop/mygit/gitTest.txt', 'r') print(f.read()) #如果文件很小,read()一次性读取最方便;如果不能确定文件大小,反复调用read(size)比较保险;如果是配置文件,调用readlines()最方便 finally: if f: f.close() #with语句来自动帮我们调用close()方法 with open('C:/Users/bingw/Desktop/mygit/gitTest.txt', 'r') as f: print(f.read()) #使用readlines函数读取文件内容 with open('C:/Users/bingw/Desktop/mygit/gitTest.txt', 'r') as f: for line in f.readlines(): print(line.strip()) #StringIO在内存中读写str,如果要操作二进制数据,就需要使用BytesIO,方法和StringIO类似 s = StringIO() s.write('hello') print(s.getvalue()) #读取StringIO f = StringIO('hello,world') while True: str1 = f.readline() if str1 == '': break print(str1.strip()) #os模块的基本功能 print(os.name) #操作系统类型,如果是posix,说明系统是Linux、Unix或Mac OS X,如果是nt,就是Windows系统 print(os.environ) #查看环境变量 print(os.environ.get('JAVA_HOME')) #获取某个环境变量的值