def writeNodeAndTree(c, word, header_style, level, maxlevel=3, usesections=1, sectionhead="", vnode=None): """Write a node and its children to Word""" if vnode is None: vnode = c.currentVnode() # dict = c.scanAllDirectives(p=vnode) encoding = dict.get("encoding", None) if encoding == None: # encoding = c.config.default_derived_file_encoding encoding = sys.getdefaultencoding() # s = vnode.b s = g.toEncodedString(s, encoding, reportErrors=True) doPara(word, s) # for i in range(vnode.numberOfChildren()): if usesections: thishead = "%s%d." % (sectionhead, i + 1) else: thishead = "" child = vnode.nthChild(i) h = child.h h = g.toEncodedString(h, encoding, reportErrors=True) doPara(word, "%s %s" % (thishead, h), "%s %d" % (header_style, min(level, maxlevel))) writeNodeAndTree(c, word, header_style, level + 1, maxlevel, usesections, thishead, child)
def prettyPrintNode(self, p): '''The driver for beautification: beautify a single node.''' # c = self.c if not should_beautify(p): # @nobeautify is in effect. return if not p.b: # Pretty printing might add text! return if not p.b.strip(): # Do this *after* we are sure @beautify is in effect. self.replace_body(p, '') return t1 = time.time() # Replace Leonine syntax with special comments. comment_string, s0 = comment_leo_lines(p) try: s1 = g.toEncodedString(s0) node1 = ast.parse(s1, filename='before', mode='exec') except IndentationError: self.skip_message('IndentationError', p) return except SyntaxError: self.skip_message('SyntaxError', p) return except Exception: g.es_exception() self.skip_message('Exception', p) return t2 = time.time() readlines = g.ReadLinesClass(s0).next tokens = list(tokenize.generate_tokens(readlines)) t3 = time.time() s2 = self.run(tokens) t4 = time.time() try: s2_e = g.toEncodedString(s2) node2 = ast.parse(s2_e, filename='before', mode='exec') ok = compare_ast(node1, node2) except Exception: g.es_exception() g.trace('Error in %s...\n%s' % (p.h, s2_e)) self.skip_message('BeautifierError', p) return if not ok: self.skip_message('BeautifierError', p) return t5 = time.time() # Restore the tags after the compare s3 = uncomment_leo_lines(comment_string, p, s2) self.replace_body(p, s3) # Update the stats self.n_input_tokens += len(tokens) self.n_output_tokens += len(self.code_list) self.n_strings += len(s3) self.parse_time += (t2 - t1) self.tokenize_time += (t3 - t2) self.beautify_time += (t4 - t3) self.check_time += (t5 - t4) self.total_time += (t5 - t1)
def writeNodeAndTree (c, word, header_style, level, maxlevel = 3, usesections = 1, sectionhead = "", vnode = None ): """Write a node and its children to Word""" if vnode is None: vnode = c.currentVnode() # dict = c.scanAllDirectives(p=vnode) encoding = dict.get("encoding",None) if encoding is None: # encoding = c.config.default_derived_file_encoding encoding = sys.getdefaultencoding() # s = vnode.b s = g.toEncodedString(s,encoding,reportErrors=True) doPara(word,s) # for i in range(vnode.numberOfChildren()): if usesections: thishead = "%s%d." % (sectionhead,i+1) else: thishead = "" child = vnode.nthChild(i) h = child.h h = g.toEncodedString(h,encoding,reportErrors=True) doPara(word,"%s %s" % (thishead,h),"%s %d" % (header_style,min(level,maxlevel))) writeNodeAndTree(c,word,header_style,level+1,maxlevel,usesections,thishead,child)
def writeTreeAsBibTex(bibFile, vnode, c): """Write the tree under vnode to the file bibFile""" # body text of @bibtex node is ignored dict = c.scanAllDirectives(p=vnode) encoding = dict.get("encoding",None) if encoding == None: encoding = g.app.config.default_derived_file_encoding strings = '' entries = '' # iterate over nodes in this tree for v in vnode.subtree(): h = v.h h = g.toEncodedString(h,encoding,reportErrors=True) if h.lower() == '@string': typestring = '@string' else: typestring = h[:h.find(' ')].lower() if typestring in entrytypes: s = v.b s = g.toEncodedString(s,encoding,reportErrors=True) if h == '@string': # store string declarations in strings for i in s.split('\n'): if i and (not i.isspace()): strings = strings + '@string{' + i + '}\n' else: # store other stuff in entries entries = entries + typestring + '{' + h[h.find(' ')+1:]+ ',\n' + s + '}\n\n' if strings: bibFile.write(strings + '\n\n') bibFile.write(entries)
def writeFileFromNode(self, event=None): '''If node starts with @read-file-into-node, use the full path name in the headline. Otherwise, prompt for a file name. ''' c = self; p = c.p c.endEditing() h = p.h.rstrip() s = p.b tag = '@read-file-into-node' if h.startswith(tag): fileName = h[len(tag):].strip() else: fileName = None if not fileName: filetypes = [("All files", "*"), ("Python files", "*.py"), ("Leo files", "*.leo"),] fileName = g.app.gui.runSaveFileDialog(c, initialfile=None, title='Write File From Node', filetypes=filetypes, defaultextension=None) if fileName: try: with open(fileName, 'w') as f: g.chdir(fileName) if s.startswith('@nocolor\n'): s = s[len('@nocolor\n'):] if not g.isPython3: # 2010/08/27 s = g.toEncodedString(s, reportErrors=True) f.write(s) f.flush() g.blue('wrote:', fileName) except IOError: g.error('can not write %s', fileName)
def writeAbbreviations(self, event): '''Write abbreviations to a file.''' c = self.c fileName = g.app.gui.runSaveFileDialog(c, initialfile=None, title='Write Abbreviations', filetypes=[ ("Text", "*.txt"), ("All files", "*") ], defaultextension=".txt") if not fileName: return try: d = self.abbrevs f = open(fileName, 'w') for name in sorted(d.keys()): val, tag = self.abbrevs.get(name) val = val.replace('\n', '\\n') # Fix bug #236: write continuations in same format as in # @data abbreviations nodes ### New code. ### val = ''.join([': %s' % (z) for z in g.splitLines(val)]) s = '%s=%s\n' % (name, val) if not g.isPython3: s = g.toEncodedString(s, reportErrors=True) f.write(s) f.close() g.es_print('wrote: %s' % fileName) except IOError: g.es('can not create', fileName)
def write(self, root): '''Write an @auto tree containing imported rST code.''' trace = False and not g.unitTesting root_level = root.level() if trace: g.trace('='*20, root.h) for p in root.subtree(): if hasattr(self.at, 'force_sentinels'): self.put_node_sentinel(p, '.. ') ch = self.underline_char(p, root_level) # Put the underlined headline self.put(p.h) # Fix #242: @auto-rst open/save error. n = max(4, len(g.toEncodedString(p.h, reportErrors=False))) self.put(ch * n) # Ensure that every section ends with exactly two newlines. s = p.b.rstrip() + '\n\n' lines = s.splitlines(False) if trace: g.printList(lines) if lines and lines[0].strip(): self.put('') # Put the body. for s in lines: self.put(s) root.setVisited() return True
def write_docutils_files(self, fn, p, source): """Write source to the intermediate file and write the output from docutils..""" junk, ext = g.os_path_splitext(fn) ext = ext.lower() fn = self.computeOutputFileName(fn) ok = self.createDirectoryForFile(fn) if not ok: return # Write the intermediate file. if self.write_intermediate_file: self.writeIntermediateFile(fn, p, source) # Should we call docutils? if not self.call_docutils: return if ext not in ('.htm', '.html', '.tex', '.pdf', '.s5', '.odt'): # #1884: test now. return # Write the result from docutils. s = self.writeToDocutils(p, source, ext) if s and ext in ('.html', '.htm'): s = self.addTitleToHtml(s) if not s: return s = g.toEncodedString(s, 'utf-8') with open(fn, 'wb') as f: f.write(s) self.n_docutils += 1 self.report(fn, p)
def writeSlideTitle(self, title, n, n_tot): """Write the title, underlined with the '#' character.""" if n != 1: title = f"{title} ({n} of {n_tot})" width = max(4, len(g.toEncodedString(title, encoding=self.encoding, reportErrors=False))) self.result_list.append(f"{title}\n{'#' * width}")
def export_outline(self, root, fn=None): ''' Entry point for export-jupyter-notebook Export the given .ipynb file. ''' self.root = root if not fn: fn = self.get_file_name() if not fn: return False try: nb = self.make_notebook() s = self.convert_notebook(nb) except Exception: g.es_exception() return False if not s: return False s = g.toEncodedString(s, encoding='utf-8', reportErrors=True) try: with open(fn, 'wb') as f: f.write(s) g.es_print('wrote: %s' % fn) except IOError: g.es_print('can not open: %s' % fn) return True
def update_svg (self,s,keywords): pc = self if pc.must_change_widget(pc.svg_class): w = pc.svg_class() pc.embed_widget(w) assert (w == pc.w) else: w = pc.w if s.strip().startswith('<'): # Assume it is the svg (xml) source. s = g.adjustTripleString(s,pc.c.tab_width).strip() # Sensitive to leading blank lines. s = g.toEncodedString(s) pc.show() w.load(QtCore.QByteArray(s)) w.show() else: # Get a filename from the headline or body text. ok,path = pc.get_fn(s,'@svg') if ok: pc.show() w.load(path) w.show()
def create_temp_file(self, c, ext, p): ''' Create the temp file used by open-with if necessary. Add the corresponding ExternalFile instance to self.files ''' trace = False and not g.unitTesting if trace: g.trace(len(p.b), p.h) path = self.temp_file_path(c, p, ext) exists = g.os_path_exists(path) if trace: kind = 'recreating:' if exists else 'creating: ' g.trace(kind, path) # Compute encoding and s. d2 = c.scanAllDirectives(p) encoding = d2.get('encoding', None) if encoding is None: encoding = c.config.default_derived_file_encoding s = g.toEncodedString(p.b, encoding, reportErrors=True) # Write the file *only* if it doesn't exist. # No need to read the file: recomputing s above suffices. if not exists: try: f = open(path, 'wb') f.write(s) f.flush() f.close() except IOError: g.error('exception creating temp file: %s' % path) g.es_exception() return None # Add or update the external file entry. time = self.get_mtime(path) self.files = [z for z in self.files if z.path != path] self.files.append(ExternalFile(c, ext, p, path, time)) return path
def writeAbbreviations(self, event): '''Write abbreviations to a file.''' c = self.c fileName = g.app.gui.runSaveFileDialog(c, initialfile=None, title='Write Abbreviations', filetypes=[("Text", "*.txt"), ("All files", "*")], defaultextension=".txt") if not fileName: return try: d = self.abbrevs f = open(fileName, 'w') for name in sorted(d.keys()): val, tag = self.abbrevs.get(name) val = val.replace('\n', '\\n') # Fix bug #236: write continuations in same format as in # @data abbreviations nodes ### New code. ### val = ''.join([': %s' % (z) for z in g.splitLines(val)]) s = '%s=%s\n' % (name, val) if not g.isPython3: s = g.toEncodedString(s, reportErrors=True) f.write(s) f.close() g.es_print('wrote: %s' % fileName) except IOError: g.es('can not create', fileName)
def create_temp_file(self, c, ext, p): ''' Create the file used by open-with if necessary. Add the corresponding ExternalFile instance to self.files ''' path = self.compute_temp_file_path(c, p, ext) exists = g.os_path_exists(path) # Compute encoding and s. d2 = c.scanAllDirectives(p) encoding = d2.get('encoding', None) if encoding is None: encoding = c.config.default_derived_file_encoding s = g.toEncodedString(p.b, encoding, reportErrors=True) # Write the file *only* if it doesn't exist. # No need to read the file: recomputing s above suffices. if not exists: try: with open(path, 'wb') as f: f.write(s) f.flush() except IOError: g.error(f"exception creating temp file: {path}") g.es_exception() return None # Add or update the external file entry. time = self.get_mtime(path) self.files = [z for z in self.files if z.path != path] self.files.append(ExternalFile(c, ext, p, path, time)) return path
def write(self, root): ''' Export_IPYNB: entry point for @auto writes. Signature must match signature of BaseWriter.write(). ''' at = self.c.atFileCommands if not root: g.trace('can not happen: no root') return False if not at and at.outputFile: g.trace('can not happen: no at.outputFile') return False # Write the text to at.outputFile. self.root = root try: nb = self.make_notebook() s = self.convert_notebook(nb) except Exception: g.es_exception() return False if not s: return False if g.isUnicode(s): s = g.toEncodedString(s, encoding='utf-8', reportErrors=True) at.outputFile.write(s) return True
def run(self, files): """Process all files""" self.files = files t1 = time.clock() for fn in files: s, e = g.readFileIntoString(fn) if s: self.tot_s += len(s) g.trace("%8s %s" % ("{:,}".format(len(s)), g.shortFileName(fn))) # Print len(s), with commas. # Fast, accurate: # 1.9 sec for parsing. # 2.5 sec for Null AstFullTraverer traversal. # 2.7 sec to generate all strings. # 3.8 sec to generate all reports. s1 = g.toEncodedString(s) self.tot_lines += len(g.splitLines(s)) # Adds less than 0.1 sec. node = ast.parse(s1, filename="before", mode="exec") ShowDataTraverser(self, fn).visit(node) # elif 0: # Too slow, too clumsy: 3.3 sec for tokenizing # readlines = g.ReadLinesClass(s).next # for token5tuple in tokenize.generate_tokens(readlines): # pass # else: # Inaccurate. 2.2 sec to generate all reports. # self.scan(fn, s) else: g.trace("skipped", g.shortFileName(fn)) t2 = time.clock() # Get the time exlusive of print time. self.show_results() g.trace("done: %4.1f sec." % (t2 - t1))
def run(self, files): '''Process all files''' self.files = files t1 = time.clock() for fn in files: s, e = g.readFileIntoString(fn) if s: self.tot_s += len(s) g.trace('%8s %s' % ("{:,}".format(len(s)), g.shortFileName(fn))) # Print len(s), with commas. # Fast, accurate: # 1.9 sec for parsing. # 2.5 sec for Null AstFullTraverer traversal. # 2.7 sec to generate all strings. # 3.8 sec to generate all reports. s1 = g.toEncodedString(s) self.tot_lines += len(g.splitLines(s)) # Adds less than 0.1 sec. node = ast.parse(s1, filename='before', mode='exec') ShowDataTraverser(self, fn).visit(node) # elif 0: # Too slow, too clumsy: 3.3 sec for tokenizing # readlines = g.ReadLinesClass(s).next # for token5tuple in tokenize.generate_tokens(readlines): # pass # else: # Inaccurate. 2.2 sec to generate all reports. # self.scan(fn, s) else: g.trace('skipped', g.shortFileName(fn)) t2 = time.clock() # Get the time exlusive of print time. self.show_results() g.trace('done: %4.1f sec.' % (t2 - t1))
def initFileDB (self,fn): trace = False and not g.unitTesting if trace: g.trace('inited',self.inited,repr(fn)) if not fn: return pth, bname = split(fn) if pth and bname: ### and g.enableDB: fn = fn.lower() fn = g.toEncodedString(fn) # Required for Python 3.x. # Important: this creates a top-level directory of the form x_y. # x is a short file name, included for convenience. # y is a key computed by the *full* path name fn. # Thus, there will a separate top-level directory for every path. self.dbdirname = dbdirname = join(g.app.homeLeoDir,'db', '%s_%s' % (bname,hashlib.md5(fn).hexdigest())) self.db = PickleShareDB(dbdirname) # Fixes bug 670108. self.c.db = self.db self.inited = True if trace: g.trace('self.c.db',self.db)
def write(self, root): '''Write an @auto tree containing imported rST code.''' trace = False and not g.unitTesting root_level = root.level() if trace: g.trace('=' * 20, root.h) self.write_root(root) for p in root.subtree(): if hasattr(self.at, 'force_sentinels'): self.put_node_sentinel(p, '.. ') ch = self.underline_char(p, root_level) # Put the underlined headline self.put(p.h) # Fix #242: @auto-rst open/save error. n = max(4, len(g.toEncodedString(p.h, reportErrors=False))) self.put(ch * n) # Ensure that every section ends with exactly two newlines. s = p.b.rstrip() + '\n\n' lines = s.splitlines(False) if trace: g.printList(lines) if lines and lines[0].strip(): self.put('') # Put the body. for s in lines: self.put(s) root.setVisited() return True
def initFileDB (self,fn): trace = False and not g.unitTesting if trace: g.trace('inited',self.inited,repr(fn)) if not fn: return pth, bname = split(fn) if pth and bname: fn = fn.lower() fn = g.toEncodedString(fn) # Required for Python 3.x. # Important: this creates a top-level directory of the form x_y. # x is a short file name, included for convenience. # y is a key computed by the *full* path name fn. # Thus, there will a separate top-level directory for every path. self.dbdirname = dbdirname = join(g.app.homeLeoDir,'db', '%s_%s' % (bname,hashlib.md5(fn).hexdigest())) self.db = PickleShareDB(dbdirname) # Fixes bug 670108. self.c.db = self.db self.inited = True if trace: g.trace('self.c.db',self.db)
def fileKey(self, s, content, requireEncodedString=False): ''' Compute the hash of s (usually a headline) and content. s may be unicode, content must be bytes (or plain string in Python 2.x) ''' ### Changed for reversion testing. ### This comment will be reverted in git to attempt to recreate the caching problem. m = hashlib.md5() if g.isUnicode(s): s = g.toEncodedString(s) if g.isUnicode(content): if requireEncodedString: g.internalError('content arg must be str/bytes') content = g.toEncodedString(content) m.update(s) m.update(content) return "fcache/" + m.hexdigest()
def trace(self): if self.tracing: g.trace("%10s: %s" % ( self.name, repr(g.toEncodedString(self.val)) ))
def fileKey(self, s, content, requireEncodedString=False): ''' Compute the hash of s (usually a headline) and content. s may be unicode, content must be bytes (or plain string in Python 2.x) ''' trace = False and not g.unitTesting m = hashlib.md5() if g.isUnicode(s): s = g.toEncodedString(s) if g.isUnicode(content): if requireEncodedString: g.internalError('content arg must be str/bytes') content = g.toEncodedString(content) m.update(s) m.update(content) if trace: g.trace(m.hexdigest()) return "fcache/" + m.hexdigest()
def color(self, text, dark=False): """make a consistent light background color for text""" text = g.toEncodedString(text, 'utf-8') x = hashlib.md5(text).hexdigest()[-6:] add = int('bb', 16) if not dark else int('33', 16) x = tuple([int(x[2 * i:2 * i + 2], 16) // 4 + add for i in range(3)]) x = '%02x%02x%02x' % x return x
def unpickle(self, s): """Unhexlify and unpickle string s into p.""" try: bin = binascii.unhexlify(g.toEncodedString(s)) # Throws TypeError if s is not a hex string. return pickle.loads(bin) except Exception: g.es_exception() return None
def unpickle (self,s): '''Unhexlify and unpickle string s into p.''' try: bin = binascii.unhexlify(g.toEncodedString(s)) # Throws TypeError if s is not a hex string. return pickle.loads(bin) except Exception: g.es_exception() return None
def color(self, text): """make a consistent light background color for text""" if g.isPython3: text = g.toEncodedString(text,'utf-8') x = hashlib.md5(text).hexdigest()[-6:] x = tuple([int(x[2*i:2*i+2], 16)//4+int('bb',16) for i in range(3)]) x = '%02x%02x%02x' % x return x
def color(self, text, dark=False): """make a consistent light background color for text""" if g.isPython3: text = g.toEncodedString(text, "utf-8") x = hashlib.md5(text).hexdigest()[-6:] add = int("bb", 16) if not dark else int("33", 16) x = tuple([int(x[2 * i : 2 * i + 2], 16) // 4 + add for i in range(3)]) x = "%02x%02x%02x" % x return x
def executeSubprocess(self, event, command): '''Execute a command in a separate process.''' k = self.c.k try: args = shlex.split(g.toEncodedString(command)) subprocess.Popen(args).wait() except Exception: g.es_exception() k.keyboardQuit() # Inits vim mode too. g.es('Done: %s' % command)
def show(self, s): # g.pr(s) if self.outputFile: # self.outputFile is opened in 'wb' mode. s = g.toEncodedString(s + '\n') self.outputFile.write(s) elif self.c: g.es(s) else: g.pr(s) g.pr('')
def color(self, text): """make a consistent light background color for text""" if g.isPython3: text = g.toEncodedString(text, 'utf-8') x = hashlib.md5(text).hexdigest()[-6:] x = tuple([ int(x[2 * i:2 * i + 2], 16) // 4 + int('bb', 16) for i in range(3) ]) x = '%02x%02x%02x' % x return x
def fileKey(self, fileName, content, requireEncodedString=False): ''' Compute the hash of fileName and content. fileName may be unicode, content must be bytes (or plain string in Python 2.x). ''' m = hashlib.md5() if g.isUnicode(fileName): fileName = g.toEncodedString(fileName) if g.isUnicode(content): if requireEncodedString: g.internalError('content arg must be str/bytes') content = g.toEncodedString(content) # New in Leo 5.6: Use the git branch name in the key. branch = g.gitBranchName() branch = g.toEncodedString(branch) # Fix #475. m.update(branch) m.update(fileName) m.update(content) return "fcache/" + m.hexdigest()
def trace_status(self, line, new_state, prev_state, stack, top): '''Print everything important in the i.gen_lines loop.''' print('') try: g.trace('===== %r' % line) except Exception: g.trace(' top.p: %s' % g.toEncodedString(top.p.h)) # print('len(stack): %s' % len(stack)) print(' new_state: %s' % new_state) print('prev_state: %s' % prev_state) # print(' top.state: %s' % top.state) g.printList(stack)
def initiate_sending(self): ### Create a bytes string. aList = [g.toEncodedString(z) for z in self.buffer] self.out_buffer = b''.join(aList) del self.buffer self.set_socket(self.socket, None) self.socket.setblocking(0) self.connected = 1 try: self.addr = self.socket.getpeername() except socket.error: # The addr isn't crucial pass
def dumpCompareNodes(self, fileName1, fileName2, inserted, deleted, changed): for d, kind in ( (inserted, 'inserted (only in %s)' % (fileName1)), (deleted, 'deleted (only in %s)' % (fileName2)), (changed, 'changed'), ): g.pr('\n', kind) for key in d: p = d.get(key) if g.isPython3: g.pr('%-32s %s' % (key, p.h)) else: g.pr('%-32s %s' % (key, g.toEncodedString(p.h, 'ascii')))
def beautify(options, path): '''Beautify the file with the given path.''' fn = g.shortFileName(path) s, e = g.readFileIntoString(path) if not s: return print('beautifying %s' % fn) s1 = g.toEncodedString(s) node1 = ast.parse(s1, filename='before', mode='exec') readlines = g.ReadLinesClass(s).next tokens = list(tokenize.generate_tokens(readlines)) beautifier = PythonTokenBeautifier(c=None) beautifier.delete_blank_lines = not options.keep s2 = beautifier.run(tokens) s2_e = g.toEncodedString(s2) node2 = ast.parse(s2_e, filename='before', mode='exec') if compare_ast(node1, node2): f = open(path, 'wb') f.write(s2_e) f.close() else: print('failed to beautify %s' % fn)
def underline (self,s,p): '''Return the underlining string to be used at the given level for string s.''' trace = False and not g.unitTesting # The user is responsible for top-level overlining. u = self.getOption('underline_characters') # '''#=+*^~"'`-:><_''' level = max(0,p.level()-self.topLevel) level = min(level+1,len(u)-1) # Reserve the first character for explicit titles. ch = u [level] if trace: g.trace(self.topLevel,p.level(),level,repr(ch),p.h) n = max(4,len(g.toEncodedString(s,encoding=self.encoding,reportErrors=False))) return '%s\n%s\n\n' % (p.h.strip(),ch*n)
def fileKey(self, fileName, content, requireEncodedString=False): ''' Compute the hash of fileName and content. fileName may be unicode, content must be bytes (or plain string in Python 2.x). ''' trace = False and not g.unitTesting m = hashlib.md5() if g.isUnicode(fileName): fileName = g.toEncodedString(fileName) if g.isUnicode(content): if requireEncodedString: g.internalError('content arg must be str/bytes') content = g.toEncodedString(content) # New in Leo 5.6: Use the git branch name in the key. branch = g.gitBranchName() # g.trace(type(branch), repr(branch)) branch = g.toEncodedString(branch) # Fix #475. m.update(branch) m.update(fileName) m.update(content) if trace: g.trace(m.hexdigest()) return "fcache/" + m.hexdigest()
def show_error_lines(self, lines, fileName): for i, line in enumerate(lines): g.es_print('%3s %s' % (i, repr(line))) if False: # Only for major debugging. try: f1 = open(fileName, "w") for s in lines: if not g.isPython3: # 2010/08/27 s = g.toEncodedString(s, encoding='utf-8', reportErrors=True) f1.write(repr(s)) f1.close() except IOError: g.es_exception() g.es_print('can not open', fileName)
def trace_status(self, line, new_state, prev_state, stack, top): '''Print everything important in the i.gen_lines loop.''' if line.isspace() or line.strip().startswith(';'): return # for elisp print('') try: g.trace('===== %r' % line) except Exception: g.trace(' top.p: %s' % g.toEncodedString(top.p.h)) # print('len(stack): %s' % len(stack)) print(' new_state: %s' % new_state) print('prev_state: %s' % prev_state) # print(' top.state: %s' % top.state) g.printList(stack)
def paste_outline(self, fn, s): ''' Paste an outline into the present outline from the s. Nodes do *not* retain their original identify. Similar to fc.getLeoOutlineFromClipboard ''' c, fc = self.c, self.c.fileCommands parent = self.file_node fc.initReadIvars() # Save... children = c.hiddenRootNode.children oldGnxDict = fc.gnxDict fc.gnxDict = {} try: fc.usingClipboard = True # This encoding must match the encoding used in putLeoOutline. s = g.toEncodedString(s, fc.leo_file_encoding, reportErrors=True) # readSaxFile modifies the hidden root. v = fc.readSaxFile( theFile=None, fileName=fn, silent=True, # don't tell about stylesheet elements. inClipboard=True, reassignIndices=True, s=s) if not v: g.es("invalid external file", color="blue") return None finally: fc.usingClipboard = False # Restore... c.hiddenRootNode.children = children # Unlink v from the hidden root. v.parents.remove(c.hiddenRootNode) p = leoNodes.Position(v) n = parent.numberOfChildren() p._linkAsNthChild(parent, n, adjust=False) # Do *not* adjust links when linking v. # The read code has already done that. # Reassign indices. fc.gnxDict = oldGnxDict ni = g.app.nodeIndices for p2 in p.self_and_subtree(): ni.getNewIndex(p2.v) # Clean up. fc.initReadIvars() c.validateOutline() c.setCurrentPosition(p) # c.selectPosition causes flash(!) return p
def __init__(self, conn, addr, server): self.leo_actions = LeoActions(self) asynchat.async_chat.__init__(self, conn) self.client_address = addr self.connection = conn self.server = server self.wfile = delayedSocketStream(self.socket) # Sets the terminator. When it is received, this means that the # http request is complete, control will be passed to self.found_terminator self.term = g.toEncodedString('\r\n\r\n') self.set_terminator(self.term) self.buffer = BytesIO() ### ### Set self.use_encoding and self.encoding. ### This is used by asyn_chat. self.use_encoding = True self.encoding = 'utf-8'
def replaceFileWithString (self,fn,s): '''Replace the file with s if s is different from theFile's contents. Return True if theFile was changed. ''' trace = False and not g.unitTesting ; verbose = False x = self exists = g.os_path_exists(fn) if exists: # Read the file. Return if it is the same. s2,e = g.readFileIntoString(fn) if s2 is None: return False if s == s2: if not g.unitTesting: g.es('unchanged:',fn) return False # Issue warning if directory does not exist. theDir = g.os_path_dirname(fn) if theDir and not g.os_path_exists(theDir): if not g.unitTesting: x.error('not written: %s directory not found' % fn) return False # Replace the file. try: f = open(fn,'wb') # 2011/09/09: Use self.encoding. f.write(g.toEncodedString(s,encoding=self.encoding)) if trace: g.trace('encoding',self.encoding) if verbose: g.trace('fn',fn, '\nlines...\n%s' %(g.listToString(g.splitLines(s))), '\ncallers',g.callers(4)) f.close() if not g.unitTesting: # g.trace('created:',fn,g.callers()) if exists: g.es('wrote:',fn) else: g.es('created:',fn) return True except IOError: x.error('unexpected exception writing file: %s' % (fn)) g.es_exception() return False
def create_temp_file(self, c, d, p): ''' Create the temp file used by open-with if necessary. Add the corresponding ExternalFile instance to self.files d is a dictionary created from an @openwith settings node. 'args': the command-line arguments to be used to open the file. 'ext': the file extension. 'kind': the method used to open the file, such as subprocess.Popen. 'name': menu label (used only by the menu code). 'shortcut': menu shortcut (used only by the menu code). ''' trace = False and not g.unitTesting assert isinstance(d, dict), d ext = d.get('ext') path = self.temp_file_path(c, p, ext) exists = g.os_path_exists(path) if trace: kind = 'recreating:' if exists else 'creating: ' g.trace(kind, path) # Compute encoding and s. d2 = c.scanAllDirectives(p) encoding = d2.get('encoding', None) if encoding is None: encoding = c.config.default_derived_file_encoding s = g.toEncodedString(p.b, encoding, reportErrors=True) # Write the file *only* if it doesn't exist. # No need to read the file: recomputing s above suffices. if not exists: try: f = open(path, 'wb') f.write(s) f.flush() f.close() except IOError: g.error('exception creating temp file: %s' % path) g.es_exception() return None # Add or update the external file entry. time = self.get_mtime(path) self.files = [z for z in self.files if z.path != path] self.files.append(ExternalFile(c, ext, p, path, time)) return path