def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (file_size, timestamp, number of cmds, file name) tuples. """ env = XSH.env if env is None: return [] xonsh_debug = env.get("XONSH_DEBUG", 0) boot = uptime.boottime() fs = _xhj_get_history_files(sort=False) files = [] time_start = time.time() for f in fs: try: cur_file_size = os.path.getsize(f) if cur_file_size == 0: # collect empty files (for gc) files.append((os.path.getmtime(f), 0, f, cur_file_size)) continue lj = xlj.LazyJSON(f, reopen=False) if lj.get("locked", False) and lj["ts"][0] < boot: # computer was rebooted between when this history was created # and now and so this history should be unlocked. hist = lj.load() lj.close() hist["locked"] = False with open(f, "w", newline="\n") as fp: xlj.ljdump(hist, fp, sort_keys=True) lj = xlj.LazyJSON(f, reopen=False) if only_unlocked and lj.get("locked", False): continue # info: file size, closing timestamp, number of commands, filename ts = lj.get("ts", (0.0, None)) files.append( (ts[1] or ts[0], len(lj.sizes["cmds"]) - 1, f, cur_file_size)) lj.close() if xonsh_debug: time_lag = time.time() - time_start print( f"[history.{json.__name__}] Enumerated {len(files):,d} history files for {time_lag:0.4f}s.\r", end="", file=sys.stderr, ) except (OSError, ValueError): continue files.sort() # this sorts by elements of the tuple, # the first of which just happens to be file mod time. # so sort by oldest first. return files
def __getitem__(self, key): if not self.hist.remember_history: return "" size = len(self) if isinstance(key, slice): return [self[i] for i in range(*key.indices(size))] elif not isinstance(key, int): raise IndexError( "JsonCommandField may only be indexed by int or slice.") elif size == 0: raise IndexError("JsonCommandField is empty.") # now we know we have an int key = size + key if key < 0 else key # ensure key is non-negative bufsize = len(self.hist.buffer) if size - bufsize <= key: # key is in buffer return self.hist.buffer[key + bufsize - size].get( self.field, self.default) # now we know we have to go into the file queue = self.hist._queue queue.append(self) with self.hist._cond: self.hist._cond.wait_for(self.i_am_at_the_front) with open(self.hist.filename, "r", newline="\n") as f: lj = xlj.LazyJSON(f, reopen=False) rtn = lj["cmds"][key].get(self.field, self.default) if isinstance(rtn, xlj.LJNode): rtn = rtn.load() queue.popleft() return rtn
def all_items(self, **kwargs): """ Returns all history as found in XONSH_DATA_DIR. yield format: {'inp': cmd, 'rtn': 0, ...} """ while self.gc and self.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer for f in _xhj_get_history_files(): try: json_file = xlj.LazyJSON(f, reopen=False) except ValueError: # Invalid json file continue try: commands = json_file.load()['cmds'] except json.decoder.JSONDecodeError: # file is corrupted somehow if builtins.__xonsh_env__.get('XONSH_DEBUG') > 0: msg = 'xonsh history file {0!r} is not valid JSON' print(msg.format(f), file=sys.stderr) continue for c in commands: yield {'inp': c['inp'].rstrip(), 'ts': c['ts'][0]} # all items should also include session items yield from self.items()
def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh__.env.get("HISTCONTROL") last_inp = None cmds = [] for cmd in self.buffer: if "ignoredups" in opts and cmd["inp"] == last_inp: # Skipping dup cmd if self.skip is not None: self.skip(1) continue if "ignoreerr" in opts and cmd["rtn"] != 0: # Skipping failed cmd if self.skip is not None: self.skip(1) continue cmds.append(cmd) last_inp = cmd["inp"] with open(self.filename, "r", newline="\n") as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist["cmds"]) hist["cmds"].extend(cmds) if self.at_exit: hist["ts"][1] = time.time() # apply end time hist["locked"] = False if not builtins.__xonsh__.env.get("XONSH_STORE_STDOUT", False): [ cmd.pop("out") for cmd in hist["cmds"][load_hist_len:] if "out" in cmd ] with open(self.filename, "w", newline="\n") as f: xlj.ljdump(hist, f, sort_keys=True)
def __getitem__(self, key): size = len(self) if isinstance(key, slice): return [self[i] for i in range(*key.indices(size))] elif not isinstance(key, int): raise IndexError( 'CommandField may only be indexed by int or slice.') # now we know we have an int key = size + key if key < 0 else key # ensure key is non-negative bufsize = len(self.hist.buffer) if size - bufsize <= key: # key is in buffer return self.hist.buffer[key + bufsize - size].get( self.field, self.default) # now we know we have to go into the file queue = self.hist._queue queue.append(self) with self.hist._cond: self.hist._cond.wait_for(self.i_am_at_the_front) with open(self.hist.filename, 'r', newline='\n') as f: lj = lazyjson.LazyJSON(f, reopen=False) rtn = lj['cmds'][key].get(self.field, self.default) if isinstance(rtn, lazyjson.Node): rtn = rtn.load() queue.popleft() return rtn
def all_items(self, newest_first=False, **kwargs): """ Returns all history as found in XONSH_DATA_DIR. yield format: {'inp': cmd, 'rtn': 0, ...} """ while self.gc and self.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer for f in _xhj_get_history_files(newest_first=newest_first): try: json_file = xlj.LazyJSON(f, reopen=False) except ValueError: # Invalid json file continue try: commands = json_file.load()["cmds"] except json.decoder.JSONDecodeError: # file is corrupted somehow if builtins.__xonsh__.env.get("XONSH_DEBUG") > 0: msg = "xonsh history file {0!r} is not valid JSON" print(msg.format(f), file=sys.stderr) continue if newest_first: commands = reversed(commands) for c in commands: yield {"inp": c["inp"].rstrip(), "ts": c["ts"][0]} # all items should also include session items yield from self.items()
def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (timestamp, number of cmds, file name) tuples. """ # pylint: disable=no-member env = getattr(builtins, '__xonsh_env__', None) if env is None: return [] fs = _xhj_get_history_files(sort=False) files = [] for f in fs: try: if os.path.getsize(f) == 0: # collect empty files (for gc) files.append((time.time(), 0, f)) continue lj = xlj.LazyJSON(f, reopen=False) if only_unlocked and lj['locked']: continue # info: closing timestamp, number of commands, filename files.append((lj['ts'][1] or time.time(), len(lj.sizes['cmds']) - 1, f)) lj.close() except (IOError, OSError, ValueError): continue files.sort() return files
def run(self): try: import readline except ImportError: return hist = builtins.__xonsh_history__ while self.wait_for_gc and hist.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer files = hist.gc.unlocked_files() i = 1 for _, _, f in files: try: lj = lazyjson.LazyJSON(f, reopen=False) for cmd in lj['cmds']: inp = cmd['inp'].splitlines() for line in inp: if line == 'EOF': continue readline.add_history(line) if RL_LIB is not None: RL_LIB.history_set_pos(i) i += 1 lj.close() except (IOError, OSError): continue
def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh_env__.get('HISTCONTROL') last_inp = None cmds = [] for cmd in self.buffer: if 'ignoredups' in opts and cmd['inp'] == last_inp: # Skipping dup cmd continue if 'ignoreerr' in opts and cmd['rtn'] != 0: # Skipping failed cmd continue cmds.append(cmd) last_inp = cmd['inp'] with open(self.filename, 'r', newline='\n') as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist['cmds']) hist['cmds'].extend(cmds) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False if not builtins.__xonsh_env__.get('XONSH_STORE_STDOUT', False): [ cmd.pop('out') for cmd in hist['cmds'][load_hist_len:] if 'out' in cmd ] with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(hist, f, sort_keys=True)
def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (timestamp, file) tuples. """ _ = self # this could be a function but is intimate to this class # pylint: disable=no-member xdd = os.path.expanduser(builtins.__xonsh_env__.get('XONSH_DATA_DIR')) xdd = os.path.abspath(xdd) fs = [f for f in iglob(os.path.join(xdd, 'xonsh-*.json'))] files = [] for f in fs: try: lj = lazyjson.LazyJSON(f, reopen=False) if only_unlocked and lj['locked']: continue # info: closing timestamp, number of commands, filename files.append((lj['ts'][1] or time.time(), len(lj.sizes['cmds']) - 1, f)) lj.close() except (IOError, OSError, ValueError): continue files.sort() return files
def dump(self): with open(self.filename, 'r', newline='\n') as f: hist = lazyjson.LazyJSON(f).load() hist['cmds'].extend(self.buffer) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False with open(self.filename, 'w', newline='\n') as f: lazyjson.dump(hist, f, sort_keys=True)
def dump(self): """Write the cached history to external storage.""" with open(self.filename, 'r', newline='\n') as f: hist = xlj.LazyJSON(f).load() hist['cmds'].extend(self.buffer) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(hist, f, sort_keys=True)
def __init__(self, afile, bfile, reopen=False, verbose=False): """ Parameters ---------- afile : file handle or str The first file to diff bfile : file handle or str The second file to diff reopen : bool, optional Whether or not to reopen the file handles each time. The default here is opposite from the LazyJSON default because we know that we will be doing a lot of reading so it is best to keep the handles open. verbose : bool, optional Whether to print a verbose amount of information. """ self.a = lazyjson.LazyJSON(afile, reopen=reopen) self.b = lazyjson.LazyJSON(bfile, reopen=reopen) self.verbose = verbose self.sm = SequenceMatcher(autojunk=False)
def __init__(self, f, reopen=True): """ Parameters ---------- f : file handle or str Path to xonsh history file. reopen : bool, optional Whether new file handle should be opened for each load, passed directly into LazyJSON class. """ self._lj = lazyjson.LazyJSON(f, reopen=reopen)
def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (timestamp, number of cmds, file name) tuples. """ # pylint: disable=no-member env = getattr(builtins, '__xonsh_env__', None) if env is None: return [] boot = uptime.boottime() fs = _xhj_get_history_files(sort=False) files = [] for f in fs: try: if os.path.getsize(f) == 0: # collect empty files (for gc) files.append((time.time(), 0, f)) continue lj = xlj.LazyJSON(f, reopen=False) if lj['locked'] and lj['ts'][0] < boot: # computer was rebooted between when this history was created # and now and so this history should be unlocked. hist = lj.load() lj.close() hist['locked'] = False with open(f, 'w', newline='\n') as fp: xlj.ljdump(hist, fp, sort_keys=True) lj = xlj.LazyJSON(f, reopen=False) if only_unlocked and lj['locked']: continue # info: closing timestamp, number of commands, filename files.append((lj['ts'][1] or lj['ts'][0], len(lj.sizes['cmds']) - 1, f)) lj.close() except (IOError, OSError, ValueError): continue files.sort() return files
def unlocked_files(self): """Finds the history files and returns the ones that are unlocked, this is sorted by the last closed time. Returns a list of (timestamp, file) tuples. """ xdd = os.path.abspath(builtins.__xonsh_env__.get('XONSH_DATA_DIR')) fs = [f for f in iglob(os.path.join(xdd, 'xonsh-*.json'))] files = [] for f in fs: try: lj = lazyjson.LazyJSON(f, reopen=False) if lj['locked']: continue # info: closing timestamp, number of commands, filename files.append((lj['ts'][1], len(lj.sizes['cmds']) - 1, f)) lj.close() except (IOError, OSError, ValueError): continue files.sort() return files
def all_items(self, **kwargs): """ Returns all history as found in XONSH_DATA_DIR. return format: (cmd, start_time, index) """ while self.gc and self.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer ind = 0 for f in _xhj_get_history_files(): try: json_file = xlj.LazyJSON(f, reopen=False) except ValueError: # Invalid json file continue commands = json_file.load()['cmds'] for c in commands: yield {'inp': c['inp'].rstrip(), 'ts': c['ts'][0], 'ind': ind} ind += 1
def all_items(self, **kwargs): """ Returns all history as found in XONSH_DATA_DIR. yeild format: {'inp': cmd, 'rtn': 0, ...} """ while self.gc and self.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer for f in _xhj_get_history_files(): try: json_file = xlj.LazyJSON(f, reopen=False) except ValueError: # Invalid json file continue commands = json_file.load()['cmds'] for c in commands: yield {'inp': c['inp'].rstrip(), 'ts': c['ts'][0]} # all items should also include session items yield from self.items()
def run(self): hist = builtins.__xonsh_history__ buf = None ptkhist = self.ptkhist while self.wait_for_gc and hist.gc.is_alive(): time.sleep(0.011) # gc sleeps for 0.01 secs, sleep a beat longer files = hist.gc.files() for _, _, f in files: try: lj = lazyjson.LazyJSON(f, reopen=False) for cmd in lj['cmds']: line = cmd['inp'].rstrip() if line == 'EOF': continue if len(ptkhist) == 0 or line != ptkhist[-1]: ptkhist.append(line) if buf is None: buf = self._buf() if buf is None: continue buf.reset(initial_document=buf.document) lj.close() except (IOError, OSError): continue