def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh_env__.get('HISTCONTROL') last_inp = None cmds = [] for cmd in self.buffer: if 'ignoredups' in opts and cmd['inp'] == last_inp: # Skipping dup cmd continue if 'ignoreerr' in opts and cmd['rtn'] != 0: # Skipping failed cmd continue cmds.append(cmd) last_inp = cmd['inp'] with open(self.filename, 'r', newline='\n') as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist['cmds']) hist['cmds'].extend(cmds) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False if not builtins.__xonsh_env__.get('XONSH_STORE_STDOUT', False): [cmd.pop('out') for cmd in hist['cmds'][load_hist_len:] if 'out' in cmd] with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(hist, f, sort_keys=True)
def test_lazy_load_index(): f = StringIO() ljdump({'wakka': 42}, f) f.seek(0) lj = LazyJSON(f) assert_equal({'wakka': 10, '__total__': 0}, lj.offsets) assert_equal({'wakka': 2, '__total__': 14}, lj.sizes)
def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh_env__.get('HISTCONTROL') last_inp = None cmds = [] for cmd in self.buffer: if 'ignoredups' in opts and cmd['inp'] == last_inp: # Skipping dup cmd continue if 'ignoreerr' in opts and cmd['rtn'] != 0: # Skipping failed cmd continue cmds.append(cmd) last_inp = cmd['inp'] with open(self.filename, 'r', newline='\n') as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist['cmds']) hist['cmds'].extend(cmds) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False if not builtins.__xonsh_env__.get('XONSH_STORE_STDOUT', False): [ cmd.pop('out') for cmd in hist['cmds'][load_hist_len:] if 'out' in cmd ] with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(hist, f, sort_keys=True)
def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh__.env.get("HISTCONTROL") last_inp = None cmds = [] for cmd in self.buffer: if "ignoredups" in opts and cmd["inp"] == last_inp: # Skipping dup cmd continue if "ignoreerr" in opts and cmd["rtn"] != 0: # Skipping failed cmd continue cmds.append(cmd) last_inp = cmd["inp"] with open(self.filename, "r", newline="\n") as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist["cmds"]) hist["cmds"].extend(cmds) if self.at_exit: hist["ts"][1] = time.time() # apply end time hist["locked"] = False if not builtins.__xonsh__.env.get("XONSH_STORE_STDOUT", False): [cmd.pop("out") for cmd in hist["cmds"][load_hist_len:] if "out" in cmd] with open(self.filename, "w", newline="\n") as f: xlj.ljdump(hist, f, sort_keys=True)
def test_lazy_load_index(): f = StringIO() ljdump({"wakka": 42}, f) f.seek(0) lj = LazyJSON(f) assert {"wakka": 10, "__total__": 0} == lj.offsets assert {"wakka": 2, "__total__": 14} == lj.sizes
def __init__(self, filename=None, sessionid=None, buffersize=100, gc=True, **meta): """Represents a xonsh session's history as an in-memory buffer that is periodically flushed to disk. Parameters ---------- filename : str, optional Location of history file, defaults to ``$XONSH_DATA_DIR/history_json/xonsh-{sessionid}.json``. sessionid : int, uuid, str, optional Current session identifier, will generate a new sessionid if not set. buffersize : int, optional Maximum buffersize in memory. meta : optional Top-level metadata to store along with the history. The kwargs 'cmds' and 'sessionid' are not allowed and will be overwritten. gc : bool, optional Run garbage collector flag. """ super().__init__(sessionid=sessionid, **meta) if filename is None: # pylint: disable=no-member data_dir = _xhj_get_data_dir() self.filename = os.path.join( data_dir, "xonsh-{0}.json".format(self.sessionid)) else: self.filename = filename if self.filename and not os.path.exists( os.path.expanduser(self.filename)): meta["cmds"] = [] meta["sessionid"] = str(self.sessionid) with open(self.filename, "w", newline="\n") as f: xlj.ljdump(meta, f, sort_keys=True) try: os.chmod(self.filename, 0o600) except Exception: # pylint: disable=broad-except pass self.buffer = [] self.buffersize = buffersize self._queue = collections.deque() self._cond = threading.Condition() self._len = 0 self._skipped = 0 self.last_cmd_out = None self.last_cmd_rtn = None self.gc = JsonHistoryGC() if gc else None # command fields that are known self.tss = JsonCommandField("ts", self) self.inps = JsonCommandField("inp", self) self.outs = JsonCommandField("out", self) self.rtns = JsonCommandField("rtn", self)
def dump(self): """Write the cached history to external storage.""" opts = builtins.__xonsh__.env.get("HISTCONTROL") last_inp = None cmds = [] for cmd in self.buffer: if "ignoredups" in opts and cmd["inp"] == last_inp: # Skipping dup cmd if self.skip is not None: self.skip(1) continue if "ignoreerr" in opts and cmd["rtn"] != 0: # Skipping failed cmd if self.skip is not None: self.skip(1) continue cmds.append(cmd) last_inp = cmd["inp"] with open(self.filename, "r", newline="\n") as f: hist = xlj.LazyJSON(f).load() load_hist_len = len(hist["cmds"]) hist["cmds"].extend(cmds) if self.at_exit: hist["ts"][1] = time.time() # apply end time hist["locked"] = False if not builtins.__xonsh__.env.get("XONSH_STORE_STDOUT", False): [ cmd.pop("out") for cmd in hist["cmds"][load_hist_len:] if "out" in cmd ] with open(self.filename, "w", newline="\n") as f: xlj.ljdump(hist, f, sort_keys=True)
def test_lazy_load_index(): f = StringIO() ljdump({'wakka': 42}, f) f.seek(0) lj = LazyJSON(f) assert {'wakka': 10, '__total__': 0} == lj.offsets assert {'wakka': 2, '__total__': 14} == lj.sizes
def test_lazy_list_empty(): x = [] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert 0 == len(lj) assert x == lj.load()
def test_lazy_list_empty(): x = [] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_equal(0, len(lj)) assert_equal(x, lj.load())
def test_lazy_dict_empty(): x = {} f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_equal(0, len(lj)) assert_equal(x, lj.load())
def test_lazy_dict(): f = StringIO() ljdump({'wakka': 42}, f) f.seek(0) lj = LazyJSON(f) assert_equal(['wakka'], list(lj.keys())) assert_equal(42, lj['wakka']) assert_equal(1, len(lj)) assert_equal({'wakka': 42}, lj.load())
def test_lazy_dict(): f = StringIO() ljdump({"wakka": 42}, f) f.seek(0) lj = LazyJSON(f) assert ["wakka"] == list(lj.keys()) assert 42 == lj["wakka"] assert 1 == len(lj) assert {"wakka": 42} == lj.load()
def test_lazy_list_str(): x = ['I', 'have', 'seen', 'the', 'wind', 'blow'] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert 'the' == lj[3] assert x[:2:-2] == lj[:2:-2] assert x == [_ for _ in lj] assert x == lj.load()
def test_lazy_list_list_ints(): x = [[0, 1], [6, 28], [496, 8128]] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert isinstance(lj[1], LJNode) assert 28 == lj[1][1] assert [6 == 28], lj[1].load() assert x == lj.load()
def test_lazy_list_str(): x = ["I", "have", "seen", "the", "wind", "blow"] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert "the" == lj[3] assert x[:2:-2] == lj[:2:-2] assert x == [_ for _ in lj] assert x == lj.load()
def test_lazy_list_str(): x = ['I', 'have', 'seen', 'the', 'wind', 'blow'] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_equal('the', lj[3]) assert_equal(x[:2:-2], lj[:2:-2]) assert_equal(x, [_ for _ in lj]) assert_equal(x, lj.load())
def test_lazy_list_list_ints(): x = [[0, 1], [6, 28], [496, 8128]] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_is_instance(lj[1], LJNode) assert_equal(28, lj[1][1]) assert_equal([6, 28], lj[1].load()) assert_equal(x, lj.load())
def test_lazy_list_ints(): x = [0, 1, 6, 28, 496, 8128] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_equal(28, lj[3]) assert_equal(x[:2:-2], lj[:2:-2]) assert_equal(x, [_ for _ in lj]) assert_equal(x, lj.load())
def test_lazy_list_ints(): x = [0, 1, 6, 28, 496, 8128] f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert 28 == lj[3] assert x[:2:-2] == lj[:2:-2] assert x == [_ for _ in lj] assert x == lj.load()
def dump(self): """Write the cached history to external storage.""" with open(self.filename, 'r', newline='\n') as f: hist = xlj.LazyJSON(f).load() hist['cmds'].extend(self.buffer) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(hist, f, sort_keys=True)
def dump(self): """Write the cached history to external storage.""" with open(self.filename, 'r', newline='\n') as f: hist = LazyJSON(f).load() hist['cmds'].extend(self.buffer) if self.at_exit: hist['ts'][1] = time.time() # apply end time hist['locked'] = False with open(self.filename, 'w', newline='\n') as f: ljdump(hist, f, sort_keys=True)
def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (file_size, timestamp, number of cmds, file name) tuples. """ env = XSH.env if env is None: return [] xonsh_debug = env.get("XONSH_DEBUG", 0) boot = uptime.boottime() fs = _xhj_get_history_files(sort=False) files = [] time_start = time.time() for f in fs: try: cur_file_size = os.path.getsize(f) if cur_file_size == 0: # collect empty files (for gc) files.append((os.path.getmtime(f), 0, f, cur_file_size)) continue lj = xlj.LazyJSON(f, reopen=False) if lj.get("locked", False) and lj["ts"][0] < boot: # computer was rebooted between when this history was created # and now and so this history should be unlocked. hist = lj.load() lj.close() hist["locked"] = False with open(f, "w", newline="\n") as fp: xlj.ljdump(hist, fp, sort_keys=True) lj = xlj.LazyJSON(f, reopen=False) if only_unlocked and lj.get("locked", False): continue # info: file size, closing timestamp, number of commands, filename ts = lj.get("ts", (0.0, None)) files.append( (ts[1] or ts[0], len(lj.sizes["cmds"]) - 1, f, cur_file_size)) lj.close() if xonsh_debug: time_lag = time.time() - time_start print( f"[history.{json.__name__}] Enumerated {len(files):,d} history files for {time_lag:0.4f}s.\r", end="", file=sys.stderr, ) except (OSError, ValueError): continue files.sort() # this sorts by elements of the tuple, # the first of which just happens to be file mod time. # so sort by oldest first. return files
def test_lazy_dict_dict_int(): x = {"wakka": {"jawaka": 42}} f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert ["wakka"] == list(lj.keys()) assert isinstance(lj["wakka"], LJNode) assert 42 == lj["wakka"]["jawaka"] assert 1 == len(lj) assert x == lj.load()
def test_lazy_dict_dict_int(): x = {'wakka': {'jawaka': 42}} f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert_equal(['wakka'], list(lj.keys())) assert_is_instance(lj['wakka'], LJNode) assert_equal(42, lj['wakka']['jawaka']) assert_equal(1, len(lj)) assert_equal(x, lj.load())
def test_lazy_dict_dict_int(): x = {'wakka': {'jawaka': 42}} f = StringIO() ljdump(x, f) f.seek(0) lj = LazyJSON(f) assert ['wakka'] == list(lj.keys()) assert isinstance(lj['wakka'], LJNode) assert 42 == lj['wakka']['jawaka'] assert 1 == len(lj) assert x == lj.load()
def __init__(self, filename=None, sessionid=None, buffersize=100, gc=True, **meta): """Represents a xonsh session's history as an in-memory buffer that is periodically flushed to disk. Parameters ---------- filename : str, optional Location of history file, defaults to ``$XONSH_DATA_DIR/xonsh-{sessionid}.json``. sessionid : int, uuid, str, optional Current session identifier, will generate a new sessionid if not set. buffersize : int, optional Maximum buffersize in memory. meta : optional Top-level metadata to store along with the history. The kwargs 'cmds' and 'sessionid' are not allowed and will be overwritten. gc : bool, optional Run garbage collector flag. """ super().__init__(sessionid=sessionid, **meta) if filename is None: # pylint: disable=no-member data_dir = builtins.__xonsh_env__.get('XONSH_DATA_DIR') data_dir = os.path.expanduser(data_dir) self.filename = os.path.join( data_dir, 'xonsh-{0}.json'.format(self.sessionid)) else: self.filename = filename self.buffer = [] self.buffersize = buffersize self._queue = collections.deque() self._cond = threading.Condition() self._len = 0 self.last_cmd_out = None self.last_cmd_rtn = None meta['cmds'] = [] meta['sessionid'] = str(self.sessionid) with open(self.filename, 'w', newline='\n') as f: xlj.ljdump(meta, f, sort_keys=True) self.gc = JsonHistoryGC() if gc else None # command fields that are known self.tss = JsonCommandField('ts', self) self.inps = JsonCommandField('inp', self) self.outs = JsonCommandField('out', self) self.rtns = JsonCommandField('rtn', self)
def __init__(self, filename=None, sessionid=None, buffersize=100, gc=True, **meta): """Represents a xonsh session's history as an in-memory buffer that is periodically flushed to disk. Parameters ---------- filename : str, optional Location of history file, defaults to ``$XONSH_DATA_DIR/xonsh-{sessionid}.json``. sessionid : int, uuid, str, optional Current session identifier, will generate a new sessionid if not set. buffersize : int, optional Maximum buffersize in memory. meta : optional Top-level metadata to store along with the history. The kwargs 'cmds' and 'sessionid' are not allowed and will be overwritten. gc : bool, optional Run garbage collector flag. """ self.sessionid = sid = uuid.uuid4() if sessionid is None else sessionid if filename is None: # pylint: disable=no-member data_dir = builtins.__xonsh_env__.get('XONSH_DATA_DIR') data_dir = os.path.expanduser(data_dir) self.filename = os.path.join( data_dir, 'xonsh-{0}.json'.format(sid)) else: self.filename = filename self.buffer = [] self.buffersize = buffersize self._queue = collections.deque() self._cond = threading.Condition() self._len = 0 self.last_cmd_out = None self.last_cmd_rtn = None meta['cmds'] = [] meta['sessionid'] = str(sid) with open(self.filename, 'w', newline='\n') as f: ljdump(meta, f, sort_keys=True) self.gc = HistoryGC() if gc else None # command fields that are known self.tss = CommandField('ts', self) self.inps = CommandField('inp', self) self.outs = CommandField('out', self) self.rtns = CommandField('rtn', self)
def files(self, only_unlocked=False): """Find and return the history files. Optionally locked files may be excluded. This is sorted by the last closed time. Returns a list of (timestamp, number of cmds, file name) tuples. """ # pylint: disable=no-member env = getattr(builtins, '__xonsh_env__', None) if env is None: return [] boot = uptime.boottime() fs = _xhj_get_history_files(sort=False) files = [] for f in fs: try: if os.path.getsize(f) == 0: # collect empty files (for gc) files.append((time.time(), 0, f)) continue lj = xlj.LazyJSON(f, reopen=False) if lj['locked'] and lj['ts'][0] < boot: # computer was rebooted between when this history was created # and now and so this history should be unlocked. hist = lj.load() lj.close() hist['locked'] = False with open(f, 'w', newline='\n') as fp: xlj.ljdump(hist, fp, sort_keys=True) lj = xlj.LazyJSON(f, reopen=False) if only_unlocked and lj['locked']: continue # info: closing timestamp, number of commands, filename files.append((lj['ts'][1] or lj['ts'][0], len(lj.sizes['cmds']) - 1, f)) lj.close() except (IOError, OSError, ValueError): continue files.sort() return files
def test_lazy_int(): f = StringIO() ljdump(42, f) f.seek(0) lj = LazyJSON(f) assert_equal(42, lj.load())
def test_lazy_str(): f = StringIO() ljdump('wakka', f) f.seek(0) lj = LazyJSON(f) assert 'wakka' == lj.load()
def test_lazy_str(): f = StringIO() ljdump("wakka", f) f.seek(0) lj = LazyJSON(f) assert "wakka" == lj.load()
def test_lazy_str(): f = StringIO() ljdump('wakka', f) f.seek(0) lj = LazyJSON(f) assert_equal('wakka', lj.load())
def test_lazy_int(): f = StringIO() ljdump(42, f) f.seek(0) lj = LazyJSON(f) assert 42 == lj.load()