def test_laod(tmpdir): with jfile.open(tmpdir / "a.json", jfile.OPEN_RW) as fh: data = jfile.load(fh, ["missing"]) assert data == ["missing"] with open(tmpdir / "a.json", "w") as fh: fh.write('{"present": true}\n') with jfile.open(tmpdir / "a.json", jfile.OPEN_RW) as fh: data = jfile.load(fh, ["missing"]) assert data == {"present": True}
def _node_update(nodes_json: str, real_path: str) -> bool: # open r/o so that we don't initailly open for write. we do a probe and # decide if anything needs to be updated if we are wrong, its not a # problem, we'll "time out" and reprobe later with jfile.open(nodes_json, jfile.OPEN_RO) as fh: jfile.flock(fh) json_data = jfile.load(fh, {}) _, test_chg_nodes, test_need_reload = _node_update_check( json_data, nodes_json, real_path) if not test_chg_nodes and not test_need_reload: _logger.info("examined nodes state - no changes") return False # we probably need to make a change. but we recheck our state again # under lock, with the data file open r/w # update the nodes file and make changes to ctdb with jfile.open(nodes_json, jfile.OPEN_RW) as fh: jfile.flock(fh) json_data = jfile.load(fh, {}) ctdb_nodes, chg_nodes, need_reload = _node_update_check( json_data, nodes_json, real_path) if not chg_nodes and not need_reload: _logger.info("reexamined nodes state - no changes") return False _logger.info("writing updates to ctdb nodes file") new_ctdb_nodes = list(ctdb_nodes) for entry in chg_nodes: pnn = entry["pnn"] expected_line = _entry_to_node(ctdb_nodes, entry) if _node_line(new_ctdb_nodes, pnn) == expected_line: continue if entry["state"] == NodeState.NEW: if pnn != len(new_ctdb_nodes): raise ValueError(f"unexpected pnn in new entry {entry}:" " nodes: {new_ctdb_nodes}") new_ctdb_nodes.append(expected_line) else: new_ctdb_nodes[pnn] = expected_line with open(real_path, "w") as nffh: write_nodes_file(nffh, new_ctdb_nodes) nffh.flush() os.fsync(nffh) _logger.info("running: ctdb reloadnodes") subprocess.check_call(list(samba_cmds.ctdb["reloadnodes"])) for entry in need_reload: entry["state"] = next_state(entry["state"]) _logger.debug("setting node identity=[{}] pnn={} to {}".format( entry["identity"], entry["pnn"], entry["state"], )) jfile.dump(json_data, fh) fh.flush() os.fsync(fh) return True
def test_dump(tmpdir): with jfile.open(tmpdir / "a.json", jfile.OPEN_RW) as fh: jfile.dump({"something": "good"}, fh) with jfile.open(tmpdir / "a.json", jfile.OPEN_RO) as fh: data = jfile.load(fh) assert data == {"something": "good"} with jfile.open(tmpdir / "a.json", jfile.OPEN_RW) as fh: jfile.dump({"something": "better"}, fh) with jfile.open(tmpdir / "a.json", jfile.OPEN_RO) as fh: data = jfile.load(fh) assert data == {"something": "better"}
def sleepy_update(path): with jfile.open(path, jfile.OPEN_RW) as fh: jfile.flock(fh) data = jfile.load(fh, [0]) time.sleep(0.2) data.append(data[-1] + 1) jfile.dump(data, fh)
def refresh_node_in_statefile(identity: str, node: str, pnn: int, path: str) -> None: """Assuming the node is already in the statefile, update the state in the case that the node (IP) has changed. """ with jfile.open(path, jfile.OPEN_RW) as fh: jfile.flock(fh) data = jfile.load(fh, {}) _refresh_statefile(data, identity, node, pnn) jfile.dump(data, fh)
def pnn_in_nodes(pnn: int, nodes_json: str, real_path: str) -> bool: """Returns true if the specified pnn has an entry in the nodes json file and that the node is already added to the ctdb nodes file. """ with jfile.open(nodes_json, jfile.OPEN_RO) as fh: jfile.flock(fh) json_data = jfile.load(fh, {}) current_nodes = json_data.get("nodes", []) for entry in current_nodes: if pnn == entry["pnn"] and _get_state_ok(entry): return True return False
def add_node_to_statefile(identity: str, node: str, pnn: int, path: str, in_nodes: bool = False) -> None: """Add the given node's identity, (node) IP, and PNN to the JSON based state file, located at `path`. If in_nodes is true, the state file will reflect that the node is already added to the CTDB nodes file. """ with jfile.open(path, jfile.OPEN_RW) as fh: jfile.flock(fh) data = jfile.load(fh, {}) _update_statefile(data, identity, node, pnn, in_nodes=in_nodes) jfile.dump(data, fh)
def _node_check(pnn: int, nodes_json: str, real_path: str) -> bool: with jfile.open(nodes_json, jfile.OPEN_RO) as fh: jfile.flock(fh) desired = jfile.load(fh, {}).get("nodes", []) ctdb_nodes = read_ctdb_nodes(real_path) # first: check to see if the current node is in the nodes file try: my_desired = [e for e in desired if e.get("pnn") == pnn][0] except IndexError: # no entry found for this node _logger.warning(f"PNN {pnn} not found in json state file") return False if my_desired["node"] not in ctdb_nodes: # this current node is not in the nodes file. # it is ineligible to make changes to the nodes file return False # this node is already in the nodes file! return True
def test_flock(tmpdir): import time import threading def sleepy_update(path): with jfile.open(path, jfile.OPEN_RW) as fh: jfile.flock(fh) data = jfile.load(fh, [0]) time.sleep(0.2) data.append(data[-1] + 1) jfile.dump(data, fh) fpath = tmpdir / "a.json" t1 = threading.Thread(target=sleepy_update, args=(fpath,)) t1.start() t2 = threading.Thread(target=sleepy_update, args=(fpath,)) t2.start() t1.join() t2.join() with jfile.open(fpath, jfile.OPEN_RW) as fh: jfile.flock(fh) data = jfile.load(fh) assert data == [0, 1, 2]
def test_open(tmpdir): with pytest.raises(FileNotFoundError): jfile.open(tmpdir / "a.json", jfile.OPEN_RO) fh = jfile.open(tmpdir / "a.json", jfile.OPEN_RW) assert fh is not None fh.close()