def test_call_sync_a_job_with_lock(): with mock("test.test1", """ from middlewared.service import job def mock(self): return self.middleware.call_sync("test.test2").wait_sync() """): with mock("test.test2", """ from middlewared.service import job @job(lock="test") def mock(self, job, *args): return 42 """): assert call("test.test1") == 42
def test_client_job_callback(): with mock( "test.test1", """ from middlewared.service import job @job() def mock(self, job, *args): import time time.sleep(2) return 42 """): with client() as c: results = [] c.call("test.test1", job=True, callback=lambda job: results.append(job.copy())) # callback is called in a separate thread, allow it to settle time.sleep(2) assert len(results) == 2, pprint.pformat(results, indent=2) assert results[0]['state'] == 'RUNNING' assert results[1]['state'] == 'SUCCESS' assert results[1]['result'] == 42
def test_successful_job_events(): with mock( "test.test1", """ from middlewared.service import job @job() def mock(self, job, *args): return 42 """): with client() as c: events = [] def callback(type, **message): events.append((type, message)) c.subscribe("core.get_jobs", callback, sync=True) c.call("test.test1", job=True) assert len(events) == 3, pprint.pformat(events, indent=2) assert events[0][0] == "ADDED" assert events[0][1]["fields"]["state"] == "WAITING" assert events[1][0] == "CHANGED" assert events[1][1]["fields"]["state"] == "RUNNING" assert events[2][0] == "CHANGED" assert events[2][1]["fields"]["state"] == "SUCCESS" assert events[2][1]["fields"]["result"] == 42
def test_systemdataset_migrate_error(request): depends(request, ["pool_04"], scope="session") """ On HA this test will fail with the error below if failover is enable: [ENOTSUP] Disable failover before exporting last pool on system. """ # Disable Failover if ha is True: results = PUT('/failover/', {"disabled": True, "master": True}) assert results.status_code == 200, results.text pool = call("pool.query", [["name", "=", pool_name]], {"get": True}) with mock( "systemdataset.update", """\ from middlewared.service import job, CallError @job() def mock(self, job, *args): raise CallError("Test error") """): with pytest.raises(ClientException) as e: call("pool.export", pool["id"], job=True) assert e.value.error == ( "[EFAULT] This pool contains system dataset, but its reconfiguration failed: [EFAULT] Test error" ) # Enable back Failover. if ha is True: results = PUT('/failover/', {"disabled": False, "master": True}) assert results.status_code == 200, results.text
def test_lock_queue_size(): try: with mock("test.test1", """ from middlewared.service import job @job(lock="test", lock_queue_size=1) def mock(self, job, *args): with open("/tmp/test", "a") as f: f.write("a\\n") import time time.sleep(5) """): j1 = call("test.test1") j2 = call("test.test1") j3 = call("test.test1") j4 = call("test.test1") call("core.job_wait", j1, job=True) call("core.job_wait", j2, job=True) call("core.job_wait", j3, job=True) call("core.job_wait", j4, job=True) assert ssh("cat /tmp/test") == "a\na\n" assert j3 == j2 assert j4 == j2 finally: with contextlib.suppress(FileNotFoundError): os.unlink("/tmp/test")
def test__does_not_emit_alert(request): depends(request, ["pool_04"], scope="session") with mock("zfs.pool.query", return_value=[{ "name": pool, "properties": { "capacity": { "parsed": "50", } }, }]): assert call("alert.run_source", "ZpoolCapacity") == []
def write_to_log(string): assert string not in read_log() with mock( "test.test1", f""" from middlewared.service import lock async def mock(self, *args): self.logger.debug({string!r}) """): call("test.test1") assert string in read_log()
def test_private_params_do_not_leak_to_core_get_jobs(): with mock( "test.test1", """ from middlewared.service import accepts, job from middlewared.schema import Dict, Str @accepts(Dict("test", Str("password", private=True))) @job() async def mock(self, job, args): return 42 """): job_id = call("test.test1", {"password": "******"}) job_descr = call("core.get_jobs", [["id", "=", job_id]], {"get": True}) assert job_descr["arguments"] == [{"password": "******"}]
def test__does_not_flap_alert(request): depends(request, ["pool_04"], scope="session") with mock("zfs.pool.query", return_value=[{ "name": pool, "properties": { "capacity": { "parsed": "79", } }, }]): with pytest.raises(CallError) as e: call("alert.run_source", "ZpoolCapacity") assert e.value.errno == CallError.EALERTCHECKERUNAVAILABLE
def test__emits_alert(request): depends(request, ["pool_04"], scope="session") with mock("zfs.pool.query", return_value=[{ "name": pool, "properties": { "capacity": { "parsed": "85", } }, }]): alerts = call("alert.run_source", "ZpoolCapacity") assert len(alerts) == 1 assert alerts[0]["klass"] == "ZpoolCapacityWarning" assert alerts[0]["key"] == f'["{pool}"]' assert alerts[0]["args"] == {"volume": pool, "capacity": 85}
def test_no_lock(): with mock( "test.test1", """ from middlewared.service import lock async def mock(self, *args): import asyncio await asyncio.sleep(5) """): start = time.monotonic() with client() as c: c1 = c.call("test.test1", background=True) c2 = c.call("test.test1", background=True) c.wait(c1) c.wait(c2) assert time.monotonic() - start < 6
def test_private_params_do_not_leak_to_logs(): with mock( "test.test1", """ from middlewared.service import accepts from middlewared.schema import Dict, Str @accepts(Dict("test", Str("password", private=True))) async def mock(self, args): raise Exception() """): log_before = ssh("cat /var/log/middlewared.log") with client(py_exceptions=False) as c: with pytest.raises(Exception): c.call("test.test1", {"password": "******"}) log = ssh("cat /var/log/middlewared.log")[len(log_before):] assert "Exception while calling test.test1(*[{'password': '******'}])" in log
def test_snapshot_count_alert(request): depends(request, ["pool_04"], scope="session") with dataset("snapshot_count") as ds: with mock("pool.snapshottask.max_count", return_value=10): for i in range(10): call("zfs.snapshot.create", { "dataset": ds, "name": f"snap-{i}" }) assert call("alert.run_source", "SnapshotCount") == [] call("zfs.snapshot.create", {"dataset": ds, "name": "snap-10"}) alert = call("alert.run_source", "SnapshotCount")[0] assert alert["text"] % alert["args"] == ( "Dataset tank/snapshot_count has more snapshots (11) than recommended (10). Performance or " "functionality might degrade.")
def test_cputemp(): with mock("reporting.cpu_temperatures", return_value={0: 55, 1: 50}): for i in range(10): # collectd collects data every 10 seconds, but we might need to wait longer, as values might behave weird # when going from NaN to values for a few points (looks like an interpolation polynomial oscillation at the # edges of the interval) time.sleep(11) now = int(time.time()) result = call("reporting.get_data", [{"name": "cputemp"}], {"start": now - 3600, "end": now}) data = result[0]["data"] if data[-1] == [None, None]: data.pop() if data[-1] == [55, 50]: break else: assert False, result
def test_threading_lock(): with mock( "test.test1", """ from middlewared.service import lock @lock("test") def mock(self, *args): import time time.sleep(5) """): start = time.monotonic() with client() as c: c1 = c.call("test.test1", background=True) c2 = c.call("test.test1", background=True) c.wait(c1) c.wait(c2) assert time.monotonic() - start >= 10
def test_snapshot_total_count_alert(request): depends(request, ["pool_04"], scope="session") with dataset("snapshot_count") as ds: base = call("zfs.snapshot.query", [], {"count": True}) with mock("pool.snapshottask.max_total_count", return_value=base + 10): for i in range(10): call("zfs.snapshot.create", { "dataset": ds, "name": f"snap-{i}" }) assert call("alert.run_source", "SnapshotCount") == [] call("zfs.snapshot.create", {"dataset": ds, "name": "snap-10"}) alert = call("alert.run_source", "SnapshotCount")[0] assert alert["text"] % alert["args"] == ( f"Your system has more snapshots ({base + 11}) than recommended ({base + 10}). Performance or " "functionality might degrade.")
def test_lock_with_argument(): with mock("test.test1", """ from middlewared.service import job @job(lock=lambda args: f"test.{args[0]}") def mock(self, job, s): import time time.sleep(5) """): start = time.monotonic() j1 = call("test.test1", "a") j2 = call("test.test1", "b") j3 = call("test.test1", "a") call("core.job_wait", j1, job=True) call("core.job_wait", j2, job=True) call("core.job_wait", j3, job=True) assert 10 <= time.monotonic() - start < 15
def test_jobs_execute_sequentially_when_there_is_a_lock(): with mock("test.test1", """ from middlewared.service import job @job(lock="test") def mock(self, job, *args): import time time.sleep(5) """): start = time.monotonic() j1 = call("test.test1") j2 = call("test.test1") j3 = call("test.test1") call("core.job_wait", j1, job=True) call("core.job_wait", j2, job=True) call("core.job_wait", j3, job=True) assert time.monotonic() - start >= 15
def test_jobs_execute_in_parallel(): with mock("test.test1", """ from middlewared.service import job @job() def mock(self, job, *args): import time time.sleep(5) """): start = time.monotonic() j1 = call("test.test1") j2 = call("test.test1") j3 = call("test.test1") call("core.job_wait", j1, job=True) call("core.job_wait", j2, job=True) call("core.job_wait", j3, job=True) assert time.monotonic() - start < 6
def test_block_hooks(block): hook_name = str(uuid.uuid4()) with mock("test.test1", """ async def mock(self, hook_name, blocked_hooks): from pathlib import Path sentinel = Path("/tmp/block_hooks_sentinel") async def hook(middleware): sentinel.write_text("") self.middleware.register_hook(hook_name, hook, blockable=True, sync=True) sentinel.unlink(missing_ok=True) with self.middleware.block_hooks(*blocked_hooks): await self.middleware.call_hook(hook_name) return sentinel.exists() """): with client() as c: assert c.call("test.test1", hook_name, [hook_name] if block else []) == (not block)