def test_run_performance_job(auth0_id, nocommit_transaction, mockup_modelchain): si = storage.StorageInterface(user=auth0_id) stored_job, save, df = mockup_modelchain compute.run_performance_job(stored_job, si) assert save.call_count == 1 reslist = save.call_args[0][1] assert len(reslist) == 3 perf = reslist[-1] assert perf.type == "performance data" iob = BytesIO(perf.data) iob.seek(0) perf_df = pd.read_feather(iob).set_index("time") assert perf_df.loc[df.index[0], "performance"] == 2.0 # sum of 2 inverters pd.testing.assert_index_equal( perf_df.index, stored_job.definition.parameters.time_parameters._time_range ) month_avg = reslist[0] assert month_avg.type == "monthly summary" iob = BytesIO(month_avg.data) iob.seek(0) month_df = pd.read_feather(iob) assert len(month_df.index) == 12 ser = month_df.iloc[0] assert len(ser) == 5 assert ser.loc["month"] == "January" assert abs(ser.loc["total_energy"] - 2.0) < 1e-8 assert abs(ser.loc["plane_of_array_insolation"] - 1.0) < 1e-8
def test_generate_job_weather_data_inverter_multi_array(stored_job, auth0_id, mocker): si = storage.StorageInterface(user=auth0_id) def mockgetdata(job_id, data_id, si): return (job_id, data_id) mocker.patch("solarperformanceinsight_api.compute._get_data", new=mockgetdata) do = stored_job.data_objects[0] new_do = [] ids = [] for i in range(3): ndo = deepcopy(do) ndo.object_id = uuid1() ids.append((stored_job.object_id, ndo.object_id)) ndo.definition.schema_path = f"/inverters/{i}" new_do.append(ndo) inv = deepcopy(stored_job.definition.system_definition.inverters[0]) inv.arrays = [inv.arrays[0], inv.arrays[0]] stored_job.definition.system_definition.inverters = [inv, inv, inv] stored_job.definition.parameters.weather_granularity = "inverter" stored_job.data_objects = new_do gen = compute.generate_job_weather_data(stored_job, si) assert str(type(gen)) == "<class 'generator'>" genlist = list(gen) # returns list of dataframes for each item assert genlist == [[i, i] for i in ids]
def test_generate_job_weather_data_array(stored_job, auth0_id, mocker): si = storage.StorageInterface(user=auth0_id) def mockgetdata(job_id, data_id, si): return (job_id, data_id) mocker.patch("solarperformanceinsight_api.compute._get_data", new=mockgetdata) do = stored_job.data_objects[0] new_do = [] ids = [] for i in range(3): arid = [] for j in range(2): ndo = deepcopy(do) ndo.object_id = f"{i}_{j}" arid.append((stored_job.object_id, ndo.object_id)) ndo.definition.schema_path = f"/inverters/{i}/arrays/{j}" new_do.append(ndo) ids.append(arid) inv = deepcopy(stored_job.definition.system_definition.inverters[0]) arr = deepcopy(inv.arrays[0]) inv.arrays = [arr, arr] stored_job.definition.system_definition.inverters = [inv, inv, inv] stored_job.definition.parameters.weather_granularity = "array" stored_job.data_objects = new_do gen = compute.generate_job_weather_data(stored_job, si) assert str(type(gen)) == "<class 'generator'>" genlist = list(gen) assert len(genlist[0]) == 2 assert genlist[0][0][1] == "0_0" assert genlist[0][1][1] == "0_1" assert genlist[1][1][1] == "1_1" # returns list of dataframes for each inverter assert genlist == ids
def test_create_upload_delete_compute(client, nocommit_transaction, new_job, weather_df, async_queue, mocker, auth0_id): cr = client.post("/jobs/", data=new_job.json()) assert cr.status_code == 201 new_id = cr.json()["object_id"] response = client.get(f"/jobs/{new_id}") assert response.status_code == 200 stored_job = response.json() assert len(stored_job["data_objects"]) == 1 data_id = stored_job["data_objects"][0]["object_id"] iob = BytesIO() weather_df.to_feather(iob) iob.seek(0) response = client.post( f"/jobs/{new_id}/data/{data_id}", files={ "file": ("test.arrow", iob, "application/vnd.apache.arrow.file") }, ) assert response.status_code == 200 response = client.get(f"/jobs/{new_id}/status") assert response.json()["status"] == "prepared" response = client.post(f"/jobs/{new_id}/compute") assert response.status_code == 202 response = client.get(f"/jobs/{new_id}/status") assert response.json()["status"] == "queued" with storage.StorageInterface(user=auth0_id).start_transaction() as st: st.delete_job(new_id) ww = SimpleWorker([async_queue], connection=async_queue.connection) log = mocker.spy(ww, "log") ww.work(burst=True) # worker logs error when exception raised in job assert log.error.call_count == 0
def test_start_transaction_commit(mocker): si = storage.StorageInterface() conn = mocker.MagicMock() mocker.patch.object(storage.engine, "connect", return_value=conn) with si.start_transaction() as st: st.cursor.execute("select 1") conn.commit.assert_called()
def test_start_transaction_rollback(mocker, err): si = storage.StorageInterface() conn = mocker.MagicMock() mocker.patch.object(storage.engine, "connect", return_value=conn) with pytest.raises(err): with si.start_transaction(): raise err(400) conn.rollback.assert_called() conn.commit.assert_not_called()
def test_run_job_job_fail(job_id, auth0_id, mocker, msg, nocommit_transaction): if msg: new = mocker.MagicMock(side_effect=ValueError("message")) else: new = mocker.MagicMock(side_effect=ValueError) mocker.patch.object(compute, "lookup_job_compute_function", return_value=new) compute.run_job(job_id, auth0_id) assert new.call_count == 1 with storage.StorageInterface(user=auth0_id).start_transaction() as st: results = st.list_job_results(job_id) assert len(results) == 1 assert results[0].definition.type == "error message"
def test_save_results_to_db(job_id, nocommit_transaction, auth0_id): si = storage.StorageInterface(user=auth0_id) with si.start_transaction() as st: prev_results = st.list_job_results(job_id) assert len(prev_results) == 0 df = pd.DataFrame( {"a": 0.0}, index=pd.date_range("2020-01-01T00:00Z", freq="10min", periods=3, name="time"), ) dbrs = [ compute.DBResult(schema_path="/", type="performance data", data=df), compute.DBResult(schema_path="/", type="weather data", data=df), ] compute.save_results_to_db(job_id, dbrs, si) with si.start_transaction() as st: new_results = st.list_job_results(job_id) status = st.get_job_status(job_id) assert len(new_results) == 2 assert status.status == "complete"
def test_generate_job_weather_data_system_multi_array(stored_job, auth0_id, mocker): si = storage.StorageInterface(user=auth0_id) arr = stored_job.definition.system_definition.inverters[0].arrays[0] stored_job.definition.system_definition.inverters[0].arrays = [arr, arr] def mockgetdata(job_id, data_id, si): return pd.DataFrame({"jid": job_id, "did": data_id}, index=[0]) mocker.patch("solarperformanceinsight_api.compute._get_data", new=mockgetdata) ndo = deepcopy(stored_job.data_objects[0]) ndo.object_id = uuid1() ndo.definition.schema_path = "/" stored_job.definition.parameters.weather_granularity = "system" stored_job.data_objects = [ndo] gen = compute.generate_job_weather_data(stored_job, si) assert str(type(gen)) == "<class 'generator'>" genlist = list(gen) assert len(genlist) == 1 assert len(genlist[0]) == 2
def test_compare_expected_and_actual(mockup_modelchain, auth0_id, nocommit_transaction): si = storage.StorageInterface(user=auth0_id) stored_job, save, df = mockup_modelchain compute.compare_expected_and_actual(stored_job, si) assert save.call_count == 1 reslist = save.call_args[0][1] assert len(reslist) == 4 summary = reslist[-1] assert summary.type == "actual vs expected energy" iob = BytesIO(summary.data) iob.seek(0) summary_df = pd.read_feather(iob) assert len(summary_df.index) == 12 ser = summary_df.iloc[0] assert len(ser) == 5 assert ser.loc["month"] == "January" assert (ser.loc["expected_energy"] - 2.0) < 1e-7 assert ser.loc["actual_energy"] == 1.0 assert (ser.loc["difference"] - -1.0) < 1e-7 assert (ser.loc["ratio"] - 1.0 / 2.0) < 1e-7
def test_generate_job_weather_data_system(stored_job, auth0_id, mocker): si = storage.StorageInterface(user=auth0_id) def mockgetdata(job_id, data_id, si): return pd.DataFrame({"jid": job_id, "did": data_id}, index=[0]) mocker.patch("solarperformanceinsight_api.compute._get_data", new=mockgetdata) ndo = deepcopy(stored_job.data_objects[0]) ndo.object_id = uuid1() ndo.definition.schema_path = "/" stored_job.definition.parameters.weather_granularity = "system" stored_job.data_objects = [ndo] gen = compute.generate_job_weather_data(stored_job, si) assert str(type(gen)) == "<class 'generator'>" genlist = list(gen) assert len(genlist) == 1 # returns list of dataframes for each item pd.testing.assert_frame_equal( genlist[0][0], pd.DataFrame({"jid": stored_job.object_id, "did": ndo.object_id}, index=[0]), )
def test_generate_job_weather_data_fail(stored_job, auth0_id, mocker): si = storage.StorageInterface(user=auth0_id) stored_job.definition.parameters.weather_granularity = "unknown" with pytest.raises(ValueError): list(compute.generate_job_weather_data(stored_job, si))
def test_get_data_bad(job_id, job_data_ids, auth0_id): si = storage.StorageInterface(user=auth0_id) with pytest.raises(TypeError): compute._get_data(job_id, job_data_ids[0], si)
def test_get_data(complete_job_id, complete_job_data_id, auth0_id): si = storage.StorageInterface(user=auth0_id) assert isinstance( compute._get_data(complete_job_id, complete_job_data_id, si), pd.DataFrame, )
def storage_interface(auth0_id): out = storage.StorageInterface() out.commit = False out.user = auth0_id return out