def test_gen_turb_con(): """mean & std of iec turbulence, con'd turb is regen'd, correct columns """ # given -- constraining points con_spat_df = pd.DataFrame([[0, 0, 0, 70]], columns=_spat_rownames).T kwargs = {'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 70, 'T': 300, 'dt': 0.5, 'seed': 1337} coh_model = 'iec' con_turb_df = gen_turb(con_spat_df, coh_model=coh_model, **kwargs) con_tc = TimeConstraint().from_con_data(con_spat_df=con_spat_df.T, con_turb_df=con_turb_df) # old spat_df was T # given -- simulated, constrainted turbulence y, z = 0, [70, 72] spat_df = gen_spat_grid(y, z) wsp_func, sig_func, spec_func = power_profile, iec_sig, kaimal_spectrum sig_theo = np.tile([1.834, 1.4672, 0.917], 2) # sig_u, sig_v, sig_w u_theo = np.array([10, 0, 0, 10.05650077210035, 0, 0]) # U1, ... U2, ... theo_cols = [f'{"uvw"[ic]}_p{ip}' for ip in range(2)for ic in range(3)] # when sim_turb_df = gen_turb(spat_df, con_tc=con_tc, wsp_func=wsp_func, sig_func=sig_func, spec_func=spec_func, coh_model=coh_model, **kwargs) # then (std dev, mean, and regen'd time series should be close; right colnames) pd.testing.assert_index_equal(sim_turb_df.columns, pd.Index(theo_cols)) np.testing.assert_allclose(sig_theo, sim_turb_df.std(axis=0), atol=0.01, rtol=0.50) np.testing.assert_allclose(u_theo, sim_turb_df.mean(axis=0), atol=0.01) np.testing.assert_allclose(con_turb_df.u_p0, sim_turb_df.u_p0, atol=0.01)
def test_gen_spat_grid(): """verify column names and entries of spat grid """ # given y, z, comps = 0, 0, [0, 2] theo_df = pd.DataFrame(np.zeros((4, 2)), index=_spat_rownames, columns=['u_p0', 'w_p0']) theo_df.iloc[0, :] = [0, 2] # when spat_df = utils.gen_spat_grid(y, z, comps=comps) # then pd.testing.assert_frame_equal(theo_df, spat_df, check_dtype=False)
def test_gen_turb_bad_con_tc(): """verify the errors are thrown when con_tc dt doesn't match sim dt""" y, z = 0, [70, 80] spat_df = gen_spat_grid(y, z) kwargs = {'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 75, 'T': 1, 'dt': 0.5} con_spat_df = pd.DataFrame([[0, 0, 0, 0, 75]], columns=['k', 'p_id', 'x', 'y', 'z']) con_turb_df = pd.DataFrame([[9], [11]], index=[0, 5], columns=['u_p0']) con_tc = TimeConstraint().from_con_data(con_spat_df=con_spat_df, con_turb_df=con_turb_df) # when and then with pytest.raises(ValueError): # no con_tc given gen_turb(spat_df, con_tc=con_tc, **kwargs)
def test_iec_turb_mn_std_dev(): """test that iec turbulence has correct mean and std deviation""" # given y, z = 0, [70, 80] spat_df = gen_spat_grid(y, z) kwargs = {'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 70, 'T': 300, 'dt': 1} sig_theo = np.array([1.834, 1.4672, 0.917, 1.834, 1.4672, 0.917]) u_theo = np.array([10, 0, 0, 10.27066087, 0, 0]) # when turb_df = gen_turb(spat_df, **kwargs) # then np.testing.assert_allclose(sig_theo, turb_df.std(axis=0), atol=0.01, rtol=0.50) np.testing.assert_allclose(u_theo, turb_df.mean(axis=0), atol=0.01)
def test_pctdf_to_h2turb(): """save PyConTurb dataframe as binary file and load again""" # given path = '.' spat_df = utils.gen_spat_grid(0, [50, 70]) turb_df = pd.DataFrame(np.random.rand(100, 6), columns=[f'{c}_p{i}' for i in range(2) for c in 'uvw']) # when utils.df_to_h2turb(turb_df, spat_df, '.') test_df = utils.h2turb_to_df(spat_df, path) [os.remove(os.path.join('.', f'{c}.bin')) for c in 'uvw'] # then pd.testing.assert_frame_equal(turb_df, test_df, check_dtype=False)
def test_gen_turb_warnings(): """verify the warnings are thrown""" # given con_spat_df = pd.DataFrame([[0, 0, 0, 0, 75]], columns=['k', 'p_id', 'x', 'y', 'z']) con_turb_df = pd.DataFrame([[9], [11]], index=[0, 1], columns=['u_p0']) con_data = {'con_spat_df': con_spat_df, 'con_turb_df': con_turb_df} y, z = 0, [70, 80] spat_df = gen_spat_grid(y, z) kwargs = {'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 75, 'T': 2, 'dt': 1, 'con_data': con_data} # when and then with pytest.warns(DeprecationWarning): gen_turb(spat_df, **kwargs)
def test_gen_turb_bad_interp(): """verify the errors are thrown for bad interp_data options""" # given y, z = 0, [70, 80] spat_df = gen_spat_grid(y, z) kwargs = {'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 75, 'T': 2, 'dt': 1} con_spat_df = pd.DataFrame([[0, 0, 0, 0, 75]], columns=['k', 'p_id', 'x', 'y', 'z']) con_turb_df = pd.DataFrame([[0], [1]], index=[9, 11], columns=['u_p0']) con_tc = TimeConstraint().from_con_data(con_spat_df=con_spat_df, con_turb_df=con_turb_df) # when and then with pytest.raises(ValueError): # no con_tc given gen_turb(spat_df, interp_data='all', **kwargs) with pytest.raises(ValueError): # bad string gen_turb(spat_df, interp_data='dog', con_tc=con_tc, **kwargs) with pytest.raises(ValueError): # bad string in list gen_turb(spat_df, interp_data=['dog'], con_tc=con_tc, **kwargs)
def test_verify_iec_sim_coherence(): """check that the simulated box has the right coherence """ # given y, z = [0], [70, 80] spat_df = gen_spat_grid(y, z) kwargs = { 'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': 70, 'T': 300, 'dt': 100 } coh_model = 'iec' n_real = 1000 # number of realizations in ensemble coh_thresh = 0.12 # coherence threshold # get theoretical coherence idcs = np.triu_indices(spat_df.shape[1], k=1) coh_theo = get_coh_mat(1 / kwargs['T'], spat_df, coh_model=coh_model, **kwargs)[idcs].flatten() # when ii_jj = [ (i, j) for (i, j) in itertools.combinations(np.arange(spat_df.shape[1]), 2) ] # pairwise indices ii, jj = [tup[0] for tup in ii_jj], [tup[1] for tup in ii_jj] turb_ens = np.empty((int(np.ceil(kwargs['T'] / kwargs['dt'])), 3 * len(y) * len(z), n_real)) for i_real in range(n_real): turb_ens[:, :, i_real] = gen_turb(spat_df, coh_model=coh_model, **kwargs) turb_fft = np.fft.rfft(turb_ens, axis=0) x_ii, x_jj = turb_fft[1, ii, :], turb_fft[1, jj, :] coh = np.mean( (x_ii * np.conj(x_jj)) / (np.sqrt(x_ii * np.conj(x_ii)) * np.sqrt(x_jj * np.conj(x_jj))), axis=-1) max_coh_diff = np.abs(coh - coh_theo).max() # then assert max_coh_diff < coh_thresh
def test_make_hawc2_input(): """verify correct strings for hawc2 input""" # given turb_dir = '.' spat_df = utils.gen_spat_grid([-10, 10], [109, 129]) kwargs = {'z_ref': 119, 'T': 600, 'dt': 1, 'u_ref': 10} str_cntr_theo = ' center_pos0 0.0 0.0 -119.0 ; hub height\n' str_mann_theo = (' begin mann ;\n filename_u ./u.bin ; \n' + ' filename_v ./v.bin ; \n filename_w ./w.bin ; \n' + ' box_dim_u 600 10.0 ; \n box_dim_v 2 20.0 ; \n' + ' box_dim_w 2 20.0 ; \n dont_scale 1 ; \n end mann ') str_output_theo1 = ' wind free_wind 1 10.0 0.0 -109.0 # wind_p0 ; ' str_output_theo2 = ' wind free_wind 1 10.0 0.0 -129.0 # wind_p1 ; ' # when str_cntr, str_mann, str_output = \ utils.make_hawc2_input(turb_dir, spat_df, **kwargs) # then assert(str_cntr == str_cntr_theo) assert(str_mann == str_mann_theo) assert(str_output.split('\n')[0] == str_output_theo1) assert(str_output.split('\n')[1] == str_output_theo2)
def test_binary_thru_hawc2(): """create binary turbulence, run through hawc2, and reload from h2 output """ # turbulence inputs z_hub, l_blade = 119, 90 # hub height, blade length y = [-l_blade, l_blade] # x-components of turb grid z = [z_hub - l_blade, z_hub, z_hub + l_blade] # z-components of turb grid kwargs = { 'u_ref': 10, 'turb_class': 'B', 'l_c': 340.2, 'z_ref': z_hub, 'T': 50, 'dt': 1. } coh_model = 'iec' spat_df = gen_spat_grid(y, z) # paths, directories, and file names test_dir = os.path.dirname(__file__) # test directory testdata_dir = os.path.join(test_dir, 'data') # data directory tmp_dir = os.path.join(test_dir, 'tmp') # temporary directory for test htc_name = 'load_save_turb.htc' # hawc2 simulation template htc_path = os.path.join(testdata_dir, htc_name) # path to htc template new_htc_path = os.path.join(tmp_dir, htc_name) # htc file in tmp/ csv_path = os.path.join(tmp_dir, 'turb_df.csv') # save pandas turb here bat_path = os.path.join(tmp_dir, 'run_hawc2.bat') # bat file to run h2 hawc2_exe = 'C:/Users/rink/Documents/hawc2/HAWC2_all_12-5/HAWC2MB.exe' # NOT 12.6!!! if not os.path.isfile(hawc2_exe): warnings.warn('***HAWC2 executable not found!!!***') # 1. create temp directory if not os.path.isdir(tmp_dir): os.mkdir(tmp_dir) # 2. copy htc file there, replacing values T, dt, wsp = kwargs['T'], kwargs['dt'], kwargs['u_ref'] # needed in htc str_cntr_pos0, str_mann, str_output = make_hawc2_input( tmp_dir, spat_df, **kwargs) with open(htc_path, 'r') as old_fid: with open(new_htc_path, 'w') as new_fid: for line in old_fid: new_line = eval('f\'' + line.rstrip() + '\'') + '\n' new_fid.write(new_line) # 4. generate turbulence files and save to csv turb_df = gen_turb(spat_df, coh_model=coh_model, wsp_func=constant_profile, **kwargs) df_to_h2turb(turb_df, spat_df, tmp_dir) turb_df.reset_index().to_csv(csv_path, index=False) del turb_df # 3. run HAWC2 on htc file with open(bat_path, 'w') as bat_fid: bat_fid.write(f'cd {tmp_dir}\n' + f'"{hawc2_exe}" {htc_name}') out = subprocess.call(f'{bat_path}', shell=True) if out: raise ValueError('Error running HAWC2!') # 4. load results turb_df = pd.read_csv(csv_path).set_index('index') # simulated results dat_df = dat_to_df(new_htc_path).set_index('time') # hawc2 results # 5. compare results time_vec = np.arange(4, 10) turb_tuples = [ ('u_p0', 1, 'vyg_p0', 1), # u is along vg ('v_p0', 1, 'vxg_p0', 1), # v is along xg ('w_p0', 1, 'vzg_p0', -1) ] # w is along -zg for py_key, py_sign, h2_key, h2_sign in turb_tuples: py_turb = np.interp(time_vec, turb_df.index, py_sign * turb_df[py_key]) h2_turb = np.interp(time_vec, dat_df.index, h2_sign * dat_df[h2_key]) np.testing.assert_allclose(py_turb, h2_turb, atol=1e-3) # 6. delete temp directory shutil.rmtree(tmp_dir)