コード例 #1
0
def write_mesh_l12(mesh, hdf5_data):
    """
    Write the l12 data to hdf5 from the mesh
    Args:
        mesh: object, the mesh
        hdf5_data: object, the hdf5 opened file
    """
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L12_COUNT, (2, ),
                                   dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L12_COUNT_ATTR)
    dset[0] = 2
    dset[1] = int(mesh.i_sym)

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L12_X,
                                   mesh.x.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L12_X_ATTR)
    dset[:, :] = mesh.x

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L12_P,
                                   mesh.p.shape,
                                   dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L12_P_ATTR)
    dset[:, :] = mesh.p + 1
コード例 #2
0
ファイル: preprocessor.py プロジェクト: Eronana/OpenWARP
def write_mesh_l10(mesh, hdf5_data):
    """
    Write the l10 data to hdf5 from the mesh
    Args:
        mesh: object, the mesh
        hdf5_data: object, the hdf5 opened file
    """
    dset = utility.require_dataset(hdf5_data, structure.H5_L10_COUNT, (4, ), dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L10_COUNT_ATTR)

    dset[0] = mesh.i_sym
    dset[1] = mesh.n_points
    dset[2] = mesh.n_panels
    dset[3] = mesh.n_bodies

    dset = utility.require_dataset(hdf5_data, structure.H5_L10_CPANEL, mesh.c_panel.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L10_CPANEL_ATTR)
    dset[:] = mesh.c_panel + 1

    dset = utility.require_dataset(hdf5_data, structure.H5_L10_XM, mesh.xm.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_XM_ATTR)
    dset[:, :] = mesh.xm

    dset = utility.require_dataset(hdf5_data, structure.H5_L10_N, mesh.n.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_N_ATTR)
    dset[:, :] = mesh.n

    dset = utility.require_dataset(hdf5_data, structure.H5_L10_A, mesh.a.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_A_ATTR)
    dset[:] = mesh.a
コード例 #3
0
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    signature = __name__ + '.solve(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature, {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')
    utility.check_is_file(hdf5_file,
                          'The path to the hdf5 file configured by HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX,
                                       custom_config, 'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ,
                                       custom_config, 'GREEN_TABULATION_NUMZ')

    n_points_simpson = utility.get_setting(
        settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
        'GREEN_TABULATION_SIMPSON_NPOINTS')

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ),
                dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ),
                dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ),
                dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
コード例 #4
0
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX,
                                       custom_config, 'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ,
                                       custom_config, 'GREEN_TABULATION_NUMZ')

    n_points_simpson = utility.get_setting(
        settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
        'GREEN_TABULATION_SIMPSON_NPOINTS')

    utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ),
                dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ),
                dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ),
                dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
コード例 #5
0
ファイル: solver.py プロジェクト: NREL/OpenWARP
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    signature = __name__ + '.solve(custom_config)'
    logger = logging.getLogger(__name__)
    utility.log_entrance(logger, signature,
                        {'custom_config': custom_config})

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
    utility.check_is_file(hdf5_file, 'The path to the hdf5 file configured by HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX, custom_config,
                                       'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ, custom_config,
                                       'GREEN_TABULATION_NUMZ')

    

    n_points_simpson = utility.get_setting(settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
                                           'GREEN_TABULATION_SIMPSON_NPOINTS')

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ), dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ), dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ), dtype='i')
            dset[:] = n_points_simpson

        return run(hdf5_db)
コード例 #6
0
def write_mesh_l10(mesh, hdf5_data):
    """
    Write the l10 data to hdf5 from the mesh
    Args:
        mesh: object, the mesh
        hdf5_data: object, the hdf5 opened file
    """
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L10_COUNT, (4, ),
                                   dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L10_COUNT_ATTR)

    dset[0] = mesh.i_sym
    dset[1] = mesh.n_points
    dset[2] = mesh.n_panels
    dset[3] = mesh.n_bodies

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L10_CPANEL,
                                   mesh.c_panel.shape,
                                   dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L10_CPANEL_ATTR)
    dset[:] = mesh.c_panel + 1

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L10_XM,
                                   mesh.xm.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_XM_ATTR)
    dset[:, :] = mesh.xm

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L10_N,
                                   mesh.n.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_N_ATTR)
    dset[:, :] = mesh.n

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_L10_A,
                                   mesh.a.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L10_A_ATTR)
    dset[:] = mesh.a
コード例 #7
0
ファイル: solver.py プロジェクト: Eronana/OpenWARP
def solve(custom_config):
    """
    Configure and then run the solver

    Args:
        custom_config, dict The custom configuration dictionary
    Returns:
        the output of the fortran function as string if successful
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')

    n_tabulatedx = utility.get_setting(settings.GREEN_TABULATION_NUMX, custom_config,
                                       'GREEN_TABULATION_NUMX')

    n_tabulatedz = utility.get_setting(settings.GREEN_TABULATION_NUMZ, custom_config,
                                       'GREEN_TABULATION_NUMZ')

    

    n_points_simpson = utility.get_setting(settings.GREEN_TABULATION_SIMPSON_NPOINTS, custom_config,
                                           'GREEN_TABULATION_SIMPSON_NPOINTS')

    utility.validate_file(hdf5_file, 'HDF5_FILE')
    with h5py.File(hdf5_file, "a") as hdf5_db:
        if n_tabulatedx and n_tabulatedx > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMX, (1, ), dtype='i')
            dset[:] = n_tabulatedx

        if n_tabulatedz and n_tabulatedz > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_NUMZ, (1, ), dtype='i')
            dset[:] = n_tabulatedz

        if n_points_simpson and n_points_simpson > 0:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_GREEN_TABULATION_SIMPSON_NPOINTS, (1, ), dtype='i')
            dset[:] = n_points_simpson



        return run(hdf5_db)
コード例 #8
0
ファイル: preprocessor.py プロジェクト: Eronana/OpenWARP
def write_mesh_l12(mesh, hdf5_data):
    """
    Write the l12 data to hdf5 from the mesh
    Args:
        mesh: object, the mesh
        hdf5_data: object, the hdf5 opened file
    """
    dset = utility.require_dataset(hdf5_data, structure.H5_L12_COUNT, (2, ), dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L12_COUNT_ATTR)
    dset[0] = 2
    dset[1] = int(mesh.i_sym)

    dset = utility.require_dataset(hdf5_data, structure.H5_L12_X, mesh.x.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_L12_X_ATTR)
    dset[:, :] = mesh.x

    dset = utility.require_dataset(hdf5_data, structure.H5_L12_P, mesh.p.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_L12_P_ATTR)
    dset[:, :] = mesh.p + 1
コード例 #9
0
def preprocess(custom_config):
    """
    Configure and then run the preprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config,
                                    'HDF5_FILE')
    nemoh_cal = utility.get_setting(settings.NEMOH_CALCULATIONS_FILE,
                                    custom_config, 'NEMOH_CALCULATIONS_FILE')
    input_file = utility.get_setting(settings.NEMOH_INPUT_FILE, custom_config,
                                     'NEMOH_INPUT_FILE')
    utility.validate_string(hdf5_file, 'HDF5_FILE')
    if not nemoh_cal and not input_file:
        utility.validate_file(hdf5_file, 'HDF5_FILE')

    utility.mkdir_p(os.path.abspath(os.path.dirname(hdf5_file)))

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if nemoh_cal:
            utility.convert_calculations(nemoh_cal, hdf5_db)

        if input_file:
            utility.convert_input(input_file, hdf5_db)

        remove_irregular_frequencies = utility.get_setting(
            settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
            'REMOVE_IRREGULAR_FREQUENCIES')
        if remove_irregular_frequencies is not None:
            dset = utility.require_dataset(
                hdf5_db,
                structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES, (1, ),
                dtype='i')
            utility.set_hdf5_attributes(
                dset, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR)
            dset[:] = int(remove_irregular_frequencies)
        else:
            settings.REMOVE_IRREGULAR_FREQUENCIES = hdf5_db.get(
                structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0]

        run(hdf5_db, custom_config)
コード例 #10
0
ファイル: preprocessor.py プロジェクト: Eronana/OpenWARP
def preprocess(custom_config):
    """
    Configure and then run the preprocessor

    Args:
        custom_config, dict The custom configuration dictionary
    """

    if not custom_config:
        custom_config = {}

    hdf5_file = utility.get_setting(settings.HDF5_FILE, custom_config, 'HDF5_FILE')
    nemoh_cal = utility.get_setting(settings.NEMOH_CALCULATIONS_FILE, custom_config, 'NEMOH_CALCULATIONS_FILE')
    input_file = utility.get_setting(settings.NEMOH_INPUT_FILE, custom_config, 'NEMOH_INPUT_FILE')
    utility.validate_string(hdf5_file, 'HDF5_FILE')
    if not nemoh_cal and not input_file:
        utility.validate_file(hdf5_file, 'HDF5_FILE')

    utility.mkdir_p(os.path.abspath(os.path.dirname(hdf5_file)))

    with h5py.File(hdf5_file, "a") as hdf5_db:
        if nemoh_cal:
            utility.convert_calculations(nemoh_cal, hdf5_db)

        if input_file:
            utility.convert_input(input_file, hdf5_db)

        remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
                                       'REMOVE_IRREGULAR_FREQUENCIES')
        if remove_irregular_frequencies is not None:
            dset = utility.require_dataset(hdf5_db, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES, (1, ), dtype='i')
            utility.set_hdf5_attributes(dset, structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR)
            dset[:] = int(remove_irregular_frequencies)
        else:
            settings.REMOVE_IRREGULAR_FREQUENCIES = hdf5_db.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0]



        run(hdf5_db, custom_config)
コード例 #11
0
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """
    print('\n  -> Initialisation ...')

    try:
        environment = utility.read_environment(hdf5_data)
    except Exception as e:
        print('It looks like your hdf5 file is not correct. Please run ',
        'the preprocessor and the solver before running the postprocessor')
        sys.exit(1)

    result = read_results(hdf5_data)

    print('. Initialisation Done !\n')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS, result.added_mass.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_RADIATION_DAMPING, result.radiation_damping.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_EXCITATION_FORCES, excitation_forces.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE, custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        print('Radiation coefficients successfully saved.\n')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE, custom_config,
                                   'DIFFRACTION_FORCE_TEC_FILE')
    if tec_file:
        save_diffraction_force(result, tec_file)
        print('Diffraction forces successfully saved.\n')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE, custom_config,
                                   'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        print('Excitation forces successfully saved.\n')

    
    irf = get_irf(hdf5_data, result)
    if not irf:
        print('It looks like your hdf5 file is not correct. Please run ',
        'the preprocessor and the solver before running the postprocessor')
        sys.exit(1)
    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS_INFINITE, irf.added_mass.shape, dtype='f')
        utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            print('IRF successfully saved.\n')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta), dtype='F')
    raos = compute_raos(raos, result)

    

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')
    if tec_file and hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)[0] != 1 and hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)[0] != 1 and hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)[0] != 1:
        res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos, result)
        save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"], res["x"], res["y"],
                            tec_file)
        print('Wave elevation successfully saved.\n')

    print(' -> All results successfully saved.\n')
コード例 #12
0
ファイル: solver.py プロジェクト: NREL/OpenWARP
def write_result(hdf5_data, data):
    """
    Write the result from nemoh fortran to the hdf5
    Args:
        hdf5_data: object the hdf5 opened data
        data: the data sent from nemoh fortran
    """
    signature = __name__ + '.write_result(hdf5_data, data)'
    logger = logging.getLogger(__name__)
    # data is too huge for logging
    utility.log_entrance(logger, signature,
                        {'hdf5_data': hdf5_data})

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FORCES, data["line"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FORCES_ATTR)
    dset[:, :] = data["line"].astype(copy=False, dtype='f')

    temp = np.array(data["out_potential"], dtype='f')
    count_skip = 0
    for i in range(data["n_problems"]):
        if data["bc_switch_potential"][i] != 1:
            temp[i, :] = 0
            count_skip += 1
    if count_skip == data["n_problems"]:
        temp = np.zeros((0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_POTENTIAL, temp.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_POTENTIAL_ATTR)
    dset[:, :] = temp

    kochin = np.zeros((data["n_theta"], 3, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_kochin"][i] == 1:
            for j in range(data["n_theta"]):

                kochin[j, 0, i] = data["theta"][j]
                kochin[j, 1, i] = np.abs(data["out_hkochin"][i, j])
                kochin[j, 2, i] = np.arctan2(np.imag(data["out_hkochin"][i, j]), np.real(data["out_hkochin"][i, j]))
        else:
            count_skip += 1

    if count_skip ==  data["n_problems"]:
        kochin = np.zeros((0, 0, 0), dtype='f')


    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_KOCHIN, kochin.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_KOCHIN_ATTR)
    dset[:, :, :] = kochin


    temp = np.zeros((data["nfs_points"], 6, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_freesurface"][i] == 1:
            for j in range(data["nfs_points"]):

                temp[j, 0, i] = data["meshfs_x"][0, j]
                temp[j, 1, i] = data["meshfs_x"][1, j]
                temp[j, 2, i] = np.abs(data["out_phi"][i, j])
                temp[j, 3, i] = np.arctan2(np.imag(data["out_phi"][i, j]), np.real(data["out_phi"][i, j]))
                temp[j, 4, i] = -np.imag(data["out_phi"][i, j])
                temp[j, 5, i] = -np.real(data["out_phi"][i, j])
        else:
            count_skip += 1

    if count_skip ==  data["n_problems"]:
        temp = np.zeros((0, 0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FREE_SURFACE_POINTS, temp.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FREE_SURFACE_POINTS_ATTR)
    dset[:, :, :] = temp

    temp = np.zeros((data["nfs_panels"], 4, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_freesurface"][i] == 1:
            for j in range(data["nfs_panels"]):
                temp[j,:, i ] = data["meshfs_p"]
        else:
            count_skip += 1
    if count_skip ==  data["n_problems"]:
        temp = np.zeros((0, 0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FREE_SURFACE_PANEL, temp.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FREE_SURFACE_PANEL_ATTR)
    dset[:, :, :] = temp


    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_DRIFT_FORCES, data["drift_forces"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_DRIFT_FORCES_ATTR)
    dset[:, :, :] = data["drift_forces"]

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_YAW_MOMENT, data["yaw_moment"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_YAW_MOMENT_ATTR)
    dset[:, :] = data["yaw_moment"]

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CENTER_BUOYANCY, data["center_buoyancy"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CENTER_BUOYANCY_ATTR)
    dset[:, :] = data["center_buoyancy"]

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_VOLUME_DISPLACEMENT, data["displacement"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_VOLUME_DISPLACEMENT_ATTR)
    dset[:] = data["displacement"]

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_WATER_PLANE_AREA, data["waterplane_area"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_WATER_PLANE_AREA_ATTR)
    dset[:] = data["waterplane_area"]

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_STIFNESS, data["stifness"].shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_STIFNESS_ATTR)
    dset[:, :, :] = data["stifness"]

    utility.log_exit(logger, signature, [None])
コード例 #13
0
def read_mesh(hdf5_data, custom_config):
    """
    Read the mesh data from the hdf5 file
    Args:
        hdf5_data: object, the hdf5 opened file

    Return:
        the mesh data
    """
    n_points = 0
    n_panels = 0
    bodies = hdf5_data.get(structure.H5_BODIES).values()
    n_bodies = len(bodies)

    interior_mesh_points = np.empty((3, 0))
    interior_mesh_panels = np.empty((4, 0))
    interior_c_panels = np.empty((0))
    interior_n_points = 0
    interior_n_panels = 0
    remove_irregular_frequencies = utility.get_setting(
        settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
        'REMOVE_IRREGULAR_FREQUENCIES')
    for c in range(n_bodies):
        body = bodies[c]
        n_points += body.get(structure.H5_BODY_NUM_POINTS)[0]
        n_panels += body.get(structure.H5_BODY_NUM_PANELS)[0]

    mesh = TMesh(n_points=n_points, n_panels=n_panels, n_bodies=n_bodies)

    n_points = 0
    n_panels = 0

    for c in range(n_bodies):
        body = bodies[c]

        mesh_arr = body.get(structure.H5_BODY_MESH)

        n = mesh_arr[0, 1]

        if c > 0 and (n != mesh.i_sym):
            print(
                ' Error: there is an inconsistency in the mesh files regarding the xOz symmetries'
            )
            sys.exit()
        else:
            mesh.i_sym = int(n)

        m = body.get(structure.H5_BODY_NUM_POINTS)[0]
        n = body.get(structure.H5_BODY_NUM_PANELS)[0]

        for i in range(m):
            mesh.x[:, n_points + i] = np.array(mesh_arr[i + 1, 1:4])

        if remove_irregular_frequencies:
            # If we have to remove frequencies, then we need to discretize the free surface
            int_mesh = generate_mesh(np.asarray(mesh_arr[1:m, 1:4]))
            interior_mesh_points = np.concatenate(
                (interior_mesh_points, int_mesh["x"]), axis=1)
            interior_mesh_panels = np.concatenate(
                (interior_mesh_panels,
                 int_mesh["p"] + mesh.n_points + interior_n_points),
                axis=1)
            interior_c_panels = np.concatenate(
                (interior_c_panels, c * np.ones(int_mesh["n_panels"])), axis=0)
            interior_n_points += int_mesh["n_points"]
            interior_n_panels += int_mesh["n_panels"]

        for i in range(m, m + n):
            mesh.p[:, n_panels + i - m] = np.array(mesh_arr[i + 1, 0:4]) - 1
            for j in range(4):
                mesh.p[j, n_panels + i - m] += n_points
            mesh.c_panel[n_panels + i - m] = c

        n_points += m
        n_panels += n
        mesh.last_panel[c] = n_panels

    if remove_irregular_frequencies:
        # If we have to remove frequencies, then we need to extend the mesh so
        # that it contains the panels of the free surface too
        mesh_interior = TMesh(n_points=n_points + interior_n_points,
                              n_panels=n_panels + interior_n_panels,
                              n_bodies=n_bodies)
        mesh_interior.x[:, 0:n_points] = mesh.x
        mesh_interior.x[:, n_points:] = interior_mesh_points
        mesh_interior.p[:, 0:n_panels] = mesh.p
        mesh_interior.p[:, n_panels:] = interior_mesh_panels
        mesh_interior.last_panel = mesh.last_panel
        mesh_interior.c_panel[0:n_panels] = mesh.c_panel
        mesh_interior.c_panel[n_panels:] = interior_c_panels
        mesh_interior.i_sym = mesh.i_sym
        mesh = mesh_interior

        is_interior_domain = np.zeros((n_panels + interior_n_panels))
        is_interior_domain[n_panels:] = 1

        dset = utility.require_dataset(hdf5_data,
                                       structure.H5_SOLVER_IS_INTERIOR_DOMAIN,
                                       is_interior_domain.shape,
                                       dtype='i')
        utility.set_hdf5_attributes(
            dset, structure.H5_SOLVER_IS_INTERIOR_DOMAIN_ATTR)
        dset[:] = is_interior_domain

        n_panels += interior_n_panels
        n_points += interior_n_points

    for i in range(mesh.n_panels):
        u = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[0, i]]
        v = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[1, i]]
        w1 = np.cross(u, v)
        a1 = 0.5 * np.linalg.norm(w1)

        u = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[2, i]]
        v = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[2, i]]
        w2 = np.cross(u, v)
        a2 = 0.5 * np.linalg.norm(w2)

        mesh.a[i] = a1 + a2

        if mesh.a[i] < utility.EPS:
            print('Error: surface of panel ' + str(i) + ' is too small (' +
                  str(mesh.a[i]) + ')')
            sys.exit()

        mesh.xm[:, i] = (1. / 3) * (mesh.x[:, mesh.p[0, i]] +
                                    mesh.x[:, mesh.p[1, i]] +
                                    mesh.x[:, mesh.p[3, i]]) * a1 / mesh.a[i]

        mesh.xm[:, i] += (1. / 3) * (mesh.x[:, mesh.p[1, i]] +
                                     mesh.x[:, mesh.p[2, i]] +
                                     mesh.x[:, mesh.p[3, i]]) * a2 / mesh.a[i]

        u = w1 + w2

        mesh.n[:, i] = u / np.linalg.norm(u)

    return mesh
コード例 #14
0
ファイル: preprocessor.py プロジェクト: Eronana/OpenWARP
def read_mesh(hdf5_data, custom_config):
    """
    Read the mesh data from the hdf5 file
    Args:
        hdf5_data: object, the hdf5 opened file

    Return:
        the mesh data
    """
    n_points=0
    n_panels=0
    bodies = hdf5_data.get(structure.H5_BODIES).values()
    n_bodies = len(bodies)

    interior_mesh_points = np.empty((3, 0))
    interior_mesh_panels = np.empty((4, 0))
    interior_c_panels = np.empty((0))
    interior_n_points = 0
    interior_n_panels = 0
    remove_irregular_frequencies = utility.get_setting(settings.REMOVE_IRREGULAR_FREQUENCIES, custom_config,
                                       'REMOVE_IRREGULAR_FREQUENCIES')
    for c in range(n_bodies):
        body = bodies[c]
        n_points += body.get(structure.H5_BODY_NUM_POINTS)[0]
        n_panels += body.get(structure.H5_BODY_NUM_PANELS)[0]

    mesh = TMesh(n_points=n_points, n_panels=n_panels, n_bodies=n_bodies)

    n_points = 0
    n_panels = 0

    for c in range(n_bodies):
        body = bodies[c]

        mesh_arr = body.get(structure.H5_BODY_MESH)

        n = mesh_arr[0, 1]

        if c > 0 and (n != mesh.i_sym):
            print(' Error: there is an inconsistency in the mesh files regarding the xOz symmetries')
            sys.exit()
        else:
            mesh.i_sym = int(n)

        m = body.get(structure.H5_BODY_NUM_POINTS)[0]
        n = body.get(structure.H5_BODY_NUM_PANELS)[0]

        for i in range(m):
            mesh.x[:, n_points + i] = np.array(mesh_arr[i + 1, 1:4])

        if remove_irregular_frequencies:
            # If we have to remove frequencies, then we need to discretize the free surface
            int_mesh = generate_mesh(np.asarray(mesh_arr[1:m, 1:4]))
            interior_mesh_points = np.concatenate((interior_mesh_points, int_mesh["x"]), axis=1)
            interior_mesh_panels = np.concatenate((interior_mesh_panels, int_mesh["p"]+mesh.n_points+interior_n_points), axis=1)
            interior_c_panels = np.concatenate((interior_c_panels, c*np.ones(int_mesh["n_panels"])), axis=0)
            interior_n_points += int_mesh["n_points"]
            interior_n_panels += int_mesh["n_panels"]

        for i in range(m, m+n):
            mesh.p[:, n_panels+i-m] = np.array(mesh_arr[i + 1, 0:4]) - 1
            for j in range(4):
                mesh.p[j, n_panels + i-m] += n_points
            mesh.c_panel[n_panels+i-m] = c

        n_points += m
        n_panels += n
        mesh.last_panel[c] = n_panels

    if remove_irregular_frequencies:
        # If we have to remove frequencies, then we need to extend the mesh so
        # that it contains the panels of the free surface too
        mesh_interior = TMesh(n_points=n_points +interior_n_points , n_panels=n_panels + interior_n_panels, n_bodies=n_bodies)
        mesh_interior.x[:, 0:n_points] = mesh.x
        mesh_interior.x[:, n_points:] = interior_mesh_points
        mesh_interior.p[:, 0:n_panels] = mesh.p
        mesh_interior.p[:, n_panels:] = interior_mesh_panels
        mesh_interior.last_panel = mesh.last_panel
        mesh_interior.c_panel[0:n_panels] = mesh.c_panel
        mesh_interior.c_panel[n_panels: ] = interior_c_panels
        mesh_interior.i_sym = mesh.i_sym
        mesh = mesh_interior


        is_interior_domain = np.zeros((n_panels + interior_n_panels))
        is_interior_domain[n_panels:] = 1

        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_IS_INTERIOR_DOMAIN, is_interior_domain.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_IS_INTERIOR_DOMAIN_ATTR)
        dset[:] = is_interior_domain

        n_panels += interior_n_panels
        n_points += interior_n_points




    for i in range(mesh.n_panels):
        u = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[0, i]]
        v = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[1, i]]
        w1 = np.cross(u, v)
        a1 = 0.5*np.linalg.norm(w1)

        u = mesh.x[:, mesh.p[3, i]] - mesh.x[:, mesh.p[2, i]]
        v = mesh.x[:, mesh.p[1, i]] - mesh.x[:, mesh.p[2, i]]
        w2 = np.cross(u, v)
        a2 = 0.5*np.linalg.norm(w2)

        mesh.a[i]= a1+a2

        if mesh.a[i] < utility.EPS:
            print('Error: surface of panel ' + str(i) + ' is too small (' + str(mesh.a[i]) + ')')
            sys.exit()

        mesh.xm[:, i] = (1./3)*(mesh.x[:, mesh.p[0, i]] + mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[3, i]])*a1/mesh.a[i]

        mesh.xm[:, i] += (1./3)*(mesh.x[:, mesh.p[1, i]] + mesh.x[:, mesh.p[2, i]] + mesh.x[:, mesh.p[3, i]])*a2/mesh.a[i]

        u = w1 + w2

        mesh.n[:, i] = u/np.linalg.norm(u)

    return mesh
コード例 #15
0
ファイル: preprocessor.py プロジェクト: Eronana/OpenWARP
def run(hdf5_data, custom_config):
    """
    This function run the preprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """
    n_radiation = 0
    n_integration = 0

    bodies = hdf5_data.get(structure.H5_BODIES)

    if not bodies:
        print('The bodies dataset is not found. It looks like your hdf5 file is not correct. Please set ',
        'NEMOH_CALCULATIONS_FILE and NEMOH_INPUT_FILE to a valid value prior to running the preprocessor ',
        'Alternatively, you could manually add the input')
        sys.exit(1)
    bodies = bodies.values()

    for body in bodies:
        n_radiation += body.get(structure.H5_FREEDOM_DEGREE).shape[0]
        n_integration += body.get(structure.H5_GENERALISED_FORCES).shape[0]

    n_w = hdf5_data.get(structure.H5_NUM_WAVE_FREQUENCIES)[0]
    w_min = hdf5_data.get(structure.H5_MIN_WAVE_FREQUENCIES)[0]
    w_max = hdf5_data.get(structure.H5_MAX_WAVE_FREQUENCIES)[0]
    w = np.zeros(n_w, settings.NEMOH_FLOAT)
    if n_w > 1:
        for j in range(n_w):
            w[j] = w_min+(w_max-w_min)*j/(n_w-1)
    else:
        w[0] = w_min

    n_beta = hdf5_data.get(structure.H5_NUM_WAVE_DIRECTIONS)[0]
    beta_min = hdf5_data.get(structure.H5_MIN_WAVE_DIRECTIONS)[0]
    beta_max = hdf5_data.get(structure.H5_MAX_WAVE_DIRECTIONS)[0]

    beta = np.zeros(n_beta, settings.NEMOH_FLOAT)

    if n_beta > 1:
        for j in range(n_beta):
            beta[j] = (beta_min+(beta_max-beta_min)*j/(n_beta-1))*math.pi/180.
    else:
        beta[0] = beta_min * math.pi/180.

    switch_potential = hdf5_data.get(structure.H5_SHOW_PRESSURE)[0] >= 1
    n_theta = hdf5_data.get(structure.H5_KOCHIN_NUMBER)[0]
    theta_min = hdf5_data.get(structure.H5_KOCHIN_MIN)[0]
    theta_max = hdf5_data.get(structure.H5_KOCHIN_MAX)[0]
    switch_kochin = n_theta > 0

    n_x = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_X)[0]
    n_y = hdf5_data.get(structure.H5_FREE_SURFACE_POINTS_Y)[0]
    l_x = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_X)[0]
    l_y = hdf5_data.get(structure.H5_FREE_SURFACE_DIMENSION_Y)[0]

    switch_free_surface = n_x > 0

    rad_case = [TCase() for x in range(n_radiation)]
    int_case = [TCase() for x in range(n_integration)]
    j_rad = 0
    j_int = 0

    for c in range(len(bodies)):
        body = bodies[c]
        freedom_degree = body.get(structure.H5_FREEDOM_DEGREE)
        m = freedom_degree.len()
        for i in range(m):
            case = TCase()
            case.i_case = freedom_degree[i, 0]
            case.direction = np.array(freedom_degree[i, 1:4])
            case.axis = np.array(freedom_degree[i, 4:7])
            case.i_body = c
            case.mode = i
            rad_case[j_rad + i] = case
        j_rad += m

        generalised_forces = body.get(structure.H5_GENERALISED_FORCES)
        m = generalised_forces.len()
        for i in range(m):
            case = TCase()
            case.i_case = generalised_forces[i, 0]
            case.direction = np.array(generalised_forces[i, 1:4])
            case.axis = np.array(generalised_forces[i, 4:7])
            case.i_body = c
            case.mode = i
            int_case[j_int + i] = case

        j_int += m

    print('')
    print('Summary of calculation')

    depth = hdf5_data.get(structure.H5_ENV_DEPTH)[0]
    if depth > 0:
        print('  ->  Water depth = ' + str(depth) + ' m')
    else:
        print('  ->  Infinite water depth')

    print('  -> ' + str(n_w) + ' wave frequencies from ' + str(w[0]) + ' to ' + str(w[n_w-1]))
    print('  -> ' + str(n_beta) + str(' wave directions from  ') + str(beta[0]) + ' to ' + str(beta[n_beta-1]))
    print('  -> ' + str(n_radiation) + ' radiation problems')
    print('  -> ' + str(n_integration) + ' forces')
    print('')

    mesh = read_mesh(hdf5_data, custom_config)
    write_mesh_l12(mesh, hdf5_data)
    write_mesh_l10(mesh, hdf5_data)

    mesh_tec_file = utility.get_setting(settings.MESH_TEC_FILE, custom_config, 'MESH_TEC_FILE')

    if mesh_tec_file:
        write_mesh_tec(mesh, mesh_tec_file)

    fnds = np.zeros((n_integration, mesh.n_panels*2**mesh.i_sym), settings.NEMOH_FLOAT)

    for j in range(n_integration):
        fnds[j, :] = compute_nds(mesh, int_case[j].body, int_case[j].i_case, int_case[j].direction, int_case[j].axis)

    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_INTEGRATION, fnds.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_INTEGRATION_ATTR)
    dset[:, :] = fnds

    environment = utility.read_environment(hdf5_data)

    normal_velocity = np.zeros((mesh.n_panels*2**mesh.i_sym, (n_beta+n_radiation)*n_w), settings.NEMOH_COMPLEX)
    fk_force = np.zeros((n_w, n_beta, n_integration), settings.NEMOH_COMPLEX)

    for i in range(n_w):
        for j in range(n_beta):

            result = compute_wave(mesh, w[i], beta[j], environment)
            pressure = result["pressure"]
            n_vel = result["n_vel"]
            normal_velocity[:, j+ i*(n_beta+n_radiation)] = n_vel
            # Calculate the corresponding FK forces
            for k in range(n_integration):
                #for c in range(mesh.n_panels*2**mesh.i_sym):
                    #fk_force[i, j, k] +=  pressure[c]*fnds[k, c]

                fk_force[i, j, k] = np.sum(pressure.flatten()*fnds[k, :].flatten())

        for j in range(n_radiation):
            n_vel = compute_radiation_condition(mesh, rad_case[j].body, rad_case[j].i_case, rad_case[j].direction,
                                        rad_case[j].axis)

            normal_velocity[:, j + n_beta + i*(n_beta+n_radiation)] = n_vel

    # Save body conditions
    n_problems = n_w*(n_radiation+n_beta)
    bc_omega = w.repeat(n_beta + n_radiation)
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_W, bc_omega.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_W_ATTR)
    dset[:] = bc_omega

    bc_switch_type = -np.ones(n_problems, dtype='f')
    bc_switch_type[0:bc_switch_type.shape[0]:n_beta + n_radiation] = beta
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_BETA, bc_switch_type.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_BETA_ATTR)
    dset[:] = bc_switch_type


    temp = int(switch_potential)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_POTENTIAL_ATTR)
    dset[:] = temp

    temp = int(switch_free_surface)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_FREE_SURFACE_ATTR)
    dset[:] = temp

    temp = int(switch_kochin)*np.ones(n_problems, dtype='i')
    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN, temp.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_SWITCH_KOCHIN_ATTR)
    dset[:] = temp

    dset = utility.require_dataset(hdf5_data, structure.H5_NORMAL_VELOCITY_VELOCITIES, normal_velocity.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_NORMAL_VELOCITY_VELOCITIES_ATTR)
    dset[:, :] = normal_velocity


    #fk_force_f = fk_force.flatten()
    #fk_force_o = np.vstack((np.abs(fk_force_f), np.arctan2(np.imag(fk_force_f), np.real(fk_force_f)))).transpose()
    fk_force_o = np.zeros((n_integration*n_w, 2*n_beta+2*n_radiation), dtype='f')
    idx = 0
    for k in range(n_integration):
        for i in range(n_w):
            for c in range(n_beta):
                fk_force_o[idx, 2*c] = np.abs(fk_force[i, c, k])
                fk_force_o[idx, 2*c+1] = np.arctan2(np.imag(fk_force[i, c, k]), np.real(fk_force[i, c, k]))

            for c in range(2*n_radiation):
                fk_force_o[idx, 2*n_beta + c] = 0
            idx += 1


    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES, fk_force_o.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_ATTR)
    dset[:, :] = fk_force_o

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_FK_FORCES_RAW, fk_force.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FK_FORCES_RAW_ATTR)
    dset[:, :, :] = fk_force

    fk_force_tec_file = utility.get_setting(settings.FK_FORCE_TEC_FILE, custom_config, 'FK_FORCE_TEC_FILE')
    if fk_force_tec_file:
        write_fk_force_tec(int_case, fk_force, w, beta, fk_force_tec_file)

    #free_surface_v = [[-0.5*l_x+l_x*i/(n_x-1), -0.5*l_y+l_y*j/(n_y-1), 0.] for i in range(n_x) for j in range(
    #    n_y)]
    free_surface_v = np.zeros((3, n_x*n_y))
    k = 0
    for i in range(n_x):
        for j in range(n_y):
            free_surface_v[0, k] = -0.5*l_x+l_x*i/(n_x-1)
            free_surface_v[1, k] = -0.5*l_y+l_y*j/(n_y-1)
            free_surface_v[2, k] = 0.
            k += 1

    #free_surface_v = np.array(free_surface_v)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_VECTORS, free_surface_v.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_VECTORS_ATTR)
    dset[:, :] = free_surface_v

    free_surface_v = np.zeros((0, 0))

    if (n_x-1) > 0 and (n_y-1) >0:
        #free_surface_v = [[j+i*n_y, j+1+i*n_y, j+1+(i+1)*n_y, j+(i+1)*n_y] for i in range(n_x-1) for j in
                        #range(n_y-1)]
        free_surface_v = np.zeros((4, (n_x-1)*(n_y-1)))
        k = 0
        for i in range(n_x-1):
            for j in range(n_y-1):
                free_surface_v[0, k] = j+i*n_y
                free_surface_v[1, k] = j+1+i*n_y
                free_surface_v[2, k] = j+1+(i+1)*n_y
                free_surface_v[3, k] = j+(i+1)*n_y
                k += 1
    #free_surface_v = np.array(free_surface_v)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_FREE_SURFACE_INDEX, free_surface_v.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_FREE_SURFACE_INDEX_ATTR)
    dset[:, :] = free_surface_v


    # Generate Kochin
    kochin = np.array([])
    if n_theta > 0:
        if n_theta > 1:
            kochin = [(theta_min+(theta_max-theta_min)*j/(n_theta-1))*np.pi/180. for j in range(n_theta)]
        else:
            kochin = [theta_min*np.pi/180.]


    kochin = np.array(kochin)
    dset = utility.require_dataset(hdf5_data, structure.H5_MESH_KOCHIN, kochin.shape, dtype='f', maxshape=(None, ))
    utility.set_hdf5_attributes(dset, structure.H5_MESH_KOCHIN_ATTR)
    dset[:] = kochin

    # Save index of cases

    out = np.array([[k+1, int_case[k].body+1, int_case[k].mode+1] for k in range(n_integration)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_FORCE, out.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_FORCE_ATTR)
    dset[:, :] = out

    out = np.array([[k+1, rad_case[k].body+1, rad_case[k].mode+1] for k in range(n_radiation)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_MOTION, out.shape, dtype='i')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_MOTION_ATTR)
    dset[:, :] = out

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
    dset[:] = beta

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_W, w.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_W_ATTR)
    dset[:] = w

    out = np.array([(theta_min+(theta_max-theta_min)*k/(n_theta-1))*np.pi/180. for k in range(n_theta)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_THETA, out.shape, dtype='f', maxshape=(None))
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_THETA_ATTR)
    dset[:] = out

    # Save radiation cases

    out = np.array([[rad_case[k].body+1, rad_case[k].i_case+1,  rad_case[k].direction[0], rad_case[k].direction[1], rad_case[k].direction[2],  rad_case[k].axis[0],  rad_case[k].axis[1] ,  rad_case[k].axis[2]] for k in range(n_radiation)])
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_RADIATION, out.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_RADIATION_ATTR)
    dset[:, :] = out

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_CASE_BETA, beta.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_CASE_BETA_ATTR)
    dset[:] = beta

    switch_ode_influence = utility.get_setting(settings.USE_ODE_INFLUENCE_COEFFICIENTS, custom_config,
                                       'USE_ODE_INFLUENCE_COEFFICIENTS')

    use_higher_order = utility.get_setting(settings.USE_HIGHER_ORDER, custom_config,
                                       'USE_HIGHER_ORDER')

    num_panel_higher_order = utility.get_setting(settings.NUM_PANEL_HIGHER_ORDER, custom_config,
                                       'NUM_PANEL_HIGHER_ORDER')

    b_spline_order = utility.get_setting(settings.B_SPLINE_ORDER, custom_config,
                                       'B_SPLINE_ORDER')

    use_dipoles_implementation = utility.get_setting(settings.USE_DIPOLES_IMPLEMENTATION, custom_config,
                                       'USE_DIPOLES_IMPLEMENTATION')

    compute_yaw_moment = utility.get_setting(settings.COMPUTE_YAW_MOMENT, custom_config,
                                       'COMPUTE_YAW_MOMENT')

    compute_drift_forces = utility.get_setting(settings.COMPUTE_DRIFT_FORCES, custom_config,
                                       'COMPUTE_DRIFT_FORCES')

    thin_panels = utility.get_setting(settings.THIN_PANELS, custom_config,
                                       'THIN_PANELS')

    if num_panel_higher_order is not None and num_panel_higher_order > 0:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_NUM_PANEL_HIGHER_ORDER_ATTR)
        dset[:] = int(num_panel_higher_order)

    if b_spline_order is not None and b_spline_order > 0:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_B_SPLINE_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_B_SPLINE_ORDER_ATTR)
        dset[:] = int(b_spline_order)

    if use_higher_order is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_HIGHER_ORDER, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR)
        dset[:] = int(use_higher_order)


    if switch_ode_influence is not None:
        temp = int(switch_ode_influence)*np.ones(n_problems, dtype='i')
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE, temp.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_SWITCH_ODE_INFLUENCE_ATTR)
        dset[:] = temp

    if use_dipoles_implementation is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR)
        dset[:] = int(use_dipoles_implementation)

    if compute_yaw_moment is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_YAW_MOMENT, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_YAW_MOMENT_ATTR)
        dset[:] = int(compute_yaw_moment)

    if compute_drift_forces is not None:
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES, (1, ), dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_COMPUTE_DRIFT_FORCES_ATTR)
        dset[:] = int(compute_drift_forces)

    if thin_panels is not None:
        temp = np.zeros(mesh.n_panels, dtype='i')
        for idx in thin_panels:
            if idx == -1:
                temp = np.ones(mesh.n_panels, dtype='i')
                break
            elif idx >= 0:
                temp[idx] = 1
        dset = utility.require_dataset(hdf5_data, structure.H5_SOLVER_THIN_PANELS, temp.shape, dtype='i')
        utility.set_hdf5_attributes(dset, structure.H5_SOLVER_THIN_PANELS_ATTR)
        dset[:] = temp
コード例 #16
0
    mesh_tec_file = utility.get_setting(settings.MESH_TEC_FILE, custom_config,
                                        'MESH_TEC_FILE')

    if mesh_tec_file:
        write_mesh_tec(mesh, mesh_tec_file)

    fnds = np.zeros((n_integration, mesh.n_panels * 2**mesh.i_sym),
                    settings.NEMOH_FLOAT)

    for j in range(n_integration):
        fnds[j, :] = compute_nds(mesh, int_case[j].body, int_case[j].i_case,
                                 int_case[j].direction, int_case[j].axis)

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_MESH_INTEGRATION,
                                   fnds.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_MESH_INTEGRATION_ATTR)
    dset[:, :] = fnds

    environment = utility.read_environment(hdf5_data)

    normal_velocity = np.zeros(
        (mesh.n_panels * 2**mesh.i_sym, (n_beta + n_radiation) * n_w),
        settings.NEMOH_COMPLEX)
    fk_force = np.zeros((n_w, n_beta, n_integration), settings.NEMOH_COMPLEX)

    for i in range(n_w):
        for j in range(n_beta):

            result = compute_wave(mesh, w[i], beta[j], environment)
コード例 #17
0
def write_result(hdf5_data, data):
    """
    Write the result from nemoh fortran to the hdf5
    Args:
        hdf5_data: object the hdf5 opened data
        data: the data sent from nemoh fortran
    """
    signature = __name__ + '.write_result(hdf5_data, data)'
    logger = logging.getLogger(__name__)
    # data is too huge for logging
    utility.log_entrance(logger, signature, {'hdf5_data': hdf5_data})

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_FORCES,
                                   data["line"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_FORCES_ATTR)
    dset[:, :] = data["line"].astype(copy=False, dtype='f')

    temp = np.array(data["out_potential"], dtype='f')
    count_skip = 0
    for i in range(data["n_problems"]):
        if data["bc_switch_potential"][i] != 1:
            temp[i, :] = 0
            count_skip += 1
    if count_skip == data["n_problems"]:
        temp = np.zeros((0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_POTENTIAL,
                                   temp.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_POTENTIAL_ATTR)
    dset[:, :] = temp

    kochin = np.zeros((data["n_theta"], 3, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_kochin"][i] == 1:
            for j in range(data["n_theta"]):

                kochin[j, 0, i] = data["theta"][j]
                kochin[j, 1, i] = np.abs(data["out_hkochin"][i, j])
                kochin[j, 2,
                       i] = np.arctan2(np.imag(data["out_hkochin"][i, j]),
                                       np.real(data["out_hkochin"][i, j]))
        else:
            count_skip += 1

    if count_skip == data["n_problems"]:
        kochin = np.zeros((0, 0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_KOCHIN,
                                   kochin.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_KOCHIN_ATTR)
    dset[:, :, :] = kochin

    temp = np.zeros((data["nfs_points"], 6, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_freesurface"][i] == 1:
            for j in range(data["nfs_points"]):

                temp[j, 0, i] = data["meshfs_x"][0, j]
                temp[j, 1, i] = data["meshfs_x"][1, j]
                temp[j, 2, i] = np.abs(data["out_phi"][i, j])
                temp[j, 3, i] = np.arctan2(np.imag(data["out_phi"][i, j]),
                                           np.real(data["out_phi"][i, j]))
                temp[j, 4, i] = -np.imag(data["out_phi"][i, j])
                temp[j, 5, i] = -np.real(data["out_phi"][i, j])
        else:
            count_skip += 1

    if count_skip == data["n_problems"]:
        temp = np.zeros((0, 0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_FREE_SURFACE_POINTS,
                                   temp.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_FREE_SURFACE_POINTS_ATTR)
    dset[:, :, :] = temp

    temp = np.zeros((data["nfs_panels"], 4, data["n_problems"]), dtype='f')
    count_skip = 0

    for i in range(data["n_problems"]):
        if data["bc_switch_freesurface"][i] == 1:
            for j in range(data["nfs_panels"]):
                temp[j, :, i] = data["meshfs_p"]
        else:
            count_skip += 1
    if count_skip == data["n_problems"]:
        temp = np.zeros((0, 0, 0), dtype='f')

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_FREE_SURFACE_PANEL,
                                   temp.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_FREE_SURFACE_PANEL_ATTR)
    dset[:, :, :] = temp

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_DRIFT_FORCES,
                                   data["drift_forces"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_DRIFT_FORCES_ATTR)
    dset[:, :, :] = data["drift_forces"]

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_YAW_MOMENT,
                                   data["yaw_moment"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_YAW_MOMENT_ATTR)
    dset[:, :] = data["yaw_moment"]

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_CENTER_BUOYANCY,
                                   data["center_buoyancy"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_CENTER_BUOYANCY_ATTR)
    dset[:, :] = data["center_buoyancy"]

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_VOLUME_DISPLACEMENT,
                                   data["displacement"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_VOLUME_DISPLACEMENT_ATTR)
    dset[:] = data["displacement"]

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_WATER_PLANE_AREA,
                                   data["waterplane_area"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_WATER_PLANE_AREA_ATTR)
    dset[:] = data["waterplane_area"]

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_STIFNESS,
                                   data["stifness"].shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_STIFNESS_ATTR)
    dset[:, :, :] = data["stifness"]

    utility.log_exit(logger, signature, [None])
コード例 #18
0
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """

    logger = logging.getLogger(__name__)
    signature = __name__ + '.run(hdf5_data, custom_config)'
    # No need to log the parameter of the method here as it will only be duplicate.
    # This function is never called directly by the user and always call from the postprocess function
    # which already logs the configuration.
    utility.log_entrance(logger, signature, {})

    logger.info('Initialisation the post processing steps')

    logger.info('Reading environment data ...')
    environment = utility.read_environment(hdf5_data)
    logger.info('Read environment data' + str(environment))

    logger.info('Reading simulation results')
    result = read_results(hdf5_data)
    logger.info('Read solver result ' + str(result))

    logger.info('Post processing initialisation done !')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_ADDED_MASS,
                                   result.added_mass.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass
    logger.info('Saved ' +
                str(structure.H5_RESULTS_ADDED_MASS_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_ADDED_MASS +
                ' with characteristics ' + str(dset))

    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_RADIATION_DAMPING,
                                   result.radiation_damping.shape,
                                   dtype='f')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping
    logger.info(
        'Saved ' +
        str(structure.H5_RESULTS_RADIATION_DAMPING_ATTR['description']) +
        ' at ' + structure.H5_RESULTS_RADIATION_DAMPING +
        ' with characteristics ' + str(dset))

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data,
                                   structure.H5_RESULTS_EXCITATION_FORCES,
                                   excitation_forces.shape,
                                   dtype='F')
    utility.set_hdf5_attributes(dset,
                                structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    logger.info(
        'Saved ' +
        str(structure.H5_RESULTS_EXCITATION_FORCES_ATTR['description']) +
        ' at ' + structure.H5_RESULTS_EXCITATION_FORCES +
        ' with characteristics ' + str(dset))

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE,
                                   custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        logger.info(
            'Radiation coefficients successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info(
            'Radiation coefficients tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE,
                                   custom_config, 'DIFFRACTION_FORCE_TEC_FILE')

    if tec_file:
        save_diffraction_force(result, tec_file)
        logger.info(
            'Diffraction forces successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info('Diffraction forces tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE,
                                   custom_config, 'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        logger.info(
            'Excitation forces successfully saved in tecplot format at ' +
            str(tec_file))
    else:
        logger.info('Excitation forces tecplot format generation is disabled')

    irf = get_irf(hdf5_data, result)

    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(
            hdf5_data,
            structure.H5_RESULTS_ADDED_MASS_INFINITE,
            irf.added_mass.shape,
            dtype='f')
        utility.set_hdf5_attributes(
            dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            logger.info('IRF successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('IRF tecplot format generation is disabled')
    else:
        logger.info('IRF computation is disabled')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta),
                    dtype='F')
    raos = compute_raos(raos, result)

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')

    dset = hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)
    utility.check_dataset_type(
        dset,
        name=str(structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR['description']),
        location=structure.H5_SOLVER_USE_HIGHER_ORDER)
    use_higher_order = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    utility.check_dataset_type(
        dset,
        name=str(structure.
                 H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR['description']),
        location=structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    use_dipoles_implementation = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    utility.check_dataset_type(
        dset,
        name=str(structure.
                 H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR['description']),
        location=structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    remove_irregular_frequencies = dset[0]

    if tec_file:
        if use_higher_order != 1 and use_dipoles_implementation != 1 and remove_irregular_frequencies != 1:
            res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos,
                                         result)
            save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"],
                                res["x"], res["y"], tec_file)
            logger.info(
                'Wave elevation successfully saved in tecplot format at ' +
                str(tec_file))
        else:
            logger.info(
                'Wave elevation computation is not supported when higher order panel, '
                +
                'used diplome implementation or remove irregular frequencies are enabled.'
                + ' Disabling it.')
    else:
        logger.info('Wave elevation tecplot format generation is disabled')
コード例 #19
0
ファイル: postprocessor.py プロジェクト: binit92/OpenWARP
def run(hdf5_data, custom_config):
    """
    This function run the postprocessor
    Args:
        hdf5_data: object, the hdf5 opened file
        custom_config, dict The custom configuration dictionary
    """

    logger = logging.getLogger(__name__)
    signature = __name__ + '.run(hdf5_data, custom_config)'
    # No need to log the parameter of the method here as it will only be duplicate.
    # This function is never called directly by the user and always call from the postprocess function
    # which already logs the configuration.
    utility.log_entrance(logger, signature,
                         {})

    logger.info('Initialisation the post processing steps')

    logger.info('Reading environment data ...')
    environment = utility.read_environment(hdf5_data)
    logger.info('Read environment data' + str(environment))

    logger.info('Reading simulation results')
    result = read_results(hdf5_data)
    logger.info('Read solver result ' + str(result))

    logger.info('Post processing initialisation done !')

    # Saving to hdf5 file
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS, result.added_mass.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_ATTR)
    dset[:, :, :] = result.added_mass
    logger.info('Saved ' + str(structure.H5_RESULTS_ADDED_MASS_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_ADDED_MASS + ' with characteristics ' +
                str(dset))

    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_RADIATION_DAMPING, result.radiation_damping.shape, dtype='f')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_RADIATION_DAMPING_ATTR)
    dset[:, :, :] = result.radiation_damping
    logger.info('Saved ' + str(structure.H5_RESULTS_RADIATION_DAMPING_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_RADIATION_DAMPING + ' with characteristics ' +
                str(dset))

    excitation_forces = result.diffraction_force + result.froudkrylov_force
    dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_EXCITATION_FORCES, excitation_forces.shape, dtype='F')
    utility.set_hdf5_attributes(dset, structure.H5_RESULTS_EXCITATION_FORCES_ATTR)
    dset[:, :, :] = excitation_forces
    logger.info('Saved ' + str(structure.H5_RESULTS_EXCITATION_FORCES_ATTR['description']) +
                ' at ' + structure.H5_RESULTS_EXCITATION_FORCES + ' with characteristics ' +
                str(dset))

    tec_file = utility.get_setting(settings.RADIATION_COEFFICIENTS_TEC_FILE, custom_config,
                                   'RADIATION_COEFFICIENTS_TEC_FILE')
    if tec_file:
        save_radiation_coefficients(result, tec_file)
        logger.info('Radiation coefficients successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Radiation coefficients tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.DIFFRACTION_FORCE_TEC_FILE, custom_config,
                                   'DIFFRACTION_FORCE_TEC_FILE')

    if tec_file:
        save_diffraction_force(result, tec_file)
        logger.info('Diffraction forces successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Diffraction forces tecplot format generation is disabled')

    tec_file = utility.get_setting(settings.EXCITATION_FORCE_TEC_FILE, custom_config,
                                   'EXCITATION_FORCE_TEC_FILE')
    if tec_file:
        save_excitation_force(result, tec_file)
        logger.info('Excitation forces successfully saved in tecplot format at ' +
                    str(tec_file))
    else:
        logger.info('Excitation forces tecplot format generation is disabled')

    irf = get_irf(hdf5_data, result)

    if irf.switch == 1:
        irf = compute_irf(result, irf)
        # Saving to hdf5 file
        dset = utility.require_dataset(hdf5_data, structure.H5_RESULTS_ADDED_MASS_INFINITE, irf.added_mass.shape, dtype='f')
        utility.set_hdf5_attributes(dset, structure.H5_RESULTS_ADDED_MASS_INFINITE_ATTR)
        dset[:, :] = irf.added_mass

        tec_file = utility.get_setting(settings.IRF_TEC_FILE, custom_config,
                                       'IRF_TEC_FILE')
        if tec_file:
            save_irf(irf, tec_file)
            logger.info('IRF successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('IRF tecplot format generation is disabled')
    else:
        logger.info('IRF computation is disabled')

    raos = np.zeros((result.n_integration, result.n_w, result.n_beta), dtype='F')
    raos = compute_raos(raos, result)

    tec_file = utility.get_setting(settings.WAVE_FIELD_TEC_FILE, custom_config,
                                   'WAVE_FIELD_TEC_FILE')

    dset = hdf5_data.get(structure.H5_SOLVER_USE_HIGHER_ORDER)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_USE_HIGHER_ORDER_ATTR['description']),
                               location=structure.H5_SOLVER_USE_HIGHER_ORDER)
    use_higher_order = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION_ATTR['description']),
                               location=structure.H5_SOLVER_USE_DIPOLES_IMPLEMENTATION)
    use_dipoles_implementation = dset[0]

    dset = hdf5_data.get(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    utility.check_dataset_type(dset,
                               name=str(structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES_ATTR['description']),
                               location=structure.H5_SOLVER_REMOVE_IRREGULAR_FREQUENCIES)
    remove_irregular_frequencies = dset[0]

    # Wave Elevation computation and tec generation
    if result.n_theta < 1:
        tec_file = None
        logger.info('Wave elevation tecplot format generation is disabled because there is no directions (Kochin)')


    if tec_file:
        if use_higher_order != 1 and use_dipoles_implementation != 1 and remove_irregular_frequencies != 1:
            res = compute_wave_elevation(hdf5_data, environment, 0, 0, raos, result)
            save_wave_elevation(res['w'], res['etai'], res["etap"], res["eta"], res["x"], res["y"],
                            tec_file)
            logger.info('Wave elevation successfully saved in tecplot format at ' +
                        str(tec_file))
        else:
            logger.info('Wave elevation computation is not supported when higher order panel, ' +
                        'used diplome implementation or remove irregular frequencies are enabled.' +
                        ' Disabling it.')
    else:
        logger.info('Wave elevation tecplot format generation is disabled')