def load_ss(self, refresh=None, path=None): if path is None: path = self.path_to_sys['ss'] try: with h5.File(path, 'r') as f: data = h5utils.load_h5_in_dict(f) self.ss = libss.StateSpace(data['a'], data['b'], data['c'], data['d'], dt=data.get('dt', None)) except EnvironmentError: # try and load from the pickle print('Unable to load from h5 at {:s}, reverting to pickle'.format( path)) try: pickle_dir = self.path + '/' + self.path.split( '/')[-1] + '.pkl' with open(pickle_dir, 'rb') as f: data = pickle.load(f) self.ss = data.linear.linear_system.ss except OSError: print('Could not find pickle at {:s}'.format(pickle_dir)) return None
def read_files(self): # open aero file # first, file names self.aero_file_name = self.data.case_route + '/' + self.data.case_name + '.aero.h5' # then check that the file exists h5utils.check_file_exists(self.aero_file_name) # read and store the hdf5 file with h5.File(self.aero_file_name, 'r') as aero_file_handle: # store files in dictionary self.aero_data_dict = h5utils.load_h5_in_dict(aero_file_handle)
def read_files(self): # open fem file # first, file names self.fem_file_name = self.data.case_route + '/' + self.data.case_name + '.fem.h5' if self.settings['unsteady']: self.dyn_file_name = self.data.case_route + '/' + self.data.case_name + '.dyn.h5' # then check that the files exists h5utils.check_file_exists(self.fem_file_name) if self.settings['unsteady']: h5utils.check_file_exists(self.dyn_file_name) # read and store the hdf5 files with h5.File(self.fem_file_name, 'r') as fem_file_handle: # store files in dictionary self.fem_data_dict = h5utils.load_h5_in_dict(fem_file_handle) # TODO implement fem file validation # self.validate_fem_file() if self.settings['unsteady']: with h5.File(self.dyn_file_name, 'r') as dyn_file_handle: # store files in dictionary self.dyn_data_dict = h5utils.load_h5_in_dict(dyn_file_handle)
def read_files(self): # open fem file # first, file names self.fem_file_name = self.data.case_route + '/' + self.data.case_name + '.fem.h5' if self.settings['unsteady']: self.dyn_file_name = self.data.case_route + '/' + self.data.case_name + '.dyn.h5' # then check that the files exists h5utils.check_file_exists(self.fem_file_name) if self.settings['unsteady']: try: h5utils.check_file_exists(self.dyn_file_name) except FileNotFoundError: self.settings['unsteady'] = False # read and store the hdf5 files with h5.File(self.fem_file_name, 'r') as fem_file_handle: # store files in dictionary self.fem_data_dict = h5utils.load_h5_in_dict(fem_file_handle) # TODO implement fem file validation # self.validate_fem_file() if self.settings['unsteady']: with h5.File(self.dyn_file_name, 'r') as dyn_file_handle: # store files in dictionary self.dyn_data_dict = h5utils.load_h5_in_dict(dyn_file_handle) # TODO implement dyn file validation # self.validate_dyn_file() # Multibody information self.mb_file_name = self.data.case_route + '/' + self.data.case_name + '.mb.h5' if os.path.isfile(self.mb_file_name): # h5utils.check_file_exists(self.mb_file_name) with h5.File(self.mb_file_name, 'r') as mb_file_handle: self.mb_data_dict = h5utils.load_h5_in_dict(mb_file_handle) # Need to redefine strings to remove the "b" at the beginning for iconstraint in range(self.mb_data_dict['num_constraints']): self.mb_data_dict[ "constraint_%02d" % iconstraint]['behaviour'] = self.mb_data_dict[ "constraint_%02d" % iconstraint]['behaviour'].decode()
def read_files(self): self.input_file_name = self.data.settings['SHARPy'][ 'route'] + '/' + self.data.settings['SHARPy'][ 'case'] + '.lininput.h5' # Check that the file exists try: h5utils.check_file_exists(self.input_file_name) # Read and store with h5.File(self.input_file_name, 'r') as input_file_handle: self.input_data_dict = h5utils.load_h5_in_dict( input_file_handle) except FileNotFoundError: pass
def load_bode(self, refresh=False, path=None): print('Loading frequency data...') if path is None: try: path = self.path_to_sys['freqresp'] except KeyError: path = glob.glob( self.path + 'frequencyresponse/{}.freqresp.h5'.format(self.system))[0] try: with h5.File(path, 'r') as freq_file_handle: # store files in dictionary freq_dict = h5utils.load_h5_in_dict(freq_file_handle) except OSError: print('No frequency data - %s' % path) return # Could create a Bode object with ss gain, max gain etc self.bode = Bode(wv=freq_dict['frequency'], yfreq=freq_dict['response']) print('...loaded frequency data from {:s}'.format(path))
def main(args=None, sharpy_input_dict=None): """ Main ``SHARPy`` routine This is the main ``SHARPy`` routine. It starts the solution process by reading the settings that are included in the ``.sharpy`` file that is parsed as an argument, or an equivalent dictionary given as ``sharpy_input_dict``. It reads the solvers specific settings and runs them in order Args: args (str): ``.sharpy`` file with the problem information and settings sharpy_input_dict (dict): ``dict`` with the same contents as the ``solver.txt`` file would have. Returns: sharpy.presharpy.presharpy.PreSharpy: object containing the simulation results. """ import time import argparse import sharpy.utils.input_arg as input_arg import sharpy.utils.solver_interface as solver_interface from sharpy.presharpy.presharpy import PreSharpy from sharpy.utils.cout_utils import start_writer, finish_writer import logging import os import h5py import sharpy.utils.h5utils as h5utils # Loading solvers and postprocessors import sharpy.solvers import sharpy.postproc import sharpy.generators import sharpy.controllers # ------------ try: # output writer start_writer() # timing t = time.process_time() t0_wall = time.perf_counter() if sharpy_input_dict is None: parser = argparse.ArgumentParser(prog='SHARPy', description= """This is the executable for Simulation of High Aspect Ratio Planes.\n Imperial College London 2020""") parser.add_argument('input_filename', help='path to the *.sharpy input file', type=str, default='') parser.add_argument('-r', '--restart', help='restart the solution with a given snapshot', type=str, default=None) parser.add_argument('-d', '--docs', help='generates the solver documentation in the specified location. ' 'Code does not execute if running this flag', action='store_true') if args is not None: args = parser.parse_args(args[1:]) else: args = parser.parse_args() if args.docs: import subprocess import sharpy.utils.docutils as docutils import sharpy.utils.sharpydir as sharpydir docutils.generate_documentation() # run make cout.cout_wrap('Running make html in sharpy/docs') subprocess.Popen(['make', 'html'], stdout=None, cwd=sharpydir.SharpyDir + '/docs') return 0 if args.input_filename == '': parser.error('input_filename is a required argument of SHARPy.') settings = input_arg.read_settings(args) if args.restart is None: # run preSHARPy data = PreSharpy(settings) else: try: with open(args.restart, 'rb') as restart_file: data = pickle.load(restart_file) except FileNotFoundError: raise FileNotFoundError('The file specified for the snapshot \ restart (-r) does not exist. Please check.') # update the settings data.update_settings(settings) # Read again the dyn.h5 file data.structure.dynamic_input = [] dyn_file_name = data.case_route + '/' + data.case_name + '.dyn.h5' if os.path.isfile(dyn_file_name): fid = h5py.File(dyn_file_name, 'r') data.structure.dyn_dict = h5utils.load_h5_in_dict(fid) # for it in range(self.num_steps): # data.structure.dynamic_input.append(dict()) # Loop for the solvers specified in *.sharpy['SHARPy']['flow'] for solver_name in settings['SHARPy']['flow']: solver = solver_interface.initialise_solver(solver_name) solver.initialise(data) data = solver.run() cpu_time = time.process_time() - t wall_time = time.perf_counter() - t0_wall cout.cout_wrap('FINISHED - Elapsed time = %f6 seconds' % wall_time, 2) cout.cout_wrap('FINISHED - CPU process time = %f6 seconds' % cpu_time, 2) finish_writer() except Exception as e: try: logdir = settings['SHARPy']['log_folder'] except KeyError: logdir = './' except NameError: logdir = './' logdir = os.path.abspath(logdir) cout.cout_wrap(('Exception raised, writing error log in %s/error.log' % logdir), 4) logging.basicConfig(filename='%s/error.log' % logdir, filemode='w', format='%(asctime)s-%(levelname)s-%(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO) logging.info('SHARPy Error Log') logging.error("Exception occurred", exc_info=True) raise e return data