def test_read_write_read_given_data2(): datasets = [4, 5, 6] for ds in datasets: uff_read = pyuff.UFF('./data/TestLab 161 164 18 15 82.uff') a = uff_read.read_sets(ds) #write to file save_to_file = './data/trace_lines.uff' if os.path.exists(save_to_file): os.remove(save_to_file) _ = pyuff.UFF(save_to_file) _.write_sets(a, 'add') #read back uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets(0) if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id'] numeric_keys = list(set(a.keys()) - set(string_keys)) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_write_read(): save_to_file = './data/nodes.uff' dataset = pyuff.prepare_15(node_nums=[16, 17, 18, 19, 20], def_cs=[11, 11, 11, 12, 12], disp_cs=[16, 16, 17, 18, 19], color=[1, 3, 4, 5, 6], x=[0.0, 1.53, 0.0, 1.53, 0.0], y=[0.0, 0.0, 3.84, 3.84, 0.0], z=[0.0, 0.0, 0.0, 0.0, 1.83]) dataset_out = dataset.copy() if save_to_file: if os.path.exists(save_to_file): os.remove(save_to_file) uffwrite = pyuff.UFF(save_to_file) uffwrite._write_set(dataset, 'add') uff_dataset_origin = dataset_out uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) numeric_keys = uff_dataset_origin.keys() a, b = uff_dataset_origin, uff_dataset_read for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k])
def test_read_write_read_given_data(): #read from file uff_read = pyuff.UFF( './data/Artemis export - Geometry RPBC_setup_05_14102016_105117.uff') a = uff_read.read_sets(1) #write to file save_to_file = './data/trace_lines.uff' if os.path.exists(save_to_file): os.remove(save_to_file) _ = pyuff.UFF(save_to_file) _.write_sets(a, 'add') #read back uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets(0) if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id'] numeric_keys = list(set(a.keys()) - set(string_keys)) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_write_read_test_data(): save_to_file = './data/trace_lines.uff' dataset = pyuff.prepare_82(trace_num=2, n_nodes=7, color=30, id='Identification line', nodes=np.array([0, 10, 13, 14, 15, 16, 17])) dataset_out = dataset.copy() if save_to_file: if os.path.exists(save_to_file): os.remove(save_to_file) uffwrite = pyuff.UFF(save_to_file) uffwrite._write_set(dataset, 'add') uff_dataset_origin = dataset_out uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id'] numeric_keys = list(set(uff_dataset_origin.keys()) - set(string_keys)) a, b = uff_dataset_origin, uff_dataset_read for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_write_read_58(): save_to_file = './data/measurement.uff' if save_to_file: if os.path.exists(save_to_file): os.remove(save_to_file) uff_datasets = [] binary = [0, 1, 0] # ascii of binary frequency = np.arange(10) np.random.seed(0) for i, b in enumerate(binary): print('Adding point {}'.format(i + 1)) response_node = 1 response_direction = 1 reference_node = i + 1 reference_direction = 1 # this is an artificial 'frf' acceleration_complex = np.random.normal(size=len(frequency)) + 1j * np.random.normal(size=len(frequency)) name = 'TestCase' data=pyuff.prepare_58( binary=binary[i], func_type=4, rsp_node=response_node, rsp_dir=response_direction, ref_dir=reference_direction, ref_node=reference_node, data=acceleration_complex, x=frequency, id1='id1', rsp_ent_name=name, ref_ent_name=name, abscissa_spacing=1, abscissa_spec_data_type=18, ordinate_spec_data_type=12, orddenom_spec_data_type=13) uff_datasets.append(data.copy()) if save_to_file: uffwrite = pyuff.UFF(save_to_file) uffwrite._write_set(data, 'add') uff_dataset_origin = uff_datasets uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id1', 'rsp_ent_name', 'ref_ent_name'] numeric_keys = list(set(uff_dataset_origin[0].keys()) - set(string_keys)) for a, b in zip(uff_dataset_origin, uff_dataset_read): for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_write_read_55(): save_to_file = './data/measurement.uff' if save_to_file: if os.path.exists(save_to_file): os.remove(save_to_file) uff_datasets = [] modes = [1, 2, 3] node_nums = [1, 2, 3, 4] freqs = [10.0, 12.0, 13.0] for i, b in enumerate(modes): mode_shape = np.random.normal(size=len(node_nums)) name = 'TestCase' data = pyuff.prepare_55(model_type=1, id1='NONE', id2='NONE', id3='NONE', id4='NONE', id5='NONE', analysis_type=2, data_ch=2, spec_data_type=8, data_type=2, r1=mode_shape, r2=mode_shape, r3=mode_shape, n_data_per_node=3, node_nums=[1, 2, 3, 4], load_case=1, mode_n=i + 1, modal_m=0, freq=freqs[i], modal_damp_vis=0., modal_damp_his=0.) uff_datasets.append(data.copy()) if save_to_file: uffwrite = pyuff.UFF(save_to_file) uffwrite._write_set(data, 'add') uff_dataset_origin = uff_datasets uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id1', 'id2', 'id3', 'id4', 'id5'] numeric_keys = list(set(uff_dataset_origin[0].keys()) - set(string_keys)) for a, b in zip(uff_dataset_origin, uff_dataset_read): for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k], decimal=5) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_read_58b_binary_vs_58_ascii(): uff_ascii = pyuff.UFF('./data/Sample_UFF58_ascii.uff') a = uff_ascii.read_sets(0) uff_bin = pyuff.UFF('./data/Sample_UFF58b_bin.uff') b = uff_bin.read_sets(0) #print(uff_ascii.read_sets(0)['id1']) np.testing.assert_string_equal(a['id1'], b['id1']) np.testing.assert_array_equal(a['rsp_dir'], b['rsp_dir']) np.testing.assert_array_equal(a['x'], b['x']) np.testing.assert_array_almost_equal(a['data'], b['data'])
def test_read_write_read_given_data_base(file='', data_at_the_end=None): if file == '': return #read from file uff_read = pyuff.UFF(file) a = uff_read.read_sets() if type(a) == list: a = [_ for _ in a if _['type'] == 58] a = a[0] #write to file save_to_file = './data/temp58.uff' if os.path.exists(save_to_file): os.remove(save_to_file) _ = pyuff.UFF(save_to_file) _.write_sets(a, 'add') #read back uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets(0) if os.path.exists(save_to_file): os.remove(save_to_file) labels = [ _ for _ in a.keys() if any(_[-len(w):] == w for w in ['_lab', '_name', '_description']) ] string_keys = ['id1', 'id2', 'id3', 'id4', 'id5'] exclude_keys = [ 'orddenom_spec_data_type', 'abscissa_spec_data_type', 'spec_data_type', 'units_description', 'version_num' ] string_keys = list( set(string_keys).union(set(labels)).difference(set(exclude_keys))) numeric_keys = list((set(a.keys()).difference(set(string_keys)).difference( set(exclude_keys)))) #print(a['n_bytes'], b['n_bytes']) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k], decimal=3) for k in string_keys: print('Testing string: ', k, a[k]) np.testing.assert_string_equal(a[k], b[k]) print('Testing data: ') np.testing.assert_array_almost_equal(a['data'], b['data']) if data_at_the_end is not None: print('Testing last data line: ') np.testing.assert_array_almost_equal(a['data'][-len(data_at_the_end):], data_at_the_end)
def uff_load(file): if pyuff is None: raise RuntimeError("In order to load UFF files you need the 'pyuff' " "package.") uff_file = pyuff.UFF(file) data = uff_file.read_sets() return data
def __init__(self, fname='../../unvread/data/shield.uff', maxkey=100): ''' Constructor ''' self.uff_object = uff.UFF(fname) self.uff_types = self.uff_object.get_set_types() # print(self.uff_types) # Models self.models = dict() # Tables self.tables = dict() # Coordinate-system tables self.localcs = pd.DataFrame(columns=['model_id', 'uffidcs', 'node_nums', 'x1', 'x2', 'x3', 'y1', 'y2', 'y3', 'z1', 'z2', 'z3']) self.localeul = pd.DataFrame(columns=['model_id', 'uffidcs', 'node_nums', 'thx', 'thy', 'thz']) # File structure. self.file_structure = ['%5d %-10s' % (field, types[field]) for field in self.uff_types] self.create_model()
def test_read_2412(): uff_ascii = pyuff.UFF('./data/mesh_Oros-modal_uff15_uff2412.uff') a = uff_ascii.read_sets(2) np.testing.assert_array_equal(a['quad']['nodes_nums'][0], np.array([1, 25, 48, 24])) np.testing.assert_array_equal(a['quad']['nodes_nums'][-1], np.array([50, 74, 73, 49]))
def test_write_read_151(): save_to_file = './data/test.uff' a = pyuff.prepare_test_151(save_to_file=save_to_file) uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) labels = [_ for _ in a.keys() if \ any(_[-len(w):]==w for w in ['_lab', '_name', '_description',\ '_created', '_saved', '_written',\ 'db_app', '_name'])] string_keys = ['program', 'description'] exclude_keys = ['date_db_written', 'time_db_written'] string_keys = list( set(string_keys).union(set(labels)).difference(set(exclude_keys))) numeric_keys = list((set(a.keys()).difference(set(string_keys)).difference( set(exclude_keys)))) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def set_modeset_file(self, modeset_file: str): """ Sets the mode shapes file for the experiment. :param modeset_file: str, path to modeset.unv file. :return: None. """ self.mode_shapes_file: pyuff.UFF = pyuff.UFF(modeset_file)
def test_write_read_151(): save_to_file = './data/test.uff' dataset = pyuff.prepare_151(model_name='Model file name', description='Model file description', db_app='Program which created DB', date_db_created='27-Jan-16', time_db_created='14:38:15', version_db1=1, version_db2=2, file_type=0, date_db_saved='28-Jan-16', time_db_saved='14:38:16', program='OpenModal', date_db_written='29-Jan-16', time_db_written='14:38:17') dataset_out = dataset.copy() if save_to_file: if os.path.exists(save_to_file): os.remove(save_to_file) uffwrite = pyuff.UFF(save_to_file) uffwrite._write_set(dataset, 'add') a = dataset_out uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) labels = [_ for _ in a.keys() if \ any(_[-len(w):]==w for w in ['_lab', '_name', '_description',\ '_created', '_saved', '_written',\ 'db_app', '_name'])] string_keys = ['program', 'description'] exclude_keys = ['date_db_written', 'time_db_written'] string_keys = list( set(string_keys).union(set(labels)).difference(set(exclude_keys))) numeric_keys = list((set(a.keys()).difference(set(string_keys)).difference( set(exclude_keys)))) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_non_ascii_in_header(): uff_ascii = pyuff.UFF('./data/Artemis export - data and dof 05_14102016_105117.uff') a = uff_ascii.read_sets(0) np.testing.assert_string_equal(a['id1'], 'Channel 1 [20, -X]') np.testing.assert_almost_equal(a['x'][-1], 1.007999609375e+03) np.testing.assert_almost_equal(np.sum(a['data']), 172027.83668466809) np.testing.assert_string_equal(a['ordinate_axis_units_lab'][:3], 'm/s') num_pts = a['num_pts'] np.testing.assert_equal(num_pts, len(a['x']))
def test_read_write_2412_mixed(): # Read dataset 2412 in test file uff_ascii = pyuff.UFF('./data/mesh_test_uff2412_mixed.uff') a = uff_ascii.read_sets(2) # Test np.testing.assert_array_equal(a['triangle']['nodes_nums'][-1], np.array([3, 6, 11])) np.testing.assert_array_equal(a['quad']['nodes_nums'][-1], np.array([3, 4, 5, 6])) # Write dataset 2412 uff_write = pyuff.UFF('./data/tmp.uff') uff_write._write_set(a,'overwrite') # Read dataset 2412 in written file uff_ascii = pyuff.UFF('./data/tmp.uff') b = uff_ascii.read_sets(0) # Test np.testing.assert_array_equal(a['triangle']['nodes_nums'], b['triangle']['nodes_nums']) np.testing.assert_array_equal(a['quad']['nodes_nums'], b['quad']['nodes_nums'])
def test_write_2414(): # Read dataset 2414 in test file uff_ascii = pyuff.UFF('./data/DS2414_disp_file.uff') a = uff_ascii.read_sets(3) # Test np.testing.assert_array_equal(a['frequency'], np.array([100])) np.testing.assert_array_equal(a['x'][3], np.array([3.33652e-09 - 1j * 9.17913e-13])) # Write dataset 2414 uff_write = pyuff.UFF('./data/tmp.uff') uff_write._write_set(a, 'overwrite') # Read dataset 2414 in written file uff_ascii = pyuff.UFF('./data/tmp.uff') b = uff_ascii.read_sets(0) # Test np.testing.assert_array_equal(a['frequency'], b['frequency']) np.testing.assert_array_equal(a['z'], b['z'])
def test_read_write_read_given_data_base(file=''): if file == '': return #read from file uff_read = pyuff.UFF(file) a = uff_read.read_sets() if type(a) == list: types = np.array([_['type'] for _ in a]) a = a[np.argwhere(types == 151)[0][0]] #write to file save_to_file = './data/temp.uff' if os.path.exists(save_to_file): os.remove(save_to_file) _ = pyuff.UFF(save_to_file) _.write_sets(a, 'add') #read back uff_read = pyuff.UFF(save_to_file) b = uff_read.read_sets(0) if os.path.exists(save_to_file): os.remove(save_to_file) labels = [_ for _ in a.keys() if \ any(_[-len(w):]==w for w in ['_lab', '_name', '_description',\ '_created', '_saved', '_written',\ 'db_app', '_name'])] string_keys = ['program', 'description'] exclude_keys = [] string_keys = list( set(string_keys).union(set(labels)).difference(set(exclude_keys))) numeric_keys = list((set(a.keys()).difference(set(string_keys)).difference( set(exclude_keys)))) for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k], decimal=3) for k in string_keys: print('Testing string: ', k, a[k]) np.testing.assert_string_equal(a[k], b[k])
def write(file,mask,fs,data,point,ordinate_axis_units_lab,abscissa_axis_lab='Time',abscissa_axis_units_lab='s',binary=0): d=mask d['abscissa_axis_lab'] = abscissa_axis_lab d['abscissa_axis_units_lab'] = abscissa_axis_units_lab # d['abscissa_force_unit_exp'] d['abscissa_inc'] = 1/fs # d['abscissa_len_unit_exp'] # d['abscissa_min'] # d['abscissa_spacing'] # d['abscissa_spec_data_type'] # d['abscissa_temp_unit_exp'] d['binary'] = binary d['data'] = data # d['func_id'] # d['func_type'] # d['id1'] d['id2'] = 'Pt='+point+';' # d['id3'] # d['id4'] # d['id5'] # d['load_case_id'] d['num_pts'] = len(data) # d['ord_data_type'] # d['orddenom_axis_lab'] # d['orddenom_axis_units_lab'] # d['orddenom_force_unit_exp'] # d['orddenom_len_unit_exp'] # d['orddenom_spec_data_type'] # d['orddenom_temp_unit_exp'] d['ordinate_axis_lab'] = point d['ordinate_axis_units_lab'] = ordinate_axis_units_lab # d['ordinate_force_unit_exp'] # d['ordinate_len_unit_exp'] # d['ordinate_spec_data_type'] # d['ordinate_temp_unit_exp'] # d['ref_dir'] # d['ref_ent_name'] # d['ref_node'] # d['rsp_dir'] d['rsp_ent_name'] = point # d['rsp_node'] # d['type'] # d['ver_num'] d['x'] = np.linspace(0, len(data)/fs, len(data), endpoint=False) # d['z_axis_axis_lab'] # d['z_axis_axis_units_lab'] # d['z_axis_force_unit_exp'] # d['z_axis_len_unit_exp'] # d['z_axis_spec_data_type'] # d['z_axis_temp_unit_exp'] # d['z_axis_value'] uffwrite = pyuff.UFF(file) uffwrite._write_set(d,'add')
def test_read(): uff_ascii = pyuff.UFF( './data/Artemis export - Geometry RPBC_setup_05_14102016_105117.uff') a = uff_ascii.read_sets(0) np.testing.assert_array_equal(a['node_nums'][:4], [16.0, 17.0, 18.0, 19.0]) x = [0.0, 1.53, 0.0, 1.53, 0.0, 1.53, 0.0, 1.53, 4.296, 5.616, 4.296] y = [0.0, 0.0, 3.84, 3.84, 0.0, 0.0, 3.84, 3.84, 0.382, 0.382, 1.102] z = [0.0, 0.0, 0.0, 0.0, 1.83, 1.83, 1.83, 1.83, 0.4] np.testing.assert_array_equal(a['x'][:len(x)], x) np.testing.assert_array_equal(a['y'][:len(y)], y) np.testing.assert_array_equal(a['z'][:len(z)], z)
def get_measured_accelerance(show=False): uf = pyuff.UFF(r'paket.uff') uffdataset58 = uf.read_sets() freq = uffdataset58[0] H = np.zeros((len(uffdataset58[0]['data']), len(uffdataset58)), dtype='complex') for j in range(0, len(uffdataset58)): H[:, j] = uffdataset58[j]['data'] if show: plt.semilogy(freq, np.abs(H[:, :])) plt.show() return freq, H
def test_read_58_ascii(): uff_ascii = pyuff.UFF('./data/Sample_UFF58_ascii.uff') a = uff_ascii.read_sets(0) #print(uff_ascii.read_sets(0)['id1']) np.testing.assert_string_equal(a['id1'], 'Mic 01.0Scalar') np.testing.assert_array_equal(a['rsp_dir'], 1) length = a['num_pts'] dt = a['x'][1] time = np.arange(length) * dt np.testing.assert_array_equal(a['x'], time) np.testing.assert_array_equal(len(a['data']), length) first_last = np.array([a['data'][0], a['data'][-1]]) np.testing.assert_array_equal(first_last, np.array([-1.47553E-02, -4.31469E-03]))
def test_read_58_non_ascii(): uff_ascii = pyuff.UFF('./data/non_ascii_header.uff') a = uff_ascii.read_sets(0) #print(uff_ascii.read_sets(0)['id1']) np.testing.assert_string_equal(a['id1'], 'ref6_23_Mar') np.testing.assert_array_equal(a['rsp_dir'], 0) length = a['num_pts'] dt = a['x'][1] time = np.arange(length) * dt np.testing.assert_array_equal(a['x'], time) np.testing.assert_array_equal(len(a['data']), length) first_last = np.array([a['data'][0], a['data'][-1]]) np.testing.assert_array_equal( first_last, np.array([0.407994 + 0.j, 3.75037 + 2.93363j]))
def test_write_read(): save_to_file = './data/nodes.uff' uff_dataset_origin = pyuff.prepare_test_15(save_to_file=save_to_file) uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) numeric_keys = uff_dataset_origin.keys() a, b = uff_dataset_origin, uff_dataset_read for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k])
def __init__(self, raw_file, modeset_file=None, geometry=None): """ Initialization of Experiment instance. Takes the data of LDV experiment and modal data of this experiment. Experiment should be performed with equidistant regular rectangular grid of measuring points. :param raw_file: str, Path to raw .uff experimental file. :param modeset_file: str, Path to Simcenter Testlab .unv mode shape file. :param geometry: tuple of ints, dimensions of the experimental grid (optional). """ print('Reading experimental file...') self.raw_data_file: pyuff.UFF = pyuff.UFF(raw_file) print('Experimental file has been read.') self.raw_data: np.array = None self.velocities: np.array = None self.exp_freqs: list = [] self.eigenfreqs: list = [] self.DBRs: dict = {} self.mode_shapes: np.array = None self.xs: list = [] self.ys: list = [] self.rs: np.array = None self.x_length: int = 0 self.y_length: int = 0 if geometry: self.extract_geometry(geometry) else: self.extract_geometry() self.extract_data_blocks() if modeset_file: print('Reading modeset file...') self.mode_shapes_file: pyuff.UFF = pyuff.UFF(modeset_file) print('Experimental file has been read.') self.extract_eigenfreqs() self.construct_mode_shapes() self.coeffs: np.array = None self.WDs: np.array = None self.WBP: np.array = None print("Experimental data processed successfully.")
def test_write_read_58(): save_to_file = './data/measurement.uff' uff_dataset_origin = pyuff.prepare_test_58(save_to_file=save_to_file) uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id1', 'rsp_ent_name', 'ref_ent_name'] numeric_keys = list(set(uff_dataset_origin[0].keys()) - set(string_keys)) for a, b in zip(uff_dataset_origin, uff_dataset_read): for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def test_write_read_test_data(): save_to_file = './data/trace_lines.uff' uff_dataset_origin = pyuff.prepare_test_82(save_to_file=save_to_file) uff_read = pyuff.UFF(save_to_file) uff_dataset_read = uff_read.read_sets() if os.path.exists(save_to_file): os.remove(save_to_file) string_keys = ['id'] numeric_keys = list(set(uff_dataset_origin.keys()) - set(string_keys)) a, b = uff_dataset_origin, uff_dataset_read for k in numeric_keys: print('Testing: ', k) np.testing.assert_array_almost_equal(a[k], b[k]) for k in string_keys: np.testing.assert_string_equal(a[k], b[k])
def get_data(self): """Return mesh data (nodes and elements) from a .unv file Parameters ---------- self : ImportData An ImportData object Returns ------- nodes: ndarray The nodes id and coordinates (one line = id, 3 coordinates) elements: dict The elements id and connectivity per element type (one line = id, n node ids) """ # Import data from .unv file uff_ascii = pyuff.UFF(self.file_path) datasets = uff_ascii.read_sets() # Scan datasets for dataset in datasets: # If nodes dataset if dataset["type"] == 15: nodes = vstack(( [int(x) for x in dataset["node_nums"]], dataset["x"], dataset["y"], dataset["z"], )).T # If element dataset elif dataset["type"] == 2412: # Store connectivities elements = dict() for elt_type, elt_dict in dataset.items(): if elt_type != "type": elements[elt_type] = vstack( (elt_dict["element_nums"], np_array(elt_dict["nodes_nums"]).T)).T return nodes, elements
def displacements_from_exp_unv(file): unv_file = pyuff.UFF(os.path.join(os.getcwd(), file)) types = unv_file.get_set_types() indexes = [] for idx, each in enumerate(types): if each == 15 or each == 2411: coor_dict = unv_file.read_sets(idx) x = np.array(coor_dict['x']) y = np.array(coor_dict['y']) x = x - np.min(x) y = y - np.min(y) if each == 55: indexes.append(idx) x = x * 1000 y = y * 1000 hold = x x = y y = hold values = np.zeros((0, len(x))) nums = unv_file.read_sets(indexes[0])['node_nums'] deect = {'Node_numbers': nums} freqs = np.empty(0, dtype=np.float32) for each in indexes: data = unv_file.read_sets(each) deect.update({data['id4']: data['r3']}) _, test, _ = data['id4'].split(",") _, _, test = test.split(" ") test, _ = test.split("(") freqs = np.append(freqs, float(test)) df = pd.DataFrame(deect).sort_values(by='Node_numbers') df = df.drop(['Node_numbers'], axis=1).reset_index().drop(['index'], axis=1) for each in df.columns: value = np.array(df[each]).imag values = np.append(values, [value], axis=0) return x, y, freqs, values
def test_read_2414(): uff_ascii = pyuff.UFF('./data/DS2414_disp_file.uff') a = uff_ascii.read_sets(3) np.testing.assert_array_equal(a['frequency'], np.array([100])) np.testing.assert_array_equal(a['x'][3], np.array([3.33652e-09 - 1j * 9.17913e-13]))