def test_nastran_to_ugrid_01(self): bdf_filename = os.path.join(model_path, "solid_bending", "solid_bending.bdf") size = 8 debug = True model = read_bdf(bdf_filename, log=None, debug=debug) log = model.log # model.skin_solid_elements() skin_bdf_filename = os.path.join(model_path, "solid_bending", "solid_skin.bdf") model.write_skin_solid_faces( skin_bdf_filename, write_solids=True, write_shells=True, size=size, is_double=False, encoding=None ) bdf_model = read_bdf(skin_bdf_filename, log=log, debug=debug) ugrid_filename_out = os.path.join(model_path, "solid_bending", "solid_skin.b8.ugrid") nastran_to_ugrid(bdf_model, ugrid_filename_out, properties=None, check_shells=True, check_solids=True) ugrid = read_ugrid(ugrid_filename_out, encoding=None, log=log, debug=debug) skin_bdf_filename2 = os.path.join(model_path, "solid_bending", "solid_skin2.bdf") ugrid.write_bdf( skin_bdf_filename2, include_shells=True, include_solids=True, convert_pyram_to_penta=True, encoding=None, size=size, is_double=False, ) model2 = read_bdf(skin_bdf_filename2, log=log, debug=debug) os.remove(ugrid_filename_out) os.remove(skin_bdf_filename) os.remove(skin_bdf_filename2)
def _test_convert_bwb(self): """converts a bwb model""" bdf_filename = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.bdf') bdf_filename_out = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes.bdf') bdf_filename_out2 = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes_converted.bdf') model = read_bdf(bdf_filename, log=log, validate=False) model.sol = 103 lines = [ 'ECHO = NONE', 'SUBCASE 1', ' DISPLACEMENT(PLOT) = ALL', ' MPC = 1', ' SPC = 100', ' SUPORT1 = 1', ' METHOD = 42', ] card_lines = ['EIGRL', 42, None, None, 20] model.add_card(card_lines, 'EIGRL') model.case_control_deck = CaseControlDeck(lines, log=log) model.write_bdf(bdf_filename_out) units_from = ['in', 'lbm', 's'] #units_from = ['mm', 'Mg', 's'] units_to = ['m', 'kg', 's'] convert(model, units_to, units=units_from) model.write_bdf(bdf_filename_out2) os.remove(bdf_filename_out) os.remove(bdf_filename_out2)
def nastran_to_stl(bdf_filename, stl_filename, is_binary=False, log=None): """ Converts a Nastran model to an STL Parameters ---------- bdf_filename : varies str : the path to a BDF input file BDF() : a BDF() model object stl_filename : str the output STL path is_binary : bool; default=False should the output file be binary """ if isinstance(bdf_filename, str): model = read_bdf(bdf_filename, log=None) else: model = bdf_filename #log.info('card_count = %s' % model.card_count) nnodes = len(model.nodes) nodes = zeros((nnodes, 3), dtype='float64') elements = [] i = 0 nodeid_to_i_map = {} for node_id, node in sorted(iteritems(model.nodes)): xyz = node.get_position() nodes[i, :] = xyz nodeid_to_i_map[node_id] = i i += 1 assert len(model.nodes) == i, 'model.nodes=%s i=%s' % (len(model.nodes), i) for eid, element in sorted(iteritems(model.elements)): if element.type in ['CQUADR']: continue elif element.type in ['CBAR', 'CBEAM', 'CONM2', 'RBE2', 'RBE3', 'CBUSH', 'CBUSH1D', 'CBUSH2D', 'CONROD', 'CROD', 'CELAS1', 'CELAS2', 'CELAS3', 'CELAS4', 'CDAMP1', 'CDAMP2', 'CDAMP3', 'CDAMP4',]: continue elif element.type in ['CQUAD4']: n1, n2, n3, n4 = element.node_ids i1, i2, i3, i4 = nodeid_to_i_map[n1], nodeid_to_i_map[n2], nodeid_to_i_map[n3], nodeid_to_i_map[n4] elements.append([i1, i2, i3]) elements.append([i3, i4, i1]) elif element.type in ['CTRIA3', 'CTRIAR']: n1, n2, n3 = element.node_ids i1, i2, i3 = nodeid_to_i_map[n1], nodeid_to_i_map[n2], nodeid_to_i_map[n3] elements.append([i1, i2, i3]) else: print(element.type) elements = array(elements, dtype='int32') stl = STL() stl.nodes = nodes stl.elements = elements stl.write_stl(stl_filename, is_binary=is_binary) return stl
def test_fix_bad_quads(self): msg = [ 'SOL 101', 'CEND', 'BEGIN BULK', 'GRID,1,,0.,0.,0.', 'GRID,2,,1.,0.,0.', 'GRID,3,,1.,1.,0.', 'GRID,4,,0.,1.,0.', 'GRID,5,,1.,1.,0.00001', 'GRID,6,,0.,0.,0.00001', 'CQUAD4,1, 2, 1,2,3,4', 'CQUAD4,2, 2, 1,2,3,5', 'CQUAD4,3, 2, 1,6,3,5', ] bdf_filename = 'fix_bad_quads.bdf' with open(bdf_filename, 'w') as bdf_file: bdf_file.write('\n'.join(msg)) model = read_bdf(bdf_filename, xref=False, debug=False) model.cross_reference(xref=True, xref_elements=False, xref_nodes_with_elements=False, xref_properties=False, xref_masses=False, xref_materials=False, xref_loads=False, xref_constraints=False, xref_aero=False, xref_sets=False, xref_optimization=False) convert_bad_quads_to_tris(model, min_edge_length=0.01) #for eid, elem in sorted(iteritems(model.elements)): #print(elem) assert model.card_count['CQUAD4'] == 2, model.card_count assert model.card_count['CTRIA3'] == 1, model.card_count os.remove(bdf_filename)
def test_fsd_01(self): bdf_filename = os.path.abspath(os.path.join(test_path, '..', 'models', 'sol_101_elements', 'static_solid_shell_bar.bdf')) model = read_bdf(bdf_filename) oid = 1 label = 'STRESS' response_type = 'STRESS' property_type = 'PSHELL' region = 'STRESS' atta = None attb = None atti = [] model.dresps[dresp.oid] = DRESP1(oid, label, response_type, property_type, region, atta, attb, atti) keywords = { 'scr' : 'yes', 'bat' : 'no', 'old' : 'no', } max_stress = 1.033589E+04 target_stress = 5.0E+3 regions = { 4 : [0.0001, 0.7, -target_stress, target_stress], } regions2 = fully_stressed_design(model, keywords=keywords) self.assertTrue(np.array_equal(i, res), 'A i=%s res=%s' % (i, res))
def test_include_05(self): with codec_open('include5.bdf', 'w') as bdf_file: bdf_file.write('$ pyNastran: punch=True\n') bdf_file.write('$ pyNastran: dumplines=True\n') bdf_file.write("INCLUDE 'include5b.inc'\n\n") with codec_open('include5b.inc', 'w') as bdf_file: bdf_file.write('ECHOON\n') bdf_file.write('$ GRID comment\n') bdf_file.write('GRID,2,,2.0\n') bdf_file.write('ECHOOFF\n') bdf_file.write('GRID,3,,3.0\n') bdf_file.write('grid,4,,4.0\n') bdf_file.write('grid ,5,,5.0\n') model = BDF(log=log, debug=False) model.read_bdf('include5.bdf') assert model.echo is False, model.echo #model.write_bdf('include5.out.bdf') # os.remove('c.bdf') # os.remove('executive_control.inc') # os.remove('case_control.inc') self.assertEqual(len(model.nodes), 4) self.assertEqual(model.nnodes, 4, 'nnodes=%s' % model.nnodes) model2 = read_bdf(bdf_filename='include5.bdf', xref=True, punch=False, log=log, encoding=None) self.assertEqual(len(model2.nodes), 4) self.assertEqual(model2.nnodes, 4, 'nnodes=%s' % model.nnodes) os.remove('include5.bdf') #os.remove('include5.out.bdf') os.remove('include5b.inc')
def test_convert_sine(self): """converts a sine model""" model_path = os.path.join(pkg_path, '..', 'models', 'freq_sine') bdf_filename = os.path.join(model_path, 'good_sine.dat') bdf_filename_out = os.path.join(model_path, 'sine_modes.bdf') bdf_filename_out2 = os.path.join(model_path, 'sine_converted.bdf') model = read_bdf(bdf_filename, log=log, validate=False) model.sol = 103 lines = [ 'ECHO = NONE', 'SUBCASE 1', ' DISPLACEMENT(PLOT) = ALL', #'$ SPC = 100', ' METHOD = 42', ] card_lines = ['EIGRL', 42, None, None, 20] model.add_card(card_lines, 'EIGRL') model.case_control_deck = CaseControlDeck(lines, log=log) model.params['GRDPNT'] = PARAM('GRDPNT', 0) #del model.params['WTMASS'] model.write_bdf(bdf_filename_out) #units_from = ['in', 'lbm', 's'] units_from = ['mm', 'Mg', 's'] units_to = ['m', 'kg', 's'] convert(model, units_to, units=units_from) model.write_bdf(bdf_filename_out2) os.remove(bdf_filename_out) os.remove(bdf_filename_out2)
def test_remove_bar(self): """removes unused data from the bar model""" model_path = os.path.join(pkg_path, '..', 'models', 'beam_modes') bdf_filename = os.path.join(model_path, 'beam_modes.dat') bdf_filename_out = os.path.join(model_path, 'beam_modes_temp.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model)
def test_renumber_01(self): log = SimpleLogger(level='warning') bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) bdf_filename_out1 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero1.out')) bdf_filename_out2 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero2.out')) bdf_filename_out3 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero3.out')) model = bdf_renumber(bdf_filename, bdf_filename_out1, size=8, is_double=False, starting_id_dict=None, round_ids=False, cards_to_skip=None) model = read_bdf(bdf_filename, log=log) bdf_renumber(model, bdf_filename_out2, size=16, is_double=False, starting_id_dict={ 'eid' : 1000, 'pid':2000, 'mid':3000, 'spc_id' : 4000,}, round_ids=False, cards_to_skip=None) bdf_renumber(bdf_filename, bdf_filename_out3, size=8, is_double=False, starting_id_dict=None, round_ids=True, cards_to_skip=None) read_bdf(bdf_filename_out1, log=log) read_bdf(bdf_filename_out2, log=log) read_bdf(bdf_filename_out3, log=log)
def test_read_bad_02(self): """tests when users don't add punch=True to read_bdf(...)""" lines = [ 'GRID 1000177 0 1. 0. 0. 0\n', 'GRID 1000178 0 0. 1. 0. 0\n', 'GRID 1000186 0 0. 0. 1. 0\n', 'GRID 1000187 0 1. 1. 1. 0\n', 'GRID 15000014 0 2. 1. 1. 0\n', 'RBE2 1500002215000014 123456 1000177 1000178 1000186 1000187\n', ] bdf_filename = 'xref_test.bdf' with codec_open(bdf_filename, 'w') as bdf_file: bdf_file.writelines(lines) with self.assertRaises(RuntimeError): read_bdf(bdf_filename, validate=False, xref=False, punch=False, encoding=None, log=log, debug=True, mode='msc') os.remove(bdf_filename)
def test_include_stop(self): with codec_open('a.bdf', 'w') as bdf_file: bdf_file.write('CEND\n') bdf_file.write('BEGIN BULK\n') bdf_file.write("INCLUDE 'b.bdf'\n\n") bdf_file.write('GRID,1,,1.0\n') model = BDF(log=log, debug=False) with self.assertRaises(IOError): model.read_bdf(bdf_filename='a.bdf', xref=True, punch=False, read_includes=True, encoding=None) with self.assertRaises(IOError): read_bdf(bdf_filename='a.bdf', xref=True, punch=False, encoding=None, log=log) model.read_bdf(bdf_filename='a.bdf', xref=True, punch=False, read_includes=False, encoding=None) model.write_bdf('out.bdf') os.remove('a.bdf') os.remove('out.bdf')
def _test_remove_bwb(self): """removes unused data from the bwb model""" bdf_filename = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.bdf') bdf_filename_out = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_remove_sine(self): """removes unused data from the sine model""" model_path = os.path.join(pkg_path, '..', 'models', 'freq_sine') bdf_filename = os.path.join(model_path, 'good_sine.dat') bdf_filename_out = os.path.join(model_path, 'sine_modes.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def _test_remove_isat(self): """removes unused data from the isat model""" model_path = os.path.join(pkg_path, '..', 'models', 'isat') bdf_filename = os.path.join(model_path, 'ISat_Dploy_Sm.dat') bdf_filename_out = os.path.join(model_path, 'isat.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_mirror(self): """tests bdf mirroring""" pid_pshell = 10 pid_psolid = 11 mid1 = 100 model = BDF(log=log) # (log=log) model.add_grid(1, [10., 10., 10.]) model.add_grid(2, [11., 10., 10.]) model.add_grid(3, [11., 11., 10.]) model.add_grid(4, [10., 11., 10.]) model.add_grid(5, [10., 10., 11.]) model.add_grid(6, [11., 10., 11.]) model.add_grid(7, [11., 11., 11.]) model.add_grid(8, [10., 11., 11.]) model.add_cquad4(1, pid_pshell, [1, 2, 3, 4]) # mass=1 model.add_ctria3(2, pid_pshell, [1, 2, 3]) # mass=0.5 model.add_conrod(3, mid1, [1, 3], A=1.0, j=0.0, c=0.0, nsm=0.0) #model.add_ctetra(4, pid_psolid, [1, 2, 3, 5]) # penta # pyram #model.add_chexa(7, pid_psolid, [1, 2, 3, 4, 5, 6, 7, 8]) model.add_pshell(pid_pshell, mid1=mid1, t=1.) model.add_psolid(pid_psolid, mid1) E = 1.0 G = None nu = 0.3 model.add_mat1(mid1, E, G, nu, rho=1.0) model.validate() model.cross_reference() mass1, cg1, inertia1 = model.mass_properties() out_filename = 'sym.bdf' write_bdf_symmetric(model, out_filename=out_filename, encoding=None, size=8, is_double=False, enddata=None, close=True, plane='xz') # +y/-y model2 = read_bdf(out_filename, log=log) assert len(model2.nodes) == 16, model2.nodes mass2, cg2, inertia2 = model2.mass_properties() #print('cg1=%s cg2=%s' % (cg1, cg2)) assert np.allclose(mass1 * 2, mass2), 'mass1=%s mass2=%s' % (mass1, mass2) assert np.allclose( cg2[1], 0.), 'cg2=%s stats=%s' % (cg2, model2.get_bdf_stats()) os.remove('sym.bdf')
def test_opt_1(self): """tests SOL 200""" log = get_logger(level='warning') bdf_filename = os.path.join(model_path, 'sol200', 'model_200.bdf') model = read_bdf(bdf_filename, xref=True, debug=False) op2_filename = os.path.join(model_path, 'sol200', 'model_200.op2') #bdf, op2 = run_model(bdf_filename, op2_filename, #f06_has_weight=False, vectorized=True, #encoding='utf-8') op2 = read_op2(op2_filename, log=log, debug=False) subcase_ids = op2.subcase_key.keys()
def test_merge_01(self): """merges multiple bdfs into a single deck""" log = SimpleLogger(level='error') bdf_filename1 = os.path.join(MODEL_PATH, 'bwb', 'bwb_saero.bdf') bdf_filename2 = os.path.join(MODEL_PATH, 'sol_101_elements', 'static_solid_shell_bar.bdf') bdf_filename3 = os.path.join(MODEL_PATH, 'solid_bending', 'solid_bending.bdf') bdf_filename4 = os.path.join(MODEL_PATH, 'iSat', 'ISat_Dploy_Sm.dat') bdf_filename_out1 = os.path.join(MODEL_PATH, 'bwb', 'BWBsaero_staticbar_8.out') bdf_filename_out2 = os.path.join(MODEL_PATH, 'bwb', 'BWBsaero_static_bar_16.out') bdf_filename_out3 = os.path.join(MODEL_PATH, 'bwb', 'BWBsaero_staticbar_isat.out') bdf_filenames1 = [bdf_filename1, bdf_filename2] bdf_filenames2 = [bdf_filename1, bdf_filename2, bdf_filename3, bdf_filename4] bdf_merge(bdf_filenames1, bdf_filename_out=bdf_filename_out1, renumber=True, encoding=None, size=8, is_double=False, cards_to_skip=None, log=log) bdf_merge(bdf_filenames1, bdf_filename_out=bdf_filename_out2, renumber=False, encoding=None, size=16, is_double=False, cards_to_skip=None, log=log) bdf_merge(bdf_filenames2, bdf_filename_out=bdf_filename_out3, renumber=False, encoding=None, size=16, is_double=False, cards_to_skip=None, log=log) read_bdf(bdf_filename_out1, log=log) read_bdf(bdf_filename_out2, log=log) read_bdf(bdf_filename_out3, log=log)
def test_merge_01(self): log = SimpleLogger(level='info') bdf_filename1 = os.path.abspath(os.path.join( pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) bdf_filename2 = os.path.abspath(os.path.join( pkg_path, '..', 'models', 'sol_101_elements', 'static_solid_shell_bar.bdf')) bdf_filename3 = os.path.abspath(os.path.join( pkg_path, '..', 'models', 'solid_bending', 'solid_bending.bdf')) bdf_filename4 = os.path.abspath(os.path.join( pkg_path, '..', 'models', 'iSat', 'ISat_Dploy_Sm.dat')) bdf_filename_out1 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWBsaero_staticbar_8.out')) bdf_filename_out2 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWBsaero_static_bar_16.out')) bdf_filename_out3 = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWBsaero_staticbar_isat.out')) bdf_filenames1 = [bdf_filename1, bdf_filename2] bdf_filenames2 = [bdf_filename1, bdf_filename2, bdf_filename3, bdf_filename4] bdf_merge(bdf_filenames1, bdf_filename_out=bdf_filename_out1, renumber=True, encoding=None, size=8, is_double=False, cards_to_skip=None, log=log) bdf_merge(bdf_filenames1, bdf_filename_out=bdf_filename_out2, renumber=False, encoding=None, size=16, is_double=False, cards_to_skip=None, log=log) bdf_merge(bdf_filenames2, bdf_filename_out=bdf_filename_out3, renumber=False, encoding=None, size=16, is_double=False, cards_to_skip=None, log=log) read_bdf(bdf_filename_out1, log=log) read_bdf(bdf_filename_out2, log=log) read_bdf(bdf_filename_out3, log=log)
def test_convert_02(self): bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) bdf_filename_out = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.out')) model = read_bdf(bdf_filename) units_to = ['m', 'kg', 's'] units_from = ['in', 'lbm', 's'] #units_to = units_from convert(model, units_to, units_from) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_dmig_11(self): pch_filename = os.path.join(test_path, 'dmig.pch') model = read_bdf(pch_filename, debug=False, punch=True) vax = model.dmigs['VAX'] vax_array, vax_dict_row, vax_dict_col = vax.get_matrix() assert vax_array.shape == (15, 1), vax_array vax_dict_row_expected = { 0: (101, 1), 1: (102, 1), 2: (105, 1), 3: (106, 1), 4: (107, 1), 5: (108, 1), 6: (109, 1), 7: (201, 1), 8: (301, 1), 9: (302, 1), 10: (305, 1), 11: (306, 1), 12: (307, 1), 13: (308, 1), 14: (309, 1) } vax_dict_col_expected = {0: (1, 0)} assert list(sorted(vax_dict_col)) == list(sorted(vax_dict_col_expected)), 'vax_dict_col=%s vax_dict_col_expected=%s' % (vax_dict_col, vax_dict_col_expected) assert list(sorted(vax_dict_row)) == list(sorted(vax_dict_row_expected)), 'vax_dict_row=%s vax_dict_row_expected=%s' % (vax_dict_row, vax_dict_row_expected)
def test_convert_02(self): """converts a full model units""" bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) bdf_filename_out = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.out')) model = read_bdf(bdf_filename, log=log) units_to = ['m', 'kg', 's'] units_from = ['in', 'lbm', 's'] #units_to = units_from convert(model, units_to, units_from) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_nastran_to_tecplot(self): """tests a large number of elements and results in SOL 101""" bdf_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.bdf') tecplot_filename = os.path.join(MODEL_PATH, 'elements', 'static_elements.plt') tecplot_filename2 = os.path.join(MODEL_PATH, 'elements', 'static_elements2.plt') log = get_logger(log=None, level='warning', encoding='utf-8') model = read_bdf(bdf_filename, log=log) with self.assertRaises(RuntimeError): nastran_to_tecplot(model) nastran_to_tecplot_filename(bdf_filename, tecplot_filename, log=log) argv = ['format_converter', 'nastran', bdf_filename, 'tecplot', tecplot_filename2] with self.assertRaises(RuntimeError): cmd_line_format_converter(argv=argv, quiet=True)
def test_tecplot_03(self): log = SimpleLogger(level='warning') nastran_filename = os.path.join(NASTRAN_MODEL_PATH, 'elements', 'static_elements.bdf') tecplot_filename = os.path.join(NASTRAN_MODEL_PATH, 'elements', 'static_elements.plt') unused_tecplot = nastran_to_tecplot_filename(nastran_filename, tecplot_filename, log=log) #tecplot2 = read_tecplot(tecplot_filename) bdf_model = read_bdf(nastran_filename, log=log) with self.assertRaises(RuntimeError): unused_tecplot = nastran_to_tecplot(bdf_model)
def test_dmig_11(self): pch_filename = os.path.join(TEST_PATH, 'dmig.pch') model = read_bdf(pch_filename, debug=False, punch=True) vax = model.dmigs['VAX'] vax_array, vax_dict_row, vax_dict_col = vax.get_matrix() assert vax_array.shape == (15, 1), vax_array vax_dict_row_expected = { 0: (101, 1), 1: (102, 1), 2: (105, 1), 3: (106, 1), 4: (107, 1), 5: (108, 1), 6: (109, 1), 7: (201, 1), 8: (301, 1), 9: (302, 1), 10: (305, 1), 11: (306, 1), 12: (307, 1), 13: (308, 1), 14: (309, 1) } vax_dict_col_expected = {0: (1, 0)} assert list(sorted(vax_dict_col)) == list(sorted(vax_dict_col_expected)), 'vax_dict_col=%s vax_dict_col_expected=%s' % (vax_dict_col, vax_dict_col_expected) assert list(sorted(vax_dict_row)) == list(sorted(vax_dict_row_expected)), 'vax_dict_row=%s vax_dict_row_expected=%s' % (vax_dict_row, vax_dict_row_expected)
def test_read_bad_02(self): """tests when users don't add punch=True to read_bdf(...)""" lines = [ 'GRID 1000177 0 1. 0. 0. 0\n', 'GRID 1000178 0 0. 1. 0. 0\n', 'GRID 1000186 0 0. 0. 1. 0\n', 'GRID 1000187 0 1. 1. 1. 0\n', 'GRID 15000014 0 2. 1. 1. 0\n', 'RBE2 1500002215000014 123456 1000177 1000178 1000186 1000187\n', ] bdf_filename = 'xref_test.bdf' with open(bdf_filename, 'w') as bdf_file: bdf_file.writelines(lines) with self.assertRaises(RuntimeError): read_bdf(bdf_filename, validate=False, xref=False, punch=False, encoding=None, log=log, debug=True, mode='msc') os.remove(bdf_filename)
def test_isat_files(self): """read/writes the isat model with the file structure""" bdf_filename = os.path.join(MODEL_PATH, 'iSat', 'iSat_launch_100Hz.dat') model = read_bdf(bdf_filename, validate=True, xref=False, punch=False, save_file_structure=True, skip_cards=None, read_cards=None, encoding=None, log=None, debug=True, mode='msc') assert len(model.include_filenames) == 1, len(model.include_filenames) assert len(model.include_filenames[0]) == 2, len(model.include_filenames[0]) out_filenames = {} out_filenames2 = {} for i, fname in enumerate(model.active_filenames): dirname = os.path.dirname(fname) basename = os.path.basename(fname) out_filenames[fname] = os.path.join(dirname, 'out_' + basename) if 'antenna_pressure' not in fname: out_filenames2[fname] = os.path.join(dirname, 'out2_' + basename) #print(bdf_filename2) #print('out_filenames =', out_filenames) all_lines, ilines = model.include_zip(bdf_filename, encoding=None, make_ilines=True) #for (ifile, iline), line in zip(ilines, all_lines): #if iline > 100: #continue #print(ifile, iline, line.rstrip()) assert len(model.include_filenames) == 1, len(model.include_filenames) assert len(model.include_filenames[0]) == 2, len(model.include_filenames[0]) model.log.info('saving model') model.write_bdfs(out_filenames, relative_dirname='') model.log.info('saving new model') model.write_bdfs(out_filenames2, relative_dirname='') bdf_filename = os.path.abspath(bdf_filename) read_bdf(out_filenames[bdf_filename]) read_bdf(out_filenames2[bdf_filename])
def test_ugrid_01(self): """tests solid_bending.bdf""" nastran_filename1 = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending.bdf') ugrid_filename = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending.b8.ugrid') log = get_logger(level='warning') unused_ugrid_model = nastran_to_ugrid( nastran_filename1, ugrid_filename_out=ugrid_filename, properties=None, check_shells=False, check_solids=True, log=log) assert os.path.exists(ugrid_filename), ugrid_filename nastran_filename2 = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending2.bdf') ugrid_model = ugrid3d_to_nastran( ugrid_filename, nastran_filename2, include_shells=True, include_solids=True, convert_pyram_to_penta=False, encoding=None, size=16, is_double=False, log=log) nastran_filename3 = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending3.bdf') tris, quads = ugrid_model.skin_solids() ugrid_model.tris = tris ugrid_model.quads = quads ugrid_model.pids = np.ones(len(tris) + len(quads)) ugrid_model.write_bdf(nastran_filename3) bdf_model = read_bdf(nastran_filename3) #print(bdf_model.get_bdf_stats()) assert os.path.exists(nastran_filename3), nastran_filename3 #tecplot_filename1 = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending.plt') #ugrid3d_to_tecplot_filename(model, tecplot_filename1) #assert os.path.exists(tecplot_filename1), tecplot_filename1 tecplot_filename2 = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending2.plt') tecplot = ugrid_to_tecplot(ugrid_model) tecplot.write_tecplot(tecplot_filename2, res_types=None, is_points=True, adjust_nids=True) assert os.path.exists(tecplot_filename2), tecplot_filename2 ugrid_filename_out = os.path.join(NASTRAN_PATH, 'solid_bending', 'solid_bending.b8.ugrid_out') pshell_pids_to_remove = [] merge_ugrid3d_and_bdf_to_ugrid3d_filename( ugrid_filename, nastran_filename3, ugrid_filename_out, pshell_pids_to_remove, update_equivalence=True, tol=0.01) assert os.path.exists(ugrid_filename_out), ugrid_filename_out
def test_export_mcids(self): """creates material coordinate systems""" bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) csv_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'mcids.csv')) export_mcids(bdf_filename, csv_filename, export_xaxis=True, export_yaxis=True, iply=9) model = read_bdf(bdf_filename, xref=False) model.safe_cross_reference() eids = [1204, 1211] export_mcids(model, csv_filename=None, eids=eids, export_xaxis=True, export_yaxis=True, iply=9) export_mcids(model, csv_filename=None, eids=eids, export_xaxis=True, export_yaxis=False, iply=9) export_mcids(model, csv_filename=None, eids=eids, export_xaxis=False, export_yaxis=True, iply=9) with self.assertRaises(AssertionError): export_mcids(model, csv_filename=None, eids=eids, export_xaxis=False, export_yaxis=False, iply=9) with self.assertRaises(RuntimeError): export_mcids(model, csv_filename, eids=eids, export_xaxis=True, export_yaxis=True, iply=10)
def test_convert_02(self): """converts a full model units""" log = SimpleLogger(level='warning') bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf')) bdf_filename_out = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.out')) model = read_bdf(bdf_filename, log=log) units_to = ['m', 'kg', 's'] units_from = ['in', 'lbm', 's'] #units_to = units_from convert(model, units_to, units_from) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_bdf_superelement_5(self): """checks flyswatter.bdf""" from pyNastran.bdf.mesh_utils.bdf_renumber import superelement_renumber model_path = os.path.join(MODEL_PATH, 'superelements', 'flyswatter') bdf_filename = os.path.join(model_path, 'flyswatter.bdf') bdf_filename_out = os.path.join(model_path, 'flyswatter.re.bdf') #log = get_logger(log=None, level='error', encoding='utf-8') fem1 = read_bdf(bdf_filename, validate=True, xref=True, punch=False, save_file_structure=False, skip_cards=None, read_cards=None, encoding=None, log=None, debug=True, mode='msc') superelement_renumber( fem1, bdf_filename_out=bdf_filename_out, starting_id_dict=None)
def end_checks(model): """various checks""" model.validate() model._verify_bdf(xref=False) model.cross_reference() model._verify_bdf(xref=True) model.uncross_reference() model.cross_reference() model.pop_xref_errors() mass, cg, inertia = model.mass_properties() assert mass > 0, 'mass=%s, cg=%s, inertia=%s' % (mass, cg, inertia) bdf_filename = 'solid_test.bdf' model.write_bdf(bdf_filename) model2 = read_bdf(bdf_filename, debug=False) os.remove(bdf_filename)
def _test_bdf_slash(self): """tests a / in a deck""" lines = [ '$ DEC/CMS REPLACEMENT HISTORY, Element D10912R.DAT', '$ *1 15-JUN-1990 17:41:35 CMSMGR "66B PLUS/G 66B/ Initial installation of TPL test problems"', '$ DEC/CMS REPLACEMENT HISTORY, Element D10912R.DAT', 'RESTART VERSION=LAST,KEEP $ RESTART FROM D10911R DBS=D10911D', 'ID EDS, D10912R $', '$ID EDS, D2712R $', '$ID EDS,D27D12R', 'SOL 109 $', '$SOL 27,0', '$DIAG 8,14', 'TIME 5', '$READ 10 $ D27D11', 'CEND', 'TITLE=NEW RIGID FORMATS - CANTILEVER BEAM D10912R', 'SUBTITLE=DIRECT TRANSIENT', 'SET 1000=10,30,40', 'SET 2000=111,200', 'METHOD=1', 'DISP(SORT2)=2000', 'SPC=200', 'MPC=100', 'TSTEP=100', 'FORCE=1000', 'SUBCASE 1', 'DLOAD=10', 'BEGIN BULK', '/ 47', 'TLOAD1,10,2,0,0,10', 'ENDDATA', ] bdf_file = StringIO() bdf_file.writelines(lines) bdf_file.seek(0) #with self.assertRaises(NotImplementedError): unused_model = read_bdf(bdf_file, validate=True, xref=True, punch=False, skip_cards=None, read_cards=None, encoding=None, log=None, debug=True, mode='msc')
def test_springs_03(self): """tests CELAS1, CELAS2, PELAS, PELAST""" model = BDF(debug=False) eid = 1 pid = 2 nids = [3, 4] c1 = 1 c2 = 1 celas1 = model.add_celas1(eid, pid, nids, c1, c2, comment='celas1') celas1.raw_fields() celas1.write_card(size=8, is_double=False) k = 1.0e7 ge = 0.0 s = 0. pelas = model.add_pelas(pid, k, ge, s, comment='pelas') pelas.raw_fields() pelas.write_card(size=8, is_double=False) tkid = 10 tgeid = 10 tknid = 10 pelast = model.add_pelast(pid, tkid, tgeid, tknid, comment='pealst') pelast.raw_fields() pelast.write_card(size=8, is_double=False) eid = 5 celas2 = model.add_celas2(eid, k, nids, c1=0, c2=0, ge=0., s=0., comment='celas2') celas2.raw_fields() celas2.write_card(size=8, is_double=False) model.add_grid(3, xyz=[0., 0., 0.]) model.add_grid(4, xyz=[0., 0., 0.]) model.validate() model._verify_bdf(xref=False) model.cross_reference() model._verify_bdf(xref=True) model.write_bdf('spring.bdf') model2 = read_bdf('spring.bdf', debug=False) os.remove('spring.bdf')
def test_opt_1(self): """tests SOL 200""" log = get_logger(level='warning') bdf_filename = os.path.join(MODEL_PATH, 'sol200', 'model_200.bdf') unused_model = read_bdf(bdf_filename, xref=True, debug=False) op2_filename = os.path.join(MODEL_PATH, 'sol200', 'model_200.op2') #bdf, op2 = run_model(bdf_filename, op2_filename, #f06_has_weight=False, vectorized=True, #encoding='utf-8') op2 = read_op2(op2_filename, log=log, debug=True, debug_file='temp.debug') unused_subcase_ids = op2.subcase_key.keys() #for subcase_id in subcase_ids: #assert isinstance(subcase_id, integer_types), subcase_id #for key, dresp in sorted(model.dresps.items()): #print(dresp) #dresp.calculate(op2, subcase_id) os.remove('temp.debug')
def get_model(bdf_filename, log=None, debug=True): """helper method""" if isinstance(bdf_filename, BDF): model = bdf_filename else: # str, StringIO model = read_bdf(bdf_filename, validate=True, xref=True, punch=False, skip_cards=None, read_cards=None, encoding=None, log=log, debug=debug, mode='msc') return model
def test_cut_plate_eids(self): """recover element ids""" log = SimpleLogger(level='warning', encoding='utf-8', log_func=None) bdf_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.dat') model = read_bdf(bdf_filename, log=log) nnodes = len(model.nodes) nodal_result = np.ones(nnodes) coord = CORD2R(1, rid=0, origin=[0., 0., 0.], zaxis=[0., 0., 1], xzplane=[1., 0., 0.], comment='') model.coords[1] = coord ytol = 2. unique_geometry_array, unique_results_array, unused_rods = cut_face_model_by_coord( bdf_filename, coord, ytol, nodal_result, plane_atol=1e-5, skip_cleanup=True, csv_filename='cut_face.csv', plane_bdf_filename='plane_face.bdf', ) #print(unique_geometry_array) #print(unique_results_array) unique_geometry_array = np.array(unique_geometry_array) unique_results_array = np.array(unique_results_array) assert unique_geometry_array.shape == (1, 40, 4), unique_geometry_array.shape assert unique_results_array.shape == (1, 40, 7), unique_results_array.shape unique_geometry_array = unique_geometry_array[0, :, :] unique_results_array = unique_results_array[0, :, :] assert unique_geometry_array.shape == (40, 4), unique_geometry_array.shape assert unique_results_array.shape == (40, 7), unique_results_array.shape #print(unique_geometry_array) #print(unique_results_array) os.remove('cut_face.csv') os.remove('plane_face.bdf')
def test_quad_180_01(self): r""" Identify a 180+ degree quad y ^ 4 | / | | / | | / | | / | / | 1------2 |----> x \ | \| 3 """ msg = ('CEND\n' 'BEGIN BULK\n' 'GRID,1,,0.,0.,0.\n' 'GRID,2,,1.,0.,0.\n' 'GRID,3,,2.,-1.,0.\n' 'GRID,4,,2., 1.,0.\n' 'CQUAD4,100,1, 1,2,3,4\n' 'PSHELL,1,1,0.1\n' 'MAT1,1,3.0,, 0.3\n' 'ENDDATA') bdf_filename = 'cquad4.bdf' with codec_open(bdf_filename, 'w') as bdf_file: bdf_file.write(msg) model = read_bdf(bdf_filename, log=log, xref=True) xyz_cid0 = model.get_xyz_in_coord(cid=0, fdtype='float32') nid_map = {} for i, (nid, node) in enumerate(sorted(iteritems(model.nodes))): #xyz = node.get_position() #xyz_cid0[i, :] = xyz nid_map[nid] = i eids_to_delete = get_bad_shells(model, xyz_cid0, nid_map, max_theta=180., max_skew=1000., max_aspect_ratio=1000.) assert eids_to_delete == [100], eids_to_delete os.remove(bdf_filename)
def test_quad_180_01(self): r""" Identify a 180+ degree quad y ^ 4 | / | | / | | / | | / | / | 1------2 |----> x \ | \| 3 """ msg = ( 'CEND\n' 'BEGIN BULK\n' 'GRID,1,,0.,0.,0.\n' 'GRID,2,,1.,0.,0.\n' 'GRID,3,,2.,-1.,0.\n' 'GRID,4,,2., 1.,0.\n' 'CQUAD4,100,1, 1,2,3,4\n' 'PSHELL,1,1,0.1\n' 'MAT1,1,3.0,, 0.3\n' 'ENDDATA' ) bdf_filename = 'cquad4.bdf' with codec_open(bdf_filename, 'w') as bdf_file: bdf_file.write(msg) model = read_bdf(bdf_filename, xref=True) xyz_cid0 = model.get_xyz_in_coord(cid=0, dtype='float32') nid_map = {} for i, (nid, node) in enumerate(sorted(iteritems(model.nodes))): #xyz = node.get_position() #xyz_cid0[i, :] = xyz nid_map[nid] = i eids_to_delete = get_bad_shells(model, xyz_cid0, nid_map, max_theta=180., max_skew=1000., max_aspect_ratio=1000.) assert eids_to_delete == [100], eids_to_delete os.remove(bdf_filename)
def run_map_deflections(node_list, bdf_filename, out_filename, cart3d, cart3d2, log=None): """ Runs the spline deflection mapping method to morph the Cart3d model to a deformed mesh. Parameters ---------- node_list : List[int] the list of nodes from the BDF to spline??? is this just the SPLINE1 card??? bdf_filename : str the name of the undeformed BDF model out_filename : str the name of the deformed result (OP2) model cart3d : str the name of the undeformed Cart3d model cart3d2 : str the name of the deformed Cart3d model log : Log(); default=None a python logging object """ fbase, ext = os.path.splitext(out_filename) if ext == '.op2': deflections = read_op2(out_filename, log=log) #elif ext == '.f06': #deflections = read_f06(out_filename) else: raise NotImplementedError('out_filename = %r' % out_filename) mesh = read_bdf(bdf_filename, xref=True, punch=False, log=log, debug=True) node_list = remove_duplicate_nodes(node_list, mesh, log=log) C = getC_matrix(node_list, mesh, log=log) wS = get_WS(node_list, deflections, log=log) del deflections aero_points = read_half_cart3d_points(cart3d, log=log) wA = get_WA(node_list, C, wS, mesh, aero_points, log=log) del C del mesh write_new_cart3d_mesh(cart3d, cart3d2, wA, log=log) return (wA, wS)
def test_export_mcids(self): """creates material coordinate systems""" log = SimpleLogger(level='error') bdf_filename = os.path.join(MODEL_PATH, 'bwb', 'bwb_saero.bdf') csv_filename = os.path.join(MODEL_PATH, 'bwb', 'mcids.csv') export_mcids(bdf_filename, csv_filename, export_xaxis=True, export_yaxis=True, iply=9, log=log, debug=False) model = read_bdf(bdf_filename, xref=False, debug=False) model.safe_cross_reference() #os.remove('mcids.csv') argv = ['bdf', 'export_mcids', bdf_filename, '-o', csv_filename, '--iplies', '0,1,2,3,4,5,6,7,8,9,10', '--no_x', '--no_y'] with self.assertRaises(DocoptExit): # can't define both --no_x and --no_y cmd_line(argv=argv, quiet=True) argv = ['bdf', 'export_mcids', bdf_filename, '-o', csv_filename, '--iplies', '0,1,2,3,4,5,6,7,8,9', '--no_x'] cmd_line(argv=argv, quiet=True) eids = [1204, 1211] export_mcids(model, csv_filename=None, eids=eids, export_xaxis=True, export_yaxis=True, iply=9, log=log, debug=False) export_mcids(model, csv_filename=None, eids=eids, export_xaxis=True, export_yaxis=False, iply=9, log=log, debug=False) export_mcids(model, csv_filename=None, eids=eids, export_xaxis=False, export_yaxis=True, iply=9, log=log, debug=False) with self.assertRaises(AssertionError): # export_xaxis and export_yaxis can't both be False export_mcids(model, csv_filename=None, eids=eids, export_xaxis=False, export_yaxis=False, iply=9) with self.assertRaises(RuntimeError): # no iply=10 export_mcids(model, csv_filename, eids=eids, export_xaxis=True, export_yaxis=True, iply=10)
def main(): # pragma: no cover app = QApplication(sys.argv) import pyNastran PKG_PATH = pyNastran.__path__[0] MODEL_PATH = os.path.join(PKG_PATH, '..', 'models') bdf_filename = os.path.join(MODEL_PATH, 'bwb', 'bwb_saero.bdf') #bdf_filename = os.path.join(MODEL_PATH, 'aero', 'bah_plane', 'bah_plane.bdf') from pyNastran.bdf.bdf import read_bdf model = read_bdf(bdf_filename) print(model.get_bdf_stats()) unused_name = 'name' #res_widget.update_results(form, name) #-------------------------------------------- m = ModelSidebar(app) m.set_model(model) sys.exit(app.exec_())
def test_convert_02(self): """converts a full model units""" log = SimpleLogger(level='error') bdf_filename = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.bdf')) bdf_filename_out = os.path.abspath( os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.out')) model = read_bdf(bdf_filename, log=log) units_to = ['m', 'kg', 's'] units_from = ['in', 'lbm', 's'] #units_to = units_from convert(model, units_to, units_from) model.write_bdf(bdf_filename_out) caero_bdf_filename = 'caero.bdf' export_caero_mesh(model, caero_bdf_filename=caero_bdf_filename) os.remove(bdf_filename_out) os.remove(caero_bdf_filename)
def test_convert_isat(self): """converts a isat model""" model_path = os.path.join(pkg_path, '..', 'models', 'iSat') bdf_filename = os.path.join(model_path, 'ISat_Dploy_Sm.dat') bdf_filename_out = os.path.join(model_path, 'isat.bdf') bdf_filename_out2 = os.path.join(model_path, 'isat_converted.bdf') model = read_bdf(bdf_filename, log=log, validate=False) #card_lines = ['EIGRL', 42, None, None, 20] #model.add_card(card_lines, 'EIGRL') #model.case_control_deck = CaseControlDeck(lines) model.write_bdf(bdf_filename_out) #units_from = ['in', 'lbm', 's'] units_from = ['mm', 'Mg', 's'] units_to = ['m', 'kg', 's'] convert(model, units_to, units=units_from) model.write_bdf(bdf_filename_out2) os.remove(bdf_filename_out) os.remove(bdf_filename_out2)
def test_tecplot_02(self): log = get_logger(level='warning') nastran_filename1 = os.path.join(NASTRAN_MODEL_PATH, 'solid_bending', 'solid_bending.bdf') nastran_filename2 = os.path.join(NASTRAN_MODEL_PATH, 'solid_bending', 'solid_bending2.bdf') tecplot_filename = os.path.join(NASTRAN_MODEL_PATH, 'solid_bending', 'solid_bending.plt') tecplot = nastran_to_tecplot_filename(nastran_filename1, tecplot_filename, log=log) #tecplot.write_tecplot(tecplot_filename) tecplot_to_nastran_filename(tecplot_filename, nastran_filename2, log=log) #os.remove(nastran_filename2) #os.remove(tecplot_filename) bdf_model = read_bdf(nastran_filename1, log=log) unused_tecplot = nastran_to_tecplot(bdf_model)
def test_convert_bar(self): """converts a bar model""" model_path = os.path.join(pkg_path, '..', 'models', 'beam_modes') bdf_filename = os.path.join(model_path, 'beam_modes.dat') bdf_filename_out = os.path.join(model_path, 'beam_modes_temp.bdf') bdf_filename_out2 = os.path.join(model_path, 'beam_modes_converted.bdf') model = read_bdf(bdf_filename, log=log, validate=False) #card_lines = ['EIGRL', 42, None, None, 20] #model.add_card(card_lines, 'EIGRL') #model.case_control_deck = CaseControlDeck(lines) model.write_bdf(bdf_filename_out) #units_from = ['in', 'lbm', 's'] units_from = ['mm', 'Mg', 's'] units_to = ['m', 'kg', 's'] convert(model, units_to, units=units_from) del model.params['WTMASS'] model.write_bdf(bdf_filename_out2) os.remove(bdf_filename_out) os.remove(bdf_filename_out2)
def test_damper_03(self): model = BDF(debug=False) eid = 1 pid = 2 s1 = 3 s2 = 4 cdamp3 = model.add_cdamp3(eid, pid, [s1, s2], comment='cdamp3') cdamp3.raw_fields() cdamp3.write_card(size=8, is_double=False) b = 1.0e7 pelas = model.add_pdamp(pid, b, comment='pdamp') spoints = model.add_spoint([3, 4], comment='spoints') spoints.raw_fields() spoints.write_card() bdf_file = StringIO() model.write_bdf(bdf_file, close=False) bdf_file.seek(0) model2 = read_bdf(bdf_file, punch=True, debug=False)
def _test_convert_isat(self): """converts a isat model""" model_path = os.path.join(pkg_path, '..', 'models', 'isat') bdf_filename = os.path.join(model_path, 'ISat_Dploy_Sm.dat') bdf_filename_out = os.path.join(model_path, 'isat.bdf') bdf_filename_out2 = os.path.join(model_path, 'isat_converted.bdf') model = read_bdf(bdf_filename, log=log, validate=False) #card_lines = ['EIGRL', 42, None, None, 20] #model.add_card(card_lines, 'EIGRL') #model.case_control_deck = CaseControlDeck(lines) model.write_bdf(bdf_filename_out) #units_from = ['in', 'lbm', 's'] units_from = ['mm', 'Mg', 's'] units_to = ['m', 'kg', 's'] convert(model, units_to, units=units_from) del model.params['WTMASS'] model.write_bdf(bdf_filename_out2) os.remove(bdf_filename_out) os.remove(bdf_filename_out2)
def test_isat_02(): """vectorized vs. standard test on ISat_Launch_Sm_4pt.dat""" log = SimpleLogger(level='error') bdf_filename = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4pt.dat') bdf_filename_outv = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4ptv.dat') bdf_filename_out = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4pt2.dat') vmodel = read_bdfv(bdf_filename) vmodel.write_bdf(bdf_filename_outv) model = read_bdf(bdf_filename, log=log) model.write_bdf(bdf_filename_out) run_and_compare_fems( bdf_filename, bdf_filename_outv, debug=False, xref=True, check=True, punch=False, cid=None, mesh_form=None, print_stats=False, encoding=None, sum_load=False, size=8, is_double=False, stop=False, nastran='', post=-1, dynamic_vars=None, quiet=False, dumplines=False, dictsort=False, nerrors=0, dev=False, crash_cards=None, ) os.remove(bdf_filename_out)
def cmd_line_export_caero_mesh(): # pragma: no cover """command line interface to export_caero_mesh""" from docopt import docopt import pyNastran msg = ( 'Usage:\n' ' bdf export_caero_mesh IN_BDF_FILENAME [-o OUT_BDF_FILENAME]\n' ' bdf export_caero_mesh -h | --help\n' ' bdf export_caero_mesh -v | --version\n' '\n' 'Positional Arguments:\n' ' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n' '\n' 'Options:\n' ' -o OUT, --output OUT_CAERO_BDF_FILENAME path to output BDF file\n' '\n' 'Info:\n' ' -h, --help show this help message and exit\n' " -v, --version show program's version number and exit\n" ) if len(sys.argv) == 1: sys.exit(msg) ver = str(pyNastran.__version__) #type_defaults = { # '--nerrors' : [int, 100], #} data = docopt(msg, version=ver) print(data) size = 16 bdf_filename = data['IN_BDF_FILENAME'] caero_bdf_filename = data['--output'] if caero_bdf_filename is None: caero_bdf_filename = 'caero.bdf' from pyNastran.bdf.bdf import read_bdf model = read_bdf(bdf_filename) model.write_caero_model(caero_bdf_filename)
def test_cut_plate(self): """mode 10 is a sine wave""" log = SimpleLogger(level='warning', encoding='utf-8', log_func=None) bdf_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.dat') op2_filename = os.path.join(MODEL_PATH, 'plate_py', 'plate_py.op2') model = read_bdf(bdf_filename, log=log) op2_model = read_op2_geom(op2_filename, log=log) title = 'Mode 10 Eigenvector' p1 = None p2 = None zaxis = None coord = CORD2R(1, rid=0, origin=[0., 0., 0.], zaxis=[0., 0., 1], xzplane=[1., 0., 0.], comment='') model.coords[1] = coord ytol = 2. # no result nodal_result = None cut_and_plot_model(title, p1, p2, zaxis, model, coord, nodal_result, model.log, ytol, plane_atol=1e-5, csv_filename=None, invert_yaxis=False, cut_type='edge', plot=False, show=False) # real nodal_result = op2_model.eigenvectors[1].data[9, :, 2] cut_and_plot_model(title, p1, p2, zaxis, model, coord, nodal_result, model.log, ytol, plane_atol=1e-5, csv_filename='real_result.csv', invert_yaxis=False, cut_type='edge', plot=IS_MATPLOTLIB, show=False) # complex nodal_result2 = np.asarray(nodal_result, dtype='complex64') nodal_result2.imag = -nodal_result.real cut_and_plot_model(title, p1, p2, zaxis, model, coord, nodal_result2, model.log, ytol, plane_atol=1e-5, csv_filename='complex_result.csv', invert_yaxis=True, cut_type='edge', plot=IS_MATPLOTLIB, show=False) os.remove('real_result.csv') os.remove('complex_result.csv')
def test_isat_02(): """vectorized vs. standard test on ISat_Launch_Sm_4pt.dat""" bdf_filename = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4pt.dat') bdf_filename_outv = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4ptv.dat') bdf_filename_out = os.path.join(test_path, 'iSat', 'ISat_Launch_Sm_4pt2.dat') vmodel = read_bdfv(bdf_filename) vmodel.write_bdf(bdf_filename_outv) model = read_bdf(bdf_filename) model.write_bdf(bdf_filename_out) run_and_compare_fems( bdf_filename, bdf_filename_outv, debug=False, xref=True, check=True, punch=False, cid=None, mesh_form=None, print_stats=False, encoding=None, sum_load=False, size=8, is_double=False, stop=False, nastran='', post=-1, dynamic_vars=None, quiet=False, dumplines=False, dictsort=False, nerrors=0, dev=False, crash_cards=None, ) os.remove(bdf_filename_out)
def split_elements(bdf_filename): model = read_bdf(bdf_filename, xref=True) for eid, elem in iteritems(model.elements): if elem.type == 'CTRIA3': # # 3 # /|\ # / | \ # / | \ # / 4 \ # / / \ \ # / / \ \ # 1-------------2 # p1, p2, p3 = elem.get_node_positions() centroid = (p1 + p2 + p3) / 3. # # 3 # /|\ # / | \ # / | \ # / | \ # 1----4----2 # elif elem.type == 'CQUAD4': # # # 4---------3 # | \ / | # | \ / | # | 5 | # | / \ | # |/ \| # 1---------2 # # the same thing shown in a rotated view # 4 # /| \ # / | \ # / | \ # / | \ # 1---------5---------3 # \ | / # \ | / # \ | / # \ | / # 2 # # max_area, taper_ratio, area_ratio # 4----7----3 # | | | # | | | # 8----9----6 # | | | # | | | # 1----4----2 # # max_interior_angle # 4---------3 # / \ / # / \ / # / \ / # / \ / # 1---------2 # # taper_ratio # 4--6--3 # / | \ # / | \ # / | \ # 1------5------2 # # taper_ratio # 4------3 # / \ / \ # / \ / \ # / \/ \ # 1-------5------2 # # taper_ratio # 4------3 # / \ \ # / \ \ # / \ \ # 1-------5------2 pass
def fully_stressed_design(bdf_filename, keywords=None, niterations_max=2, alpha=0.9): """ Optimizes shell thickness for minimum weight (ONLY shells) Parameters ---------- bdf_filename : str; BDF() the BDF filename or model Returns ------- desvars : dict[id]=values the "optimization history of the design variables """ force = True iteration = 0 niterations_max = 10 if isinstance(bdf_filename, str): model = read_bdf(bdf_filename) elif isinstance(bdf_filename, BDF): model = bdf_filename bdf_filename = model.bdf_filename else: raise TypeError(bdf_filename) doptparm = model.doptprm if doptparm is not None: if 'FSDALP' in doptparm.params: alpha = doptparm.params['FSDALP'] else: alpha = doptparm.defaults['FSDALP'] if not isinstance(alpha, float): msg = 'FSDALP on DOPTPARM must be an integer; FSDALP=%r' % (alpha) raise TypeError(msg) if not(0. < niterations_max <= 1.): msg = 'FSDALP on DOPTPARM must be between (0. < n <= 1.0); FSDALP=%s' % (alpha) raise ValueError(msg) if 'FSDMAX' in doptparm.params: niterations_max = doptparm.params['FSDMAX'] else: niterations_max = doptparm.defaults['FSDMAX'] if not isinstance(niterations_max, int): msg = 'FSDMAX on DOPTPARM must be an integer; FSDMAX=%r' % (niterations_max) raise TypeError(msg) if niterations_max <= 0: msg = 'FSDMAX on DOPTPARM must be > 0; FSDMAX=%s' % (niterations_max) raise ValueError(msg) else: niterations_max = 2 alpha = 0.9 dresps_to_consider, desvars_to_consider, dvprels_to_consider = get_inputs(model) pid_to_eid = model.get_property_id_to_element_ids_map() bdf_filename2 = 'fem_baseline.bdf' op2_filename2 = 'fem_baseline.op2' try: shutil.copyfile(bdf_filename, bdf_filename2) except TypeError: msg = 'cannot copy %r to %r' % (bdf_filename, bdf_filename2) raise TypeError(msg) while iteration < niterations_max: if not os.path.exists(op2_filename2) or force: run_nastran(bdf_filename2, keywords=keywords) results = read_op2(op2_filename2, combine=True, log=None, debug=False, debug_file=None, build_dataframe=False, skip_undefined_matrices=True, mode='msc') isubcase = 1 itime = 0 stress_per_region = {} nopt = 0 for pid, region in iteritems(regions): print('pid=%s region=%s' % (pid, region)) (tmin, tmax, ovm_min, ovm_max) = region prop = model.properties[pid] told = prop.t eids_requested = pid_to_eid[pid] print('eids_requested[pid=%s] = %s' % (pid, eids_requested)) #def compute_critical_stress(results, subcases) #def compute_critical_stress(results, subcases): stress = [] eid_node = [] for res in [results.cquad4_stress, results.ctria3_stress]: resi = res[isubcase] eid_nodei = resi.element_node #print('eid_nodei = %s' % (eid_nodei)) eid = eid_nodei[:, 0] stress_data = resi.data eid_node.append(eid_nodei) # A #i = np.where(eid == eids_requested) # B #j = np.searchsorted(eid, eids_requested) #i = np.where(eid[j] == eids_requested) #j = np.in1d(eids_requested, eid) # A in B j = np.in1d(eid, eids_requested) # A in B i = np.where(j) #print('i = %s' % i) #[0] #print('j = %s' % j) #[0] #print('eid = %s' % eid) #[0] #print('eids_requested = %s' % eids_requested) #[0] if len(i) == 0: continue #print('i=%s; ni=%s' % (i, len(i))) stress_datai = stress_data[itime, i, 7] #print('eids = %s' % eid[i]) #print('stress_datai = %s' % stress_datai) stress.append(stress_datai) #print('stressA = %s' % stress) stress = np.hstack(stress) #print('stressB = %s' % stress) # PROD area # PSHELL/PCOMP thickness # PSHEAR thickness eid_node = np.vstack(eid_node) stress_max = stress.max() stress_min = stress.min() print('stress_min=%s' % stress_min) print('stress_max=%s' % stress_max) istress_max = np.where(stress == stress_max)[0] istress_min = np.where(stress == stress_min)[0] eid_max = eid_node[istress_max, 0] eid_min = eid_node[istress_min, 0] peak_stress = max(abs(stress_max), abs(stress_min)) tnew = told * stress_max / ovm_max tnew = min(tmax, tnew) tnew = max(tmin, tnew) #tnew = (oi/omax)**alpha * ti_old tratio = tnew / told if np.allclose(tratio, 1.): continue nopt += 1 stress_per_region = [stress_min, stress_max, eid_min, eid_max] print('pid=%s' % pid) print(' stress_per_region (ovm_min, ovm_max, eid_min, eid_max) => %s' % stress_per_region) print(' told=%s tnew=%s tratio=%s\n' %(told, tnew, tratio)) prop.t *= tratio prop.z1 *= tratio prop.z2 *= tratio regions2[pid] = [tnew, peak_stress, stress_max, eid_max, stress_min, eid_min] # t_new, ovm_new if nopt == 0: break #eid_node = np.hstack(eid_node) #s = np.hstack(eid_node) iteration += 1 bdf_filename2 = 'fem_%i.bdf' % iteration op2_filename2 = 'fem_%i.op2' % iteration model.write_bdf(bdf_filename2) print('regions2 = %s' % regions2) return regions2
def cmd_line_mirror(): # pragma: no cover """command line interface to write_bdf_symmetric""" import sys from docopt import docopt import pyNastran msg = "Usage:\n" msg += " bdf mirror IN_BDF_FILENAME [-o OUT_BDF_FILENAME] [--plane PLANE] [--tol TOL]\n" msg += ' bdf mirror -h | --help\n' msg += ' bdf mirror -v | --version\n' msg += '\n' msg += "Positional Arguments:\n" msg += " IN_BDF_FILENAME path to input BDF/DAT/NAS file\n" #msg += " OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n" msg += '\n' msg += 'Options:\n' msg += " -o OUT, --output OUT_BDF_FILENAME path to output BDF/DAT/NAS file\n\n" msg += " --plane PLANE the symmetry plane (xz, ???)\n\n" msg += " --tol TOL the spherical equivalence tolerance (default=0.000001)\n\n" msg += 'Info:\n' msg += ' -h, --help show this help message and exit\n' msg += " -v, --version show program's version number and exit\n" if len(sys.argv) == 1: sys.exit(msg) ver = str(pyNastran.__version__) #type_defaults = { # '--nerrors' : [int, 100], #} data = docopt(msg, version=ver) if data['--tol'] is None: data['--tol'] = 0.000001 print(data) size = 16 bdf_filename = data['IN_BDF_FILENAME'] bdf_filename_out = data['--output'] if bdf_filename_out is None: bdf_filename_out = 'mirrored.bdf' from pyNastran.bdf.bdf import read_bdf from pyNastran.bdf.mesh_utils.bdf_equivalence import bdf_equivalence_nodes model = read_bdf(bdf_filename) size = 16 bdf_filename_temp = '__temp.bdf__' model.write_bdf_symmetric(bdf_filename_temp, encoding=None, size=size, is_double=False, enddata=None, close=True, plane='xz') tol = 0.000001 bdf_equivalence_nodes(bdf_filename_temp, bdf_filename_out, tol, renumber_nodes=False, neq_max=10, xref=True, node_set=None, size=size, is_double=False, remove_collapsed_elements=False, avoid_collapsed_elements=False, crash_on_collapse=False, debug=True) os.remove(bdf_filename_temp)
def cmd_line_bin(): # pragma: no cover import sys from docopt import docopt import pyNastran msg = "Usage:\n" #msg += " bdf bin IN_BDF_FILENAME AXIS1 AXIS2 [--cid CID] [--step SIZE]\n" msg += " bdf bin IN_BDF_FILENAME AXIS1 AXIS2 [--cid CID] [--nbins NBINS]\n" msg += ' bdf bin -h | --help\n' msg += ' bdf bin -v | --version\n' msg += '\n' msg += "Positional Arguments:\n" msg += " IN_BDF_FILENAME path to input BDF/DAT/NAS file\n" msg += " AXIS1 axis to loop over\n" msg += " AXIS2 axis to bin\n" msg += '\n' msg += 'Options:\n' msg += " --cid CID the coordinate system to bin (default:0)\n" #msg += " --step SIZE the step size for binning\n\n" msg += " --nbins NBINS the number of bins\n\n" msg += 'Info:\n' msg += ' -h, --help show this help message and exit\n' msg += " -v, --version show program's version number and exit\n\n" msg += 'Plot z (2) as a function of y (1) in y-stepsizes of 0.1:\n' msg += ' bdf bin fem.bdf 1 2 --cid 0 --step 0.1\n\n' msg += 'Plot z (2) as a function of y (1) with 50 bins:\n' msg += ' bdf bin fem.bdf 1 2 --cid 0 --nbins 50' if len(sys.argv) == 1: sys.exit(msg) ver = str(pyNastran.__version__) #type_defaults = { # '--nerrors' : [int, 100], #} data = docopt(msg, version=ver) bdf_filename = data['IN_BDF_FILENAME'] axis1 = int(data['AXIS1']) axis2 = int(data['AXIS2']) cid = 0 if data['--cid']: cid = int(data['--cid']) #stepsize = 0.1 #if data['--step']: #stepsize = float(data['--step']) nbins = 10 if data['--nbins']: nbins = int(data['--nbins']) assert nbins >= 2, nbins print(data) #asdf import numpy as np import matplotlib.pyplot as plt from pyNastran.bdf.bdf import read_bdf model = read_bdf(bdf_filename) xyz_cid = model.get_xyz_in_coord(cid=cid, dtype='float64') y = xyz_cid[:, axis1] z = xyz_cid[:, axis2] plt.figure(1) #n, bins, patches = plt.hist( [x0,x1,x2], 10, weights=[w0, w1, w2], histtype='bar') ys = [] zs = [] zs_min = [] zs_max = [] y0 = y.min() y1 = y.max() dy = (y1 - y0) / nbins y0i = y0 y1i = y0 + dy for i in range(nbins): j = np.where((y0i <= y) & (y <= y1i))[0] if not len(j): continue ys.append(y[j].mean()) zs_min.append(z[j].min()) zs_max.append(z[j].max()) y0i += dy y1i += dy zs_max = np.array(zs_max) zs_min = np.array(zs_min) print('ys = %s' % ys) print('zs_max = %s' % zs_max) print('zs_min = %s' % zs_min) plt.plot(ys, zs_max, 'r-o', label='max') plt.plot(ys, zs_min, 'b-o', label='min') plt.plot(ys, zs_max - zs_min, 'g-o', label='delta') #plt.xlim([y0, y1]) plt.xlabel('Axis %s' % axis1) plt.ylabel('Axis %s' % axis2) plt.grid(True) plt.legend() plt.show()
def remove_unused(bdf_filename, remove_nids=True, remove_cids=True, remove_pids=True, remove_mids=True): """ removes unused: - nodes - properties - materials - coords """ if isinstance(bdf_filename, BDF): model = bdf_filename else: model = read_bdf(bdf_filename, xref=False) #nids = model.nodes.keys() #cids = #nids = set(list(model.nodes.keys())) #cids = set(list(model.coords.keys())) #pids = set(list(model.properties.keys())) nids_used = set([]) cids_used = set([]) pids_used = set([]) pids_mass_used = set([]) mids_used = set([]) mids_thermal_used = set([]) #card_types = list(model.card_count.keys()) #card_map = model.get_card_ids_by_card_types( #card_types=card_types, #reset_type_to_slot_map=False, #stop_on_missing_card=True) #for nid, node in iteritems(model.nodes): #cids_used.update([node.Cp(), node.Cd()]) skip_cards = [ 'ENDDATA', 'PARAM', 'EIGR', 'EIGRL', 'EIGB', 'EIGP', 'EIGC', 'SPOINT', 'EPOINT', 'DESVAR', 'SET1', 'FREQ', 'FREQ1', 'FREQ2', 'TSTEP', 'TSTEPNL', 'NLPCI', #'LOAD', 'LSEQ', 'DLOAD', 'LOADCYN', 'NLPARM', 'ROTORG', 'ROTORD', 'DAREA', 'DEQATN', 'DMIG', 'DMI', 'DMIJ', 'DMIK', 'DMIJI', 'POINT', 'EPOINT', 'DELAY', 'DPHASE', # properties 'PELAS', 'PDAMP', 'PBUSH', 'PELAST', 'PDAMPT', 'PBUSHT', 'PGAP', 'PBUSH1D', 'PFAST', 'PVISC', 'PMASS', 'FLFACT', 'FLUTTER', 'DLINK', 'DDVAL', 'DIVERG', 'GUST', 'AELINK', 'AELIST', 'TRIM', 'PAERO1', 'AEFACT', 'AESTAT', 'BCTPARA', 'BCRPARA', 'BSURF', 'BSURFS', 'BCTADD', 'BCTSET', # not checked------------------------------------------ 'PHBDY', 'CHBDYG', 'CHBDYP', 'CHBDYE', 'RADBC', 'CONV', 'QVOL', 'PCONV', 'PCONVM', #'PBCOMP', 'PDAMP5', 'AECOMP', 'CAERO2', 'CAERO3', 'CAERO4', 'PAERO3', 'PAERO4', #'CFAST', 'DCONADD', 'GMCORD', 'MONPNT1', 'MONPNT2', 'MONPNT3', ] set_types = [ 'SET1', 'SET3', 'ASET', 'ASET1', 'BSET', 'BSET1', 'CSET', 'CSET1', 'QSET', 'SSET1', 'USET', 'USET1', 'SESET', ] load_types = [ 'GRAV', 'RANDPS', 'FORCE', 'FORCE1', 'FORCE2', 'MOMENT', 'MOMENT1', 'MOMENT2', 'PLOAD', 'PLOAD1', 'PLOAD2', 'PLOAD4', 'SPCD', 'GMLOAD', 'RFORCE', 'RFORCE1', 'TEMP', 'QBDY1', 'QBDY2', 'QBDY3', 'QHBDY', 'ACCEL', 'PLOADX1', 'SLOAD', 'ACCEL1', 'LOADCYN', 'LOAD', 'LSEQ', 'DLOAD', ] # could remove some if we look at the rid_trace #for cid, coord in iteritems(model.coords): #if coord.type in ['CORD1R', 'CORD1C', 'CORD1S']: #nids_used.update(node_ids) #elif coord.type in ['CORD1R', 'CORD1C', 'CORD1S']: #cids_used.update(coord.Rid()) #else: #raise NotImplementedError(coord) for card_type, ids in iteritems(model._type_to_id_map): #for card_type, ids in iteritems(card_map): if card_type in ['CORD1R', 'CORD1C', 'CORD1S']: #print(ids) for cid in ids: coord = model.coords[cid] nids_used.update(coord.node_ids) elif card_type in ['CORD2R', 'CORD2C', 'CORD2S']: #print(ids) for cid in ids: coord = model.coords[cid] cids_used.add(coord.Rid()) elif card_type in ['MAT1', 'MAT2', 'MAT3', 'MAT4', 'MAT5', 'MAT8', 'MAT9', 'MAT10', 'MAT11']: # todo: MATS1, MATT1, etc. pass elif card_type in ['MATS1', 'MATT1', 'MATT2', 'MATT4', 'MATT5', 'MATHE', 'MATHP', 'CREEP']: mids_used.update(ids) elif card_type in ['CTETRA', 'CPENTA', 'CPYRAM', 'CHEXA']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type in ['CONM1', 'CONM2']: for eid in ids: elem = model.masses[eid] nids_used.add(elem.Nid()) cids_used.add(elem.Cid()) #print(elem.object_attributes()) #print(elem.object_methods()) #aaa elif card_type in ['CMASS1', 'CMASS3']: for eid in ids: elem = model.masses[eid] pids_mass_used.add(elem.Pid()) nids_used.update(elem.node_ids) elif card_type in ['CMASS2', 'CMASS4']: for eid in ids: elem = model.masses[eid] nids_used.update(elem.node_ids) elif card_type in ['CELAS1', 'CDAMP1', 'CVISC', 'CDAMP5']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type in ['CELAS2', 'CDAMP2']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) elif card_type in ['CELAS3', 'CDAMP3']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type in ['CELAS4', 'CDAMP4']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) elif card_type in ['CTRIA3', 'CQUAD4', 'CTRIA6', 'CTRIAR', 'CQUAD8', 'CQUADR', 'CTRIAX', 'CQUADX', 'CQUAD']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) if isinstance(elem.theta_mcid, int): cids_used.add(elem.theta_mcid) elif card_type in ['CTRIAX6']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) mids_used.add(elem.Mid()) elif card_type in ['CSHEAR', 'CTUBE']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type in ['CPLSTN3', 'CPLSTN4', 'CPLSTN6', 'CPLSTN8', 'CPLSTS3', 'CPLSTS4', 'CPLSTS6', 'CPLSTS8', 'CQUADX4', 'CQUADX8', 'CTRIAX6', 'CTRAX3', 'CTRAX6']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type == 'PLPLANE': for pid in ids: prop = model.properties[pid] cids_used.add(prop.cid) mids_used.add(prop.Mid()) elif card_type == 'PPLANE': for pid in ids: prop = model.properties[pid] mids_used.add(prop.Mid()) elif card_type in ['CROD', 'CRAC2D', 'CRAC3D']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type in ['CONROD']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Mid()) elif card_type in ['PLOTEL']: for eid in ids: elem = model.plotels[eid] nids_used.update(elem.node_ids) elif card_type in ['PSOLID', 'PLSOLID']: for pid in ids: prop = model.properties[pid] mids_used.add(prop.Mid()) elif card_type in ['PDAMP5']: for pid in ids: prop = model.properties[pid] mids_thermal_used.add(prop.Mid()) elif card_type in ['PBAR', 'PBARL', 'PROD', 'PTUBE', 'PBEAM', 'PBEAML', 'PSHEAR', 'PRAC2D', 'PRAC3D', 'PBEND']: for pid in ids: prop = model.properties[pid] mids_used.add(prop.Mid()) elif card_type in ['PSHELL']: for pid in ids: prop = model.properties[pid] mids = [mid for mid in prop.material_ids if mid is not None] mids_used.update(mids) elif card_type in ['PCOMP', 'PCOMPG']: for pid in ids: prop = model.properties[pid] mids = prop.material_ids mids_used.update(mids) elif card_type in ['PBCOMP']: for pid in ids: prop = model.properties[pid] mids = prop.Mids() mids_used.add(prop.Mid()) mids_used.update(mids) elif card_type in ['PCOMPS']: for pid in ids: prop = model.properties[pid] mids = prop.Mids() mids_used.update(mids) cids_used.update(prop.cordm) elif card_type in ['RBAR', 'RBAR1', 'RBE1', 'RBE2', 'RBE3', 'RROD', 'RSPLINE']: for eid in ids: elem = model.rigid_elements[eid] #print(elem.object_attributes()) #print(elem.object_methods()) nids_used.update(elem.independent_nodes) nids_used.update(elem.dependent_nodes) elif card_type in ['TLOAD1', 'TLOAD2', 'RLOAD1', 'RLOAD2', 'ACSRCE']: pass elif card_type in load_types: for loads in itervalues(model.loads): for load in loads: if load.type in ['FORCE', 'MOMENT']: nids_used.add(load.node_id) cids_used.add(load.Cid()) elif load.type in ['FORCE1', 'FORCE2', 'MOMENT1', 'MOMENT2']: nids_used.update(load.node_ids) elif load.type == 'GRAV': cids_used.add(load.Cid()) elif load.type == 'RANDPS': pass elif load.type == 'PLOAD': nids_used.update(load.node_ids) elif load.type == 'PLOAD1': #eid = integer(card, 2, 'eid') pass elif load.type == 'PLOAD2': #eids_used.update(load.element_ids) pass elif load.type == 'PLOAD4': # eids, g1, g34 cids_used.add(load.Cid()) elif load.type == 'SPCD': nids_used.update(load.node_ids) elif load.type == 'GMLOAD': cids_used.add(load.Cid()) elif load.type in ['RFORCE', 'RFORCE1']: nids_used.add(load.node_id) cids_used.add(load.Cid()) elif load.type == 'TEMP': nids_used.update(list(load.temperatures.keys())) elif load.type == 'ACCEL': # nids? cids_used.add(load.Cid()) elif load.type == 'ACCEL1': # nids? cids_used.add(load.Cid()) elif load.type in ['QBDY1', 'QBDY2', 'QBDY3', 'QHBDY']: pass #'QBDY1', 'QBDY2', 'QBDY3', 'QHBDY', 'PLOADX1 elif load.type in ['PLOADX1']: nids_used.update(load.node_ids) elif load.type in ['SLOAD']: nids_used.update(load.node_ids) elif load.type in ['LOAD', 'LSEQ', 'LOADCYN']: pass elif load.type in ['QVOL']: # eids pass else: raise NotImplementedError(load) elif card_type == 'TEMPD': pass #for temp_id in ids: #tempd = self.tempds[temp_id] elif card_type in ['MPCADD', 'MPC']: for mpcs in itervalues(model.mpcs): for mpc in mpcs: if mpc.type in ['MPCADD']: pass elif mpc.type in ['MPC']: nids_used.update(mpc.node_ids) else: raise NotImplementedError(mpc) elif card_type in ['SPCADD', 'SPC1', 'SPC', 'GMSPC', 'SPCAX']: for spcs in itervalues(model.spcs): for spc in spcs: if spc.type in ['SPCADD', 'GMSPC', 'SPCAX']: pass elif spc.type in ['SPC1', 'SPC']: nids_used.update(spc.node_ids) else: raise NotImplementedError(spc) elif card_type in ['TABLED1', 'TABLED2', 'TABLED3', 'TABLED4', 'TABLEM1', 'TABLEM2', 'TABLEM3', 'TABLEM4', 'TABDMP1', 'TABRND1', 'TABLES1',]: pass elif card_type == 'SUPORT': for suport in model.suport: nids_used.update(suport.node_ids) elif card_type == 'SUPORT1': for suport1 in itervalues(model.suport1): nids_used.update(suport1.node_ids) elif card_type == 'GRID': for nid, node in iteritems(model.nodes): cids_used.update([node.Cp(), node.Cd()]) elif card_type in ['CBAR', 'CBEAM', 'CBEND']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) if elem.g0 is not None: assert isinstance(elem.g0, int), elem.g0 nids_used.add(elem.g0) elif card_type == 'CFAST': for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) elif card_type == 'CGAP': for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) if elem.g0 is not None: assert isinstance(elem.G0(), int), elem.G0() nids_used.add(elem.G0()) elif card_type in ['CBUSH1D', 'CBUSH2D']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) cids_used.add(elem.Cid()) elif card_type in ['PBUSH']: pass #for pid in ids: #prop = model.properties[pid] #raise RuntimeError(prop) elif card_type == 'PBUSHT': # tables pass elif card_type in ['CBUSH']: for eid in ids: elem = model.elements[eid] nids_used.update(elem.node_ids) pids_used.add(elem.Pid()) if elem.g0 is not None: assert isinstance(elem.g0, int), elem.g0 nids_used.add(elem.g0) # TODO: cid elif card_type == 'AESURF': #CID1 | ALID1 | CID2 | ALID2 for aesurf in itervalues(model.aesurf): cids_used.add(aesurf.Cid1()) cid2 = aesurf.Cid2() if cid2 is not None: cids_used.add(cid2) elif card_type in ['SPLINE1', 'SPLINE2', 'SPLINE3', 'SPLINE4', 'SPLINE5']: pass #for spline_id in ids: #spline = model.splines[spline_id] elif card_type in ['CAERO1']: for eid in ids: caero = model.caeros[eid] # PID, LSPAN, LCHORD cids_used.add(caero.Cp()) elif card_type in skip_cards: pass elif card_type in set_types: pass elif card_type in ['DCONSTR']: pass elif card_type == 'DRESP1': for dresp_id in ids: dresp = model.dresps[dresp_id] if dresp.property_type in ['PSHELL', 'PCOMP', 'PCOMPG', 'PBAR', 'PBARL', 'PBEAM', 'PROD', 'PDAMP', 'PVISC', 'PTUBE', 'PSHEAR', 'PELAS', 'PSOLID', 'PBEAML']: pids_used.update(dresp.atti_values()) elif dresp.property_type == 'ELEM': if dresp.response_type in ['STRESS', 'FRSTRE', 'CFAILURE', 'TFORC', 'FRFORC']: #eids_used.update(dresp.atti_values()) pass else: msg = str(dresp) + 'region=%r property_type=%r response_type=%r, atta=%r attb=%s atti=%s' % ( dresp.region, dresp.property_type, dresp.response_type, dresp.atta, dresp.attb, dresp.atti) raise NotImplementedError(msg) #elif dresp.property_type == 'STRESS': elif dresp.property_type is None: if dresp.response_type in ['WEIGHT', 'EIGN', 'VOLUME', 'LAMA', 'CEIG', 'FREQ', 'STABDER']: pass elif dresp.response_type in ['DISP', 'FRDISP', 'TDISP', 'RMSDISP', 'PSDDISP', 'TVELO', 'FRVELO', 'RMSVELO', 'TACCL', 'FRACCL', 'RMSACCL', 'SPCFORCE', 'TSPCF', 'FRSPCF', 'FORCE', 'TFORC', 'FRFORC']: nids_used.update(dresp.atti) elif dresp.response_type in ['FLUTTER', 'TRIM', 'DIVERG']: # flutter_id / trim_id pass else: msg = str(dresp) + 'region=%r property_type=%r response_type=%r atta=%r attb=%s atti=%s' % ( dresp.region, dresp.property_type, dresp.response_type, dresp.atta, dresp.attb, dresp.atti) raise NotImplementedError(msg) else: msg = str(dresp) + 'region=%r property_type=%r response_type=%r atta=%r attb=%s atti=%s' % ( dresp.region, dresp.property_type, dresp.response_type, dresp.atta, dresp.attb, dresp.atti) raise NotImplementedError(msg) elif card_type == 'DRESP2': pass #for dresp_id in ids: #dresp = model.dresps[dresp_id] #if dresp.property_type in ['PSHELL', 'PCOMP', 'PBAR', 'PBARL', 'PBEAM', 'PROD']: #pids_used.update(dresp.atti_values()) #elif dresp.property_type is None: #if dresp.response_type in ['WEIGHT', 'EIGN', 'VOLUME']: #pass #elif dresp.response_type in ['DISP']: #nids_used.update(dresp.atti) #else: #msg = str(dresp) + 'response_type=%r' % dresp.response_type #raise NotImplementedError(msg) #else: #raise NotImplementedError(dresp) #msg = str(dresp) + 'response_type=%r' % dresp.response_type #raise NotImplementedError(msg) elif card_type == 'DRESP3': pass elif card_type in ['DVPREL1', 'DVPREL2']: for dvprel_id in ids: dvprel = model.dvprels[dvprel_id] if dvprel.Type in ['PSHELL', 'PCOMP', 'PBAR', 'PBARL', 'PBEAM', 'PROD', 'PELAS', 'PBUSH', 'PDAMP', 'PTUBE', 'PSHEAR', 'PDAMP', 'PMASS', 'PBEAML', 'PCOMPG', 'PVISC', 'PBUSHT', 'PELAST', 'PBUSH1D', 'PGAP']: pids_used.add(dvprel.Pid()) elif dvprel.Type in ['DISP']: raise NotImplementedError(str(dvprel) + 'dvprel.Type=DISP') else: raise NotImplementedError(dvprel) elif card_type in ['DVCREL1', 'DVCREL2']: for dvcrel_id in ids: dvcrel = model.dvcrels[dvcrel_id] if dvcrel.Type in ['CMASS2', 'CMASS4', 'CONM1', 'CONM2', 'CELAS2', 'CELAS4', 'CDAMP2', 'CQUAD4', 'CGAP', 'CBAR']: pass #pids_used.add(dvcrel.Eid()) else: raise NotImplementedError(str(dvcrel) + 'Type=%r' % dvcrel.Type) elif card_type in ['DVMREL1', 'DVMREL2']: for dvmrel_id in ids: dvmrel = model.dvmrels[dvmrel_id] if dvmrel.Type in ['MAT1', 'MAT2', 'MAT8', 'MAT9', 'MAT11']: mids_used.add(dvmrel.Mid()) else: raise NotImplementedError(str(dvmrel) + 'Type=%r' % dvmrel.Type) elif card_type == 'DVGRID': for dvgrid_id in ids: dvgrids = model.dvgrids[dvgrid_id] for dvgrid in dvgrids: nids_used.add(dvgrid.nid) cids_used.add(dvgrid.cid) elif card_type == 'TF': for tf_id in ids: tfs = model.transfer_functions[tf_id] for tf in tfs: nids_used.update(tf.nids) else: raise NotImplementedError(card_type) #for pid, prop in iteritems(model.properties): #prop = model.properties[pid] #if prop.type in no_materials: #continue #elif prop.type == 'PSHELL': #mids_used.extend([mid for mid in prop.material_ids if mid is not None]) #elif prop.type == 'PCONEAX': #mids_used.extend([mid for mid in model.Mids() if mid is not None]) #elif prop.type in prop_mid: #mids_used.append(prop.Mid()) #elif prop.type in ['PCOMP', 'PCOMPG', 'PCOMPS']: #mids_used.extend(prop.Mids()) #elif prop.type == 'PBCOMP': #mids_used.append(prop.Mid()) #mids_used.extend(prop.Mids()) #else: #raise NotImplementedError(prop) nids = set(model.nodes.keys()) pids = set(model.properties.keys()) pids_mass = set(model.properties_mass.keys()) cids = set(model.coords.keys()) mids = set(model.materials.keys()) nids_to_remove = list(nids - nids_used) pids_to_remove = list(pids - pids_used) pids_mass_to_remove = list(pids_mass - pids_mass_used) mids_to_remove = list(mids - mids_used) cids_to_remove = list(cids - cids_used) if remove_nids: for nid in nids_to_remove: del model.nodes[nid] model.log.debug('removed nodes %s' % nids_to_remove) if remove_cids: for cid in cids_to_remove: del model.coords[cid] model.log.debug('removing coords %s' % cids_to_remove) if remove_pids: for pid in pids_mass_to_remove: del model.properties_mass[pid] model.log.debug('removing properties_mass %s' % pids_mass_to_remove) for pid in pids_to_remove: del model.properties[pid] model.log.debug('removing properties %s' % pids_to_remove) if remove_mids: for mid in mids_to_remove: del model.materials[mid] model.log.debug('removing materials %s' % mids_to_remove)
def run_fem1(fem1, bdf_model, mesh_form, xref, punch, sum_load, size, is_double, cid, encoding=None): """ Reads/writes the BDF Parameters ---------- fem1 : BDF() The BDF object bdf_model : str The root path of the bdf filename mesh_form : str {combined, separate} 'combined' : interspersed=True 'separate' : interspersed=False xref : bool The xref mode punch : bool punch flag sum_load : bool static load sum flag size : int, {8, 16} size flag is_double : bool double flag cid : int / None cid flag """ assert os.path.exists(bdf_model), print_bad_path(bdf_model) try: if '.pch' in bdf_model: fem1.read_bdf(bdf_model, xref=False, punch=True, encoding=encoding) else: fem1.read_bdf(bdf_model, xref=False, punch=punch, encoding=encoding) #fem1.geom_check(geom_check=True, xref=False) fem1.write_skin_solid_faces('skin_file.bdf', size=16, is_double=False) if xref: #fem1.uncross_reference() fem1.cross_reference() fem1._xref = True spike_fem = read_bdf(fem1.bdf_filename, encoding=encoding) remake = False if remake: log = fem1.log fem1.save('model.obj') fem1.save('model.obj', unxref=False) fem1.write_bdf('spike_out.bdf') fem1.get_bdf_stats() fem1 = BDF() fem1.load('model.obj') fem1.write_bdf('spike_in.bdf') fem1.log = log fem1.get_bdf_stats() fem1.cross_reference() #fem1.get_bdf_stats() fem1._xref = True #fem1.geom_check(geom_check=True, xref=True) #fem1.uncross_reference() #fem1.cross_reference() except: print("failed reading %r" % bdf_model) raise #fem1.sumForces() if fem1._auto_reject: out_model = bdf_model + '.rej' else: out_model = bdf_model + '_out' if cid is not None and xref: fem1.resolve_grids(cid=cid) if mesh_form == 'combined': fem1.write_bdf(out_model, interspersed=False, size=size, is_double=is_double) elif mesh_form == 'separate': fem1.write_bdf(out_model, interspersed=False, size=size, is_double=is_double) else: msg = "mesh_form=%r; allowedForms=['combined','separate']" % mesh_form raise NotImplementedError(msg) #fem1.writeAsCTRIA3(out_model) fem1._get_maps() return out_model, fem1
def get_oml_eids(bdf_filename, eid_start, theta_tol=30., is_symmetric=True, consider_flippped_normals=True): """ extracts the OML faces (outer mold line) bdf_filename : str or BDF() the bdf filename eid_start : int the element to start from theta_tol : float; default=30. the angular tolerance in degrees is_symmetric : bool; default=True is the y=0 plane considered to be part of the OML consider_flippped_normals : bool; default=True if you extracted the free faces from tets, you can get flipped normals this considers a 180 degree error to be 0.0, which will cause other problems """ #ninety = np.radians(90.) #2810 # start for bwb_saero.bdf #2811 # close #2819 # close #2818 # close #eids_oml = np.array([eid_start]) eids_oml = set([eid_start]) #--------------------------------- theta_tol = np.radians(theta_tol) model = read_bdf(bdf_filename, xref=True) maps = model._get_maps( eids=None, map_names=None, consider_0d=False, consider_0d_rigid=False, consider_1d=False, consider_2d=True, consider_3d=False) (edge_to_eid_map, eid_to_edge_map, nid_to_edge_map) = maps #free_edges = get_free_edges(model, maps=maps) #--------------------------------- normals = {} etypes_skipped = set([]) for eid, elem in iteritems(model.elements): if elem.type in ['CTRIA3', 'CQUAD4']: normals[eid] = elem.Normal() else: if elem.type in etypes_skipped: continue model.log.debug('elem.type=%r is not supported' % elem.type) etypes_skipped.add(elem.type) #eid_starts = eids_oml.tolist() eids_next = set([eid_start]) while eids_next: eid_starts = deepcopy(eids_next) eids_oml_start = deepcopy(eids_oml) print(len(eid_starts)) while eid_starts: eid_start = eid_starts.pop() normal_start = normals[eid_start] # get the next set of edges edges = eid_to_edge_map[eid_start] #flattened = [] #for row in matrix: #for n in row: #flattened.append(n) # flattened = [n for row in matrix for n in row] #eids_to_consider = [edge_to_eid_map[edge] for edge in edges] list_eids_to_consider = [] for edge in edges: eids_with_edge = edge_to_eid_map[edge] list_eids_to_consider += eids_with_edge #list_eids_to_consider = set([eid for eid in edge_to_eid_map[edge] for edge in edges]) #print('list_eids_to_consider =', list_eids_to_consider) eids_to_consider = set(list_eids_to_consider) # don't do the same element twice; creates an infinite loop if you do #eids_to_check = np.setdiff1d(eids_to_consider, eids_oml) eids_to_check = eids_to_consider.difference(eids_oml) # don't check elements we're checking right now #eids_to_check = np.setdiff1d(eids_to_consider, eid_starts) eids_to_check = eids_to_consider.difference(eid_starts) #print('eids_to_check =', eids_to_check) for eid in eids_to_check: normal = normals[eid] # a o b = a * b * cos(theta) # cos(theta) = (a o b)/ (a b); where |a| = 1; |b| = 1 cos_theta = np.dot(normal, normal_start) theta = np.arccos(cos_theta) if theta < theta_tol: eids_next.add(eid) eids_oml.add(eid) elif consider_flippped_normals: # handles flipped normals cos_theta = np.dot(normal, -normal_start) theta = np.arccos(cos_theta) if theta < theta_tol: eids_next.add(eid) eids_oml.add(eid) #print('eids_next =', eids_next) eids_next = eids_next.difference(eids_oml_start) #eids_next = eids_next.difference(eid_starts) #print('eids_next =', eids_next) #print('-------------------------------') print('done...') with open('eids_oml.txt', 'w') as eids_file: eids_file.write('eids_oml = %s\n' % list(eids_oml)) return eids_oml
def write_skin_solid_faces( model, skin_filename, write_solids=False, write_shells=True, size=8, is_double=False, encoding=None, punch=False ): """ Writes the skinned elements Parameters ---------- model : BDF() or str BDF : the BDF object str : bdf_filename and the read_bdf method is called skin_filename : str the file to write write_solids : bool; default=False write solid elements that have skinned faces write_shells : bool; default=False write shell elements size : int; default=8 the field width is_double : bool; default=False double precision flag encoding : str; default=None -> system default the string encoding Unused Parameters ----------------- punch : bool; default=False is this a punch file; should be used by the read_bdf if model is a string """ if isinstance(model, string_types): model = read_bdf(model) if len(model.element_ids) == 0 or len(model.material_ids) == 0 or len(model.property_ids) == 0: msg = "returning due to no elements/materials/properties\n" msg += " nelements=%s nmaterials=%s nproperties=%s" % ( len(model.element_ids), len(model.material_ids), len(model.property_ids), ) model.log.warning(msg) return eid_set, face_map = get_solid_skin_faces(model) if len(eid_set) == 0: model.log.info("returning due to no elements in set") return eid_set_to_write = set([]) nid_set_to_write = set([]) mid_set_to_write = set([]) if write_solids: for face, eids in iteritems(eid_set): eid_set_to_write.update(eids) for eid in eids: elem = model.elements[eid] pid = elem.Pid() prop = model.properties[pid] # PSOLID mid = prop.Mid() # print(prop) nid_set_to_write.update(elem.node_ids) mid_set_to_write.add(mid) # print('added_mid (a) =', mid) elif write_shells: for face, eids in iteritems(eid_set): eid_set_to_write.update(eids) nid_set_to_write.update(face) for eid in eids: elem = model.elements[eid] pid = elem.Pid() prop = model.properties[pid] # PSOLID # print(prop) try: # print(prop.mid) mid = prop.Mid() mid_set_to_write.add(mid) # print('added eid=%s pid=%s mid=%s (b)' % (eid, pid, mid)) except AttributeError: continue else: raise RuntimeError("write_solids=False write_shells=False") eids_to_write = list(eid_set_to_write) nids_to_write = list(nid_set_to_write) mids_to_write = list(mid_set_to_write) # element_ids_to_delete = set(model.element_ids) - eids_to_write eid_shell = max(model.elements) + 1 pid_shell = max(model.properties) + 1 mid_shell = max(model.materials) + 1 _write_skin_solid_faces( model, skin_filename, face_map, nids_to_write, eids_to_write, mids_to_write, eid_set, eid_shell, pid_shell, mid_shell, write_solids=write_solids, write_shells=write_shells, size=size, is_double=is_double, encoding=encoding, )
def force_to_pressure(bdf_filename, bdf_filename_out=None): """ converts FORCE cards to PLOAD4s for a shell model """ if isinstance(bdf_filename, BDF): model = bdf_filename else: model = read_bdf(bdf_filename, validate=True, xref=False, punch=False, encoding=None, log=None, debug=True, mode='msc') if 0: card_types = ['CQUAD4', 'CTRIA3'] card_ids_map = model.get_card_ids_by_card_types(card_types=None, reset_type_to_slot_map=False, stop_on_missing_card=False) for eid in card_ids_map['CQUAD4']: elem = model.elements[eid] #for nid in elem.node_ids: #raise NotImplementedError(elem) for eid in card_ids_map['CTRIA3']: elem = model.elements[eid] #raise NotImplementedError(elem) nid_elem_count = defaultdict(int) nid_elem_map = defaultdict(list) for eid, elem in iteritems(model.elements): for nid in elem.nodes: nid_elem_count[nid] += 1 nid_elem_map[nid].append(eid) #model.cross_reference() forces = defaultdict(float) for load_id, load in iteritems(model.loads): for loadi in loads: if loadi.type == 'FORCE': loadi = FORCE(sid, node, cid, mag, xyz) loadi.get #if load.node_id not in nids: #continue if load.Cid() != 0: cp = load.cid forcei = load.mag * cp.transform_vector_to_global(load.xyz) * scale raise NotImplementedError() else: forcei = load.mag * load.xyz * scale elem_count = nid_elem_count[load.node] f /= elem_count for eid in nid_elem_map: forces[eid] += forcei #node = self.Node(load.node_id) #r = xyz[node.nid] - p #m = cross(r, f) #F += f #M += m else: raise NotImplementedError(loadi) #pressures = {} model.cross_reference() model.loads = {} with open('pressures.out', 'w') as pressure_file: for eid, press in iteritems(forces): eids = [eid] forcei = forces[eid] elem = model.elements[eid] area = elem.Area() pressures = [pressure, pressure, pressure, pressure] pload4 = PLOAD4(sid, eids, pressures, g1=None, g34=None, cid=0, NVector=None, sorl='SURF', ldir='NORM', comment='') #pressure_file.write(pload4.write_card(size=8, is_double=False)) model.add_load(pload4) if bdf_filename_out: model.write_bdf(bdf_filename_out) return model