def save_load_deck(model, punch=True, run_remove_unused=True, run_convert=True): """writes, re-reads, saves an obj, loads an obj, and returns the deck""" model.validate() model.pop_parse_errors() model.pop_xref_errors() bdf_file = StringIO() model.write_bdf(bdf_file, size=8, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, is_double=True, close=False) bdf_file.seek(0) if run_remove_unused: remove_unused(model) if run_convert: units_to = ['m', 'kg', 's'] units = ['ft', 'lbm', 's'] convert(model, units_to, units) model2 = BDF(log=model.log) #print(bdf_file.getvalue()) model2.read_bdf(bdf_file, punch=punch) model2.pop_parse_errors() model2.get_bdf_stats() model2.save(obj_filename='model.obj', unxref=True) model3 = BDF(debug=False, log=None, mode='msc') model3.load(obj_filename='model.obj') os.remove('model.obj') model3.cross_reference() model3.pop_xref_errors() return model3
def remove_unassociated_nodes(bdf_filename, bdf_filename_out, renumber=False): """dummy function""" assert renumber is False, renumber remove_unused(bdf_filename, remove_nids=True, remove_cids=False, remove_pids=False, remove_mids=False)
def test_remove_bar(self): """removes unused data from the bar model""" model_path = os.path.join(pkg_path, '..', 'models', 'beam_modes') bdf_filename = os.path.join(model_path, 'beam_modes.dat') bdf_filename_out = os.path.join(model_path, 'beam_modes_temp.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model)
def _test_remove_bwb(self): """removes unused data from the bwb model""" bdf_filename = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.bdf') bdf_filename_out = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def test_remove_sine(self): """removes unused data from the sine model""" model_path = os.path.join(pkg_path, '..', 'models', 'freq_sine') bdf_filename = os.path.join(model_path, 'good_sine.dat') bdf_filename_out = os.path.join(model_path, 'sine_modes.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def _test_remove_isat(self): """removes unused data from the isat model""" model_path = os.path.join(pkg_path, '..', 'models', 'isat') bdf_filename = os.path.join(model_path, 'ISat_Dploy_Sm.dat') bdf_filename_out = os.path.join(model_path, 'isat.bdf') model = read_bdf(bdf_filename, log=log, validate=False) remove_unused(model) model.write_bdf(bdf_filename_out) os.remove(bdf_filename_out)
def save_load_deck(model, punch=True, run_remove_unused=True, run_convert=True, run_renumber=True, run_mirror=True): """writes, re-reads, saves an obj, loads an obj, and returns the deck""" model.validate() model.pop_parse_errors() model.pop_xref_errors() bdf_file = StringIO() model.write_bdf(bdf_file, size=8, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, is_double=True, close=False) bdf_file.seek(0) if run_remove_unused: remove_unused(model) if run_convert: units_to = ['m', 'kg', 's'] units = ['ft', 'lbm', 's'] convert(model, units_to, units) model2 = BDF(log=model.log) #print(bdf_file.getvalue()) model2.read_bdf(bdf_file, punch=punch) model2.pop_parse_errors() model2.get_bdf_stats() model2.write_bdf('model2.bdf') model2.save(obj_filename='model.obj', unxref=True) model3 = BDF(debug=False, log=model.log, mode='msc') model3.load(obj_filename='model.obj') os.remove('model.obj') cross_reference(model3) if run_renumber: renumber('model2.bdf', model.log) if run_mirror: # we put embed this under renumber to prevent modifying an # existing model to prevent breaking tests # # shouldn't have any effect model2.bdf bdf_mirror('model2.bdf', plane='xz', log=model.log) return model3
def save_load_deck(model, xref='standard', punch=True, run_remove_unused=True, run_convert=True, run_renumber=True, run_mirror=True, run_save_load=True, run_quality=True, write_saves=True, run_save_load_hdf5=True, run_mass_properties=True, run_loads=True, run_test_bdf=True, run_op2_writer=True, run_op2_reader=True, op2_log_level: str = 'warning'): """writes, re-reads, saves an obj, loads an obj, and returns the deck""" if os.path.exists('junk.bdf'): os.remove('junk.bdf') model.set_error_storage(nparse_errors=0, stop_on_parsing_error=True, nxref_errors=0, stop_on_xref_error=True) model.validate() model.pop_parse_errors() model.pop_xref_errors() bdf_file = StringIO() model.write_bdf(bdf_file, size=8, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, is_double=True, close=False) bdf_file.seek(0) if write_saves and model.save_file_structure: bdf_filenames = { 0: 'junk.bdf', } model.write_bdfs(bdf_filenames) os.remove('junk.bdf') if run_convert: units_to = ['m', 'kg', 's'] units = ['ft', 'lbm', 's'] convert(model, units_to, units) model2 = BDF(log=model.log) #print(bdf_file.getvalue()) model2.read_bdf(bdf_file, punch=punch, xref=False) _cross_reference(model2, xref) model2.pop_parse_errors() model2.get_bdf_stats() model2.write_bdf('model2.bdf') if run_test_bdf: folder = '' log_error = get_logger(log=None, level='error', encoding='utf-8') test_bdf(folder, 'model2.bdf', stop_on_failure=True, punch=punch, quiet=True, log=log_error) os.remove('model2.test_bdf.bdf') nelements = len(model2.elements) + len(model2.masses) nnodes = len(model2.nodes) + len(model2.spoints) + len(model2.epoints) _run_mass_properties(model2, nnodes, nelements, run_mass_properties=run_mass_properties) _run_loads(model2, nelements, run_loads=run_loads) if run_save_load: model2.save(obj_filename='model.obj', unxref=True) model3 = BDF(debug=False, log=model.log, mode='msc') model3.load(obj_filename='model.obj') os.remove('model.obj') else: model2.uncross_reference() model3 = model2 _run_hdf5(model2, model.log, run_save_load_hdf5=run_save_load_hdf5) cross_reference(model3, xref) if run_renumber: renumber('model2.bdf', model.log) if run_mirror: # we put this under renumber to prevent modifying an # existing model to prevent breaking tests # # shouldn't have any effect model2.bdf model_mirrored = bdf_mirror('model2.bdf', plane='xz', log=model.log)[0] model_mirrored.write_bdf('mirrored2.bdf') read_bdf('mirrored2.bdf', log=model.log) os.remove('mirrored2.bdf') os.remove('model2.bdf') if model.elements and run_quality: element_quality(model) if run_op2_writer: op2_geom_model = attach_op2_results_to_bdf(model, op2_model=None) from pyNastran.op2.op2_geom import read_op2_geom table_name = 'OUGV1' node_gridtype = np.zeros((10, 2), dtype='int32') node_gridtype[:, 0] = np.arange(1, 11) data = np.zeros((1, 10, 6), dtype='float32') isubcase = 1 disp = RealDisplacementArray.add_static_case(table_name, node_gridtype, data, isubcase, is_sort1=True) op2_geom_model.displacements[isubcase] = disp op2_filename = 'spike.op2' bkp_log = op2_geom_model.log op2_geom_model.log = get_logger(log=None, level=op2_log_level, encoding='utf-8') op2_geom_model.write_op2(op2_filename, post=-1, endian=b'<', skips=None, nastran_format='nx') if run_op2_reader: unused_op2_geom = read_op2_geom(op2_filename, log=op2_geom_model.log, xref=False) else: frame = inspect.currentframe() call_frame = inspect.getouterframes(frame, 2) op2_geom_model.log.warning('skipping op2 reader for %s' % call_frame[1][3]) op2_geom_model.log = bkp_log os.remove(op2_filename) if run_remove_unused: remove_unused(model) return model3
def cmd_line_filter(): # pragma: no cover """command line interface to export_caero_mesh""" from docopt import docopt import pyNastran msg = ( 'Usage:\n' ' bdf filter IN_BDF_FILENAME [-o OUT_CAERO_BDF_FILENAME]\n' ' bdf filter IN_BDF_FILENAME [-o OUT_CAERO_BDF_FILENAME] [--x YSIGN_X] [--y YSIGN_Y] [--z YSIGN_Z]\n' ' bdf filter -h | --help\n' ' bdf filter -v | --version\n' '\n' 'Positional Arguments:\n' ' IN_BDF_FILENAME path to input BDF/DAT/NAS file\n' '\n' 'Options:\n' ' -o OUT, --output OUT_BDF_FILENAME path to output BDF file (default=filter.bdf)\n' " --x YSIGN_X a string (e.g., '< 0.')\n" " --y YSIGN_Y a string (e.g., '< 0.')\n" " --z YSIGN_Z a string (e.g., '< 0.')\n" '\n' 'Info:\n' ' -h, --help show this help message and exit\n' " -v, --version show program's version number and exit\n" '\n' 'Examples\n' '1. remove unused cards:\n' ' >>> bdf filter fem.bdf' '2. remove GRID points and associated cards with y value < 0:\n' " >>> bdf filter fem.bdf --y '< 0.'" ) if len(sys.argv) == 1: sys.exit(msg) ver = str(pyNastran.__version__) #type_defaults = { # '--nerrors' : [int, 100], #} data = docopt(msg, version=ver) print(data) size = 16 bdf_filename = data['IN_BDF_FILENAME'] bdf_filename_out = data['--output'] if bdf_filename_out is None: bdf_filename_out = 'filter.bdf' import numpy as np func_map = { '<' : np.less, '>' : np.greater, '<=' : np.less_equal, '>=' : np.greater_equal, } xsign = None ysign = None zsign = None if data['--x']: xsign, xval = data['--x'].split(' ') xval = float(xval) assert xsign in ['<', '>', '<=', '>='], xsign if data['--y']: # --y < 0 ysign, yval = data['--y'].split(' ') yval = float(yval) assert ysign in ['<', '>', '<=', '>='], ysign if data['--z']: zsign, zval = data['--z'].split(' ') zval = float(zval) assert zsign in ['<', '>', '<=', '>='], zsign from pyNastran.bdf.bdf import read_bdf model = read_bdf(bdf_filename) #nid_cp_cd, xyz_cid0, xyz_cp, icd_transform, icp_transform = model.get_xyz_in_coord_array( #cid=0, fdtype='float64', idtype='int32') eids = [] xyz_cid0 = [] for eid, elem in sorted(model.elements.items()): xyz = elem.Centroid() xyz_cid0.append(xyz) eids.append(eid) xyz_cid0 = np.array(xyz_cid0) eids = np.array(eids) # we pretend to change the SPOINT location update_nodes = False # we pretend to change the SPOINT location iunion = None if xsign: xvals = xyz_cid0[:, 0] xfunc = func_map[xsign] ix = xfunc(xvals, xval) iunion = _union(xval, ix, iunion) update_nodes = True if ysign: yvals = xyz_cid0[:, 1] yfunc = func_map[ysign] iy = yfunc(yvals, yval) iunion = _union(yval, iy, iunion) update_nodes = True if zsign: zvals = xyz_cid0[:, 2] zfunc = func_map[zsign] iz = xfunc(zvals, zval) iunion = _union(zval, iz, iunion) update_nodes = True if update_nodes: eids_to_remove = eids[iunion] for eid in eids_to_remove: etype = model.elements[eid].type model._type_to_id_map[etype].remove(eid) del model.elements[eid] #update_nodes(model, nid_cp_cd, xyz_cid0) # unxref'd model remove_unused(model, remove_nids=True, remove_cids=True, remove_pids=True, remove_mids=True) model.write_bdf(bdf_filename_out)
def tecplot_to_nastran(tecplot_filename, bdf_filename, log=None, debug=True): """Converts a Tecplot file to Nastran.""" if isinstance(tecplot_filename, str): model = read_tecplot(tecplot_filename, log=log, debug=debug) else: model = tecplot_filename removed_nodes = False shell_pid = 1 solid_pid = 2 mid = 1 istart = 1 with open(bdf_filename, 'w') as bdf_file: bdf_file.write('$pyNastran : punch=True\n') for inode, node in enumerate(model.xyz): card = [ 'GRID', inode + 1, None, ] + list(node) bdf_file.write(print_card_8(card)) itri = 0 if len(model.tri_elements): # tris only for itri, tri in enumerate(model.tri_elements): card = ['CTRIA3', itri + 1, shell_pid] + list(tri) bdf_file.write(print_card_8(card)) #istart += bdf_model if len(model.quad_elements): if len(model.tri_elements) != 0: # if there are tris, then we assume the quads are good for iquad, quad in enumerate(model.quad_elements): card = ['CQUAD4', iquad + 1, shell_pid] + list(quad) bdf_file.write(print_card_8(card)) else: # need to split out the CQUAD4 elements istart = itri + 1 for iquad, quad in enumerate(model.quad_elements): if quad[2] == quad[3]: # if it's a tri card = ['CTRIA3', istart + iquad, shell_pid] + list( quad[:3]) else: card = ['CQUAD4', istart + iquad, shell_pid ] + list(quad) bdf_file.write(print_card_8(card)) istart += iquad if len(model.tri_elements) + len(model.quad_elements): card = ['PSHELL', shell_pid, mid, 0.1] bdf_file.write(print_card_8(card)) if len(model.tet_elements) + len(model.hexa_elements): card = ['PSOLID', solid_pid, mid] bdf_file.write(print_card_8(card)) if len(model.tet_elements): for itet, tet in enumerate(model.tet_elements): card = ['CTETRA', istart + itet, solid_pid] + list(tet) bdf_file.write(print_card_8(card)) if len(model.hexa_elements): # need to split out the CTETRA and CPENTA elements for ihex, hexa in enumerate(model.hexa_elements): uhexa = unique(hexa) nnodes_unique = len(uhexa) nids = hexa[:nnodes_unique] centroid_y = model.xyz[nids, 1].max() if centroid_y < 0: removed_nodes = True continue if nnodes_unique == 4: card = ['CTETRA', istart + ihex, solid_pid] + list(nids) assert len(card) == 7, len(card) elif nnodes_unique == 5: card = ['CPYRAM', istart + ihex, solid_pid] + list(nids) assert len(card) == 8, len(card) elif nnodes_unique == 6: card = ['CPENTA', istart + ihex, solid_pid] + list(nids) assert len(card) == 9, len(card) elif nnodes_unique == 8: card = ['CHEXA', istart + ihex, solid_pid] + list(hexa) bdf_file.write(print_card_8(card)) E = 3.0e7 G = None nu = 0.3 card = ['MAT1', mid, E, G, nu] bdf_file.write(print_card_8(card)) if removed_nodes: bdf_model = BDF(debug=debug) bdf_model.read_bdf(bdf_filename) remove_unused(bdf_model)
def test_surf_01(self): """tests two_blade_wake_sym_extended.surf""" MODEL_PATH = os.path.join(PKG_PATH, '..', 'models') bdf_filename = os.path.join(MODEL_PATH, 'iSat', 'ISat_Launch_Sm_Rgd.dat') surf_filename = os.path.join(MODEL_PATH, 'iSat', 'ISat_Launch_Sm_Rgd.surf') bdf_model = read_bdf(bdf_filename) #ugrid_filename = os.path.join(PKG_PATH, 'converters', 'aflr', 'ugrid', 'models', #'two_blade_wake_sym_extended.surf') #log = get_logger(level='warning') pid_to_element_flags = {} for pid, prop in bdf_model.properties.items(): if prop.type in ['PSHELL', 'PCOMP']: # name, initial_normal_spacing, bl_thickness, grid_bc pid_to_element_flags[pid] = ['na;me', 0.01, 0.1, 1] with self.assertRaises(RuntimeError): nastran_to_surf(bdf_model, pid_to_element_flags, surf_filename, renumber_pids=None, line_map=None, scale=1.0, tol=1e-10, xref=True) delete_elements( bdf_model, element_types_to_save=['CTRIA3', 'CQUAD4']) delete_properties( bdf_model, property_types_to_save=['PSHELL', 'PCOMP', 'PCOMPG', 'PLPLANE']) #print(bdf_model.properties) bdf_model.uncross_reference() remove_unused(bdf_model, remove_nids=True, remove_cids=True, remove_pids=True, remove_mids=True) #delete_forces(bdf_model) bdf_model.case_control_deck = None #bdf_filename_re = os.path.join(MODEL_PATH, 'iSat', 'ISat_Launch_Sm_Rgd_re.dat') bdf_filename_re = None bdf_model.cross_reference() bdf_model_re = bdf_renumber( bdf_model, bdf_filename_re, #size=8, is_double=False, #starting_id_dict=None, round_ids=False, cards_to_skip=None, log=bdf_model.log, debug=False)[0] remap_cards(bdf_model_re) #print(bdf_model_re.properties) #print(bdf_model_re.elements) #aaa #bdf_model_re = read_bdf(bdf_filename_re) #print(bdf_model_re.get_bdf_stats()) pid_to_element_flags = {} for pid, prop in bdf_model_re.properties.items(): if prop.type in ['PSHELL', 'PCOMP']: # name, initial_normal_spacing, bl_thickness, grid_bc pid_to_element_flags[pid] = ['na;me', 0.01, 0.1, 1] nastran_to_surf(bdf_model_re, pid_to_element_flags, surf_filename, renumber_pids=None, line_map=None, scale=1.0, tol=1e-10, xref=False)
def save_load_deck(model, xref='standard', punch=True, run_remove_unused=True, run_convert=True, run_renumber=True, run_mirror=True, run_save_load=True, run_quality=True, write_saves=True, run_save_load_hdf5=True, run_mass_properties=True): """writes, re-reads, saves an obj, loads an obj, and returns the deck""" model.validate() model.pop_parse_errors() model.pop_xref_errors() bdf_file = StringIO() model.write_bdf(bdf_file, size=8, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, is_double=True, close=False) bdf_file.seek(0) if write_saves and model.save_file_structure: bdf_filenames = { 0: 'junk.bdf', } model.write_bdfs(bdf_filenames) os.remove('junk.bdf') if run_remove_unused: remove_unused(model) if run_convert: units_to = ['m', 'kg', 's'] units = ['ft', 'lbm', 's'] convert(model, units_to, units) model2 = BDF(log=model.log) #print(bdf_file.getvalue()) model2.read_bdf(bdf_file, punch=punch, xref=False) _cross_reference(model2, xref) model2.pop_parse_errors() model2.get_bdf_stats() model2.write_bdf('model2.bdf') nelements = len(model2.elements) + len(model2.masses) if run_mass_properties and len(model2.nodes) == 0 and nelements > 0: mass1, cg1, inertia1 = model2.mass_properties(reference_point=None, sym_axis=None) mass2, cg2, inertia2 = model2.mass_properties_nsm(reference_point=None, sym_axis=None) #if not quiet: #if model2.wtmass != 1.0: #print('weight = %s' % (mass1 / model2.wtmass)) #print('mass = %s' % mass1) #print('cg = %s' % cg1) #print('Ixx=%s, Iyy=%s, Izz=%s \nIxy=%s, Ixz=%s, Iyz=%s' % tuple(inertia1)) assert np.allclose(mass1, mass2), 'mass1=%s mass2=%s' % (mass1, mass2) assert np.allclose(cg1, cg2), 'mass=%s\ncg1=%s cg2=%s' % (mass1, cg1, cg2) assert np.allclose( inertia1, inertia2, atol=1e-5 ), 'mass=%s cg=%s\ninertia1=%s\ninertia2=%s\ndinertia=%s' % ( mass1, cg1, inertia1, inertia2, inertia1 - inertia2) mass3, cg3, inertia3 = mass_properties_breakdown(model2)[:3] assert np.allclose(mass1, mass3), 'mass1=%s mass3=%s' % (mass1, mass3) #assert np.allclose(cg1, cg3), 'mass=%s\ncg1=%s cg3=%s' % (mass1, cg1, cg3) if run_save_load: model2.save(obj_filename='model.obj', unxref=True) model3 = BDF(debug=False, log=model.log, mode='msc') model3.load(obj_filename='model.obj') os.remove('model.obj') else: model2.uncross_reference() model3 = model2 if run_save_load_hdf5 and IS_H5PY: model2.export_hdf5_filename('test.h5') model4 = BDF(log=model2.log) model4.load_hdf5_filename('test.h5') model4.validate() bdf_stream = StringIO() model4.write_bdf(bdf_stream, encoding=None, size=8, is_double=False, interspersed=False, enddata=None, write_header=True, close=True) for key, value in model2.card_count.items(): if key == 'ENDDATA': continue if key not in model4.card_count: msg = 'key=%r was not loaded to hdf5\nexpected=%s\nactual=%s' % ( key, model2.card_count, model4.card_count) #raise RuntimeError(msg) model.log.error(msg) cross_reference(model3, xref) if run_renumber: renumber('model2.bdf', model.log) if run_mirror: # we put embed this under renumber to prevent modifying an # existing model to prevent breaking tests # # shouldn't have any effect model2.bdf bdf_mirror('model2.bdf', plane='xz', log=model.log) os.remove('model2.bdf') if model.elements and run_quality: element_quality(model) return model3
def save_load_deck(model, xref='standard', punch=True, run_remove_unused=True, run_convert=True, run_renumber=True, run_mirror=True, run_save_load=True, run_quality=True, write_saves=True, run_save_load_hdf5=True): """writes, re-reads, saves an obj, loads an obj, and returns the deck""" model.validate() model.pop_parse_errors() model.pop_xref_errors() bdf_file = StringIO() model.write_bdf(bdf_file, size=8, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, close=False) bdf_file.seek(0) model.write_bdf(bdf_file, size=16, is_double=True, close=False) bdf_file.seek(0) if write_saves and model.save_file_structure: bdf_filenames = { 0: 'junk.bdf', } model.write_bdfs(bdf_filenames) os.remove('junk.bdf') if run_remove_unused: remove_unused(model) if run_convert: units_to = ['m', 'kg', 's'] units = ['ft', 'lbm', 's'] convert(model, units_to, units) model2 = BDF(log=model.log) #print(bdf_file.getvalue()) model2.read_bdf(bdf_file, punch=punch, xref=False) _cross_reference(model2, xref) model2.pop_parse_errors() model2.get_bdf_stats() model2.write_bdf('model2.bdf') if run_save_load: model2.save(obj_filename='model.obj', unxref=True) model3 = BDF(debug=False, log=model.log, mode='msc') model3.load(obj_filename='model.obj') os.remove('model.obj') else: model2.uncross_reference() model3 = model2 if run_save_load_hdf5 and IS_H5PY: model2.export_to_hdf5_filename('test.h5') model4 = BDF(log=model2.log) model4.load_hdf5_filename('test.h5') model4.validate() bdf_stream = StringIO() model4.write_bdf(bdf_stream, encoding=None, size=8, is_double=False, interspersed=False, enddata=None, write_header=True, close=True) for key, value in model2.card_count.items(): if key == 'ENDDATA': continue if key not in model4.card_count: msg = 'key=%r was not loaded to hdf5\nexpected=%s\nactual=%s' % ( key, model2.card_count, model4.card_count) #raise RuntimeError(msg) model.log.error(msg) cross_reference(model3, xref) if run_renumber: renumber('model2.bdf', model.log) if run_mirror: # we put embed this under renumber to prevent modifying an # existing model to prevent breaking tests # # shouldn't have any effect model2.bdf bdf_mirror('model2.bdf', plane='xz', log=model.log) os.remove('model2.bdf') if model.elements and run_quality: element_quality(model) return model3
def tecplot_to_nastran(tecplot_filename, bdf_filename, debug=True): """Converts a Tecplot file to Nastran.""" if isinstance(tecplot_filename, str): model = read_tecplot(tecplot_filename, debug=debug) else: model = tecplot_filename removed_nodes = False shell_pid = 1 solid_pid = 2 mid = 1 istart = 1 with open(bdf_filename, 'wb') as bdf_file: bdf_file.write('$pyNastran : punch=True\n') for inode, node in enumerate(model.xyz): card = ['GRID', inode + 1, None,] + list(node) bdf_file.write(print_card_8(card)) if len(model.tri_elements): # tris only itri = 0 for itri, tri in enumerate(model.tri_elements): card = ['CTRIA3', itri + 1, shell_pid] + list(tri) bdf_file.write(print_card_8(card)) #istart += bdf_model if len(model.quad_elements): if len(model.tri_elements) != 0: # if there are tris, then we assume the quads are good for iquad, quad in enumerate(model.quad_elements): card = ['CQUAD4', iquad + 1, shell_pid] + list(quad) bdf_file.write(print_card_8(card)) else: # need to split out the CQUAD4 elements istart = itri + 1 for iquad, quad in enumerate(model.quad_elements): if quad[2] == quad[3]: # if it's a tri card = ['CTRIA3', istart + iquad, shell_pid] + list(quad[:3]) else: card = ['CQUAD4', istart + iquad, shell_pid] + list(quad) bdf_file.write(print_card_8(card)) istart += iquad if len(model.tri_elements) + len(model.quad_elements): card = ['PSHELL', shell_pid, mid, 0.1] bdf_file.write(print_card_8(card)) if len(model.tet_elements) + len(model.hexa_elements): card = ['PSOLID', solid_pid, mid] bdf_file.write(print_card_8(card)) if len(model.tet_elements): for itet, tet in enumerate(model.tet_elements): card = ['CTETRA', istart + itet, solid_pid] + list(tet) bdf_file.write(print_card_8(card)) if len(model.hexa_elements): # need to split out the CTETRA and CPENTA elements for ihex, hexa in enumerate(model.hexa_elements): uhexa = unique(hexa) nnodes_unique = len(uhexa) nids = hexa[:nnodes_unique] centroid_y = model.xyz[nids, 1].max() if centroid_y < 0: removed_nodes = True continue if nnodes_unique == 4: card = ['CTETRA', istart + ihex, solid_pid] + list(nids) assert len(card) == 7, len(card) elif nnodes_unique == 5: card = ['CPYRAM', istart + ihex, solid_pid] + list(nids) assert len(card) == 8, len(card) elif nnodes_unique == 6: card = ['CPENTA', istart + ihex, solid_pid] + list(nids) assert len(card) == 9, len(card) elif nnodes_unique == 8: card = ['CHEXA', istart + ihex, solid_pid] + list(hexa) bdf_file.write(print_card_8(card)) E = 3.0e7 G = None nu = 0.3 card = ['MAT1', mid, E, G, nu] bdf_file.write(print_card_8(card)) if removed_nodes: bdf_model = BDF(debug=debug) bdf_model.read_bdf(bdf_filename) remove_unused(bdf_model)