def run_fem1(fem1, bdf_model, mesh_form, xref, punch, sum_load, size, is_double, cid): check_path(bdf_model, 'bdf_model') try: if '.pch' in bdf_model: fem1.read_bdf(bdf_model, xref=False, punch=True) else: fem1.read_bdf(bdf_model, xref=xref, punch=punch) except: print("failed reading %r" % bdf_model) raise #fem1.sumForces() out_model = bdf_model + 'v_out' if cid is not None and xref: fem1.resolveGrids(cid=cid) if mesh_form == 'combined': fem1.write_bdf(out_model, interspersed=True, size=size, is_double=is_double) elif mesh_form == 'separate': fem1.write_bdf(out_model, interspersed=False, size=size, is_double=is_double) else: msg = "mesh_form=%r; allowedForms=['combined','separate']" % mesh_form raise NotImplementedError(msg) #fem1.writeAsCTRIA3(out_model) return out_model
def test_openfoam_geometry_01(self): """tests the ascii three plugs model""" log = get_logger(level='warning', encoding='utf-8') geometry_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'blockMeshDict') bdf_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'blockMeshDict.bdf') face_filename = os.path.join(MODEL_PATH, 'SnakeRiverCanyon', 'system', 'faces') check_path(geometry_filename, 'geometry_filename') test = OpenFoamGUI() test.log = log test.on_load_geometry(geometry_filename, geometry_format='openfoam_shell', raise_error=True) test.on_load_geometry(geometry_filename, geometry_format='openfoam_hex', raise_error=True) os.remove('points.bdf') #test.load_openfoam_geometry_faces(geometry_filename) model = read_block_mesh(geometry_filename, log=log) block_mesh_name_out = 'blockMeshDict.out' model.write_block_mesh(block_mesh_name_out=block_mesh_name_out, make_symmetry=False) model.write_block_mesh(block_mesh_name_out=block_mesh_name_out, make_symmetry=True) model.write_bdf(bdf_filename, model.nodes, model.hexas) mirror_block_mesh(geometry_filename, block_mesh_name_out) os.remove(block_mesh_name_out)
def load_obj_from_hdf5(hdf5_filename, custom_types_dict=None, log=None, debug=False): """ loads an hdf5 file into an object Parameters ---------- hdf5_filename : str the h5 filename to load custom_types_dict : dict[key] : function() the custom mapper """ check_path(hdf5_filename, 'hdf5_filename') log = get_logger2(log=log, debug=debug, encoding='utf-8') log.info('hdf5_filename = %r' % hdf5_filename) model = {} with h5py.File(hdf5_filename, 'r') as h5_file: load_obj_from_hdf5_file(model, h5_file, custom_types_dict=custom_types_dict, log=log, debug=debug) return model
def read_panair(self, infilename): """reads a panair input file""" check_path(infilename, 'panair_input_Filename') self.infilename = infilename with open(self.infilename, 'r') as infile: self.lines = infile.readlines() lines = remove_comments(self.lines, self.log) (sections, section_names) = split_into_sections(lines) unused_groups = self.group_sections(sections, section_names)
def create_rst_from_python_files(): quick_start_pydocs_dir = os.path.join(pkg_path, '..', 'docs', 'quick_start', 'py_docs') pydocs_dir = os.path.join(pkg_path, '..', 'docs', 'html_docs', 'quck_start') if not os.path.exists(pydocs_dir): os.makedirs(pydocs_dir) check_path(quick_start_pydocs_dir, 'quick_start_pydocs_dir') for fname in os.listdir(quick_start_pydocs_dir): fname1 = os.path.join(quick_start_pydocs_dir, fname) fname2 = os.path.join(pydocs_dir, fname) print(fname1) print(fname2) shutil.copyfile(fname1, fname2) return
def ugrid3d_to_nastran(ugrid_filename, bdf_filename, include_shells=True, include_solids=True, convert_pyram_to_penta=False, encoding=None, size=16, is_double=False, log=None): """ Converts a UGRID to a BDF. Parameters ---------- ugrid_filename : str the input UGRID filename bdf_filename : str the output BDF filename include_shells : bool; default=True should the shells be written include_solids : bool; default=True should the solids be written convert_pyram_to_penta : bool; default=False False : NX Nastran True : MSC Nastran size : int; {8, 16}; default=16 the bdf write precision is_double : bool; default=False the field precision to write log : logger; default=None a logger object Returns ------- ugrid_model : UGRID() the ugrid model """ model = UGRID(log=log, debug=False) check_path(ugrid_filename, 'ugrid_filename') model.read_ugrid(ugrid_filename) model.write_bdf(bdf_filename, include_shells=include_shells, include_solids=include_solids, convert_pyram_to_penta=convert_pyram_to_penta, encoding=encoding, size=size, is_double=is_double) return model
def _add_user_points_from_csv(self, csv_points_filename: str, name: str, color, point_size: int = 4): """ Helper method for adding csv nodes to the gui Parameters ---------- csv_points_filename : str CSV filename that defines one xyz point per line name : str name of the geometry actor color : List[float, float, float] RGB values; [0. to 1.] point_size : int; default=4 the nominal point size """ is_failed = True try: check_path(csv_points_filename, 'csv_points_filename') # read input file try: user_points = np.loadtxt(csv_points_filename, comments='#', delimiter=',') except ValueError: user_points = loadtxt_nice(csv_points_filename, comments='#', delimiter=',') # can't handle leading spaces? #raise except ValueError as error: #self.log_error(traceback.print_stack(f)) self.gui.log_error('\n' + ''.join(traceback.format_stack())) #traceback.print_exc(file=self.log_error) self.gui.log_error(str(error)) return is_failed self._add_user_points(user_points, name, color, csv_points_filename, point_size=point_size) is_failed = False return False
def load_hdf5_filename(self, hdf5_filename: str, combine: bool=True) -> None: """Loads an h5 file into an OP2 object Parameters ---------- hdf5_filename : str the path to the an hdf5 file combine : bool; default=True runs the combine routine """ check_path(hdf5_filename, 'hdf5_filename') self.op2_filename = hdf5_filename self.log.info(f'hdf5_op2_filename = {hdf5_filename!r}') debug = False with h5py.File(hdf5_filename, 'r') as h5_file: load_op2_from_hdf5_file(self, h5_file, self.log, debug=debug) self.combine_results(combine=combine)
def read_op2(self, op2_filename: Optional[str]=None, combine: bool=True, build_dataframe: Optional[bool]=False, skip_undefined_matrices: bool=False, encoding: Optional[str]=None) -> None: """ Starts the OP2 file reading Parameters ---------- op2_filename : str (default=None -> popup) the op2_filename combine : bool; default=True True : objects are isubcase based False : objects are (isubcase, subtitle) based; will be used for superelements regardless of the option build_dataframe : bool (default=None -> True if in iPython, False otherwise) builds a pandas DataFrame for op2 objects None: True if in iPython, False otherwise skip_undefined_matrices : bool; default=False True : prevents matrix reading crashes encoding : str the unicode encoding (default=None; system default) """ if op2_filename: check_path(op2_filename, name='op2_filename') mode = self.mode if build_dataframe is None: build_dataframe = False if ipython_info(): build_dataframe = True if encoding is None: encoding = sys.getdefaultencoding() self.encoding = encoding self.skip_undefined_matrices = skip_undefined_matrices assert self.ask in [True, False], self.ask self.is_vectorized = True self.log.debug(f'combine={combine}') msg = f'{"-"*8} reading op2 with read_mode=1 (array sizing) {"-"*8}'
def read_patran_format(patran_fmt_filename): """ reads a file as shown below:: /* mscnastran_op2_nod.res_tmpl */ /* PATRAN 2.5 results file template for NASPAT OUTPUT2 .nod files */ KEYLOC = 0 TYPE = scalar COLUMN = 1 PRI = Normals SEC = Normal (X) TYPE = scalar COLUMN = 2 PRI = Normals SEC = Normal (Y) TYPE = scalar COLUMN = 3 PRI = Normals SEC = Normal (Z) TYPE = END """ check_path(patran_fmt_filename, 'patran_fmt_filename') with open(patran_fmt_filename, 'r') as patran_file: lines = patran_file.readlines() headers = defaultdict(list) for line in lines: if '=' in line: sline = line.strip().split('=') key = sline[0].strip() value = sline[1].strip() headers[key].append(value) return headers
def run_docopt(argv=None): """ The main function for the command line ``test_pynastran_gui`` script. """ msg = "Usage:\n" # INPUT format may be explicitly or implicitly defined with or # without an output file msg += " test_pynastrangui [-f FORMAT] INPUT_FILENAME OUTPUT_FILENAME [--log LOG] [--test]\n" msg += " test_pynastrangui [-f FORMAT] INPUT_FILENAME [--log LOG] [--test]\n" msg += " test_pynastrangui -f FORMAT [-r] [-d] INPUT_DIRECTORY [--log LOG] [--test]\n" msg += " test_pynastrangui -f FORMAT [-r] [-d] [--log LOG] [--test]\n" msg += ' test_pynastrangui -h | --help\n' msg += ' test_pynastrangui -v | --version\n' msg += '\n' msg += 'Positional Arguments:\n' msg += ' INPUT_FILENAME path to input file\n' msg += ' OUTPUT_FILENAME path to output file\n' msg += ' INPUT_DIRECTORY path to input directory\n' msg += '\n' msg += "Options:\n" msg += ' -f FORMAT, --format format type (avus, cart3d, lawgs, nastran, panair,\n' msg += ' su2, stl, surf, tetgen, usm3d, ugrid)\n' msg += ' -d, --dir directory to run tests on\n' msg += " -r, --regenerate Resets the tests\n" msg += ' --log LOG debug, info, warning, error; default=debug\n' msg += '\n' msg += "Debug:\n" msg += " --test temporary dev mode (default=False)\n" msg += 'Info:\n' #msg += " -q, --quiet prints debug messages (default=True)\n" msg += ' -h, --help show this help message and exit\n' msg += " -v, --version show program's version number and exit\n" if len(sys.argv) == 1: sys.exit(msg) ver = str(pyNastran.__version__) data = docopt(msg, argv=argv, help=True, version=ver, options_first=False) isdir = data['INPUT_DIRECTORY'] or not data['INPUT_FILENAME'] or data[ '--dir'] if isdir or data['--dir']: formati = data['--format'].lower() #print("sys.version_info[:2]) =", sys.version_info[:2]) failed_cases_filename = 'failed_cases_%s_%s%s.in' % ( formati, sys.version_info[0], sys.version_info[1]) print(failed_cases_filename) if data['--regenerate'] or not os.path.exists(failed_cases_filename): dirname = data['INPUT_DIRECTORY'] if not os.path.exists(dirname): msg = 'dirname=%r does not exist\n%s' % ( dirname, print_bad_path(dirname)) raise RuntimeError(msg) if not os.path.isdir(dirname): msg = 'dirname=%r is not a directory' % dirname raise RuntimeError(msg) extensions = FORMAT_TO_EXTENSION[formati] input_filenames = [ get_files_of_type(dirname, extension=extension, max_size=100., limit_file='no_dig.txt') for extension in extensions ] input_filenames = list( itertools.chain.from_iterable(input_filenames)) else: input_filenames = get_failed_files(failed_cases_filename) output_filenames = [None] * len(input_filenames) else: failed_cases_filename = None input_filename = data['INPUT_FILENAME'] output_filename = data['OUTPUT_FILENAME'] check_path(input_filename, 'input_filename') if not os.path.isfile(input_filename): msg = 'input_filename=%r is not a file' % input_filename raise RuntimeError(msg) input_filenames = [input_filename] output_filenames = [output_filename] if data['--format']: formati = data['--format'].lower() else: formati = determine_format(input_filename) print('formati', formati) #assert formati == 'nastran', 'format=%r' % formati if data['--log']: log_method = data['--log'].lower() assert log_method in ['debug', 'info', 'warning', 'error'], 'log_method=%r' % log_method else: log_method = 'debug' return formati, input_filenames, output_filenames, failed_cases_filename, log_method, data[ '--test']
def read_patran(patran_filename, fdtype='float64', idtype='int32'): """ reads a patran .nod formatted file:: KEY 2214 0 0.000000E+00 0 3 10.9126140E+00-.1825228E+00-.3658157E+00 20.3790452E+00-.1844318E+00-.9068129E+00 30.1772419E+00-.2389538E+00-.9547180E+00 40.1056876E+00-.2347771E+00-.9662866E+00 50.6002256E-01-.1678307E+00-.9839869E+00 """ base = os.path.splitext(patran_filename)[0] headers = read_patran_format(base + '.res_tmpl') check_path(patran_filename, 'patran_filename') with open(patran_filename, 'r') as patran_file: lines = patran_file.readlines() title = lines[0].strip() subtitle = (lines[2].strip() + ';' + lines[3].strip()).rstrip(';') sline = lines[1].strip().split() nnodes = int(sline[0]) max_node = int(sline[1]) fmt = sline[2] nvalues = int(sline[4]) if 'e' in fmt or 'E' in fmt or '.' in fmt: assert '-' not in fmt, 'fmt=%r' % fmt dtype = fdtype width = len(fmt) + 1 else: dtype = idtype width = len(fmt) + 1 #print('fmt=%r; width=%s' % (fmt, width)) nids = [] #line0 = lines[0] #nid = line0[:8] #data data = [] for line in lines[4:]: nid = line[:8] nids.append(nid) #print('nid = %r' % nid) i8 = 8 i16 = i8 + width datai = [] #print('i8=%s i16=%s' % (i8, i16)) for ivalue in range(nvalues): value = line[i8:i16] i8 += width i16 += width datai.append(value) #print('datai = %r' % datai) data.append(datai) nids_array = np.array(nids, dtype=idtype) data_array = np.array(data, dtype=dtype) data_dict = { 'title': title, 'subtitle': subtitle, 'nids': nids_array, 'data': data_array, 'headers': headers, } return data_dict
def run_docopt(): """helper for ``get_inputs_docopt``""" msg = "Usage:\n" # INPUT format may be explicitly or implicitly defined with or # without an output file test = '' qt = '' if not pyNastran.is_pynastrangui_exe: test = ' [--test]' qt = ' [--qt QT] [--plugin]' msg += " pyNastranGUI INPUT [-f FORMAT] [-o OUTPUT]\n" msg += ' [-g GSCRIPT] [-p PSCRIPT]\n' msg += ' [-u POINTS_FNAME...] [--user_geom GEOM_FNAME...]\n' msg += ' [-q] [--groups] [--noupdate] [--log LOG]%s%s\n' % ( test, qt) # You don't need to throw a -o flag msg += " pyNastranGUI INPUT OUTPUT [-f FORMAT] [-o OUTPUT]\n" msg += ' [-g GSCRIPT] [-p PSCRIPT]\n' msg += ' [-u POINTS_FNAME...] [--user_geom GEOM_FNAME...]\n' msg += ' [-q] [--groups] [--noupdate] [--log LOG]%s%s\n' % ( test, qt) # no input/output files # can you ever have an OUTPUT, but no INPUT? msg += " pyNastranGUI [-f FORMAT] [-i INPUT] [-o OUTPUT...]\n" msg += ' [-g GSCRIPT] [-p PSCRIPT]\n' msg += ' [-u POINTS_FNAME...] [--user_geom GEOM_FNAME...]\n' msg += ' [-q] [--groups] [--noupdate] [--log LOG]%s%s\n' % ( test, qt) msg += ' pyNastranGUI -h | --help\n' msg += ' pyNastranGUI -v | --version\n' msg += "\n" msg += "Primary Options:\n" msg += " -f FORMAT, --format FORMAT format type (avus, bedge, cart3d, lawgs, nastran,\n" # plot3d, msg += ' openfoam_hex, openfoam_shell, openfoam_faces,\n' msg += " panair, stl, surf, tetgen, usm3d, ugrid, ugrid3d)\n" msg += " -i INPUT, --input INPUT path to input file\n" msg += " -o OUTPUT, --output OUTPUT path to output file\n" #msg += " -r XYZ, --rotation XYZ [x, y, z, -x, -y, -z] default is ???\n" msg += '\n' msg += "Secondary Options:\n" msg += " --groups enables groups\n" msg += " -g GSCRIPT, --geomscript path to geometry script file (runs before load geometry)\n" msg += " -p PSCRIPT, --postscript path to post script file (runs after load geometry)\n" msg += " --user_geom GEOM_FNAME add user specified geometry (repeatable)\n" msg += " -u POINTS_FNAME, --user_points add user specified points (repeatable)\n" msg += '\n' msg += "Debug:\n" if not pyNastran.is_pynastrangui_exe: msg += " --test temporary dev mode (default=False)\n" msg += " --qt QT sets the qt version (default=QT_API)\n" msg += " --plugin disables the format check\n" msg += " --noupdate disables the update check\n" msg += " --log LOG disables HTML logging; prints to the screen\n" msg += '\n' msg += "Info:\n" msg += " -q, --quiet prints debug messages (default=True)\n" msg += " -h, --help show this help message and exits\n" msg += " -v, --version show program's version number and exit\n" ver = str(pyNastran.__version__) data = docopt(msg, version=ver) #print(data) input_format = data['--format'] input_filenames = [] if data['INPUT']: input_filenames += [data['INPUT']] if data['--input']: input_filenames += [data['--input']] for input_filename in input_filenames: check_path(input_filename, 'input file') output_filenames = [] if data['OUTPUT']: output_filenames += [data['OUTPUT']] if data['--output']: output_filenames += data['--output'] for output_filename in output_filenames: check_path(output_filename, 'output_filename') debug = not (data['--quiet']) if input_filenames and input_format is None: input_format = determine_format(input_filenames[0]) plugin = False if '--plugin' in data: plugin = True if not plugin: # None is for custom geometry allowed_formats = [ 'nastran', 'stl', 'cart3d', 'tecplot', 'ugrid', 'ugrid3d', 'panair', #'plot3d', 'surf', 'lawgs', 'degen_geom', 'shabp', 'avus', 'fast', 'abaqus', 'usm3d', 'bedge', 'su2', 'tetgen', 'openfoam_hex', 'openfoam_shell', 'openfoam_faces', 'obj', 'avl', None, ] assert input_format in allowed_formats, 'format=%r is not supported' % input_format geom_script = data['--geomscript'] if geom_script: check_path(geom_script, name='geom_script') post_script = data['--postscript'] if post_script: check_path(post_script, 'post_script') user_points = data['--user_points'] user_geom = data['--user_geom'] if data['--qt'] is not None: qt = data['--qt'].lower() assert qt in ['pyside', 'pyqt4', 'pyqt5', 'pyside2'], 'qt=%r' % qt os.environ.setdefault('QT_API', qt) for key, value in sorted(data.items()): print(key, value) #print("shots", shots) is_groups = data['--groups'] no_update = data['--noupdate'] test = '' if '--test' in data: test = data['--test'] #assert data['--console'] == False, data['--console'] return (input_format, input_filenames, output_filenames, geom_script, post_script, debug, user_points, user_geom, is_groups, no_update, data['--log'], test)
def _update_argparse_argdict(argdict): """converts to the pyNastranGUI argument format""" argdict['debug'] = not (argdict['quiet']) del argdict['quiet'] swap_key(argdict, 'groups', 'is_groups') swap_key(argdict, 'points_fname', 'user_points') input_filenames = [] if isinstance(argdict['input'], str): input_filenames += [argdict['input']] if isinstance(argdict['INPUT'], str): input_filenames += [argdict['INPUT']] del argdict['INPUT'] argdict['input'] = input_filenames output_filenames = [] if isinstance(argdict['output'], str): output_filenames += [argdict['output']] if isinstance(argdict['OUTPUT'], str): output_filenames += [argdict['OUTPUT']] del argdict['OUTPUT'] argdict['output'] = output_filenames for output_filename in output_filenames: check_path(output_filename, name='output_filename') for input_filename in input_filenames: check_path(input_filename, name='input file') plugin = False if 'plugin' in argdict: plugin = True if not plugin: # None is for custom geometry allowed_formats = [ 'nastran', 'stl', 'cart3d', 'tecplot', 'ugrid', 'ugrid3d', 'panair', #'plot3d', 'surf', 'lawgs', 'degen_geom', 'shabp', 'avus', 'fast', 'abaqus', 'usm3d', 'bedge', 'su2', 'tetgen', 'openfoam_hex', 'openfoam_shell', 'openfoam_faces', 'obj', 'avl', None, ] assert input_format in allowed_formats, 'format=%r is not supported' % input_format if input_filenames and argdict['format'] is None: input_format = determine_format(input_filenames[0]) argdict['format'] = input_format if argdict['geomscript']: check_path(geom_script, name='geomscript') if argdict['postscript']: check_path(post_script, name='postscript') if argdict['qt'] is not None: qt = argdict['qt'].lower() assert qt in ['pyside', 'pyqt4', 'pyqt5', 'pyside2'], 'qt=%r' % qt os.environ.setdefault('QT_API', qt) #if argdict['input'] is None: #argdict['input'] = [] #inputs = { #'format' : input_format, #'input' : input_filename, #'output' : output_filename, #'debug' : debug, #'geomscript' : geom_script, #'postscript' : post_script, #'user_points' : user_points, #'user_geom' : user_geom, #'is_groups' : is_groups, #'log' : log, #'test' : test, #} #print(argdict) return argdict
def _update_argparse_argdict(argdict): """converts to the pyNastranGUI argument format""" argdict['debug'] = not argdict['quiet'] del argdict['quiet'] if not GROUPS_DEFAULT: swap_key(argdict, 'nogroups', 'is_groups') else: argdict['is_groups'] = argdict['groups'] swap_key(argdict, 'points_fname', 'user_points') input_filenames = _add_inputs_outputs(argdict['INPUT'], argdict['input'], word='input') del argdict['INPUT'] argdict['input'] = input_filenames output_filenames = _add_inputs_outputs(argdict['OUTPUT'], argdict['output'], word='output') del argdict['OUTPUT'] argdict['output'] = output_filenames for output_filename in output_filenames: check_path(output_filename, name='output_filename') for input_filename in input_filenames: check_path(input_filename, name='input file') plugin = False if 'plugin' in argdict: plugin = True formats = argdict['format'] if input_filenames and formats is None: input_formats = [] for input_filenamei in input_filenames: if isinstance(input_filenamei, str): formati = determine_format(input_filenamei) else: # pragma: no cover raise TypeError('input_filenamei=%s type=%s' % (input_filenamei, type(input_filenamei))) input_formats.append(formati) #input_formats = [determine_format(input_filenamei) for input_filenamei in input_filenames] argdict['format'] = input_formats del input_formats elif formats: input_formats = [] for formati in formats: if isinstance(formati, str): input_formats.append(formati) else: input_formats.extend(formati) argdict['format'] = input_formats del input_formats del formats if not plugin: # None is for custom geometry allowed_formats = [ 'nastran', 'stl', 'cart3d', 'tecplot', 'ugrid', 'ugrid3d', 'panair', #'plot3d', 'surf', 'lawgs', 'degen_geom', 'shabp', 'avus', 'fast', 'abaqus', 'usm3d', 'bedge', 'su2', 'tetgen', 'openfoam_hex', 'openfoam_shell', 'openfoam_faces', 'obj', 'avl', None, ] for input_format in input_formats: assert input_format in allowed_formats, 'format=%r is not supported' % input_format if argdict['geomscript']: geom_script = argdict['geomscript'] check_path(geom_script, name='geomscript') if argdict['postscript']: post_script = argdict['postscript'] check_path(post_script, name='postscript') if argdict['qt'] is not None: qt = argdict['qt'].lower() assert qt in ['pyside', 'pyqt4', 'pyqt5', 'pyside2'], 'qt=%r' % qt os.environ.setdefault('QT_API', qt) #if argdict['input'] is None: #argdict['input'] = [] #inputs = { #'format' : input_format, #'input' : input_filename, #'output' : output_filename, #'debug' : debug, #'geomscript' : geom_script, #'postscript' : post_script, #'user_points' : user_points, #'user_geom' : user_geom, #'is_groups' : is_groups, #'log' : log, #'test' : test, #} formats = argdict['format'] ninput_files = len(input_filenames) if formats: if isinstance(formats, str): formats = [formats] nformats = len(formats) if nformats == 1 and ninput_files > 1: formats = formats * ninput_files argdict['format'] = formats if nformats != ninput_files: msg = ('nformats=%s formats=%s\n' 'ninput_files=%s input_filenames=%s' % (nformats, formats, ninput_files, input_filenames)) raise RuntimeError(msg) return argdict
def read_cogsg(self, cogsg_filename, stop_after_header=False): """ Reads the *.cogsg file Returns ------- nodes : (N, 3) float ndarray the nodes tet_elements : ??? ??? """ check_path(cogsg_filename, 'cogsg file') with open(cogsg_filename, 'rb') as cogsg_file: # nelements * 4 * 4 + 32 ??? dummy = cogsg_file.read(4) # 1022848 dummy_int, = unpack('>i', dummy) #assert dummy_int == 1022848, 'dummy_int = %s' % dummy_int # file header if self.precision == 'single': sformat = '>6if' nbytes = 6 * 4 + 4 elif self.precision == 'double': sformat = '>6id' nbytes = 6 * 4 + 8 else: raise RuntimeError('invalid precision format') data = cogsg_file.read(nbytes) (inew, ne, npoints, nb, npv, nev, tc) = unpack(sformat, data) self.header = { 'dummy': dummy_int, 'inew': inew, # dummy int 'nElements': ne, # nc; number of tets 'nPoints': npoints, # npo; number of grid points including nbn 'nBoundPts': nb, # nbn; number of boundary points including nbc 'nViscPts': npv, # npv; number of viscous points (=0 for Euler) 'nViscElem': nev, # ncv; number of viscous cells (=0 for Euler) 'tc': tc, # dummy double # nbc } if stop_after_header: return self.header self.log.info(self.header) # nbn nodes # #del ne, np if 1: nodes, tets = self._read_cogsg_volume(cogsg_file) return nodes, tets #else: #---------------------------------------------------------------------- # elements # face elements nnodes_per_face = 3 nfaces = ne if nfaces > 0: data_length = nnodes_per_face * nfaces str_format = '>' + 'i' * data_length data = cogsg_file.read(4 * data_length) faces = unpack(str_format, data) faces = np.array(faces) faces = faces.reshape((nfaces, 3)) else: faces = None #---------------------------------------------------------------------- # nodes nbound_pts = nb nnodes = nbound_pts #data_length = nnodes if self.precision == 'double': data_length = 8 * nnodes elif self.precision == 'single': data_length = 4 * nnodes else: raise RuntimeError('precision = %r' % self.precision) skip_nodes = False if skip_nodes: t = cogsg_file.tell() cogsg_file._goto(t + data_length * 3) nodes = None else: if self.precision == 'double': str_format = '>%sd' % nnodes unused_node_array_format = 'float64' elif self.precision == 'single': str_format = '>%sd' % nnodes unused_node_array_format = 'float32' else: raise RuntimeError('precision = %r' % self.precision) data = cogsg_file.read(3 * data_length) assert self.precision == 'single', self.precision nodes = np.frombuffer(data, '>4f').reshape(3, nnodes).T.copy() #nodes = np.frombuffer(data, '>4f').reshape(nnodes, 3).copy() cogsg_file.read(nnodes * 3 * 8) # 3 -> xyz, 8 -> double precision ??? #---------------------------------------------------------------------- # elements # boundary layer elements nnodes_per_tet = 4 ntets = nev if ntets: data_length = nnodes_per_tet * ntets str_format = '>' + 'i' * data_length data = cogsg_file.read(4 * data_length) tets = unpack(str_format, data) tets = np.array(tets) tets = tets.reshape((tets, 4)) #---------------------------------------------------------------------- # volume points nnodes = npv str_format = '>%si' % nnodes data = cogsg_file.read(4 * nnodes) nodes_vol = unpack(str_format, data) nodes_vol = np.array(nodes_vol) nodes_vol = nodes_vol.reshape((tets, 3))
def load_openfoam_geometry(self, openfoam_filename, mesh_3d, name='main', plot=True, **kwargs): model_name = name #key = self.caseKeys[self.iCase] #case = self.resultCases[key] #skip_reading = self.remove_old_openfoam_geometry(openfoam_filename) skip_reading = self.gui._remove_old_geometry(openfoam_filename) if skip_reading: return log = self.gui.log reset_labels = True #self.log.info('self.modelType=%s' % self.modelType) log.info('mesh_3d = %s' % mesh_3d) if mesh_3d in ['hex', 'shell']: model = BlockMesh(log=log, debug=False) # log=self.log, debug=False elif mesh_3d == 'faces': model = BlockMesh(log=log, debug=False) # log=self.log, debug=False boundary = Boundary(log=log, debug=False) self.gui.modelType = 'openfoam' #self.modelType = model.modelType log.info('openfoam_filename = %s' % openfoam_filename) is_face_mesh = False if mesh_3d == 'hex': is_3d_blockmesh = True is_surface_blockmesh = False (nodes, hexas, quads, names, patches) = model.read_openfoam(openfoam_filename) elif mesh_3d == 'shell': is_3d_blockmesh = False is_surface_blockmesh = True (nodes, hexas, quads, names, patches) = model.read_openfoam(openfoam_filename) elif mesh_3d == 'faces': is_3d_blockmesh = False is_surface_blockmesh = False is_face_mesh = True #(nodes, hexas, quads, names, patches) = model.read_openfoam(openfoam_filename) else: raise RuntimeError(mesh_3d) tris = [] if mesh_3d == 'hex': self.gui.nelements = len(hexas) elif mesh_3d == 'shell': self.gui.nelements = len(quads) elif mesh_3d == 'faces': dirname = os.path.dirname(openfoam_filename) point_filename = os.path.join(dirname, 'points') face_filename = os.path.join(dirname, 'faces') boundary_filename = os.path.join(dirname, 'boundary') check_path(face_filename, 'face_filename') check_path(point_filename, 'point_filename') check_path(boundary_filename, 'boundary_filename') hexas = None patches = None nodes, quads, names = boundary.read_openfoam( point_filename, face_filename, boundary_filename) self.gui.nelements = len(quads) + len(tris) else: raise RuntimeError(mesh_3d) self.gui.nnodes = len(nodes) log = self.gui.log log.debug("nnodes = %s" % self.gui.nnodes) log.debug("nelements = %s" % self.gui.nelements) grid = self.gui.grid grid.Allocate(self.gui.nelements, 1000) self.gui.nid_map = {} assert nodes is not None nnodes = nodes.shape[0] xmax, ymax, zmax = nodes.max(axis=0) xmin, ymin, zmin = nodes.min(axis=0) nodes -= np.array([xmin, ymin, zmin]) log.info('xmax=%s xmin=%s' % (xmax, xmin)) log.info('ymax=%s ymin=%s' % (ymax, ymin)) log.info('zmax=%s zmin=%s' % (zmax, zmin)) dim_max = max(xmax - xmin, ymax - ymin, zmax - zmin) #dim_max = (mmax - mmin).max() assert dim_max > 0 # breaks the model without subracting off the delta #self.update_axes_length(dim_max) self.gui.create_global_axes(dim_max) #print('is_face_mesh=%s is_3d_blockmesh=%s is_surface_blockmesh=%s' % ( #is_face_mesh, is_3d_blockmesh, is_surface_blockmesh)) with open('points.bdf', 'w') as bdf_file: bdf_file.write('CEND\n') bdf_file.write('BEGIN BULK\n') unames = unique(names) for pid in unames: bdf_file.write('PSHELL,%i,1,0.1\n' % pid) bdf_file.write('MAT1,1,1.0e7,,0.3\n') if is_face_mesh: points = vtk.vtkPoints() points.SetNumberOfPoints(self.gui.nnodes) unodes = unique(quads) unodes.sort() # should stop plotting duplicate nodes for inode, node in enumerate(nodes): if inode in unodes: bdf_file.write('GRID,%i,,%s,%s,%s\n' % ( inode + 1, node[0], node[1], node[2], )) points.InsertPoint(inode, node) else: points = numpy_to_vtk_points(nodes) #elements -= 1 normals = None if is_3d_blockmesh: nelements = hexas.shape[0] cell_type_hexa8 = vtkHexahedron().GetCellType() create_vtk_cells_of_constant_element_type( grid, hexas, cell_type_hexa8) elif is_surface_blockmesh: nelements = quads.shape[0] cell_type_quad4 = vtkQuad().GetCellType() create_vtk_cells_of_constant_element_type( grid, quads, cell_type_quad4) elif is_face_mesh: elems = quads nelements = quads.shape[0] nnames = len(names) normals = zeros((nelements, 3), dtype='float32') if nnames != nelements: msg = 'nnames=%s nelements=%s names.max=%s names.min=%s' % ( nnames, nelements, names.max(), names.min()) raise RuntimeError(msg) for eid, element in enumerate(elems): ineg = where(element == -1)[0] nnodes = 4 if ineg: nnodes = ineg.max() #pid = 1 pid = names[eid] if nnodes == 3: # triangle! bdf_file.write('CTRIA3,%i,%i,%i,%i,%i\n' % (eid + 1, pid, element[0] + 1, element[1] + 1, element[2] + 1)) elem = vtkTriangle() a = nodes[element[1], :] - nodes[element[0], :] b = nodes[element[2], :] - nodes[element[0], :] n = cross(a, b) normals[eid, :] = n / norm(n) elem.GetPointIds().SetId(0, element[0]) elem.GetPointIds().SetId(1, element[1]) elem.GetPointIds().SetId(2, element[2]) grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds()) elif nnodes == 4: bdf_file.write( 'CQUAD4,%i,%i,%i,%i,%i,%i\n' % (eid + 1, pid, element[0] + 1, element[1] + 1, element[2] + 1, element[3] + 1)) a = nodes[element[2], :] - nodes[element[0], :] b = nodes[element[3], :] - nodes[element[1], :] n = cross(a, b) normals[eid, :] = n / norm(n) elem = vtkQuad() elem.GetPointIds().SetId(0, element[0]) elem.GetPointIds().SetId(1, element[1]) elem.GetPointIds().SetId(2, element[2]) elem.GetPointIds().SetId(3, element[3]) grid.InsertNextCell(elem.GetCellType(), elem.GetPointIds()) else: raise RuntimeError('nnodes=%s' % nnodes) else: msg = 'is_surface_blockmesh=%s is_face_mesh=%s; pick one' % ( is_surface_blockmesh, is_face_mesh) raise RuntimeError(msg) bdf_file.write('ENDDATA\n') self.gui.nelements = nelements grid.SetPoints(points) grid.Modified() self.gui.scalar_bar_actor.VisibilityOn() self.gui.scalar_bar_actor.Modified() self.gui.isubcase_name_map = {0: ['OpenFoam BlockMeshDict', '']} cases = OrderedDict() ID = 1 #print("nElements = ",nElements) if mesh_3d == 'hex': form, cases, node_ids, element_ids = self._fill_openfoam_case( cases, ID, nodes, nelements, patches, names, normals, is_surface_blockmesh) elif mesh_3d == 'shell': form, cases, node_ids, element_ids = self._fill_openfoam_case( cases, ID, nodes, nelements, patches, names, normals, is_surface_blockmesh) elif mesh_3d == 'faces': if len(names) == nelements: is_surface_blockmesh = True form, cases, node_ids, element_ids = self._fill_openfoam_case( cases, ID, nodes, nelements, patches, names, normals, is_surface_blockmesh) else: raise RuntimeError(mesh_3d) self.gui.node_ids = node_ids self.gui.element_ids = element_ids if plot: self.gui._finish_results_io2(model_name, form, cases, reset_labels=reset_labels) else: self.gui._set_results(form, cases)
def read_fgrid(self, fgrid_filename, unused_dimension_flag=3): """extracts the nodes, tris, bcs, tets""" check_path(fgrid_filename, 'fgrid_filename') with open(fgrid_filename, 'r') as fgrid: nnodes, ntris, ntets = fgrid.readline().split() nnodes = int(nnodes) ntris = int(ntris) ntets = int(ntets) self.log.info('nnodes=%s ntris=%s ntets=%s' % (nnodes, ntris, ntets)) assert nnodes > 0, nnodes #inode = 0 # I think this goes xxx, yyy, zzz # instead of x, y, z # x, y, z xyz = [] nfloats = 0 while nfloats < nnodes * 3: sline = fgrid.readline().split() nfloatsi = len(sline) nfloats += nfloatsi xyz.extend(sline) nodes = np.array(xyz, dtype='float32') assert nfloats == nnodes * 3, 'nfloats=%s nnodes*3=%s' % ( nfloats, nnodes * 3) assert nodes.max() > 0, nodes.max() # we want a contiguous array self.nodes = nodes.reshape( (3, nnodes)).T.ravel().reshape(nnodes, 3) #--------------------------------------------------------------------- tris = [] nints = 0 while nints < ntris * 3: sline = fgrid.readline().split() nintsi = len(sline) nints += nintsi tris.extend(sline) if tris: self.tris = np.array(tris, dtype='int32').reshape(ntris, 3) #--------------------------------------------------------------------- nints = 0 bcs = [] while nints < ntris: sline = fgrid.readline().split() nintsi = len(sline) nints += nintsi bcs.extend(sline) if bcs: self.bcs = np.array(bcs, dtype='int32') #--------------------------------------------------------------------- nints = 0 tets = [] while nints < ntets * 4: sline = fgrid.readline().split() nintsi = len(sline) nints += nintsi tets.extend(sline) if bcs: self.tets = np.array(tets, dtype='int32').reshape((ntets, 4))
def equivalence_ugrid3d_and_bdf_to_bdf(ugrid_filename, bdf_filename, pshell_pids_to_remove, tol=0.01, renumber=True): """ Merges a UGRID3D (*.ugrid) with a BDF and exports a BDF that is equivalenced and renumbered. Parameters ---------- ugrid_filename : str the AFLR3/UGrid3d filename bdf_filename : str the BDF filename pshell_pids_to_remove : List[int, ...] tol : float; default=0.01 the equivalence tolerance renumber : bool; default=True calls ``bdf_renumber`` to renumber the output BDF model Returns ------- out_bdf_filename : str the output BDF filename """ print('equivalence_ugrid3d_and_bdf_to_bdf - bdf_filename=%s' % bdf_filename) print('equivalence_ugrid3d_and_bdf_to_bdf - ugrid_filename=%s' % ugrid_filename) check_path(ugrid_filename, 'ugrid_filename') base = os.path.splitext(bdf_filename)[0] #bdf_merged_filename = base + '_merged.bdf' bdf_equivalence_filename = base + '_equivalence.bdf' bdf_renumber_filename = base + '_renumber.bdf' update_merge = True if update_merge: ugrid_model = UGRID(log=None, debug=False) ugrid_model.read_ugrid(ugrid_filename) bdf_model = read_bdf(bdf_filename, xref=False) #bdf_model.write_bdf(bdf_merged_filename, interspersed=False, enddata=False) tol = 0.01 nid0 = max(bdf_model.nodes) + 1 # new node ids start at max+1 nid_offset = nid0 - 1 # node_ids are 1-based, so we must offset them eid = max(bdf_model.elements) + 1 cp = None for nid, node in enumerate(ugrid_model.nodes): #assert len(node) == 3, node card = ['GRID', nid + nid0, cp] + list(node) bdf_model.add_card(card, 'GRID', is_list=True) #f.write(print_card_double(card)) pid_solid = 100 mid = 1 pids = unique(ugrid_model.pids) for pidi in pids: if pidi not in pshell_pids_to_remove: card = ['PSHELL', pidi, mid, 0.1] bdf_model.add_card(card, 'PSHELL', is_list=True) card = ['PSOLID', pid_solid, mid] bdf_model.add_card(card, 'PSOLID', is_list=True) card = ['MAT1', mid, 3.0e7, None, 0.3] bdf_model.add_card(card, 'MAT1', is_list=True) shells = [ ('CQUAD4', ugrid_model.quads), ('CTRIA3', ugrid_model.tris), ] for card_type, card_nodes in shells: if card_nodes.shape[0]: for pid, nodes in zip(ugrid_model.pids, card_nodes + nid_offset): if pid not in pshell_pids_to_remove: card = [ card_type, eid, pid, ] + list(nodes) bdf_model.add_card(card, card_type, is_list=True) eid += 1 solids = [ ('CTETRA', ugrid_model.tets), ('CPYRAM', ugrid_model.penta5s), ('CPENTA', ugrid_model.penta6s), ('CHEXA', ugrid_model.hexas), ] for card_type, card_nodes in solids: if card_nodes.shape[0]: for nodes in card_nodes + nid_offset: card = [ card_type, eid, pid_solid, ] + list(nodes) bdf_model.add_card(card, card_type, is_list=True) eid += 1 # tol = min_edge_length / 2.0 # TODO: remove this... bdf_model.write_bdf('model_join.bdf', interspersed=False) bdf_model.cross_reference() bdf_equivalence_nodes(bdf_model, bdf_equivalence_filename, tol, renumber_nodes=False, neq_max=10, xref=False) if renumber: starting_ids_dict = { 'cid': 1, 'nid': 1, 'eid': 1, 'pid': 1, 'mid': 1, } bdf_renumber(bdf_equivalence_filename, bdf_renumber_filename, size=16, is_double=False, starting_id_dict=starting_ids_dict) #os.remove(bdf_equivalence_filename) out_bdf_filename = bdf_renumber_filename else: out_bdf_filename = bdf_equivalence_filename #os.remove(bdf_merged_filename) #os.remove(bdf_renumber_filename) return out_bdf_filename
{nx, msc, autodesk, optistruct, nasa95} encoding : str the unicode encoding (default=None; system default) Returns ------- model : OP2() an OP2 object .. todo:: creates the OP2 object without all the read methods .. note :: this method will change in order to return an object that does not have so many methods """ if op2_filename: check_path(op2_filename, name='op2_filename') if load_geometry: # TODO: kwargs support for methods kw = {'op2_filename': op2_filename, 'combine': combine, 'subcases': subcases, 'exclude_results': exclude_results, 'include_results': include_results, 'validate': True, 'xref': True, 'build_dataframe': build_dataframe, 'skip_undefined_matrices': skip_undefined_matrices, 'mode': mode, 'log': log, 'debug': debug, 'encoding': encoding}
def equivalence_ugrid3d_and_bdf_to_bdf(ugrid_filename: str, bdf_filename: str, pshell_pids_to_remove: List[int], tol: float = 0.01, renumber: bool = True, log: Optional[SimpleLogger] = None): """ Merges a UGRID3D (*.ugrid) with a BDF and exports a BDF that is equivalenced and renumbered. Parameters ---------- ugrid_filename : str the AFLR3/UGrid3d filename bdf_filename : str the BDF filename pshell_pids_to_remove : List[int, ...] tol : float; default=0.01 the equivalence tolerance renumber : bool; default=True calls ``bdf_renumber`` to renumber the output BDF model Returns ------- out_bdf_filename : str the output BDF filename """ log = get_logger2(log, debug=True) log.info( f'equivalence_ugrid3d_and_bdf_to_bdf - bdf_filename={bdf_filename}') log.info( f'equivalence_ugrid3d_and_bdf_to_bdf - ugrid_filename={ugrid_filename}' ) check_path(ugrid_filename, 'ugrid_filename') base = os.path.splitext(bdf_filename)[0] #bdf_merged_filename = base + '_merged.bdf' bdf_equivalence_filename = base + '_equivalence.bdf' bdf_renumber_filename = base + '_renumber.bdf' update_merge = True if update_merge: bdf_model = _update_merge(ugrid_filename, bdf_filename, pshell_pids_to_remove, tol=tol, log=log) bdf_equivalence_nodes(bdf_model, bdf_equivalence_filename, tol, renumber_nodes=False, neq_max=10, xref=False, log=log) if renumber: starting_ids_dict = { 'cid': 1, 'nid': 1, 'eid': 1, 'pid': 1, 'mid': 1, } bdf_renumber(bdf_equivalence_filename, bdf_renumber_filename, size=16, is_double=False, starting_id_dict=starting_ids_dict, log=log) #os.remove(bdf_equivalence_filename) out_bdf_filename = bdf_renumber_filename else: out_bdf_filename = bdf_equivalence_filename #os.remove(bdf_merged_filename) #os.remove(bdf_renumber_filename) os.remove('model_join.bdf') return out_bdf_filename
def read_openfoam(self, point_filename, face_filename, boundary_filename): """reads a Boundary file""" check_path(face_filename, 'face_filename') check_path(point_filename, 'point_filename') check_path(boundary_filename, 'boundary_filename') #self.log.info('face_filename = %r' % face_filename) #self.log.info('point_filename = %r' % point_filename) #self.log.info('boundary_filename = %r' % boundary_filename) assert 'faces' in face_filename, face_filename assert 'points' in point_filename, point_filename assert 'boundary' in boundary_filename, boundary_filename #print('starting Boundary') point_file = PointFile(log=self.log, debug=self.debug) #from PyFoam.RunDictionary.ParsedBlockMeshDict import ParsedBlockMeshDict #self.log.info(dir(f)) face_file = FaceFile(log=self.log, debug=self.debug) boundary_file = BoundaryFile(log=self.log, debug=False) boundaries = boundary_file.read_boundary_file(boundary_filename) #if 0: #foam = FoamFile(boundary_filename, log=p.log) #print('getting lines') #blines = foam.read_foam_file() #print('converting') #bd = convert_to_dict(foam, blines, debug=True) #del blines self.log.info('getting npoints') #pself.log.info(write_dict(d)) #------------------------------------------- # count number of faces by looking at the boundary info # so we can allocate faces2 nfaces2 = 0 ifaces_to_read = [] #f_boundary_faces = open('boundary_faces.py', 'wb') for name, boundary in boundaries.items(): # type patch; # 0 # nFaces nFaces; # 1 # startFace 777700; # 2 self.log.info('boundary[%s] = %s' % (name, boundary)) nfacesi = boundary[1] startface = int(boundary[2]) nfaces2 += nfacesi new_faces = list(np.arange(nfacesi, dtype='int32') + startface) #f_boundary_faces.write('boundary_faces[%s, %s] = %s\n' % ( #name, len(new_faces), new_faces)) ifaces_to_read += new_faces self.log.info('nfaces2 = %s' % nfaces2) ifaces_to_read = np.ravel(ifaces_to_read) if len(ifaces_to_read) != nfaces2: raise RuntimeError('len(ifaces_to_read)=%s nfaces2=%s' % ( ifaces_to_read.shape, nfaces2)) self.log.info(ifaces_to_read) faces = face_file.read_face_file(face_filename, ifaces_to_read=ifaces_to_read) #faces = f.read_face_file(face_filename, ifaces_to_read=None) del ifaces_to_read if 0: # pragma: no cover # doesn't work for some reason... # we want to only plot a subset of faces to reduce the data set # that works, but we also need to decrease the number of nodes # (they take wayyy too long) # so we take our faces, get the unique nodes # sort them so they're consistent with the order in the file # using the same block of code that works in the face reader, #but it still fails for some reason... # after this step, we renumber the faces with the adjusted node ids ipoints_to_read = np.unique(faces.ravel()) self.log.info('nnodes = %s' % len(ipoints_to_read)) ipoints_to_read.sort() self.log.info('ipoints_to_read = %s' % ipoints_to_read) else: ipoints_to_read = None nodes = point_file.read_point_file(point_filename, ipoints_to_read=ipoints_to_read) if ipoints_to_read is not None: nid_to_ipoint = {} for inid, nid in enumerate(ipoints_to_read): nid_to_ipoint[nid] = inid self.log.info('%s %s' % (faces, faces.max())) for iface, unused_face in enumerate(faces): #print('face = %s' % face) faces[iface, 0] = nid_to_ipoint[faces[iface, 0]] faces[iface, 1] = nid_to_ipoint[faces[iface, 1]] faces[iface, 2] = nid_to_ipoint[faces[iface, 2]] #print('faces[%i] = %s' % (i, faces[i, :])) self.log.info('%s %s' % (faces, faces.max())) self.log.info('done...') del ipoints_to_read del nid_to_ipoint #------------------------------------------- # keep only the required faces iface = 0 #faces2 = zeros((nfaces2, 4), dtype='int32') names = np.zeros(nfaces2, dtype='int32') iname = 1 snames = [None] * (len(boundaries) + 1) self.log.info('') for name, boundary in boundaries.items(): self.log.info('iname=%s name=%s boundary=%s' % (iname, name, boundary)) # type patch; # nFaces nFaces; # startFace 777700; try: unused_type = boundary[0] nfacesi = int(boundary[1]) startface = int(boundary[2]) except: print(boundary.keys()) raise #faces2[iface:iface+nfacesi] = faces[startface:startface + nfacesi] names[iface:iface+nfacesi] = iname snames[iname] = name iface += nfacesi iname += 1 #del faces quads = faces #if 0: #f_boundary_faces.write('\n\n---Faces----\n') #for iface, face in enumerate(faces): #pid = names[iface] #name = snames[pid] #f_boundary_faces.write('%i (%i %i %i %i) pid=%s name=%s\n' % ( #iface, face[0], face[1], face[2], face[3], pid, name)) #f_boundary_faces.write('\n\n---First Faces----\n') #pid_save = set() #for iface, face in enumerate(faces): #pid = names[iface] #if pid not in pid_save: #name = snames[pid] #f_boundary_faces.write('%i (%i %i %i %i) pid=%s name=%s\n' % ( #iface, face[0], face[1], face[2], face[3], pid, name)) #pid_save.add(pid) # only save the unique nodes # ... #unodes = unique(quads.ravel()) #unodes.sort() #nodes = nodes[unodes, :] # renumber the nodes on the faces # ... self.log.debug('names=%s; max=%s min=%s' % (names, names.max(), names.min())) print('done with Boundary') #self.nodes = nodes return nodes, quads, names
def run_fem1(fem1, bdf_model, out_model, mesh_form, xref, punch, sum_load, size, is_double, cid, run_extract_bodies=False, encoding=None, crash_cards=None, safe_xref=True, pickle_obj=False, stop=False): """ Reads/writes the BDF Parameters ---------- fem1 : BDF() The BDF object bdf_model : str The root path of the bdf filename out_model : str The path to the output bdf mesh_form : str {combined, separate} 'combined' : interspersed=True 'separate' : interspersed=False xref : bool The xref mode punch : bool punch flag sum_load : bool static load sum flag size : int, {8, 16} size flag is_double : bool double flag cid : int / None cid flag safe_xref : bool; default=False ??? run_extract_bodies : bool; default=False isolate the fem bodies; typically 1 body; code is still buggy encoding : str; default=None the file encoding crash_cards : ??? ??? """ if crash_cards is None: crash_cards = [] check_path(bdf_model, 'bdf_model') try: if '.pch' in bdf_model: fem1.read_bdf(bdf_model, xref=False, punch=True, encoding=encoding) else: fem1.read_bdf(bdf_model, xref=False, punch=punch, encoding=encoding) for card in crash_cards: if card in fem1.card_count: raise DisabledCardError('card=%r has been disabled' % card) #fem1.geom_check(geom_check=True, xref=False) if not stop and not xref: skin_filename = 'skin_file.bdf' fem1.write_skin_solid_faces(skin_filename, size=16, is_double=False) if os.path.exists(skin_filename): read_bdf(skin_filename, log=fem1.log) os.remove(skin_filename) if xref: if run_extract_bodies: extract_bodies(fem1) # 1. testing that these methods word without xref #fem1._get_rigid() #fem1.get_dependent_nid_to_components() #fem1._get_maps(eids=None, map_names=None, #consider_0d=True, consider_0d_rigid=True, #consider_1d=True, consider_2d=True, consider_3d=True) #fem1.get_dependent_nid_to_components() # 1. testing that these methods work with xref fem1._get_rigid() #common_node_ids = list(fem1.nodes.keys()) #fem1.get_rigid_elements_with_node_ids(common_node_ids) #for spc_id in set(list(fem1.spcadds.keys()) + list(fem1.spcs.keys())): #fem1.get_reduced_spcs(spc_id) #for mpc_id in set(list(fem1.mpcadds.keys()) + list(fem1.mpcs.keys())): #fem1.get_reduced_mpcs(mpc_id) #fem1.get_dependent_nid_to_components() #fem1._get_maps(eids=None, map_names=None, #consider_0d=True, consider_0d_rigid=True, #consider_1d=True, consider_2d=True, consider_3d=True) #fem1.get_dependent_nid_to_components() #fem1.get_pid_to_node_ids_and_elements_array(pids=None, etypes=None, idtype='int32', #msg=' which is required by test_bdf') #fem1.get_property_id_to_element_ids_map(msg=' which is required by test_bdf') #fem1.get_material_id_to_property_ids_map(msg=' which is required by test_bdf') #fem1.get_element_ids_list_with_pids(pids=None) #fem1.get_element_ids_dict_with_pids(pids=None, stop_if_no_eids=False, #msg=' which is required by test_bdf') #fem1.get_node_id_to_element_ids_map() #fem1.get_node_id_to_elements_map() read_bdf(fem1.bdf_filename, encoding=encoding, debug=fem1.debug, log=fem1.log) fem1 = remake_model(bdf_model, fem1, pickle_obj) #fem1.geom_check(geom_check=True, xref=True) except: print("failed reading %r" % bdf_model) raise #out_model = bdf_model + '_out' #if cid is not None and xref: #fem1.resolve_grids(cid=cid) if mesh_form is None: pass elif mesh_form == 'combined': fem1.write_bdf(out_model, interspersed=True, size=size, is_double=is_double) elif mesh_form == 'separate': fem1.write_bdf(out_model, interspersed=False, size=size, is_double=is_double) else: msg = "mesh_form=%r; allowed_mesh_forms=['combined','separate']" % mesh_form raise NotImplementedError(msg) #fem1.write_as_ctria3(out_model) fem1._get_maps() #remove_unused_materials(fem1) #remove_unused(fem1) #units_to = ['m', 'kg', 's'] #units_from = ['m', 'kg', 's'] #convert(fem1, units_to, units=units_from) if xref: check_for_cd_frame(fem1) #try: #fem1.get_area_breakdown() #fem1.get_volume_breakdown() #except: #if len(fem1.masses) > 0: #fem1.log.warning('no elements with area/volume found, but elements with mass were') #else: #fem1.log.warning('no elements found') #if len(fem1.elements) + len(fem1.masses) > 0: #try: #fem1.get_mass_breakdown() #except RuntimeError: #fem1.log.warning('no elements with mass found') return fem1