def main():
    print('main...')
    bdf = None
    op2_filename = 'model.op2'
    op2 = read_op2(op2_filename=None, combine=True, log=None, debug=True,
                  debug_file=None, build_dataframe=False,
                  skip_undefined_matrices=True, mode='msc')

    subcases = [1]

    groups = {
        1 : 'Elm 403082 565514 403084 552195 552196 553965 552204',
    }
    eid_groups = {}
    results = {}
    for key, group in sorted(groups.items()):
        eid_group = parse_patran_syntax_dict(group, pound_dict=None)['Elm']
        eid_groups.append(eid_group)
    del groups

    centroid_file = open('centroid.csv', 'w')
    centroid_file.write('group, subcase, eid_max, maxp, eid_min, minp\n')
    for group_id, eids in enumerate(eid_groups):
        # TODO: speed this up by using the same indices
        for subcase in subcases:
            eid_max, maxp, eid_min, minp = get_centroid_max_min_principal_stress(bdf, op2, subcase, eids)
            centroid_file.write('%s, %s, %s, %s, %s, %s\n' % (
                group, subcase, eid_max, maxp, eid_min, minp))

    cat('centroid.csv')
 def test_opt_1(self):
     #bdf_filename = os.path.join(model_path, 'sol200', 'model_200.bdf')
     #model = read_bdf(bdf_filename, xref=True)
     op2_filename = os.path.join(model_path, 'sol200', 'model_200.op2')
     #bdf, op2 = run_model(bdf_filename, op2_filename,
                          #f06_has_weight=False, vectorized=True,
                          #encoding='utf-8')
     op2 = read_op2(op2_filename, debug=False)
Beispiel #3
0
def _setup_bar_grid_point_forces(log):
    op2_filename = os.path.join(MODEL_PATH, 'grid_point_forces', 'bar_grid_point_forces.op2')
    #from pyNastran.bdf.bdf import read_bdf
    #bdf_model = read_bdf()
    model = read_op2(op2_filename, load_geometry=True, combine=True,
                     exclude_results=None, log=log)
    log = model.log
    gpforce = model.grid_point_forces[1]  # type: RealGridPointForcesArray
    force = model.cbar_force[1]
    #['station', 'bending_moment1', 'bending_moment2', 'shear1', 'shear2', 'axial', 'torque']
    headers = force.get_headers()
    #istation = headers.index('station')
    itime = 0
    #ibending_moment1 = headers.index('bending_moment1')
    ibending_moment2 = headers.index('bending_moment2')
    #station = force.data[itime, :, istation]
    #bending_moment1 = force.data[itime, :, ibending_moment1]
    bending_moment2 = force.data[itime, :, ibending_moment2]

    coord_out = model.coords[0]
    nid_cp_cd, xyz_cid0, xyz_cp, icd_transform, icp_transform = model.get_xyz_in_coord_array(
        cid=0, fdtype='float64', idtype='int32')
    all_nids = nid_cp_cd[:, 0]

    all_eids, element_centroids_cid0 = get_element_centroids(model, idtype='int32', fdtype='float64')
    #stations = element_centroids_cid0[:-1, 0]
    stations = np.linspace(0., 10., num=51)
    #model.log.level = 'warning'
    #print(stations)

    nids_bar = []
    nids_beam = []
    for eid, elem in sorted(model.elements.items()):
        if elem.type == 'CBAR':
            nids_bar.append(elem.nodes)
        elif elem.type == 'BEAM':
            nids_beam.append(elem.nodes)
    nids_bar = np.array(nids_bar, dtype='int32')
    nids_beam = np.array(nids_beam, dtype='int32')
    inid_bar = np.searchsorted(all_nids, nids_bar)
    x1 = xyz_cid0[inid_bar[:, 0], 0]
    x2 = xyz_cid0[inid_bar[:, 1], 0]
    out = (
        model, coord_out, nid_cp_cd, icd_transform,
        all_nids, xyz_cid0,
        all_eids, element_centroids_cid0,
        gpforce, x1, x2, bending_moment2,
        stations)
    return out
def get_eigenvalues(op2_filename, debug=False):
    """get the buckling eigenvalues for each panel"""
    model2 = read_op2(op2_filename, debug=debug)
    cases = model2.eigenvectors.keys()
    isubcase = cases[0]
    eigenvector = model2.eigenvectors[isubcase]
    try:
        eigrs = eigenvector.eigrs
        #eigrs = d._eigrs
    except AttributeError:
        msg = '%s.object_attributes() = %s' % (
            eigenvector.class_name,
            str(eigenvector.object_attributes(keys_to_skip='class_name')))
        raise RuntimeError(msg)
    return eigrs
Beispiel #5
0
 def test_opt_1(self):
     """tests SOL 200"""
     log = get_logger(level='warning')
     bdf_filename = os.path.join(MODEL_PATH, 'sol200', 'model_200.bdf')
     unused_model = read_bdf(bdf_filename, xref=True, debug=False)
     op2_filename = os.path.join(MODEL_PATH, 'sol200', 'model_200.op2')
     #bdf, op2 = run_model(bdf_filename, op2_filename,
                          #f06_has_weight=False, vectorized=True,
                          #encoding='utf-8')
     op2 = read_op2(op2_filename, log=log, debug=True, debug_file='temp.debug')
     unused_subcase_ids = op2.subcase_key.keys()
     #for subcase_id in subcase_ids:
         #assert isinstance(subcase_id, integer_types), subcase_id
         #for key, dresp in sorted(model.dresps.items()):
             #print(dresp)
             #dresp.calculate(op2, subcase_id)
     os.remove('temp.debug')
Beispiel #6
0
    def test_write_2(self):
        """tests basic op2 writing"""
        log = get_logger(log=None, level='warning', encoding='utf-8')
        folder = os.path.join(MODEL_PATH, 'solid_bending')
        op2_filename = os.path.join(folder, 'solid_bending.op2')
        op2_filename_debug = os.path.join(folder, 'solid_bending.debug.out')
        op2_filename_out = os.path.join(folder, 'solid_bending_out.op2')
        op2_filename_debug_out = os.path.join(folder, 'solid_bending_out.debug.out')
        #debug_file = 'solid_bending.debug.out'
        #model = os.path.splitext(op2_filename)[0]
        #debug_file = model + '.debug.out'

        op2 = read_op2(op2_filename, debug_file=op2_filename_debug, log=log)

        op2.write_op2(op2_filename_out) #, is_mag_phase=False)
        op2b = read_op2_geom(op2_filename_out, debug_file=op2_filename_debug_out, log=log)
        assert op2 == op2b
    def test_strain(self):
        log = get_logger(level='warning')
        for folder, prefix, freqs in CASES:
            bdf = BDF(debug=False, log=log)
            basepath = os.path.join(pkg_path, 'op2', 'test', 'examples',
                                    folder)
            bdf.read_bdf(os.path.join(basepath, prefix + '.bdf'))
            op2 = read_op2(
                os.path.join(basepath, prefix + '.op2'),
                debug=False,
                log=log,
                exclude_results=['element_forces', 'stress'],
            )
            op2_new = data_in_material_coord(bdf, op2)

            for freq in freqs:
                for vecname in strain_vectors:
                    vector = getattr(op2_new, vecname).get(1)
                    if vector is None:
                        continue
                    if 'center' in prefix:
                        name = os.path.join(
                            basepath,
                            '%s_center_freq_%1.1f.txt' % (vecname, freq))
                    else:
                        name = os.path.join(
                            basepath,
                            '%s_corner_freq_%1.1f.txt' % (vecname, freq))
                    if not os.path.isfile(name):
                        raise AssertionError(
                            'Not found reference result {0}'.format(name))
                    ref_strain = np.loadtxt(name)
                    mag = ref_strain[:, 1::2]
                    phase = ref_strain[:, 2::2]
                    if freq == 1.0:
                        data = vector.data[0]
                    elif freq == 9.5:
                        data = vector.data[17]
                    eids = get_eids_from_op2_vector(vector)
                    check = eids != 0
                    assert np.allclose(np.abs(data[check]), mag, rtol=RTOL)
                    phase[np.isclose(mag, 0)] = 0
                    assert np.allclose(calc_phasedeg(data[check]),
                                       phase,
                                       rtol=RTOL)
Beispiel #8
0
    def test_force(self):
        log = get_logger(level='warning')
        for folder, prefix, freqs in CASES:
            bdf = BDF(debug=False, log=log)
            basepath = os.path.join(pkg_path, 'op2', 'test', 'examples',
                                    folder)
            bdf.read_bdf(os.path.join(basepath, prefix + '.bdf'))
            op2 = read_op2(
                os.path.join(basepath, prefix + '.op2'),
                debug=False,
                log=log,
                exclude_results=['stress', 'strain'],
            )
            op2_new = data_in_material_coord(bdf, op2)

            for freq in freqs:
                for vecname in force_vectors:
                    vector = getattr(op2_new, vecname).get(1)
                    if vector is None:
                        continue
                    if 'center' in prefix:
                        name = os.path.join(
                            basepath, f'{vecname}_center_freq_{freq:1.1f}.txt')
                    else:
                        name = os.path.join(
                            basepath, f'{vecname}_corner_freq_{freq:1.1f}.txt')
                    if not os.path.isfile(name):
                        raise AssertionError(
                            f'Not found reference result {name}')
                    ref_force = np.loadtxt(name)
                    mag = ref_force[0::2]
                    phase = ref_force[1::2]
                    if freq == 1.0:
                        data = vector.data[0]
                    elif freq == 9.5:
                        data = vector.data[17]
                    #eids = get_eids_from_op2_vector(vector)
                    #check = eids != 0
                    assert np.allclose(np.abs(data[:, :]), mag, rtol=RTOL)
                    assert np.allclose(calc_phasedeg(data), phase, rtol=RTOL)
Beispiel #9
0
def main():
    print('main...')
    bdf = None
    #op2_filename = 'model.op2'
    op2 = read_op2(op2_filename=None,
                   combine=True,
                   log=None,
                   debug=True,
                   debug_file=None,
                   build_dataframe=False,
                   skip_undefined_matrices=True,
                   mode='msc')

    subcases = [1]

    groups = {
        1: 'Elm 403082 565514 403084 552195 552196 553965 552204',
    }
    eid_groups = []
    results = {}
    for key, group in sorted(groups.items()):
        eid_group = parse_patran_syntax_dict(group, pound_dict=None)['Elm']
        eid_groups.append(eid_group)
    del groups

    centroid_file = open('centroid.csv', 'w')
    centroid_file.write('group, subcase, eid_max, maxp, eid_min, minp\n')
    for group_id, eids in enumerate(eid_groups):
        # TODO: speed this up by using the same indices
        for subcase in subcases:
            eid_max, maxp, eid_min, minp = get_centroid_max_min_principal_stress(
                bdf, op2, subcase, eids)
            centroid_file.write('%s, %s, %s, %s, %s, %s\n' %
                                (group, subcase, eid_max, maxp, eid_min, minp))

    cat('centroid.csv')
Beispiel #10
0
def fully_stressed_design(bdf_filename,
                          keywords=None,
                          niterations_max=2,
                          alpha=0.9):
    """
    Optimizes shell thickness for minimum weight (ONLY shells)

    Parameters
    ----------
    bdf_filename : str; BDF()
        the BDF filename or model

    Returns
    -------
    desvars : dict[id]=values
        the "optimization history of the design variables
    """
    force = True

    iteration = 0
    niterations_max = 10

    if isinstance(bdf_filename, str):
        model = read_bdf(bdf_filename)
    elif isinstance(bdf_filename, BDF):
        model = bdf_filename
        bdf_filename = model.bdf_filename
    else:
        raise TypeError(bdf_filename)

    doptparm = model.doptprm
    if doptparm is not None:
        if 'FSDALP' in doptparm.params:
            alpha = doptparm.params['FSDALP']
        else:
            alpha = doptparm.defaults['FSDALP']
        if not isinstance(alpha, float):
            msg = 'FSDALP on DOPTPARM must be an integer; FSDALP=%r' % (alpha)
            raise TypeError(msg)
        if not (0. < niterations_max <= 1.):
            msg = 'FSDALP on DOPTPARM must be between (0. < n <= 1.0); FSDALP=%s' % (
                alpha)
            raise ValueError(msg)

        if 'FSDMAX' in doptparm.params:
            niterations_max = doptparm.params['FSDMAX']
        else:
            niterations_max = doptparm.defaults['FSDMAX']

        if not isinstance(niterations_max, int):
            msg = 'FSDMAX on DOPTPARM must be an integer; FSDMAX=%r' % (
                niterations_max)
            raise TypeError(msg)
        if niterations_max <= 0:
            msg = 'FSDMAX on DOPTPARM must be > 0; FSDMAX=%s' % (
                niterations_max)
            raise ValueError(msg)
    else:
        niterations_max = 2
        alpha = 0.9

    dresps_to_consider, desvars_to_consider, dvprels_to_consider = get_inputs(
        model)

    pid_to_eid = model.get_property_id_to_element_ids_map()
    bdf_filename2 = 'fem_baseline.bdf'
    op2_filename2 = 'fem_baseline.op2'
    try:
        shutil.copyfile(bdf_filename, bdf_filename2)
    except TypeError:
        msg = 'cannot copy %r to %r' % (bdf_filename, bdf_filename2)
        raise TypeError(msg)

    while iteration < niterations_max:
        if not os.path.exists(op2_filename2) or force:
            run_nastran(bdf_filename2, keywords=keywords)
        results = read_op2(op2_filename2,
                           combine=True,
                           log=None,
                           debug=False,
                           debug_file=None,
                           build_dataframe=False,
                           skip_undefined_matrices=True,
                           mode='msc')

        isubcase = 1
        itime = 0

        stress_per_region = {}
        nopt = 0
        for pid, region in iteritems(regions):
            print('pid=%s region=%s' % (pid, region))
            (tmin, tmax, ovm_min, ovm_max) = region
            prop = model.properties[pid]
            told = prop.t
            eids_requested = pid_to_eid[pid]
            print('eids_requested[pid=%s] = %s' % (pid, eids_requested))

            #def compute_critical_stress(results, subcases)
            #def compute_critical_stress(results, subcases):
            stress = []
            eid_node = []
            for res in [results.cquad4_stress, results.ctria3_stress]:
                resi = res[isubcase]
                eid_nodei = resi.element_node
                #print('eid_nodei = %s' % (eid_nodei))
                eid = eid_nodei[:, 0]
                stress_data = resi.data
                eid_node.append(eid_nodei)

                # A
                #i = np.where(eid == eids_requested)

                # B
                #j = np.searchsorted(eid, eids_requested)
                #i = np.where(eid[j] == eids_requested)
                #j = np.in1d(eids_requested, eid) # A in B
                j = np.in1d(eid, eids_requested)  # A in B
                i = np.where(j)
                #print('i = %s' % i) #[0]
                #print('j = %s' % j) #[0]
                #print('eid = %s' % eid) #[0]
                #print('eids_requested = %s' % eids_requested) #[0]
                if len(i) == 0:
                    continue
                #print('i=%s; ni=%s' % (i, len(i)))
                stress_datai = stress_data[itime, i, 7]
                #print('eids = %s' % eid[i])
                #print('stress_datai = %s' % stress_datai)
                stress.append(stress_datai)

            #print('stressA = %s' % stress)
            stress = np.hstack(stress)
            #print('stressB = %s' % stress)

            # PROD area
            # PSHELL/PCOMP thickness
            # PSHEAR thickness
            eid_node = np.vstack(eid_node)
            stress_max = stress.max()
            stress_min = stress.min()
            print('stress_min=%s' % stress_min)
            print('stress_max=%s' % stress_max)
            istress_max = np.where(stress == stress_max)[0]
            istress_min = np.where(stress == stress_min)[0]
            eid_max = eid_node[istress_max, 0]
            eid_min = eid_node[istress_min, 0]
            peak_stress = max(abs(stress_max), abs(stress_min))
            tnew = told * stress_max / ovm_max
            tnew = min(tmax, tnew)
            tnew = max(tmin, tnew)
            #tnew = (oi/omax)**alpha * ti_old
            tratio = tnew / told
            if np.allclose(tratio, 1.):
                continue
            nopt += 1
            stress_per_region = [stress_min, stress_max, eid_min, eid_max]
            print('pid=%s' % pid)
            print(
                '  stress_per_region (ovm_min, ovm_max, eid_min, eid_max) => %s'
                % stress_per_region)
            print('  told=%s tnew=%s tratio=%s\n' % (told, tnew, tratio))

            prop.t *= tratio
            prop.z1 *= tratio
            prop.z2 *= tratio

            regions2[pid] = [
                tnew, peak_stress, stress_max, eid_max, stress_min, eid_min
            ]  # t_new, ovm_new
        if nopt == 0:
            break

        #eid_node = np.hstack(eid_node)
        #s = np.hstack(eid_node)

        iteration += 1
        bdf_filename2 = 'fem_%i.bdf' % iteration
        op2_filename2 = 'fem_%i.op2' % iteration
        model.write_bdf(bdf_filename2)

    print('regions2 = %s' % regions2)
    return regions2
    def test_stress(self):
        log = get_logger(level='warning')
        is_failed = False
        for folder, prefix, freqs in CASES:
            bdf = BDF(debug=False, log=log)
            basepath = os.path.join(pkg_path, 'op2', 'test', 'examples',
                                    folder)
            bdf.read_bdf(os.path.join(basepath, prefix + '.bdf'))
            op2 = read_op2(
                os.path.join(basepath, prefix + '.op2'),
                debug=False,
                log=log,
                exclude_results=['element_forces', 'strain'],
            )
            try:
                op2_new = data_in_material_coord(bdf, op2)
            except ValueError as error:
                op2.log.error('failed rotating %r' % prefix)
                is_failed = True
                #continue
                raise

            for freq in freqs:
                for vecname in stress_vectors:
                    vector = getattr(op2_new, vecname).get(1)
                    if vector is None:
                        continue
                    if 'center' in prefix:
                        name = os.path.join(
                            basepath,
                            '%s_center_freq_%1.1f.txt' % (vecname, freq))
                    else:
                        name = os.path.join(
                            basepath,
                            '%s_corner_freq_%1.1f.txt' % (vecname, freq))
                    if not os.path.isfile(name):
                        raise AssertionError(
                            'Not found reference result {0}'.format(name))
                    ref_stress = np.loadtxt(name)
                    mag = ref_stress[:, 1::2]
                    phase = ref_stress[:, 2::2]
                    if freq == 1.0:
                        data = vector.data[0]
                    elif freq == 9.5:
                        data = vector.data[17]
                    eids = get_eids_from_op2_vector(vector)
                    check = eids != 0
                    if 'cquad8' in vecname:
                        assert np.allclose(np.abs(data[check][0::10, :]),
                                           mag[0::10],
                                           rtol=RTOL)
                        assert np.allclose(calc_phasedeg(
                            data[check][1::10, :]),
                                           phase[1::10],
                                           rtol=RTOL)
                    else:
                        assert np.allclose(np.abs(data[check]), mag, rtol=RTOL)
                        assert np.allclose(calc_phasedeg(data[check]),
                                           phase,
                                           rtol=RTOL)
        if is_failed:
            raise ValueError('see previous message')
Beispiel #12
0
    def test_gpforce_02(self):
        IS_MATPLOTLIB = False
        op2_filename = os.path.join(MODEL_PATH, 'grid_point_forces', 'bar_grid_point_forces.op2')
        #from pyNastran.bdf.bdf import read_bdf
        #bdf_model = read_bdf()
        log = SimpleLogger(level='warning')
        model = read_op2(op2_filename, load_geometry=True, combine=True,
                         exclude_results=None, log=log)
        log = model.log
        gpforce = model.grid_point_forces[1]  # type: RealGridPointForcesArray
        force = model.cbar_force[1]
        #['station', 'bending_moment1', 'bending_moment2', 'shear1', 'shear2', 'axial', 'torque']
        headers = force.get_headers()
        #istation = headers.index('station')
        itime = 0
        #ibending_moment1 = headers.index('bending_moment1')
        ibending_moment2 = headers.index('bending_moment2')
        #station = force.data[itime, :, istation]
        #bending_moment1 = force.data[itime, :, ibending_moment1]
        bending_moment2 = force.data[itime, :, ibending_moment2]

        coord_out = model.coords[0]
        nid_cp_cd, xyz_cid0, xyz_cp, icd_transform, icp_transform = model.get_xyz_in_coord_array(
            cid=0, fdtype='float64', idtype='int32')
        all_nids = nid_cp_cd[:, 0]

        all_eids, element_centroids_cid0 = get_element_centroids(model, idtype='int32', fdtype='float64')
        #stations = element_centroids_cid0[:-1, 0]
        stations = np.linspace(0., 10., num=51)
        #model.log.level = 'warning'
        #print(stations)

        nids_bar = []
        nids_beam = []
        for eid, elem in sorted(model.elements.items()):
            if elem.type == 'CBAR':
                nids_bar.append(elem.nodes)
            elif elem.type == 'BEAM':
                nids_beam.append(elem.nodes)
        nids_bar = np.array(nids_bar, dtype='int32')
        nids_beam = np.array(nids_beam, dtype='int32')
        inid_bar = np.searchsorted(all_nids, nids_bar)
        x1 = xyz_cid0[inid_bar[:, 0], 0]
        x2 = xyz_cid0[inid_bar[:, 1], 0]

        with self.assertRaises(AssertionError):
            log.error('problem with extract_freebody_loads...')
            fb_force, fb_moment = gpforce.extract_freebody_loads(
                all_eids,
                coord_out, model.coords,
                nid_cp_cd,
                icd_transform,
                itime=0, debug=True,
                log=log)

        if IS_MATPLOTLIB:  # pragma: no cover
            fig = plt.figure()
            ax = fig.gca()
            L = 10.0
            x = xyz_cid0[:, 0].copy()
            x.sort()
            M = x ** 2 / 2
            # F = wx
            # M = wx^2/2
            ax.plot(x1, bending_moment2[::2], 'o-', label='BM2', linewidth=3)
            ax.plot(x2, bending_moment2[1::2], 'o--', )
            ax.plot(L-x, M, 'o-', label='exact', linewidth=1)
            ax.grid(True)

        #nids = [1]
        #eids = [1]
        force_out_sum, moment_out_sum = gpforce.extract_interface_loads(
            all_nids, all_eids,
            coord_out, model.coords,
            nid_cp_cd, icd_transform,
            xyz_cid0,
            #summation_point: Optional[NDArray3float]=None,
            consider_rxf=True, itime=0,
            debug=True, log=log)
        assert np.allclose(force_out_sum, [0., 0., 0.]), force_out_sum
        assert np.allclose(moment_out_sum, [0., 0., 0.]), moment_out_sum

        # this one is empty...
        nids = [1]
        eids = [2]
        force_out_sum, moment_out_sum = gpforce.extract_interface_loads(
            nids, eids,
            coord_out, model.coords,
            nid_cp_cd, icd_transform,
            xyz_cid0,
            #summation_point: Optional[NDArray3float]=None,
            consider_rxf=True, itime=0,
            debug=True, log=log)
        #assert force_out.size == 0, force_out
        #assert moment_out.size == 0, moment_out
        assert not np.any(np.isfinite(force_out_sum)), force_out_sum
        assert not np.any(np.isfinite(moment_out_sum)), moment_out_sum
        #coord0 = model.coords[0]
        #gpforce.extract_interface_loads(nids: np.ndarray,
                                        #eids: np.ndarray,
                                        #coord_out=coord0,
                                        #model.coords,
                                        #nid_cd,
                                        #icd_transform,
                                        #xyz_cid0,
                                        #summation_point=None,
                                        #consider_rxf=True,
                                        #itime=0,
                                        #debug=True,
                                        #log=model.log,
                                        #idtype='int32')
        # ----------------------------------------
        nodes_list = list(model.nodes.keys())
        nids = np.array(nodes_list, dtype='int32')
        nids.sort()
        #eids = np.ndarray(list(model.elements.keys()), dtype='int32')
        #eids.sort()
        # bar is [0,10] in x
        force_sum, moment_sum = gpforce.shear_moment_diagram(
                                        xyz_cid0,
                                        all_eids,
                                        nids,
                                        icd_transform,
                                        element_centroids_cid0,
                                        model.coords,
                                        nid_cp_cd,
                                        stations,
                                        coord_out,
                                        idir=0, itime=0,
                                        debug=True, log=model.log)
        force_sum_expected = np.array([
            [0.,  0., -9.5],
            [0.,  0., -9.5],
            [0.,  0., -9.5],
            [0.,  0., -9.5],
            [0.,  0., -9.5],
            [0.,  0., -8.5],
            [0.,  0., -8.5],
            [0.,  0., -8.5],
            [0.,  0., -8.5],
            [0.,  0., -8.5],
            [0.,  0., -7.5],
            [0.,  0., -7.5],
            [0.,  0., -7.5],
            [0.,  0., -7.5],
            [0.,  0., -7.5],
            [0.,  0., -6.5],
            [0.,  0., -6.5],
            [0.,  0., -6.5],
            [0.,  0., -6.5],
            [0.,  0., -6.5],
            [0.,  0., -5.5],
            [0.,  0., -5.5],
            [0.,  0., -5.5],
            [0.,  0., -5.5],
            [0.,  0., -5.5],
            [0.,  0., -4.5],
            [0.,  0., -4.5],
            [0.,  0., -4.5],
            [0.,  0., -4.5],
            [0.,  0., -4.5],
            [0.,  0., -3.5],
            [0.,  0., -3.5],
            [0.,  0., -3.5],
            [0.,  0., -3.5],
            [0.,  0., -3.5],
            [0.,  0., -2.5],
            [0.,  0., -2.5],
            [0.,  0., -2.5],
            [0.,  0., -2.5],
            [0.,  0., -2.5],
            [0.,  0., -1.5],
            [0.,  0., -1.5],
            [0.,  0., -1.5],
            [0.,  0., -1.5],
            [0.,  0., -1.5],
            [0.,  0., -0.5],
            [0.,  0., -0.5],
            [0.,  0., -0.5]])
        moment_sum_expected = np.array([
            [0.0,  4.42166672e+01,  0.0],
            [0.0,  4.23166695e+01,  0.0],
            [0.0,  4.04166679e+01,  0.0],
            [0.0,  3.85166664e+01,  0.0],
            [0.0,  3.66166687e+01,  0.0],
            [0.0,  3.53166695e+01,  0.0],
            [0.0,  3.36166687e+01,  0.0],
            [0.0,  3.19166679e+01,  0.0],
            [0.0,  3.02166672e+01,  0.0],
            [0.0,  2.85166683e+01,  0.0],
            [0.0,  2.74166679e+01,  0.0],
            [0.0,  2.59166679e+01,  0.0],
            [0.0,  2.44166679e+01,  0.0],
            [0.0,  2.29166679e+01,  0.0],
            [0.0,  2.14166679e+01,  0.0],
            [0.0,  2.05166683e+01,  0.0],
            [0.0,  1.92166672e+01,  0.0],
            [0.0,  1.79166679e+01,  0.0],
            [0.0,  1.66166687e+01,  0.0],
            [0.0,  1.53166676e+01,  0.0],
            [0.0,  1.46166677e+01,  0.0],
            [0.0,  1.35166683e+01,  0.0],
            [0.0,  1.24166679e+01,  0.0],
            [0.0,  1.13166676e+01,  0.0],
            [0.0,  1.02166681e+01,  0.0],
            [0.0,  9.71666813e+00,  0.0],
            [0.0,  8.81666756e+00,  0.0],
            [0.0,  7.91666794e+00,  0.0],
            [0.0,  7.01666784e+00,  0.0],
            [0.0,  6.11666775e+00,  0.0],
            [0.0,  5.81666803e+00,  0.0],
            [0.0,  5.11666775e+00,  0.0],
            [0.0,  4.41666794e+00,  0.0],
            [0.0,  3.71666789e+00,  0.0],
            [0.0,  3.01666784e+00,  0.0],
            [0.0,  2.91666794e+00,  0.0],
            [0.0,  2.41666794e+00,  0.0],
            [0.0,  1.91666794e+00,  0.0],
            [0.0,  1.41666794e+00,  0.0],
            [0.0,  9.16667938e-01,  0.0],
            [0.0,  1.01666796e+00,  0.0],
            [0.0,  7.16667950e-01,  0.0],
            [0.0,  4.16667938e-01,  0.0],
            [0.0,  1.16667941e-01,  0.0],
            [0.0, -1.83332056e-01,  0.0],
            [0.0,  1.16670445e-01,  0.0],
            [0.0,  1.66695174e-02,  0.0],
            [0.0, -8.33295286e-02,  0.0]])
        assert np.allclose(force_sum[3:, :], force_sum_expected), force_out_sum
        assert np.allclose(moment_sum[3:, :], moment_sum_expected), moment_out_sum
        if IS_MATPLOTLIB:  # pragma: no cover
            M2 = moment_sum[:, 1]
            ax.plot(stations, M2, '*-', label='SMT')
            ax.legend()
            fig.show()
            x = 1
Beispiel #13
0
def load_regions_and_create_eigenvalue_csv(bdf_model,
                                           op2_filenames,
                                           regions_filename,
                                           sym_regions_filename=None,
                                           eig_min=-1.0,
                                           eig_max=1.0,
                                           eig_default=3.0):
    """
    loads a BDF and a series of OP2 filenames and creates an eigenvalue buckling plot

    Parameters
    ----------
    eig_min : float
        the required minimum eigenvalue
    eig_max : float
        the required maximum eigenvalue
    eig_default : float
        the default eigenvalue for cases that do not calculate eigenvectors
        because there were no eigenvalues in the range
    regions_filename : str
        path to regions.txt file
    sym_regions_filename : str; default=None -> No symmetry
        path to sym_regions.txt file

    Returns
    -------
    min_eigenvalue_by_patch_id : dict
        key : patch_id : int
            the integer patch id
        value : eigenvalue or reserve factor
           the reserve factor eigenvalue for buckling
    eigenvalues : (n, ) float ndarray
        the minimum eigenvalues

    Creates
    -------
    eigenvalues_output.csv : file
        csv of log10(eigenvalue), eigenvalue, is_buckled
    """
    bdf_model.log.info('load_regions_and_create_eigenvalue_csv')
    #patch_numbers = []
    #evals = []
    assert isinstance(bdf_model, BDF), type(bdf_model)

    min_eigenvalue_by_patch_id = {}
    is_sym_regions = False
    if sym_regions_filename is not None:
        is_sym_regions = True
        region_to_symregion_map, symregion_to_region_map = load_sym_regions_map(
            sym_regions_filename)
    msg = ''

    assert len(op2_filenames) > 0, 'op2_filenames=%s' % op2_filenames
    bdf_model.log.info('eig_min=%s eig_max=%s' % (eig_min, eig_max))
    for op2_filename in op2_filenames:
        bdf_model.log.info('op2_filename = %r' % op2_filename)
        if not os.path.exists(op2_filename):
            bdf_model.log.warning(op2_filename)
            continue
        op2_filename_base = os.path.basename(op2_filename)
        patch_id_str = op2_filename_base.split('_')[1].split('.')[0]
        patch_id = int(patch_id_str)

        sym_patch_id = None
        if is_sym_regions:
            if patch_id in symregion_to_region_map:
                sym_patch_id = symregion_to_region_map[patch_id]
            elif patch_id in region_to_symregion_map:
                sym_patch_id = region_to_symregion_map[patch_id]
            else:
                raise RuntimeError("can this happen???")

        # = pf.split('_')[1].split('.')[0]
        #patch_numbers.append(patch_id)
        #model = BDF(debug=False)
        #model.read_bdf(pf)
        # eids = model.elements.keys()
        #op2_path = '%s_.op2' % patch_id)
        try:
            model2 = read_op2(op2_filename,
                              combine=True,
                              log=None,
                              debug=False,
                              mode='msc')
        except FatalError:
            #os.remove(op2_filename)
            bdf_model.log.error('fatal on %r' % op2_filename)
            msg += '%s\n' % op2_filename
            continue
        cases = model2.eigenvectors.keys()
        if len(cases) == 0:
            #assert is_sym_regions == False, is_sym_regions
            min_eigenvalue_by_patch_id[patch_id] = eig_default
            min_eigenvalue_by_patch_id[sym_patch_id] = eig_default
            continue

        isubcase = cases[0]
        eigenvector = model2.eigenvectors[isubcase]
        eigrs = np.array(eigenvector.eigrs)
        #cycles = (eigrs * 2 * np.pi) ** 2.
        #print('eigrs =', eigrs)

        #----------------------------------
        # calculate what's basically a reserve factor (RF); margin = reserve_factor - 1
        # take the minimum of the "tension"/"compression" RFs, which are
        # compared to different allowables

        # lambda > 0
        i = np.where(eigrs >= 0.0)[0]
        if len(i) == 0:
            pos_eigenvalue = eig_default  # TODO: no buckling eigenvalue...what?
            pos_reserve_factor = eig_default
        else:
            pos_eigenvalue = eigrs[i].min()
            pos_reserve_factor = pos_eigenvalue / eig_max
            #max_eigenvalue = np.log10(eigi)

        # lambda < 0
        if 0:
            j = np.where(eigrs < 0.0)[0]
            if len(j) == 0:
                neg_eigenvalue = eig_default  # TODO: no buckling eigenvalue...what?
                neg_reserve_factor = eig_default
            else:
                neg_eigenvalue = np.abs(eigrs[j]).min()
                neg_reserve_factor = neg_eigenvalue / abs(eig_min)
                #min_eigenvalue = np.log10(eigi)
        else:
            neg_reserve_factor = 10.
            neg_eigenvalue = 10.

        #evals.append(min_eval)
        bdf_model.log.info(
            'Patch=%s  compression (lambda > 0); lambda=%.3f RF=%.3f' %
            (patch_id, pos_eigenvalue, pos_reserve_factor))
        bdf_model.log.info(
            'Patch=%s  tension    (lambda < 0); lambda=%.3f RF=%.3f' %
            (patch_id, neg_eigenvalue, neg_reserve_factor))
        reserve_factor = min(neg_reserve_factor, pos_reserve_factor,
                             eig_default)
        assert reserve_factor > 0.
        min_eigenvalue_by_patch_id[patch_id] = reserve_factor
        if is_sym_regions:
            min_eigenvalue_by_patch_id[sym_patch_id] = reserve_factor
    bdf_model.log.info(msg)

    bdf_model.log.info('finished parsing eigenvalues...')
    #model = BDF(debug=False)
    #model.read_bdf(bdf_filename)
    all_eids = np.unique(bdf_model.elements.keys())
    neids = len(all_eids)

    eigenvalues = np.zeros(neids, dtype='float32')
    with open(regions_filename, 'r') as regions_file:
        lines = regions_file.readlines()

        for iline, line in enumerate(lines[1:]):
            sline = line.strip().split(',')
            #print(sline)
            values = [int(val) for val in sline]
            pid = values[0]
            regions_patch_id = values[1]
            eids = values[2:]
            i = np.searchsorted(all_eids, eids)  # [0] ???
            assert np.array_equal(
                all_eids[i],
                eids), 'iline=%s pid=%s patch_id=%s' % (iline, pid,
                                                        regions_patch_id)
            if regions_patch_id not in min_eigenvalue_by_patch_id:
                bdf_model.log.info('missing pid=%s' % pid)
                continue
            #patch_id[i] = panel_id
            eigenvalues[i] = min_eigenvalue_by_patch_id[regions_patch_id]

    eigenvalue_filename = 'eigenvalues_output.csv'
    with open(eigenvalue_filename, 'w') as eigenvalue_file:
        eigenvalue_file.write('# log(Eigenvalue), eigenvalue, is_buckled\n')
        for eig in eigenvalues:
            eig = max(eig, 0.000001)
            if eig < 1.0:
                is_buckled = 1.0
            else:
                is_buckled = 0.0
            log10_eig = np.log10(eig)
            eigenvalue_file.write('%f, %f, %i\n' %
                                  (log10_eig, eig, is_buckled))
    return min_eigenvalue_by_patch_id, eigenvalues
Beispiel #14
0
def parse_op2(contact_bdf, main_op2, subcase_id, contact_surfaces, eid_groups,
              nodes_groups, stiffnesses, errors):
    """
    assumes static analysis
    """
    from pyNastran.op2.op2 import read_op2

    op2 = read_op2(op2_filename=main_op2)
    spring_forces = op2.spring_forces[subcase_id]

    #force_name = 'forces.bdf'

    #contact_bdf = 'spring.bdf'
    with open(contact_bdf, 'w') as bdf_file:
        errorsi = 0
        for contact_surface, eid_group, nodes_group in zip(
                contact_surfaces, eid_groups, nodes_groups):
            (_left_bdf, _right_bdf, _dof, stiffness, cid, _glue, _initial_gap,
             _max_deflection_error) = contact_surface
            #spring_eids_min, spring_eids_max = eid_groups
            #nodes_left, nodes_right = nodes_groups

            #nelements = spring_eids_max - spring_eids_min
            #forces = zeros((nelements), 'float64')
            c1 = c2 = cid

            i = 0
            for eid, force in iteritems(spring_forces.forces):
                #forces[i] = force
                g1 = spring_forces.g1[eid]
                g2 = spring_forces.g2[eid]

                _k = stiffnesses[i]
                #deflection = force/k
                if force <= 0:
                    # assume a really small stiffness (no contact)
                    #force = 0.0
                    #if errors[i] == 1:  # if old error
                    new_stiffness = 0.01
                    stiffnesses[i] = new_stiffness
                    new_flag = 0
                else:  # force > 0
                    # the contact is correct
                    #force_card = ['FORCE']
                    #if deflection >

                    new_stiffness = stiffness
                    #flag = int(force/abs(force)) # 0, 1
                    new_flag = 1

                celas = ['CELAS2', eid, new_stiffness, g1, c1, g2, c2]
                bdf_file.write(print_card(celas))

                # check to see
                if new_flag != errors[i]:
                    errorsi += 1
                    errors[i] = new_flag
                i += 1

    ierrors = where(errors == 1)[0]
    nerrors = len(ierrors)
    return nerrors
Beispiel #15
0
import os
import numpy as np
import matplotlib.pyplot as plt
import pyNastran
from pyNastran.op2.op2 import read_op2
from pyNastran.bdf.mesh_utils.cut_model_by_plane import get_element_centroids
PKG_PATH = pyNastran.__path__[0]
op2_filename = os.path.join(PKG_PATH, '..', 'models', 'grid_point_forces',
                            'bar_grid_point_forces.op2')
#from pyNastran.bdf.bdf import read_bdf
#bdf_model = read_bdf()
model = read_op2(op2_filename,
                 load_geometry=True,
                 combine=True,
                 exclude_results=None,
                 log=None,
                 debug=True)
gpforce = model.grid_point_forces[1]
force = model.cbar_force[1]
#['station', 'bending_moment1', 'bending_moment2', 'shear1', 'shear2', 'axial', 'torque']
headers = force.get_headers()
istation = headers.index('station')
itime = 0
#ibending_moment1 = headers.index('bending_moment1')
ibending_moment2 = headers.index('bending_moment2')
#station = force.data[itime, :, istation]
#bending_moment1 = force.data[itime, :, ibending_moment1]
bending_moment2 = force.data[itime, :, ibending_moment2]

coord_out = model.coords[0]
nid_cp_cd, xyz_cid0, xyz_cp, icd_transform, icp_transform = model.get_xyz_in_coord_array(
Beispiel #16
0
            if is_linux: # linux
                kb = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
            else: # windows
                kb = get_memory_usage() / 1024
            mb = kb / 1024.
            print("Memory usage start: %s (KB); %.2f (MB)" % (kb, mb))
        else:
            raise RuntimeError('wmi (for Windows) or resource (for Linux/Mac) cannot be found')
        m_init.append(mb)

    #m0 = psutil.Process(os.getpid()).memory_info().rss/1024**3

    #objgraph.show_growth(limit=3)
    if 1:
        model = read_op2(op2_filename,
                       build_dataframe=False, debug=False,
                       #exclude_results=exclude_results,
                       skip_undefined_matrices=True)
    else:
        model = OP2()
        model._cleanup_words()

    if 0:
        model.remove_empty_results()
        del model.case_control_deck
        del model.matrices
        for name in model.object_methods():
            try:
                delattr(model, name)
            except AttributeError:
                pass
        objgraph.show_growth()
def fully_stressed_design(bdf_filename, keywords=None,
                          niterations_max=2, alpha=0.9):
    """
    Optimizes shell thickness for minimum weight (ONLY shells)

    Parameters
    ----------
    bdf_filename : str; BDF()
        the BDF filename or model

    Returns
    -------
    desvars : dict[id]=values
        the "optimization history of the design variables
    """
    force = True

    iteration = 0
    niterations_max = 10

    if isinstance(bdf_filename, str):
        model = read_bdf(bdf_filename)
    elif isinstance(bdf_filename, BDF):
        model = bdf_filename
        bdf_filename = model.bdf_filename
    else:
        raise TypeError(bdf_filename)

    doptparm = model.doptprm
    if doptparm is not None:
        if 'FSDALP' in doptparm.params:
            alpha = doptparm.params['FSDALP']
        else:
            alpha = doptparm.defaults['FSDALP']
        if not isinstance(alpha, float):
            msg = 'FSDALP on DOPTPARM must be an integer; FSDALP=%r' % (alpha)
            raise TypeError(msg)
        if not(0. < niterations_max <= 1.):
            msg = 'FSDALP on DOPTPARM must be between (0. < n <= 1.0); FSDALP=%s' % (alpha)
            raise ValueError(msg)

        if 'FSDMAX' in doptparm.params:
            niterations_max = doptparm.params['FSDMAX']
        else:
            niterations_max = doptparm.defaults['FSDMAX']

        if not isinstance(niterations_max, int):
            msg = 'FSDMAX on DOPTPARM must be an integer; FSDMAX=%r' % (niterations_max)
            raise TypeError(msg)
        if niterations_max <= 0:
            msg = 'FSDMAX on DOPTPARM must be > 0; FSDMAX=%s' % (niterations_max)
            raise ValueError(msg)
    else:
        niterations_max = 2
        alpha = 0.9


    dresps_to_consider, desvars_to_consider, dvprels_to_consider = get_inputs(model)

    pid_to_eid = model.get_property_id_to_element_ids_map()
    bdf_filename2 = 'fem_baseline.bdf'
    op2_filename2 = 'fem_baseline.op2'
    try:
        shutil.copyfile(bdf_filename, bdf_filename2)
    except TypeError:
        msg = 'cannot copy %r to %r' % (bdf_filename, bdf_filename2)
        raise TypeError(msg)

    while iteration < niterations_max:
        if not os.path.exists(op2_filename2) or force:
            run_nastran(bdf_filename2, keywords=keywords)
        results = read_op2(op2_filename2, combine=True, log=None,
                          debug=False,
                          debug_file=None,
                          build_dataframe=False,
                          skip_undefined_matrices=True,
                          mode='msc')

        isubcase = 1
        itime = 0

        stress_per_region = {}
        nopt = 0
        for pid, region in iteritems(regions):
            print('pid=%s region=%s' % (pid, region))
            (tmin, tmax, ovm_min, ovm_max) = region
            prop = model.properties[pid]
            told = prop.t
            eids_requested = pid_to_eid[pid]
            print('eids_requested[pid=%s] = %s' % (pid, eids_requested))

            #def compute_critical_stress(results, subcases)
        #def compute_critical_stress(results, subcases):
            stress = []
            eid_node = []
            for res in [results.cquad4_stress, results.ctria3_stress]:
                resi = res[isubcase]
                eid_nodei = resi.element_node
                #print('eid_nodei = %s' % (eid_nodei))
                eid = eid_nodei[:, 0]
                stress_data = resi.data
                eid_node.append(eid_nodei)


                # A
                #i = np.where(eid == eids_requested)

                # B
                #j = np.searchsorted(eid, eids_requested)
                #i = np.where(eid[j] == eids_requested)
                #j = np.in1d(eids_requested, eid) # A in B
                j = np.in1d(eid, eids_requested) # A in B
                i = np.where(j)
                #print('i = %s' % i) #[0]
                #print('j = %s' % j) #[0]
                #print('eid = %s' % eid) #[0]
                #print('eids_requested = %s' % eids_requested) #[0]
                if len(i) == 0:
                    continue
                #print('i=%s; ni=%s' % (i, len(i)))
                stress_datai = stress_data[itime, i, 7]
                #print('eids = %s' % eid[i])
                #print('stress_datai = %s' % stress_datai)
                stress.append(stress_datai)

            #print('stressA = %s' % stress)
            stress = np.hstack(stress)
            #print('stressB = %s' % stress)

            # PROD area
            # PSHELL/PCOMP thickness
            # PSHEAR thickness
            eid_node = np.vstack(eid_node)
            stress_max = stress.max()
            stress_min = stress.min()
            print('stress_min=%s' % stress_min)
            print('stress_max=%s' % stress_max)
            istress_max = np.where(stress == stress_max)[0]
            istress_min = np.where(stress == stress_min)[0]
            eid_max = eid_node[istress_max, 0]
            eid_min = eid_node[istress_min, 0]
            peak_stress = max(abs(stress_max), abs(stress_min))
            tnew = told * stress_max / ovm_max
            tnew = min(tmax, tnew)
            tnew = max(tmin, tnew)
            #tnew = (oi/omax)**alpha * ti_old
            tratio = tnew / told
            if np.allclose(tratio, 1.):
                continue
            nopt += 1
            stress_per_region = [stress_min, stress_max,
                                 eid_min, eid_max]
            print('pid=%s' % pid)
            print('  stress_per_region (ovm_min, ovm_max, eid_min, eid_max) => %s' % stress_per_region)
            print('  told=%s tnew=%s tratio=%s\n' %(told, tnew, tratio))

            prop.t *= tratio
            prop.z1 *= tratio
            prop.z2 *= tratio

            regions2[pid] = [tnew, peak_stress, stress_max, eid_max, stress_min, eid_min] # t_new, ovm_new
        if nopt == 0:
            break

        #eid_node = np.hstack(eid_node)
        #s = np.hstack(eid_node)

        iteration += 1
        bdf_filename2 = 'fem_%i.bdf' % iteration
        op2_filename2 = 'fem_%i.op2' % iteration
        model.write_bdf(bdf_filename2)

    print('regions2 = %s' % regions2)
    return regions2
                new_vector.data[:, :, 0][:, check] = tmp
                tmp.real = eyy_theta_real
                tmp.imag = eyy_theta_imag
                new_vector.data[:, :, 1][:, check] = tmp
                tmp.real = exy_theta_real * 2.
                tmp.imag = exy_theta_imag * 2.
                new_vector.data[:, :, 2][:, check] = tmp
            else:
                exx_theta, eyy_theta, exy_theta = transf_Mohr(
                    exx, eyy, exy, vecthetarad)
                thetadeg_new = thetadeg_to_principal(exx_theta, eyy_theta,
                                                     exy_theta)
                new_vector.data[:, :, 1][:, check] = exx_theta
                new_vector.data[:, :, 2][:, check] = eyy_theta
                new_vector.data[:, :, 3][:, check] = exy_theta * 2.
                new_vector.data[:, :, 4][:, check] = thetadeg_new

            #TODO implement transformation for corner nodes
            #     for now we just zero the wrong values
            if 'quad8' in vecname:
                for i in [2, 3, 4, 5, 6, 7, 8, 9]:
                    new_vector.data[:, i, :] = 0
    return op2_new


if __name__ == '__main__':
    op2_filename = r'C:\NASA\m4\formats\git\pyNastran\models\solid_bending\solid_bending.op2'
    from pyNastran.op2.op2 import read_op2
    model = read_op2(op2_filename)
    transform_solids(model)
Beispiel #19
0
def load_regions_and_create_eigenvalue_csv(bdf_model, op2_filenames,
                                           regions_filename, sym_regions_filename=None,
                                           eig_min=-1.0, eig_max=1.0, eig_default=3.0):
    """
    loads a BDF and a series of OP2 filenames and creates an eigenvalue buckling plot

    Parameters
    ----------
    eig_min : float
        the required minimum eigenvalue
    eig_max : float
        the required maximum eigenvalue
    eig_default : float
        the default eigenvalue for cases that do not calculate eigenvectors
        because there were no eigenvalues in the range
    regions_filename : str
        path to regions.txt file
    sym_regions_filename : str; default=None -> No symmetry
        path to sym_regions.txt file

    Returns
    -------
    min_eigenvalue_by_patch_id : dict
        key : patch_id : int
            the integer patch id
        value : eigenvalue or reserve factor
           the reserve factor eigenvalue for buckling
    eigenvalues : (n, ) float ndarray
        the minimum eigenvalues

    Creates
    -------
    eigenvalues_output.csv : file
        csv of log10(eigenvalue), eigenvalue, is_buckled
    """
    bdf_model.log.info('load_regions_and_create_eigenvalue_csv')
    #patch_numbers = []
    #evals = []
    assert isinstance(bdf_model, BDF), type(bdf_model)

    min_eigenvalue_by_patch_id = {}
    is_sym_regions = False
    if sym_regions_filename is not None:
        is_sym_regions = True
        region_to_symregion_map, symregion_to_region_map = load_sym_regions_map(
            sym_regions_filename)
    msg = ''

    assert len(op2_filenames) > 0, 'op2_filenames=%s' % op2_filenames
    print('eig_min=%s eig_max=%s' % (eig_min, eig_max))
    for op2_filename in op2_filenames:
        bdf_model.log.info('op2_filename = %r' % op2_filename)
        if not os.path.exists(op2_filename):
            print(op2_filename)
            continue
        patch_id_str = op2_filename.split('_')[1].split('.')[0]
        patch_id = int(patch_id_str)

        sym_patch_id = None
        if is_sym_regions:
            if patch_id in symregion_to_region_map:
                sym_patch_id = symregion_to_region_map[patch_id]
            elif patch_id in region_to_symregion_map:
                sym_patch_id = region_to_symregion_map[patch_id]
            else:
                raise RuntimeError("can this happen???")

        # = pf.split('_')[1].split('.')[0]
        #patch_numbers.append(patch_id)
        #model = BDF(debug=False)
        #model.read_bdf(pf)
        # eids = model.elements.keys()
        #op2_path = '%s_.op2' % patch_id)
        try:
            model2 = read_op2(op2_filename, combine=True, log=None,
                              debug=False, mode='msc')
        except FatalError:
            #os.remove(op2_filename)
            print('fatal on %r' % op2_filename)
            msg += '%s\n' % op2_filename
            continue
        cases = model2.eigenvectors.keys()
        if len(cases) == 0:
            #assert is_sym_regions == False, is_sym_regions
            min_eigenvalue_by_patch_id[patch_id] = eig_default
            min_eigenvalue_by_patch_id[sym_patch_id] = eig_default
            continue

        isubcase = cases[0]
        eigenvector = model2.eigenvectors[isubcase]
        eigrs = np.array(eigenvector.eigrs)
        #cycles = (eigrs * 2 * np.pi) ** 2.
        #print('eigrs =', eigrs)

        #----------------------------------
        # calculate what's basically a reserve factor (RF); margin = reserve_factor - 1
        # take the minimum of the "tension"/"compression" RFs, which are
        # compared to different allowables

        # lambda > 0
        i = np.where(eigrs >= 0.0)[0]
        if len(i) == 0:
            pos_eigenvalue = eig_default  # TODO: no buckling eigenvalue...wat?
            pos_reserve_factor = eig_default
        else:
            pos_eigenvalue = eigrs[i].min()
            pos_reserve_factor = pos_eigenvalue / eig_max
            #max_eigenvalue = np.log10(eigi)

        # lambda < 0
        if 0:
            j = np.where(eigrs < 0.0)[0]
            if len(j) == 0:
                neg_eigenvalue = eig_default  # TODO: no buckling eigenvalue...wat?
                neg_reserve_factor = eig_default
            else:
                neg_eigenvalue = np.abs(eigrs[j]).min()
                neg_reserve_factor = neg_eigenvalue / abs(eig_min)
                #min_eigenvalue = np.log10(eigi)
        else:
            neg_reserve_factor = 10.
            neg_eigenvalue = 10.

        #evals.append(min_eval)
        bdf_model.log.info('Patch=%s  compression (lambda > 0); lambda=%.3f RF=%.3f' % (patch_id, pos_eigenvalue, pos_reserve_factor))
        #bdf_model.log.info('Patch=%s  tension    (lambda < 0); lambda=%.3f RF=%.3f' % (patch_id, neg_eigenvalue, neg_reserve_factor))
        reserve_factor = min(neg_reserve_factor, pos_reserve_factor, eig_default)
        assert reserve_factor > 0.
        min_eigenvalue_by_patch_id[patch_id] = reserve_factor
        if is_sym_regions:
            min_eigenvalue_by_patch_id[sym_patch_id] = reserve_factor
    print(msg)

    bdf_model.log.info('finished parsing eigenvalues...')
    #model = BDF(debug=False)
    #model.read_bdf(bdf_filename)
    all_eids = np.unique(bdf_model.elements.keys())
    neids = len(all_eids)

    eigenvalues = np.zeros(neids, dtype='float32')
    with open(regions_filename, 'r') as regions_file:
        lines = regions_file.readlines()

        for iline, line in enumerate(lines[1:]):
            sline = line.strip().split(',')
            #print(sline)
            values = [int(val) for val in sline]
            pid = values[0]
            regions_patch_id = values[1]
            eids = values[2:]
            i = np.searchsorted(all_eids, eids) # [0] ???
            assert np.array_equal(all_eids[i], eids), 'iline=%s pid=%s patch_id=%s' % (
                iline, pid, regions_patch_id)
            if regions_patch_id not in min_eigenvalue_by_patch_id:
                bdf_model.log.info('missing pid=%s' % pid)
                continue
            #patch_id[i] = panel_id
            eigenvalues[i] = min_eigenvalue_by_patch_id[regions_patch_id]

    eigenvalue_filename = 'eigenvalues_output.csv'
    with open(eigenvalue_filename, 'w') as eigenvalue_file:
        eigenvalue_file.write('# log(Eigenvalue), eigenvalue, is_buckled\n')
        for eig in eigenvalues:
            eig = max(eig, 0.000001)
            if eig < 1.0:
                is_buckled = 1.0
            else:
                is_buckled = 0.0
            log10_eig = np.log10(eig)
            eigenvalue_file.write('%f, %f, %i\n' % (log10_eig, eig, is_buckled))
    return min_eigenvalue_by_patch_id, eigenvalues