Exemplo n.º 1
0
def bdf_mirror(bdf_filename: Union[str, BDF],
               plane: str = 'xz',
               log=None,
               debug: bool = True):
    """
    Mirrors the model about the symmetry plane

    Parameters
    ----------
    bdf_filename : str / BDF()
        str : the bdf filename
        BDF : the BDF model object
    plane : str; {'xy', 'yz', 'xz'}; default='xz'
        the plane to mirror about
        xz : +y/-y
        yz : +x/-x
        xy : +z/-z

    Returns
    -------
    model : BDF()
        BDF : the BDF model object
    nid_offset : int
        the offset node id
    eid_offset : int
        the offset element id

    """
    model = get_bdf_model(bdf_filename, xref=True, log=log, debug=debug)
    mirror_model = model
    nid_offset, plane = _mirror_nodes(model, plane=plane)
    eid_offset = _mirror_elements(model, mirror_model, nid_offset)
    _mirror_loads(model, nid_offset, eid_offset)
    _mirror_aero(model, nid_offset, plane=plane)
    return model, nid_offset, eid_offset
Exemplo n.º 2
0
def _setup_faces(bdf_filename):
    """helper method"""
    model = get_bdf_model(bdf_filename, xref=False, log=None, debug=False)
    out = model.get_xyz_in_coord_array(cid=0, fdtype='float64', idtype='int32')
    nid_cp_cd, xyz_cid0, unused_xyz_cp, unused_icd_transform, unused_icp_transform = out
    nids = nid_cp_cd[:, 0]
    #eid_to_edge_map, nid_to_edge_map, edge_to_eid_map = create_maps(model)
    #model = BDF()
    face_eids = []
    faces = []
    shells = set([
        'CTRIA3', 'CTRIAX', 'CTRIA6', 'CTRIAX6',
        'CQUAD4', 'CQUAD', 'CQUAD8', 'CQUADR', 'CQUADX', 'CQUADX8',
        'CSHEAR'])
    for eid, elem in model.elements.items():
        if elem.type in shells:
            #if elem.type == 'CQUAD4':
                # split to 2 faces...not done
            #elif elem.type == 'CTRIA3':
            face_eids.append(eid)
            faces.append(elem.node_ids)

    #out = model._get_maps(eids=None, map_names=None,
                          #consider_0d=False, consider_0d_rigid=False,
                          #consider_1d=False, consider_2d=True, consider_3d=False)
    #edge_to_eid_map = out['edge_to_eid_map']
    #faces = iterkeys(edge_to_eid_map)
    return nids, xyz_cid0, faces, face_eids
Exemplo n.º 3
0
def _setup_faces(bdf_filename: Union[str, BDF]) -> Tuple[Any, Any, Any, Any]:
    """helper method"""
    model = get_bdf_model(bdf_filename, xref=False, log=None, debug=False)
    out = model.get_xyz_in_coord_array(cid=0, fdtype='float64', idtype='int32')
    nid_cp_cd, xyz_cid0, unused_xyz_cp, unused_icd_transform, unused_icp_transform = out
    nids = nid_cp_cd[:, 0]
    #eid_to_edge_map, nid_to_edge_map, edge_to_eid_map = create_maps(model)
    #model = BDF()
    face_eids = []
    faces = []
    shells = {
        'CTRIA3', 'CTRIAX', 'CTRIA6', 'CTRIAX6', 'CQUAD4', 'CQUAD', 'CQUAD8',
        'CQUADR', 'CQUADX', 'CQUADX8', 'CSHEAR'
    }
    for eid, elem in model.elements.items():
        if elem.type in shells:
            if elem.type == 'CQUAD4':
                # split to 2 faces
                n1, n2, n3, n4 = elem.node_ids
                face_eids.append(eid)
                face_eids.append(-eid)
                faces.append((n1, n2, n3))
                faces.append((n1, n3, n4))
            elif elem.type == 'CTRIA3':
                face_eids.append(eid)
                faces.append(elem.node_ids)
            else:
                model.log.debug('skipping %s' % elem.type)

    #out = model._get_maps(eids=None, map_names=None,
    #consider_0d=False, consider_0d_rigid=False,
    #consider_1d=False, consider_2d=True, consider_3d=False)
    #edge_to_eid_map = out['edge_to_eid_map']
    return nids, xyz_cid0, faces, face_eids
Exemplo n.º 4
0
def bdf_mirror_plane(bdf_filename: Union[str, BDF], plane, mirror_model=None,
                     log=None, debug: bool=True, use_nid_offset: bool=True):
    """mirrors a model about an arbitrary plane"""
    model = get_bdf_model(bdf_filename, xref=True, log=log, debug=debug)
    if mirror_model is None:
        mirror_model = BDF(debug=debug, log=log, mode='msc')

    nid_offset, plane = _mirror_nodes_plane(model, mirror_model, plane,
                                            use_nid_offset=use_nid_offset)
    eid_offset = _mirror_elements(model, mirror_model, nid_offset, use_eid_offset=True)
    #_mirror_loads(model, nid_offset, eid_offset)
    return model, mirror_model, nid_offset, eid_offset
Exemplo n.º 5
0
def _setup_edges(bdf_filename):
    """helper method"""
    model = get_bdf_model(bdf_filename, xref=False, log=None, debug=False)
    out = model.get_xyz_in_coord_array(cid=0, fdtype='float64', idtype='int32')
    nid_cp_cd, xyz_cid0, unused_xyz_cp, unused_icd_transform, unused_icp_transform = out
    nids = nid_cp_cd[:, 0]
    #eid_to_edge_map, nid_to_edge_map, edge_to_eid_map = create_maps(model)
    #model = BDF()
    out = model._get_maps(eids=None, map_names=None,
                          consider_0d=False, consider_0d_rigid=False,
                          consider_1d=False, consider_2d=True, consider_3d=False)
    edge_to_eid_map = out['edge_to_eid_map']
    edges = iterkeys(edge_to_eid_map)
    return nids, xyz_cid0, edges
Exemplo n.º 6
0
def _eq_nodes_setup(
    bdf_filename,
    renumber_nodes=False,
    xref=True,
    node_set: Optional[List[NDArrayNint]] = None,
    log: Optional[SimpleLogger] = None,
    debug: bool = True,
    idtype: str = 'int32',
    fdtype: str = 'float64'
) -> Tuple[NDArrayN3float, BDF, NDArrayNint, NDArrayNint]:
    """helper function for ``bdf_equivalence_nodes``"""
    if node_set is not None:
        if renumber_nodes:
            raise NotImplementedError(
                'node_set is not None & renumber_nodes=True')

        #print(type(node_set))
        #print('*node_set', node_set)
        assert len(node_set) > 0, node_set
        assert isinstance(node_set, list), type(node_set)
    all_node_set = get_all_node_set(node_set)

    model = get_bdf_model(bdf_filename, xref=xref, log=log, debug=debug)

    # quads / tris
    #nids_quads = []
    #eids_quads = []
    #nids_tris = []
    #eids_tris = []

    # map the node ids to the slot in the nids array
    renumber_nodes = False
    if node_set is not None:
        nids, all_nids = _eq_nodes_setup_node_set(
            model,
            node_set,
            all_node_set,
            renumber_nodes=renumber_nodes,
            idtype=idtype)
    else:
        nids, all_nids = _eq_nodes_setup_node(model,
                                              renumber_nodes=renumber_nodes,
                                              idtype=idtype)

    nodes_xyz = _get_xyz_cid0(model, nids, fdtype=fdtype)
    inew = _check_for_referenced_nodes(model, node_set, nids, all_nids,
                                       nodes_xyz)

    #assert np.array_equal(nids[inew], nids_new), 'some nodes are not defined'
    return nodes_xyz, model, nids, inew
Exemplo n.º 7
0
def shift(bdf_filename, dxyz, bdf_filename_out=None):
    """shifts the model by some amount"""
    if isinstance(dxyz, list):
        dxyz = np.array(dxyz)
    assert isinstance(dxyz, np.ndarray), dxyz
    print("dxyz = %s" % dxyz)

    model = get_bdf_model(bdf_filename, xref=True, log=None, debug=True)
    for unused_nid, node in model.nodes.items():
        xyz = node.get_position() + dxyz
        node.set_position(model, xyz, cid=0, xref=True)

    for unused_caero_id, caero in model.caeros.items():
        caero.shift(dxyz)

    if bdf_filename_out:
        model.write_bdf(bdf_filename_out)
    return model
Exemplo n.º 8
0
def _eq_nodes_setup(bdf_filename,
                    unused_tol,
                    renumber_nodes=False,
                    xref=True,
                    node_set=None,
                    log=None,
                    debug=True):
    """helper function for ``bdf_equivalence_nodes``"""
    if node_set is not None:
        if renumber_nodes:
            raise NotImplementedError(
                'node_set is not None & renumber_nodes=True')

        #print(type(node_set))
        #print('*node_set', node_set)
        assert len(node_set) > 0, node_set
        if isinstance(node_set, set):
            node_set = asarray(list(node_set), dtype='int32')
        else:
            node_set = asarray(node_set, dtype='int32')

    model = get_bdf_model(bdf_filename, xref=xref, log=log, debug=debug)

    # quads / tris
    #nids_quads = []
    #eids_quads = []
    #nids_tris = []
    #eids_tris = []

    # map the node ids to the slot in the nids array
    renumber_nodes = False
    if node_set is not None:
        nids, all_nids, unused_nid_map = _eq_nodes_setup_node_set(
            model, node_set, renumber_nodes=renumber_nodes)
    else:
        nids, all_nids, unused_nid_map = _eq_nodes_setup_node(
            model, renumber_nodes=renumber_nodes)

    nodes_xyz = _get_xyz_cid0(model, nids)
    inew = _check_for_referenced_nodes(model, node_set, nids, all_nids,
                                       nodes_xyz)

    #assert np.array_equal(nids[inew], nids_new), 'some nodes are not defined'
    return nodes_xyz, model, nids, inew
Exemplo n.º 9
0
def _eq_nodes_setup(bdf_filename,
                    unused_tol,
                    renumber_nodes=False,
                    xref=True,
                    node_set=None,
                    debug=True):
    """helper function for `bdf_equivalence_nodes`"""
    if node_set is not None:
        if renumber_nodes:
            raise NotImplementedError(
                'node_set is not None & renumber_nodes=True')

        #print(type(node_set))
        #print('*node_set', node_set)
        assert len(node_set) > 0, node_set
        if isinstance(node_set, set):
            node_set = asarray(list(node_set), dtype='int32')
        else:
            node_set = asarray(node_set, dtype='int32')

    model = get_bdf_model(bdf_filename, xref=xref, log=None, debug=debug)

    coord_ids = model.coord_ids
    needs_get_position = True if coord_ids == [0] else False

    # quads / tris
    #nids_quads = []
    #eids_quads = []
    #nids_tris = []
    #eids_tris = []

    # map the node ids to the slot in the nids array
    renumber_nodes = False

    inode = 0
    nid_map = {}
    if node_set is not None:
        if PY2:
            all_nids = array(model.nodes.keys(), dtype='int32')
        else:
            all_nids = array(list(model.nodes.keys()), dtype='int32')

        # B - A
        # these are all the nodes that are requested from node_set that are missing
        #   thus len(diff_nodes) == 0
        diff_nodes = setdiff1d(node_set, all_nids)
        if len(diff_nodes) != 0:
            msg = ('The following nodes cannot be found, but are included'
                   ' in the reduced set; nids=%s' % diff_nodes)
            raise RuntimeError(msg)

        # A & B
        # the nodes to analyze are the union of all the nodes and the desired set
        # which is basically the same as:
        #   nids = unique(node_set)
        nids = intersect1d(all_nids, node_set,
                           assume_unique=True)  # the new values

        if renumber_nodes:
            raise NotImplementedError(
                'node_set is not None & renumber_nodes=True')
        else:
            for nid in all_nids:
                nid_map[inode] = nid
                inode += 1
        #nids = array([node.nid for nid, node in sorted(iteritems(model.nodes))
        #if nid in node_set], dtype='int32')

    else:
        if renumber_nodes:
            for nid, node in sorted(iteritems(model.nodes)):
                node.nid = inode + 1
                nid_map[inode] = nid
                inode += 1
            nnodes = len(model.nodes)
            nids = arange(1, inode + 1, dtype='int32')
            assert nids[-1] == nnodes
        else:
            for nid, node in sorted(iteritems(model.nodes)):
                nid_map[inode] = nid
                inode += 1
            nids = array(
                [node.nid for nid, node in sorted(iteritems(model.nodes))],
                dtype='int32')
        all_nids = nids

    if needs_get_position:
        nodes_xyz = array([model.nodes[nid].get_position() for nid in nids],
                          dtype='float32')
    else:
        nodes_xyz = array([model.nodes[nid].xyz for nid in nids],
                          dtype='float32')

    if node_set is not None:
        assert nodes_xyz.shape[0] == len(nids)

    if 0:
        # I forget entirely what this block of code is for, but my general
        # recollection was that it checked that all the nodes that were
        # referenced were included in the nids list.  I'd rather break that
        # check in order to support nodes_set.
        #
        # It's also possible that it's here, so you only consider nodes that
        # are associated...

        # there is some set of points that are used on the elements that
        # will be considered.
        #
        # Presumably this is enough to capture all the node ids and NOT
        # spoints, but I doubt it...
        spoint_epoint_nid_set = set([])
        for unused_eid, element in sorted(iteritems(model.elements)):
            spoint_epoint_nid_set.update(element.node_ids)
        for unused_eid, element in sorted(iteritems(model.masses)):
            spoint_epoint_nid_set.update(element.node_ids)

        nids_new = spoint_epoint_nid_set - set(model.spoints) - set(
            model.epoints)

        if None in nids_new:
            nids_new.remove(None)

        # autosorts the data
        nids_new = unique(list(nids_new))
        assert isinstance(nids_new[0], integer_types), type(nids_new[0])

        missing_nids = list(set(nids_new) - set(all_nids))
        if missing_nids:
            missing_nids.sort()
            msg = 'There are missing nodes...\n'  # TODO: in what???
            msg = 'missing nids=%s' % str(missing_nids)
            raise RuntimeError(msg)

        # get the node_id mapping for the kdtree
        inew = searchsorted(nids, nids_new, side='left')
        # print('nids_new =', nids_new)
    else:
        inew = slice(None)
    #assert np.array_equal(nids[inew], nids_new), 'some nodes are not defined'
    return nodes_xyz, model, nids, inew
Exemplo n.º 10
0
def find_coplanar_triangles(bdf_filename: Union[BDF, str],
                            eids: Optional[List[int]] = None) -> List[int]:
    """
    Finds coplanar triangles

    Parameters
    ----------
    bdf_filename : BDF/str
        BDF: a model
        str: the path to the bdf input file
    eids : list
        the element ids to consider

    Returns
    -------
    coplanar_eids : List[int]
        the elements that are coplanar

    """
    model = get_bdf_model(bdf_filename, xref=False, log=None, debug=False)
    log = model.log

    if eids is None:
        eids = model.elements.keys()

    i = 0
    eids_removed = []
    neids = len(eids)
    nids = np.zeros((neids, 3), dtype='int32')
    for eid in eids:
        elem = model.elements[eid]
        try:
            nids[i, :] = elem.nodes
        except ValueError:
            eids_removed.append(eid)
            assert len(elem.nodes) != 3, str(elem)
            continue
        i += 1

    if i != neids:
        log.warning(
            f'removed {neids-i} non-triangles; eids_removed={eids_removed}')
        nids = nids[:i, :]

    #nids = np.array([
    #[10, 20, 30],
    #[20, 30, 10],
    #[10, 30, 20],
    #], dtype='int32')

    # [1, 2, 3]
    # [2, 3, 1]
    # [1, 3, 2]

    #imin = nids.argmin(axis=1)
    #imax = nids.argmax(axis=1)
    imin = nids.min(axis=1)
    imax = nids.max(axis=1)

    #print('imin = %s' % (imin))  # [0, 2, 0]
    #print('imax = %s' % (imax))  # [2, 1, 1]

    imid = []
    for row, imini, imaxi in zip(nids, imin, imax):
        #a = [imini, imaxi]
        #print(row, imini, imaxi)
        a = list(row)
        #a.remove(row[imini])
        #a.remove(row[imaxi])
        #print(a)
        a.remove(imini)
        #print(a)
        a.remove(imaxi)
        #print(a)
        #print('')
        imid.append(a[0])

    #print('imid = %s' % (imid))  # [1, 0, 2]

    nids2 = np.vstack([imin, imid, imax]).T
    aset = set()
    eids_to_remove = set()
    for eid, row in zip(eids, nids2):
        new_row = tuple(list(row))
        if new_row in aset:
            log.debug(f'eid={eid} exists already...')
            eids_to_remove.add(eid)
        else:
            aset.add(new_row)
    return model, eids_to_remove
Exemplo n.º 11
0
def get_oml_eids(
        bdf_filename: Union[str, BDF, PurePath, StringIO],
        eid_start: int,
        theta_tol: float = 30.,
        is_symmetric: bool = True,
        consider_flippped_normals: bool = True) -> Tuple[BDF, Set[int]]:
    """
    Extracts the OML faces (outer mold line) of a shell model.  In other words,
    find all the shell elements touching the current element without crossing
    an MPC or rigid element.

    Parameters
    ----------
    bdf_filename : str or BDF()
        the bdf filename
    eid_start : int
        the element to start from
    theta_tol : float; default=30.
        the angular tolerance in degrees
    is_symmetric : bool; default=True
        is the y=0 plane considered to be part of the OML
    consider_flippped_normals : bool; default=True
        if you extracted the free faces from tets, you can get flipped normals
        this considers a 180 degree error to be 0.0, which will cause other problems

    """
    #ninety = np.radians(90.)

    #2810 # start for bwb_saero.bdf
    #2811 # close
    #2819 # close
    #2818 # close

    #eids_oml = np.array([eid_start])
    eids_oml = set([eid_start])
    #---------------------------------
    theta_tol = np.radians(theta_tol)

    model = get_bdf_model(bdf_filename, xref=True, log=None, debug=True)
    maps = model._get_maps(eids=None,
                           map_names=None,
                           consider_0d=False,
                           consider_0d_rigid=False,
                           consider_1d=False,
                           consider_2d=True,
                           consider_3d=False)
    edge_to_eid_map = maps['edge_to_eid_map']
    eid_to_edge_map = maps['eid_to_edge_map']
    unused_nid_to_edge_map = maps['nid_to_edge_map']

    #free_edges = get_free_edges(model, maps=maps)
    #---------------------------------
    normals = {}
    etypes_skipped = set()
    shells = {'CTRIA3', 'CQUAD4', 'CTRIA6', 'CQUAD8', 'CQUAD'}
    for eid, elem in model.elements.items():
        if elem.type in shells:
            normals[eid] = elem.Normal()
        else:
            if elem.type in etypes_skipped:
                continue
            model.log.debug(f'elem.type={elem.type!r} is not supported')
            etypes_skipped.add(elem.type)

    #eid_starts = eids_oml.tolist()
    eids_next = set([eid_start])
    while eids_next:
        eid_starts = deepcopy(eids_next)
        eids_oml_start = deepcopy(eids_oml)
        model.log.debug(f'nactive_elements = {len(eid_starts)}')
        while eid_starts:
            eid_start = eid_starts.pop()
            normal_start = normals[eid_start]

            # get the next set of edges
            edges = eid_to_edge_map[eid_start]

            #flattened = []
            #for row in matrix:
            #for n in row:
            #flattened.append(n)
            # flattened = [n for row in matrix for n in row]
            #eids_to_consider = [edge_to_eid_map[edge] for edge in edges]
            list_eids_to_consider = []
            for edge in edges:
                eids_with_edge = edge_to_eid_map[edge]
                list_eids_to_consider += eids_with_edge
            #list_eids_to_consider = set([eid for eid in edge_to_eid_map[edge] for edge in edges])
            #print('list_eids_to_consider =', list_eids_to_consider)
            eids_to_consider = set(list_eids_to_consider)

            # don't do the same element twice; creates an infinite loop if you do
            #eids_to_check = np.setdiff1d(eids_to_consider, eids_oml)
            eids_to_check = eids_to_consider.difference(eids_oml)

            # don't check elements we're checking right now
            #eids_to_check = np.setdiff1d(eids_to_consider, eid_starts)
            eids_to_check = eids_to_consider.difference(eid_starts)

            #print('eids_to_check =', eids_to_check)
            for eid in eids_to_check:
                normal = normals[eid]
                # a o b = a * b * cos(theta)
                # cos(theta) = (a o b)/ (a b); where |a| = 1; |b| = 1
                cos_theta = np.clip(normal @ normal_start, -1.0, 1.0)
                theta = np.arccos(cos_theta)
                if theta < theta_tol:
                    eids_next.add(eid)
                    eids_oml.add(eid)
                elif consider_flippped_normals:
                    # handles flipped normals
                    cos_theta = np.clip(normal @ -normal_start, -1.0, 1.0)
                    theta = np.arccos(cos_theta)
                    if theta < theta_tol:
                        eids_next.add(eid)
                        eids_oml.add(eid)
            #print('eids_next =', eids_next)
        eids_next = eids_next.difference(eids_oml_start)
        #eids_next = eids_next.difference(eid_starts)
        #print('eids_next =', eids_next)
        #print('-------------------------------')
    model.log.debug('done with get_oml_eids')

    #with open('eids_oml.txt', 'w') as eids_file:
    #eids_file.write('eids_oml = %s\n' % list(eids_oml))
    return model, eids_oml
Exemplo n.º 12
0
def make_half_model(bdf_filename, plane: str='xz',
                         zero_tol: float=1e-12, log=None, debug: bool=True):
    """
    Makes a half/symmetric model from a full model

    Parameters
    ----------
    bdf_filename : str / BDF()
        str : the bdf filename
        BDF : the BDF model object
    plane : str; {'xy', 'yz', 'xz'}; default='xz'
        the plane to mirror about
        xz : +y/-y
        yz : +x/-x
        xy : +z/-z
    zaero_tol : float; default=1e-12
        the symmetry plane tolerance

    Returns
    -------
    model : BDF()
        BDF : the BDF model object

    ## TODO: doesn't handle elements straddling the centerline

    """
    model = get_bdf_model(bdf_filename, xref=True, log=log, debug=debug)
    iy, plane = _plane_to_iy(plane)
    nids_to_remove = []
    eids_to_remove = []
    caero_ids_to_remove = []
    zero = -zero_tol
    for eid, elem in model.elements.items():
        xyz = elem.Centroid()

        if xyz[iy] < zero:
            eids_to_remove.append(eid)

    for nid, node in model.nodes.items():
        xyz = node.get_position()
        if xyz[iy] < zero:
            nids_to_remove.append(nid)

    for nid in nids_to_remove:
        del model.nodes[nid]

    for eid in eids_to_remove:
        del model.elements[eid]

    for caero_id, caero in model.caeros.items():
        if caero.type == 'CAERO1':
            p1, p2, p3, p4 = caero.get_points()
            #print(caero)
            if p1[iy] <= zero and p4[iy] <= zero:
                #print('p1=%s p4=%s' % (p1, p4))
                caero_ids_to_remove.append(caero_id)
            elif p1[iy] < zero:
                p1[iy] = 0.
                caero.set_points([p1, p2, p3, p4])
            elif p4[iy] < zero:
                p4[iy] = 0.
                caero.set_points([p1, p2, p3, p4])
        elif caero.type == 'CAERO2':
            # TODO: a CAERO2 can't be half symmetric...can it?
            # TODO: it can be skewed though...
            p1, p2 = caero.get_points()
            if p1[iy] <= zero and p2[iy] <= zero:
                #print('p1=%s p4=%s' % (p1, p4))
                caero_ids_to_remove.append(caero_id)
        else:  # pragma: no cover
            raise NotImplementedError(caero)

    for caero_id in caero_ids_to_remove:
        del model.caeros[caero_id]

    #print('nids_to_remove =', nids_to_remove)
    for unused_spline_id, spline in model.splines.items():
        caero = spline.caero
        #setg = spline.setg
        #print('caero = ', caero)
        nids = spline.setg_ref.ids  # list
        #spline.uncross_reference()

        #i = 0
        nids = list(set(nids) - set(nids_to_remove))
        nids.sort()
        spline.setg_ref.ids_ref = None
        spline.setg_ref.ids = nids

    plane_to_labels_keep_map = {
        'yz' : ['URDD4', 'URDD2', 'URDD3', 'SIDES', 'YAW'], # yz
        'xz' : ['URDD1', 'URDD5', 'URDD3', 'PITCH', 'ANGLEA'], # xz plane
        'xy' : ['URDD1', 'URDD2', 'URDD6', 'ROLL'], # xy plane
    }

    all_labels = {
        'URDD4', 'URDD2', 'URDD3', 'SIDES', 'YAW',
        'URDD1', 'URDD5', 'URDD3', 'PITCH', 'ANGLEA',
        'URDD1', 'URDD2', 'URDD6', 'ROLL',
    }
    labels_to_keep = plane_to_labels_keep_map[plane]
    labels_to_remove = [label for label in all_labels if label not in labels_to_keep]

    #print('labels_to_remove =', labels_to_remove)
    for aestat_id in list(model.aestats.keys()):
        aestat = model.aestats[aestat_id]
        if aestat.label in labels_to_remove:
            del model.aestats[aestat_id]

    for unused_trim_id, trim in model.trims.items():
        labels = trim.labels
        ilabels_to_remove = [labels.index(label) for label in labels_to_remove
                             if label in labels]
        #print("ilabels_to_remove =", ilabels_to_remove)
        trim.uxz = [trim.uxs[ilabel] for ilabel in ilabels_to_remove]
        trim.labels = [trim.labels[ilabel] for ilabel in ilabels_to_remove]
    return model
Exemplo n.º 13
0
def bdf_merge(bdf_filenames: List[str],
              bdf_filename_out: Optional[str] = None,
              renumber: bool = True,
              encoding: Optional[str] = None,
              size: int = 8,
              is_double: bool = False,
              cards_to_skip: Optional[List[str]] = None,
              skip_case_control_deck: bool = False,
              log: Optional[SimpleLogger] = None) -> Tuple[BDF, List[MAPPER]]:
    """
    Merges multiple BDF into one file

    Parameters
    ----------
    bdf_filenames : List[str]
        list of bdf filenames
    bdf_filename_out : str / None
        the output bdf filename (default=None; None -> no writing)
    renumber : bool
        should the bdf be renumbered (default=True)
    encoding : str
        the unicode encoding (default=None; system default)
    size : int; {8, 16}; default=8
        the bdf write precision
    is_double : bool; default=False
        the field precision to write
    cards_to_skip : List[str]; (default=None -> don't skip any cards)
        There are edge cases (e.g. FLUTTER analysis) where things can break due to
        uncross-referenced cards.  You need to disable entire classes of cards in
        that case (e.g. all aero cards).
    skip_case_control_deck : bool, optional, default : False
        If true, don't consider the case control deck while merging.

    Returns
    -------
    model : BDF
        Merged model.
    mappers_all : List[mapper]
        mapper : Dict[bdf_attribute] : old_id_to_new_id_dict
            List of mapper dictionaries of original ids to merged

            bdf_attribute : str
                a BDF attribute (e.g., 'nodes', 'elements')
            old_id_to_new_id_dict : dict[id_old] : id_new
                a sub dictionary that is used to map the node/element/etc. ids
            mapper = {
                'elements' : eid_map,
                'nodes' : nid_map,
                'coords' : cid_map,
                ...
            }

    Supports
      nodes:      GRID
      coords:     CORDx
      elements:   CQUAD4, CTRIA3, CTETRA, CPENTA, CHEXA, CELASx, CBAR, CBEAM
                  CONM1, CONM2, CMASS
      properties: PSHELL, PCOMP, PSOLID, PMASS
      materials:  MAT1, MAT8

    .. todo:: doesn't support SPOINTs/EPOINTs
    .. warning:: still very preliminary

    """
    if not isinstance(bdf_filenames, (list, tuple)):
        raise TypeError('bdf_filenames is not a list/tuple...%s' %
                        str(bdf_filenames))

    if not len(bdf_filenames) > 1:
        raise RuntimeError("You can't merge one BDF...bdf_filenames=%s" %
                           str(bdf_filenames))
    for bdf_filename in bdf_filenames:
        if not isinstance(bdf_filename, (str, BDF, StringIO, PurePath)):
            raise TypeError('bdf_filenames is not a string/BDF...%s' %
                            bdf_filename)

        #bdf_filenames = [bdf_filenames]

    #starting_id_dict_default = {
    #'cid' : max(model.coords.keys()),
    #'nid' : max(model.nodes.keys()),
    #'eid' : max([
    #max(model.elements.keys()),
    #max(model.masses.keys()),
    #]),
    #'pid' : max([
    #max(model.properties.keys()),
    #max(model.properties_mass.keys()),
    #]),
    #'mid' : max(model.material_ids),
    #}
    bdf_filename0 = bdf_filenames[0]
    model = get_bdf_model(bdf_filename,
                          xref=True,
                          cards_to_skip=cards_to_skip,
                          validate=False,
                          log=log,
                          debug=False)
    #if isinstance(bdf_filename0, BDF):
    #model = bdf_filename0
    #else:
    #model = BDF(debug=False, log=log)
    #model.disable_cards(cards_to_skip)
    #model.read_bdf(bdf_filename0, encoding=encoding, validate=False)

    if skip_case_control_deck:
        model.case_control_deck = CaseControlDeck([], log=None)
    model.log.info('primary=%s' % bdf_filename0)

    _mapper_0 = _get_mapper_0(model)  # mapper for first model

    data_members = [
        'coords',
        'nodes',
        'elements',
        'masses',
        'properties',
        'properties_mass',
        'materials',
        'sets',
        'rigid_elements',
        'mpcs',
        'caeros',
        'splines',
    ]
    mappers = []
    for bdf_filename in bdf_filenames[1:]:
        starting_id_dict = get_renumber_starting_ids_from_model(model)
        #for param, val in sorted(starting_id_dict.items()):
        #print('  %-3s %s' % (param, val))

        model.log.debug('secondary=%s' % bdf_filename)
        model2_renumber = get_bdf_model(bdf_filename,
                                        xref=True,
                                        cards_to_skip=cards_to_skip,
                                        validate=True,
                                        log=log,
                                        debug=False)

        #if isinstance(bdf_filename, BDF):
        #model2_renumber = bdf_filename
        #else:
        #model2_renumber = BDF(debug=False, log=log)
        #model2_renumber.disable_cards(cards_to_skip)
        #model2_renumber.read_bdf(bdf_filename)

        _apply_scalar_cards(model, model2_renumber)

        bdf_dump = StringIO()  # 'bdf_merge_temp.bdf'
        _, mapperi = bdf_renumber(model2_renumber,
                                  bdf_dump,
                                  starting_id_dict=starting_id_dict,
                                  size=size,
                                  is_double=is_double,
                                  cards_to_skip=cards_to_skip)
        bdf_dump.seek(0)

        mappers.append(mapperi)
        model2 = BDF(debug=False, log=log)
        model2.disable_cards(cards_to_skip)
        model2.read_bdf(bdf_dump)

        #model.log.info('model2.node_ids = %s' % np.array(model2.node_ids))
        for data_member in data_members:
            data1 = getattr(model, data_member)
            data2 = getattr(model2, data_member)
            if isinstance(data1, dict):
                #model.log.info('  working on %s' % (data_member))
                for key, value in data2.items():
                    if data_member in 'coords' and key == 0:
                        continue

                    if isinstance(value, list):
                        assert key not in data1, key
                        data1[key] = value
                    else:
                        assert key not in data1, f'{data_member} key={key}\n{data1}'
                        data1[key] = value
                        #print('   %s' % key)
            else:  # pragma: no cover
                raise NotImplementedError(type(data1))
    #if bdf_filenames_out:
    #model.write_bdf(bdf_filenames_out, size=size)

    mapper_renumber = None
    if renumber:
        model.log.debug('final renumber...')

        starting_id_dict = {
            'cid': 1,
            'nid': 1,
            'eid': 1,
            'pid': 1,
            'mid': 1,
        }
        _, mapper_renumber = bdf_renumber(model,
                                          bdf_filename_out,
                                          starting_id_dict=starting_id_dict,
                                          size=size,
                                          is_double=is_double,
                                          cards_to_skip=cards_to_skip)
        bdf_filename_temp = StringIO()
        model.write_bdf(bdf_filename_temp,
                        size=size,
                        is_double=False,
                        interspersed=False,
                        enddata=None,
                        close=False)
        bdf_filename_temp.seek(0)
        model = read_bdf(bdf_filename_temp,
                         validate=False,
                         xref=model._xref,
                         punch=False,
                         log=model.log,
                         debug=True,
                         mode=model._nastran_format)

    elif bdf_filename_out:
        model.write_bdf(out_filename=bdf_filename_out,
                        encoding=None,
                        size=size,
                        is_double=is_double,
                        interspersed=True,
                        enddata=None)

    mappers_final = _assemble_mapper(mappers,
                                     _mapper_0,
                                     data_members,
                                     mapper_renumber=mapper_renumber)
    return model, mappers_final