def parse_grid_mesh(mesh): """Parse the grid mesh """ # get the grid grid = mesh.getElementsByTagName("Grid") if not grid: raise AFEPYError("No Mesh.Grid element found") if len(grid) > 1: raise AFEPYError("Only 1 Mesh.Grid element supported") grid = grid[0] shape = grid.getAttribute("shape") if not shape: raise AFEPYError("no shape attribute found for Mesh.Grid") shape = tolist(shape, dtype=Int) num_dim = len(shape) lengths = grid.getAttribute("lengths") if not lengths: raise AFEPYError("no lengths attribute found for Mesh.Grid") lengths = tolist(lengths, dtype=Float) if len(lengths) != num_dim: raise AFEPYError("len(Mesh.Grid.lengths) must equal num_dim") nodes, conn = gen_mesh_grid(shape, lengths) node_num_map = dict([(i+1, i) for i in range(nodes.shape[0])]) elem_num_map = dict([(i+1, i) for i in range(conn.shape[0])]) return num_dim, nodes, conn, node_num_map, elem_num_map
def parse_conn(mesh, num_dim, node_num_map): """Parse the (single) Connectivity element Notes ----- <Connectivity offsets="int"> int int ... int ... </Connectivity> """ els = mesh.getElementsByTagName("Connectivity") if not els: return N = 0 _conn = [] elem_num_map = {} num_elem = 0 blx = {} for el in els: offsets = Int(el.getAttribute("offsets")) conn = child_data_to_array(el, np.int32) elem_nums = conn[:, 0] conn = conn[:, 1:] for (i, nodes) in enumerate(conn): elem_num_map[elem_nums[i]] = num_elem for (j, n) in enumerate(nodes): try: nodes[j] = node_num_map[n] except KeyError: raise AFEPYError("{0}: node not defined".format(n)) conn[i] = nodes num_elem += 1 if conn.shape[1] != offsets: raise AFEPYError("incorrect offsets") _conn.append(conn) N = max(N, offsets) block = el.getAttribute("block") if block: blx[block] = {"name": block, "exo_id": BLOCK_ID_START+500+len(blx), "elem_type": get_elem_type(num_dim, offsets), "elements": elem_nums} elem_num = 0 connect = -np.ones((num_elem, N), dtype=np.int32) for item in _conn: for (i, nodes) in enumerate(item): connect[elem_num, :len(nodes)] = nodes elem_num += 1 return elem_num_map, connect, blx
def Material(model=None, parameters=None, name=None, density=None): if name is None: for i in range(1, 100): name = "Material-{0}".format(len(materials)+i) if name not in materials: break else: raise AFEPYError("Maximum number of materials exceeded") material = matdb.get(model.lower()).mat_class if material is None: raise AFEPYError("'{0}' is not a material".format(model)) if isinstance(parameters, dict): parameters = material.parse_parameters(parameters) mat = material(parameters) materials[name] = mat mat.density = density or 1. return mat
def ElementBlock(self, name=None, elements=None, exo_id=None, mapped=1): """Assign elements to an element block""" if name is None: i = 1 while True: name = "Block-{0}".format(i) if name not in self.blocks: break i += 1 if i > MAX_NUM_SETS: raise AFEPYError("maximum number of blocks exceeded") if name in self.blocks: raise AFEPYError("{0}: element block already exists".format(name)) if exo_id is None: exo_id = BLOCK_ID_START while True: if exo_id not in self.blocks.exo_ids: break exo_id += 1 if exo_id > MAX_NUM_SETS + BLOCK_ID_START: raise AFEPYError("maximum number of blocks exceeded") if isstr(elements): if elements.lower() == "all": elements = np.array(range(self.num_cell), dtype=np.int) elif elements.lower() == "unassigned": assigned = [] for elem_blk in self.element_blocks: assigned.extend(elem_blk.elem_ids) elements = list(set(self.elem_ids) - set(assigned)) elements = np.array(elements, dtype=np.int) else: raise AFEPYError("{0}: unrecognized option".format(elements)) else: if mapped: elements = [self.elem_num_map[e] for e in elements] elements = np.array(elements, dtype=np.int) self.blocks[name] = ElementBlock(name, elements, exo_id) return self.blocks[name]
def _init(V, sideset, traction): if isinstance(sideset, (tuple, list)): sideset = V.Sideset(None, sideset).name if sideset not in V.sidesets: raise AFEPYError("{0}: invalid side set ID".format(sideset)) data = [] for (elem_num, face) in V.sidesets[sideset].surfaces: trac = array(traction)[:V.num_dof_per_node] data.append([elem_num, face, trac]) return data
def get_node_ids(self, nodes, nodeset, region): if all([x is None for x in (nodeset, nodes, region)]): # require one raise AFEPYError("1 of [nodeset, nodes, region] keywords required") if len([x for x in (nodeset, nodes, region) if x is not None]) > 1: raise AFEPYError("1 of [nodeset, nodes, region] keywords required") if nodeset: try: node_ids = self.nodesets[nodeset].node_ids except KeyError: raise AFEPYError("{0}: invalid node set ID".format(nodeset)) elif nodes: node_ids = [self.dof_map(node) for node in nodes] else: node_ids = self.nodes_in_region(region) return node_ids
def Mesh(*args, **kwargs): filename = kwargs.pop("file", None) mesh_type = kwargs.pop("type", None) string = kwargs.pop("string", None) if all([x is None for x in (filename, mesh_type, string)]): raise AFEPYError( "Mesh requires 1 of 'filename, type, string' keywords") if filename is not None: if "xml" in filename.split("."): mesh = MeshBase.from_xml(filename=filename) elif splitext(filename)[1] in (".g", ".gen"): mesh = MeshBase.from_genesis(filename) else: raise AFEPYError("unknown mesh file extension '{0}'".format(ext)) elif string is not None: mesh = MeshBase.from_xml(string=string) else: if mesh_type is None: raise AFEPYError("grid mesh requires 'type' keyword") mt = mesh_type.lower() if mt == "grid": dim = getopt("dimension", kwargs, pop=1) if dim is None: raise AFEPYError("grid mesh requires 'dimension' keyword") dim = Int(dim) if dim not in (2, 3): raise AFEPYError("grid mesh requires dim to be 2 or 3") nx = Int(getopt("nx", kwargs, 2, pop=1)) ny = Int(getopt("ny", kwargs, 2, pop=1)) lx = Float(getopt("lx", kwargs, 1., pop=1)) ly = Float(getopt("ly", kwargs, 1., pop=1)) if dim == 2: mesh = MeshBase.mesh_grid_2D(nx, ny, lx, ly) else: nz = Int(getopt("nz", kwargs, 2, pop=1)) lz = Float(getopt("lz", kwargs, 1., pop=1)) mesh = MeshBase.mesh_grid_3D(nx, ny, nz, lx, ly, lz, **kwargs) elif mt[:5] == "unstr": offset = int(getopt("offset", kwargs, 1)) coords = Array(getopt("vertices", kwargs)) dim = Int(getopt("dimension", kwargs, coords.shape[1])) connect = Array(getopt("connect", kwargs)) - offset nmap = getopt("node_num_map", kwargs) if nmap is None: nmap = dict([(i + 1, i) for i in range(coords.shape[0])]) emap = getopt("elem_num_map", kwargs) if emap is None: emap = dict([(i + 1, i) for i in range(connect.shape[0])]) mesh = MeshBase(mt, dim, coords, connect, nmap, emap) else: raise AFEPYError("unknown mesh type '{0}'".format(type)) return mesh
def parse_xml(filename=None, string=None): if string is None and filename is None: raise AFEPYError("parse_xml must receive a filename or string argument") if string is not None and filename is not None: raise AFEPYError("parse_xml must receive only 1 of " "filename or string argument") if string: doc = xdom.parseString(string) elif filename.endswith(".gz"): f = gzip.open(filename, "rb") doc = xdom.parseString(f.read()) f.close() else: doc = xdom.parse(filename) root = doc.getElementsByTagName("Mesh")[0] (num_dim, nodes, conn, nsdict, ssdict, blkdict, node_num_map, elem_num_map) = parse_xml_mesh(root) # format sidesets sidesets = [] for (name, v) in ssdict.items(): s = Namespace("Sideset", name=name, exo_id=v["exo_id"], data=v["surface"]) sidesets.append(s) nodesets = [] for (name, v) in nsdict.items(): s = Namespace("Nodeset", name=name, exo_id=v["exo_id"], data=v["nodes"]) nodesets.append(s) blocks = [] for (name, block) in blkdict.items(): b = Namespace("ElementBlock", name=name, exo_id=block["exo_id"], elem_type=block["elem_type"], elements=block["elements"]) blocks.append(b) return Namespace("Mesh", coords=nodes, connect=conn, num_dim=num_dim, node_num_map=node_num_map, sidesets=sidesets, nodesets=nodesets, elem_num_map=elem_num_map, blocks=blocks)
def parse_xml_mesh(mesh): """Parse the (single) Mesh element Notes ----- <Mesh> <Vertices> ... </Vertices> <Connectivity> ... </Connectivity> <Sideset> ... </Sideset> <Nodeset> ... </Nodeset> </Mesh> """ mesh_types = ("DEFAULT", "GRID") grid = mesh.getAttribute("type") mesh_type = get_attribute(mesh, "type", "DEFAULT").upper() if mesh_type not in mesh_types: raise AFEPYError("{0}: unrecognized mesh type".format(mesh_type)) if mesh_type == "DEFAULT": node_num_map, nodes, num_dim = parse_nodes(mesh) elem_num_map, conn, eblx = parse_conn(mesh, num_dim, node_num_map) nodes = nodes[:, :num_dim] elif mesh_type == "GRID": num_dim, nodes, conn, node_num_map, elem_num_map = parse_grid_mesh(mesh) # find node sets, side sets ns = parse_ns(mesh, nodes, conn, node_num_map) or {} ss = parse_ss(mesh, elem_num_map) or {} blx = parse_blocks(mesh, num_dim, conn, elem_num_map) for k in eblx: if k in blx: raise AFEPYError("Duplicate block entry {0}".format(k)) blx.update(eblx) return (num_dim, nodes[:, :num_dim], conn, ns, ss, blx, node_num_map, elem_num_map)
def find_orphans(self): orphans = [] for elem_id in range(self.num_cell): orphaned = True for elem_blk in self.element_blocks: if elem_id in elem_blk.elem_ids: orphaned = False break if orphaned: orphans.append(elem_id) if orphans: o = ", ".join("{0}".format(x) for x in orphans) raise AFEPYError("Orphaned elements detected. All elements " "must be assigned to an element block. " "Orphaned elements:\n {0}".format(o))
def parse_nodes(mesh): """Parse the (single) Nodes element Notes ----- <Vertices dimension="int"> int float [float [float]] ... </Vertices> """ els = mesh.getElementsByTagName("Vertices") if not els: return if len(els) > 1: raise AFEPYError("Only one Vertices block supported") dimension = els[0].getAttribute("dimension") if dimension is None: raise AFEPYError("Missing Vertices.dimension attribute") dimension = Int(dimension) nodes = child_data_to_array(els[0], np.float64) # nodes given as # node_num x y [z] # reorder them in ascending node number node_nums = [] for node_num in np.array(nodes[:, 0], dtype=np.int): if node_num in node_nums: raise AFEPYError("{0}: duplicate node number".format(node_num)) node_nums.append(node_num) node_nums = np.array(node_nums) node_num_map = {} for (i, n) in enumerate(node_nums): node_num_map[n] = i nodes = nodes[:, 1:dimension+1] return node_num_map, nodes, dimension
def get_axis_pos(self, region): R = region.upper() try: R, pos = R.split("==", 1) pos = float(pos) axis = {"X": 0, "Y": 1, "Z": 2}[R.strip().upper()] except ValueError: try: axis, b = DOMAINS[R] pos = self.extremum[(axis, b)] except KeyError: raise AFEPYError("{0}: invalid nodeset region".format(region)) return axis, pos
def parse_ss(mesh, elem_num_map): """Parse the Sideset elements Notes ----- <Sideset exo_id="int"> int int ... </Sideset> """ els = mesh.getElementsByTagName("Sideset") if not els: return ssdict = {} for el in els: attributes = dict(el.attributes.items()) exo_id = attributes.get("exo_id") if exo_id is not None: exo_id = Int(exo_id) name = attributes.get("name") if all([x is None for x in (exo_id, name)]): raise AFEPYError("Sideset requires at least one " "of name or exo_id attribute") if name is None: i = i while True: name = "Sideset-{0}".format(i) if name not in nsdict: break if i > MAX_NUM_SETS: raise AFEPYError("maximum number of sidesets exceeded") if name in ssdict: raise AFEPYError("{0}: duplicate sideset".format(exo_id)) if exo_id is None: exo_id = SSET_ID_START while True: if exo_id not in [v["exo_id"] for (k,v) in ssdict.items()]: break exo_id += 1 if exo_id > MAX_NUM_SETS+SSET_ID_START: raise AFEPYError("maximum number of sidesets exceeded") sideset = child_data_to_array(el, np.int, flatten=True) if sideset.size % 2 != 0: raise AFEPYError("Sidesets must be specified as " "'elem_id face_id' pairs") sideset = sideset.reshape(sideset.size/2, 2) for (i, elem_face) in enumerate(sideset): if elem_face[0] not in elem_num_map: raise AFEPYError("'{0}': element not in mesh".format(elem_face[0])) ssdict[name] = {"name": name, "exo_id": exo_id, "surface": sideset} return ssdict
def __init__(self, V=None, mag=None, dofs=None, nodes=None, nodeset=None, region=None, components=None, copy_from=None): if copy_from is not None: self.data = [x for x in copy_from.data] else: if V is None: raise AFEPYError("missing V") node_ids = V.get_node_ids(nodes, nodeset, region) components = self.format_components(V, mag, dofs, components) self.data = self.init(components, node_ids)
def Nodeset(self, name=None, nodes=None, region=None, exo_id=None, tol=None, mapped=1): """Assign nodesets to the mesh """ if name is None: i = 1 while True: name = "Nodeset-{0}".format(i) if name not in self.nodesets: break i += 1 if i > MAX_NUM_SETS: raise AFEPYError("maximum number of nodesets exceeded") if name in self.nodesets: raise AFEPYError("{0}: nodeset already exists".format(name)) if exo_id is None: exo_id = NSET_ID_START while True: if exo_id not in self.nodesets.exo_ids: break exo_id += 1 if exo_id >= MAX_NUM_SETS + NSET_ID_START: raise AFEPYError("maximum number of nodesets exceeded") if exo_id in self.nodesets.exo_ids: raise AFEPYError("{0}: nodeset exo_id already exists".format(name)) ns_nodes = [] if nodes is not None: try: if mapped: ns_nodes = [self.node_num_map[node] for node in nodes] else: ns_nodes = [node for node in nodes] except KeyError: raise AFEPYError("invalid node number for node " "set {0}".format(name)) if region is not None: nodes = self.nodes_in_region(region, tol=tol) if not nodes: raise AFEPYError("no nodes found in region {0}".format(region)) ns_nodes.extend(nodes) self.nodesets[name] = Nodeset(name, exo_id, ns_nodes) return self.nodesets[name]
def initialize(self, elem_id, elem_nodes, material, **kwargs): self.elem_id = elem_id self.elem_nodes = elem_nodes self.density = kwargs.get("density", 1.) self.material = material # register required variables self.variables = [] self.variables.append(Variable("STRESS", SYMTEN, None, None)) self.variables.append(Variable("STRAIN", SYMTEN, None, None)) self.variables.append(Variable("DSTRAIN", SYMTEN, None, None)) elem_var_names = [v.name for v in self.variables] for v in material.variables: if v.name in elem_var_names: ev = self.variables[elem_var_names.index(v.name)] if v.type == ev.type: continue raise AFEPYError("duplicate variable {0}".format(v.name)) self.variables.append(v)
def parse_gmsh(filename): lines = [line for line in open(filename).readlines() if line.split()] version = lines[0].split()[1] try: num_dim = int(lines[1].split()[1]) except IndexError: raise AFEPYError("failed to parse number of dimensions " "from {0}".format(filename)) assert lines[2].strip() == "Vertices" num_node = int(lines[3]) nodes = [] for (i, line) in enumerate(lines[4:], 4): if re.search("(?i)^[A-Z]", line.strip()): break nodes.append([float(x) for x in line.split()][:num_dim]) nodes = np.array(nodes) assert nodes.shape[0] == num_node elem_type = lines[i].strip().lower() nc = {"tet": 4, "hex": 8, "tri": 3, "qua": 4}[elem_type[:3]] num_elem = int(lines[i+1]) elems = [] for line in lines[i+2:]: if re.search("(?i)^[A-Z]", line.strip()): break elems.append([int(x) for x in line.split()][:nc]) elems = np.array(elems) assert elems.shape[0] == num_elem, "{0}, {1}".format(elems.shape[0], num_elem) node_num_map = dict([(i+1, i) for i in range(nodes.shape[0])]) elem_num_map = dict([(i+1, i) for i in range(elems.shape[0])]) return Namespace("Mesh", coords=nodes, connect=elems, num_dim=num_dim, node_num_map=node_num_map, sidesets=sidesets, nodesets=nodesets, elem_num_map=elem_num_map, blocks={})
def apply_bcs(self, data, disp_bcs, A, b, du=None, M=None, fac=1.): logger.debug("applying displacement boundary conditions...", end=" ") if du is None: du = np.zeros(self.num_dof) for (node_id, dof, mag) in bcsum(disp_bcs): # Modify rows and columns of K if dof > self.num_dof_per_node: raise AFEPYError("incorrect dof") row = int(self.num_dof_per_node * node_id + dof) # current value of displacement u_cur = data[row] + du[row] ufac = fac * mag - u_cur # Modify the RHS b -= [A[i, row] * ufac for i in range(self.num_dof)] # modify stiffness A[row, :] = 0. A[:, row] = 0. A[row, row] = 1. if M is not None: M[row, :] = 0. M[:, row] = 0. M[row, row] = 1. b[row] = ufac logger.debug("done") return
def FiniteElement(elem_type, material, **options): """Get the element class for elem_type Parameters ---------- elem_type : str Element type Returns ------- elem_cls : class Uninstantiated element class """ try: elem_class = ELEMENTS[elem_type.upper()] except KeyError: raise AFEPYError("{0}: unknown element type".format(elem_type)) options.update({"density": material.density}) def Element(elem_id, elem_nodes): return elem_class(elem_id, elem_nodes, material, **options) return Element
def register_variable(self, name, type, keys=None, ivals=None): if name in [v.name for v in self.variables]: raise AFEPYError("duplicate element variable {0}".format(name)) self.variables.append(Variable(name, type, keys, ivals))
def StaticNonlinearSolve(V, data, node_bcs, tractions=None, F=None, period=1., increments=5, maxiters=10, tolerance=1.E-4, relax=1.): """Assemble and solve the system of equations Parameters ---------- Notes ----- Assembles and solves Ku = F where K is the global finite element stiffness F is the global finite element load u is the unkown nodal displacement vector """ # Allocate storage u = data.Zeros() if F is None: F = data.Zeros() dtime = period / float(increments) logger.write_intro("Static, nonlinear", increments, tolerance, maxiters, relax, 0., period, V.num_dim, V.num_elem, V.num_node) logger.info(HEAD) istep = 1 for iframe in range(increments): kframe = iframe + 1 load_fac = float(kframe) / float(increments) err1 = 1. # Newton-Raphson loop for nit in range(maxiters): # Update element states V.update_kinematics(data, u, dtime) A = V.stiffness(data, u, data.time, dtime, kframe) R = V.residual(data, u) q = V.force(tractions) + F # Compute global force rhs = load_fac * q - R # Enforce displacement boundary conditions V.apply_bcs(data, node_bcs, A, rhs, u, fac=load_fac) # --- Solve for the nodal displacement du = linsolve(A, rhs) # --- update displacement increment u += relax * du # --- Check convergence dusq = dot(u, u) err1 = dot(du, du) if dusq != 0.: err1 = sqrt(err1 / dusq) err2 = sqrt(dot(rhs, rhs)) / float(V.num_dof) message = ITER_FMT.format(istep, nit+1, load_fac, data.time, dtime, err1, err2, tolerance, kframe) logger.info(message) if err1 < tolerance or err2 < tolerance: break continue else: raise AFEPYError("Failed to converge on step {0}, " "frame {1}".format(istep, kframe)) data.time += dtime data += u logger.write("Simulation completed successfully\n") logger.write("=" * 78) return
def parse_ns(mesh, coords, conn, node_num_map): """Parse the Nodeset elements Notes ----- <Nodeset exo_id="int"> int int ... int ... </Nodeset> """ els = mesh.getElementsByTagName("Nodeset") if not els: return nsdict = {} for el in els: attributes = dict(el.attributes.items()) exo_id = attributes.get("exo_id") if exo_id is not None: exo_id = Int(exo_id) name = attributes.get("name") if all([x is None for x in (exo_id, name)]): raise AFEPYError("Nodeset requires at least one " "of name or exo_id attribute") if name is None: i = i while True: name = "Nodeset-{0}".format(i) if name not in nsdict: break if i > MAX_NUM_SETS: raise AFEPYError("maximum number of nodesets exceeded") if name in nsdict: raise AFEPYError("{0}: duplicate nodeset".format(exo_id)) if exo_id is None: exo_id = NSET_ID_START while True: if exo_id not in [v["exo_id"] for (k,v) in nsdict.items()]: break exo_id += 1 if exo_id > MAX_NUM_SETS+NSET_ID_START: raise AFEPYError("maximum number of nodesets exceeded") axis = find_region(el) if axis: box = bounding_box(coords) nodes, els = members_at_position(coords, conn, axis[0], box[axis]) else: # nodesets are 1 based in the input file, convert to zero based # through the node_num_map nodes = child_data_to_array(el, np.int, flatten=1) nodes = np.array(nodes) for n in nodes: if n not in node_num_map: raise AFEPYError("Nodeset '{0}' references " "nonexistent nodes".format(name)) nsdict[name] = {"name": name, "exo_id": exo_id, "nodes": nodes} return nsdict
def parse_blocks(mesh, num_dim, connect, elem_num_map): """Parse the Block elements Notes ----- <ElementBlock exo_id="int"> contents </ElementBlock> contents can be o generate start end [interval] o all o int int ... int ... """ els = mesh.getElementsByTagName("ElementBlock") if not els: return {} blocks = {} assigned = [] for el in els: attributes = dict(el.attributes.items()) exo_id = attributes.get("exo_id") if exo_id is not None: exo_id = Int(exo_id) name = attributes.get("name") if all([x is None for x in (exo_id, name)]): raise AFEPYError("ElementBlock requires at least one " "of name or exo_id attribute") if name is None: i = 1 while True: name = "Block-{0}".format(i) if name not in blocks: break if i > MAX_NUM_SETS: raise AFEPYError("maximum number of element blocks exceeded") if name in blocks: raise AFEPYError("{0}: duplicate block exo_id".format(exo_id)) if exo_id is None: exo_id = BLOCK_ID_START while True: if exo_id not in [v["exo_id"] for (k,v) in blocks.items()]: break exo_id += 1 if exo_id > MAX_NUM_SETS+BLOCK_ID_START: raise AFEPYError("maximum number of element blocks exceeded") blocks[name] = {"name": name, "exo_id": exo_id} block = child_data_to_list(el) # check for special arguments elems = [] for line in block: if line[0].lower() == "all": elems = elem_num_map.keys() break if line[0].lower() == "unassigned": elems.extend([x for x in elem_num_map.keys() if x not in assigned]) break if line[0].lower() == "generate": line[1:] = [Int(x) for x in line[1:]] try: xi, xf, inc = line[1:] except ValueError: xi, xf = line[1:] inc = 1 elems.extend(range(xi, xf+1, inc)) else: elems.extend([Int(x) for x in line]) num_node_per_elem = None for elem in elems: if elem not in elem_num_map: raise AFEPYError("block '{0}' references " "non-existent elements".format(name)) assigned.append(elem) nodes = [i for i in connect[elem_num_map[elem]] if i >= 0] if num_node_per_elem is None: num_node_per_elem = len(nodes) elif num_node_per_elem != len(nodes): raise AFEPYError("All elements in block '{0}' must have " "same number of nodes".format(name)) blocks[name]["elements"] = np.array(elems, dtype=np.int) elem_type = el.getAttribute("elem_type") if not elem_type.strip(): elem_type = get_elem_type(num_dim, num_node_per_elem) blocks[name]["elem_type"] = elem_type return blocks
def Sideset(self, name, surfaces=None, exo_id=None, region=None, mapped=1): """Assign side sets to the mesh Parameters ---------- surfaces : ndarray Side set specification sset[i, j] - jth face of element i exo_id : int Side set identifying integer """ if name is None: i = 1 while True: name = "Sideset-{0}".format(i) if name not in self.sidesets: break i += 1 if i > MAX_NUM_SETS: raise AFEPYError("maximum number of sidesets exceeded") if name in self.sidesets: raise AFEPYError("{0}: sideset already exists".format(name)) if exo_id is None: exo_id = SSET_ID_START while True: if exo_id not in self.sidesets.exo_ids: break exo_id += 1 if exo_id >= MAX_NUM_SETS + SSET_ID_START: raise AFEPYError("maximum number of sidesets exceeded") if exo_id in self.sidesets.exo_ids: raise AFEPYError("{0}: sideset exo_id already exists".format(name)) sset_surfs = [] if surfaces is not None: for (i, item) in enumerate(surfaces): try: elem, face = item except ValueError: raise AFEPYError("bad sideset definition") try: if mapped: elem = self.elem_num_map[elem] sset_surfs.extend([elem, face - 1]) except KeyError: raise AFEPYError("{0}: element not in mesh".format(elem)) if region is not None: # look for acceptable domain names if self.type != "grid": exit("cannot yet set sideset by region") try: axis = DOMAINS[region.upper()] except KeyError: raise AFEPYError( "{0}: invalid sideset region".format(surfaces)) nodes, els = self.members_at_position(axis[0], self.extremum[axis]) # Side sets are defined on an element face. Loop through elements, # store the element number, face number, and the traction face = geom.face(self.num_coord, self.connect.shape[1], region) for elem_num in els: sset_surfs.extend([elem_num, face]) sset_surfs = np.array(sset_surfs).reshape(-1, 2) self.sidesets[name] = Sideset(name, exo_id, sset_surfs) return self.sidesets[name]
def find_materials(): """Find material models """ logger = Logger("console") errors = [] mat_libs = {} rx = re.compile(r"(?:^|[\\b_\\.-])[Mm]at") a = ["MaterialModel", "AbaqusMaterial"] # gather and verify all files search_dirs = [d for d in MAT_LIB_DIRS] if not SUPPRESS_USER_ENV: for user_mat in cfgparse("materials"): user_mat = os.path.realpath(user_mat) if user_mat not in search_dirs: search_dirs.append(user_mat) # go through each item in search_dirs and generate a list of material # interface files. if item is a directory gather all files that match rx; # if it's a file, add it to the list of material files for item in search_dirs: if os.path.isfile(item): d, files = os.path.split(os.path.realpath(item)) files = [files] elif os.path.isdir(item): d = item files = [f for f in os.listdir(item) if rx.search(f)] else: logger.warn("{0} no such directory or file, skipping".format(d), report_who=1) continue files = [f for f in files if f.endswith(".py")] if not files: logger.warn("{0}: no mat files found".format(d), report_who=1) # go through files and determine if it's an interface file. if it is, # load it and add it to mat_libs for f in files: module = f[:-3] try: libs = xpyclbr.readmodule(module, [d], ancestors=a) except AttributeError as e: errors.append(e.args[0]) logger.error(e.args[0]) continue for lib in libs: if lib in mat_libs: logger.error("{0}: duplicate material".format(lib)) errors.append(lib) continue module = load_file(libs[lib].file) mat_class = getattr(module, libs[lib].class_name) if not mat_class.name: raise AFEPYError("{0}: material name attribute " "not defined".format(lib)) libs[lib].mat_class = mat_class mat_libs.update({mat_class.name.lower(): libs[lib]}) if errors: raise AFEPYError(", ".join(errors)) return mat_libs
def __init__(self, mesh, dict): if not mesh.blocks: # look for speciall block 'All' in dict if len(dict) == 1 and dict.keys()[0].lower() == "all": key = dict.keys()[0] mesh.ElementBlock(name=key, elements="all") self.elements = np.empty(mesh.num_cell, dtype=np.object) self.num_elem = mesh.num_cell self.blocks = [] for eb in mesh.blocks.values(): self.blocks.append(ElementBlock()) self.blocks[-1].__dict__.update(eb.__dict__) self.connect = mesh.connect mesh.run_diagnostics() self.mesh = mesh # assign properties to elements and determine if all blocks have been # assigned properties unassigned = [] self.elems_per_block = [] for eb in self.blocks: try: elem = dict[eb.name] except KeyError: unassigned.append(eb.name) continue for elem_id in eb.elem_ids: elem_nodes = [n for n in self.mesh.connect[elem_id] if n >= 0] self.elements[elem_id] = elem(elem_id, elem_nodes) eb.num_int_point = self.elements[elem_id].integration.num_point eb.elem_type = self.elements[elem_id].type eb.num_node_per_elem = self.elements[elem_id].num_node eb.num_dof_per_node = self.elements[elem_id].num_dof_per_node eb.ndi = self.elements[elem_id].ndi eb.nshr = self.elements[elem_id].nshr self.elems_per_block.append(eb.elem_ids) if unassigned: u = ", ".join(unassigned) raise AFEPYError("Element block without properties detected. " "All element blocks must be assigned properties. " "Unassigned element blocks:\n {0}".format(u)) # determine total number of dofs, look for conflicting nodes node_dofs = {} for e in self.elements: node_ids = [n for n in self.mesh.connect[e.elem_id] if n >= 0] n = self.elements[e.elem_id].num_dof_per_node for node_id in node_ids: try: nn = node_dofs[node_id] if nn == n: continue raise AFEPYError("conflicting dofs in node " "{0}".format(node_id)) except KeyError: node_dofs[node_id] = n self.num_dof = sum(node_dofs.values()) self.num_dof_per_node = self.elements[0].num_dof_per_node self.mesh.num_dof_per_node = self.num_dof_per_node self.elem_blk_ids = self.mesh.elem_blk_ids self.num_dim = self.mesh.num_dim self.num_node = self.mesh.num_node self.X = self.mesh.coords self.dofs = np.arange(self.num_dof).reshape(self.mesh.num_node, -1)