def getDepthData(self): encoded = self.meta['model']['depth_map'] # Decode encoded += '=' * (len(encoded) % 4) encoded = encoded.replace('-', '+').replace('_', '/') data = encoded.decode('base64').decode('zip') # base64 encoded # Read header hsize = ord(data[0]) # header size in bytes fmt = Struct('< x 3H B') # little endian, padding byte, 3x unsigned short int, unsigned char n_planes, width, height, offset = fmt.unpack(data[:hsize]) # Read plane labels n = width * height fmt = Struct('%dB' % n) lbls = fmt.unpack(data[offset:offset+fmt.size]) offset += fmt.size # Read planes fmt = Struct('< 4f') # little endian, 4 signed floats planes = [] for i in xrange(n_planes): unpacked = fmt.unpack(data[offset:offset+fmt.size]) planes.append((unpacked[:3], unpacked[3])) offset += fmt.size self.depthdata = (width, height), lbls, planes return self.depthdata
def _readPBEAM(self, data, n): """ PBEAM(5402,54,262) - the marker for Record 14 .. todo:: add object """ s1 = Struct(b(self._endian + '4if')) s2 = Struct(b(self._endian + '16f')) s3 = Struct(b(self._endian + '11f')) ntotal = 1072 # 44+12*84+20 nproperties = (len(data) - n) // ntotal for i in range(nproperties): eData = data[n:n+20] n += 20 dataIn = list(s1.unpack(eData)) self.binary_debug.write(' PBEAM=%s\n' % str(dataIn)) (pid, mid, nsegs, ccf, x) = dataIn for i in range(12): eData = data[n:n+64] n += 64 pack = s2.unpack(eData) (so, xxb, a, i1, i2, i12, j, nsm, c1, c2, d1, d2, e1, e2, f1, f2) = pack dataIn.append(pack) self.binary_debug.write(' %s\n' % str(pack)) eData = data[n:n+44] dataIn = list(s3.unpack(eData)) #(k1,k2,s1,s2,nsia,nsib,cwa,cwb,m1a,m2a,m1b,m2b,n1a,n2a,n1b,n2b) = pack # prop = PBEAM(None, dataIn) # self.addOp2Property(prop) #sys.exit('ept-PBEAM') self.card_count['PBEAM'] = nproperties return n
def zmf2dict(fn): """Función que lee una librería de Zemax (archivo con terminación zmf), y genera un diccionario con las descripciones de cada componente. La llave es la referencia de cada componente """ f=open(fn,"r") rd={} head = Struct("<I") lens = Struct("<100sIIIIIIIdd") shapes = "?EBPM" version, = head.unpack(f.read(head.size)) assert version in (1001, ) while True: li = f.read(lens.size) if len(li) != lens.size: if len(li) > 0: print(f, "additional data", repr(li)) break li = list(lens.unpack(li)) li[0] = li[0].decode("latin1").strip("\0") li[3] = shapes[li[3]] description = f.read(li[7]) assert len(description) == li[7] description = zmf_obfuscate(description, li[8], li[9]) description = description.decode("latin1") assert description.startswith("VERS {:06d}\n".format(li[1])) rd[li[0]]=description return rd
def _read_mathp(self, data, n): """MATHP(4506,45,374) - Record 11""" nmaterials = 0 s1 = Struct(b(self._endian + 'i7f3i23fi')) s2 = Struct(b(self._endian + '8i')) n2 = n while n2 < n: edata = data[n:n+140] n += 140 out1 = s1.unpack(edata) (mid, a10, a01, d1, rho, alpha, tref, ge, sf, na, nd, kp, a20, a11, a02, d2, a30, a21, a12, a03, d3, a40, a31, a22, a13, a04, d4, a50, a41, a32, a23, a14, a05, d5, continue_flag) = out1 data_in = [out1] if continue_flag: edata = data[n:n+32] # 7*4 n += 32 out2 = s2.unpack(edata) (tab1, tab2, tab3, tab4, x1, x2, x3, tab5) = out2 data_in.append(out2) mat = MATHP.add_op2_data(data_in) self.add_op2_material(mat) nmaterials += 1 self.card_count['MATHP'] = nmaterials return n
def read_image_optional_header(cls, file): format = Struct('<HBBIIIIIIIIIHHHHHHIIIIHHIIIIII') directory_format = Struct('<II') directory = [] partial_header = format.unpack(file.read(format.size)) directory = [cls._IMAGE_DATA_DIRECTORY(*directory_format.unpack(file.read(directory_format.size))) for i in range(16)] return cls._IMAGE_OPTIONAL_HEADER(*(partial_header + (directory,)))
def _readMATHP(self, data, n): """MATHP(4506,45,374) - Record 11""" #print "reading MATHP" nmaterials = 0 s1 = Struct(b(self._endian + 'i7f3i23fi')) s2 = Struct(b(self._endian + '8i')) n2 = n while n2 < n: eData = data[n:n+140] n += 140 out1 = s1.unpack(eData) (mid, a10, a01, d1, rho, alpha, tref, ge, sf, na, nd, kp, a20, a11, a02, d2, a30, a21, a12, a03, d3, a40, a31, a22, a13, a04, d4, a50, a41, a32, a23, a14, a05, d5, continueFlag) = out1 dataIn = [out1] if continueFlag: eData = data[n:n+32] # 7*4 n += 32 out2 = s2.unpack(eData) (tab1, tab2, tab3, tab4, x1, x2, x3, tab5) = out2 data.append(out2) self.addOp2Material(MATHP(None, dataIn)) nmaterials += 1 self.card_count['MATHP'] = nmaterials return n
def _read_pcomp(self, data, n): """ PCOMP(2706,27,287) - the marker for Record 22 """ nproperties = 0 s1 = Struct(b(self._endian + "2i3fi2f")) s2 = Struct(b(self._endian + "i2fi")) ndata = len(data) while n < (ndata - 32): out = s1.unpack(data[n : n + 32]) (pid, nlayers, z0, nsm, sb, ft, Tref, ge) = out if self.binary_debug: self.log.debug("PCOMP pid=%s nlayers=%s z0=%s nsm=%s sb=%s ft=%s Tref=%s ge=%s" % tuple(out)) assert isinstance(nlayers, int), out n += 32 mids = [] T = [] thetas = [] souts = [] # None, 'SYM', 'MEM', 'BEND', 'SMEAR', 'SMCORE', 'NO' is_symmetrical = "NO" if nlayers < 0: is_symmetrical = "SYM" nlayers = abs(nlayers) assert nlayers > 0, out assert 0 < nlayers < 100, "pid=%s nlayers=%s z0=%s nms=%s sb=%s ft=%s Tref=%s ge=%s" % ( pid, nlayers, z0, nsm, sb, ft, Tref, ge, ) for ilayer in range(nlayers): (mid, t, theta, sout) = s2.unpack(data[n : n + 16]) mids.append(mid) T.append(t) thetas.append(theta) souts.append(sout) if self.is_debug_file: self.binary_debug.write(" mid=%s t=%s theta=%s sout=%s" % (mid, t, theta, sout)) n += 16 data_in = [pid, z0, nsm, sb, ft, Tref, ge, is_symmetrical, mids, T, thetas, souts] prop = PCOMP.add_op2_data(data_in) self._add_op2_property(prop) nproperties += 1 self.card_count["PCOMP"] = nproperties return n
def test_unpack_get_stats(self): """ tc -s class show dev lo classid 1:1 """ data1 = "\x24\x00\x00\x00\x2a\x00\x01\x03\x22\x08\xdc\x4d\x00\x00\x00\x00" data2 = "\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" nlmsghdr = Struct("IHHII") tcmsg = Struct("BxxxiIII") nlmsghdr.unpack(data1) tcmsg.unpack(data2)
def _readPCOMP(self, data, n): """ PCOMP(2706,27,287) - the marker for Record 22 """ nproperties = 0 n2 = n s1 = Struct(b"2i3fi2f") s2 = Struct(b"i2fi") while n2 < n: # len(data) >= 32: # 8*4 - dynamic # print("len(data) = %s" % len(data)) # print(self.print_block(data[0:200])) isSymmetrical = "NO" eData = data[n : n + 32] out = s1.unpack(eData) self.binary_debug.write(" PCOMP=%s\n" % str(out)) (pid, nLayers, z0, nsm, sb, ft, Tref, ge) = out eData = data[n : n + 16 * (nLayers)] Mid = [] T = [] Theta = [] Sout = [] if nLayers < 0: isSymmetrical = "YES" nLayers = abs(nLayers) # print("nLayers = ",nLayers) assert 0 < nLayers < 100, "pid=%s nLayers=%s z0=%s nms=%s sb=%s ft=%s Tref=%s ge=%s" % ( pid, nLayers, z0, nsm, sb, ft, Tref, ge, ) idata = 0 for ilayer in range(nLayers): (mid, t, theta, sout) = s2.unpack(eData[idata : idata + 16]) Mid.append(mid) T.append(t) Theta.append(theta) Sout.append(sout) idata += 16 dataIn = [pid, z0, nsm, sb, ft, Tref, ge, isSymmetrical, Mid, T, Theta, Sout] # print("PCOMP = %s" % (dataIn)) prop = PCOMP(None, dataIn) self.addOp2Property(prop) nproperties += 1 self.card_count["PCOMP"] = nproperties return n
def unpackls(data): items = [] view = buffer(data) shortformat = Struct('!H') while view: namelength = shortformat.unpack(view[:2])[0] itemlength = namelength + 13 view = view[2:] itemformat = Struct('!%dsLBQ' % namelength) item = RemoteFile.unpack(*itemformat.unpack(view[:itemlength])) items.append(item) view = view[itemlength:] return items
def _read_pcomp(self, data, n): """ PCOMP(2706,27,287) - the marker for Record 22 """ nproperties = 0 s1 = Struct(b(self._endian + '2i3fi2f')) s2 = Struct(b(self._endian + 'i2fi')) ndata = len(data) while n < (ndata - 32): out = s1.unpack(data[n:n+32]) (pid, nlayers, z0, nsm, sb, ft, Tref, ge) = out if self.binary_debug: self.log.debug('PCOMP pid=%s nlayers=%s z0=%s nsm=%s sb=%s ft=%s Tref=%s ge=%s' % tuple(out)) assert isinstance(nlayers, int), out n += 32 Mid = [] T = [] Theta = [] Sout = [] # None, 'SYM', 'MEM', 'BEND', 'SMEAR', 'SMCORE', 'NO' is_symmetrical = 'NO' if nlayers < 0: is_symmetrical = 'SYM' nlayers = abs(nlayers) assert nlayers > 0, out assert 0 < nlayers < 100, 'pid=%s nlayers=%s z0=%s nms=%s sb=%s ft=%s Tref=%s ge=%s' % ( pid, nlayers, z0, nsm, sb, ft, Tref, ge) for ilayer in range(nlayers): (mid, t, theta, sout) = s2.unpack(data[n:n+16]) Mid.append(mid) T.append(t) Theta.append(theta) Sout.append(sout) if self.is_debug_file: self.binary_debug.write(' mid=%s t=%s theta=%s sout=%s' % (mid, t, theta, sout)) n += 16 data_in = [ pid, z0, nsm, sb, ft, Tref, ge, is_symmetrical, Mid, T, Theta, Sout] prop = PCOMP.add_op2_data(data_in) self._add_op2_property(prop) nproperties += 1 self.card_count['PCOMP'] = nproperties return n
def _read_tf(self, data, n): nfields = (len(data) - n) // 4 # subtract of the header (sid, nid, component, b0, b1, b2) # divide by 5 (nid1, component1, a0, a1, a2) #nrows = (nfields - 6) // 5 #print('n=%s nrows=%s' % (n, nrows)) #print(self.show_data(data)) #nid1, component1, a0, a1, a2 ndata = len(data) struct1 = Struct(b'3i3f') struct2 = Struct(b'2i3f') while n < ndata: n2 = n + 24 # 20=4*6 sid, nid, component, b0, b1, b2 = struct1.unpack(data[n:n2]) if self.is_debug_file: self.binary_debug.write('TF header -> %s\n' % ([sid, nid, component, b0, b1, b2])) nids = [] components = [] a = [] irow = 0 while 1: n3 = n2 + 20 # 20=4*5 nid1, component1, a0, a1, a2 = struct2.unpack(data[n2:n3]) if self.is_debug_file: self.binary_debug.write(' i=%s -> %s\n' % ( irow, [nid1, component1, a0, a1, a2])) if nid1 == -1 and component1 == -1: break assert nid1 > -1 assert component1 > -1 nids.append(nid1) components.append(component1) a.append([a0, a1, a2]) n2 = n3 irow += 1 tf = TF(sid, nid, component, b0, b1, b2, nids, components, a) #if self.is_debug_file: #self.binary_debug.write('%s\n' % str(tf)) self._add_tf_object(tf) self._increase_card_count('TF') n = n3 return n
def read_points_binary(self, npoints): size = npoints * 12 # 12=3*4 all the points n = 0 points = zeros(npoints * 3, dtype='float32') s = Struct(b'>3000f') # 3000 floats; 1000 points while size > 12000: # 12k = 4 bytes/float*3 floats/point*1000 points data = self.infile.read(4 * 3000) nodeXYZs = s.unpack(data) points[n:n+3000] = nodeXYZs n += 3000 size -= 4 * 3000 assert size >= 0, 'size=%s' % size if size > 0: data = self.infile.read(size) Format = b'>%if' % (size // 4) nodeXYZs = unpack(Format, data) points[n:] = nodeXYZs points = points.reshape((npoints, 3)) self.infile.read(8) # end of second block, start of third block return points
def read_elements_binary(self, nelements): self.nElementsRead = nelements self.nElementsSkip = 0 size = nelements * 12 # 12=3*4 all the elements elements = zeros(self.nElements*3, dtype='int32') n = 0 s = Struct(b'>3000i') while size > 12000: # 4k is 1000 elements data = self.infile.read(4 * 3000) nodes = s.unpack(data) elements[n : n + 3000] = nodes size -= 4 * 3000 n += 3000 assert size >= 0, 'size=%s' % size if size > 0: data = self.infile.read(size) Format = b'>%ii' % (size // 4) nodes = unpack(Format, data) elements[n:] = nodes elements2 = elements.reshape((nelements, 3)) self.infile.read(8) # end of third (element) block, start of regions (fourth) block return elements2
def test_unpack_tcp_filter(self): """ tc filter add dev lo parent 1: protocol ip prio 1 u32 \ match ip sport 8000 0xffff flowid 1:5 """ data = "\\\0\0\0,\0\5\6!\201\333M\0\0\0\0\0\0\0\0\1\0\0\0\0\0\0\0\0\0\1\0\10\0\1\0\10\0\1\0u32\0000\0\2\0\10\0\1\0\5\0\1\0$\0\5\0\1\0\1\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\0\0\37@\0\0\24\0\0\0\0\0\0\0" #data = "\\\0\0\0,\0\5\6\352\210\333M\0\0\0\0\0\0\0\0\1\0\0\0\0\0\0\0\0\0\1\0\10\0\1\0\10\0\1\0u32\0000\0\2\0\10\0\1\0\5\0\1\0$\0\5\0\1\0\1\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\377\377\0\0\37@\24\0\0\0\0\0\0\0" msg = Message.unpack(data) st = msg.service_template attr = st.attributes tc_u32_sel = Struct("BBBHHhhI") tc_u32_key = Struct("IIii") nested = list(unpack_attrs(attr[1].payload)) sel = tc_u32_sel.unpack(nested[1].payload[:16]) key = tc_u32_key.unpack(nested[1].payload[16:])
class ULInt24(StaticField): """ A custom made construct for handling 3-byte types as used in ancient file formats. A better implementation would be writing a more flexable version of FormatField, rather then specifically implementing it for this case """ __slots__ = ["packer"] def __init__(self, name): self.packer = Packer("<BH") StaticField.__init__(self, name, self.packer.size) def __getstate__(self): attrs = StaticField.__getstate__(self) attrs["packer"] = attrs["packer"].format return attrs def __setstate__(self, attrs): attrs["packer"] = Packer(attrs["packer"]) return StaticField.__setstate__(attrs) def _parse(self, stream, context): try: vals = self.packer.unpack(_read_stream(stream, self.length)) return vals[0] + (vals[1] << 8) except Exception: ex = sys.exc_info()[1] raise FieldError(ex) def _build(self, obj, stream, context): try: vals = (obj%256, obj >> 8) _write_stream(stream, self.length, self.packer.pack(vals)) except Exception: ex = sys.exc_info()[1] raise FieldError(ex)
def _read_grid(self, data, n): # 21.8 sec, 18.9 """(4501,45,1) - the marker for Record 17""" s = Struct(b(self._endian + 'ii3f3i')) ntotal = 32 nentries = (len(data) - n) // ntotal self._increase_card_count('GRID', nentries) for i in range(nentries): edata = data[n:n + 32] out = s.unpack(edata) (nid, cp, x1, x2, x3, cd, ps, seid) = out if self.is_debug_file: self.binary_debug.write(' GRID=%s\n' % str(out)) if cd >= 0 and nid < 10000000: if ps == 0: ps = '' node = GRID(nid, cp, np.array([x1, x2, x3]), cd, ps, seid) self.nodes[nid] = node #if nid in self.nodes: #self.reject_lines.append(str(node)) #else: self.nodes[nid] = node #self.add_node(node) else: self.log.debug("*nid=%s cp=%s x1=%-5.2f x2=%-5.2f x3=%-5.2f cd=%-2s ps=%s seid=%s" % (nid, cp, x1, x2, x3, cd, ps, seid)) n += ntotal return n
class StreamSerializer(object): """Helper to pass python objects over streams.""" length_format = '!i' def __init__(self): self.length_struct = Struct(self.length_format) self.length = calcsize(self.length_format) @staticmethod def encode(obj): return pickle.dumps(obj) @staticmethod def decode(message): return pickle.loads(message) def encode_with_length(self, obj): """Encode object and prepend length to message.""" message = self.encode(obj) return self.length_struct.pack(len(message)) + message def decode_from_stream(self, fd, timeout=5): """Read object from given stream and return it.""" rlist, _, _ = select([fd], [], [], timeout) if not rlist: raise RuntimeError("Can't read object from {0!r}.".format(fd)) message_length = self.length_struct.unpack(os.read(fd, self.length))[0] assert message_length > 0, 'wrong message length provided' return self.decode(os.read(fd, message_length))
def _read_pbush_nx(self, data, n): """PBUSH(1402,14,37)""" #if self.table_name == ['EPTS', 'EPT']: ntotal = 72 s = Struct(b(self._endian + 'i17f')) nentries = (len(data) - n) // ntotal assert nentries > 0, 'table=%r len=%s' % (self.table_name, len(data) - n) for i in range(nentries): edata = data[n:n+72] out = s.unpack(edata) (pid, k1, k2, k3, k4, k5, k6, b1, b2, b3, b4, b5, b6, g1, sa, st, ea, et) = out g2 = g3 = g4 = g5 = g6 = g1 data_in = (pid, k1, k2, k3, k4, k5, k6, b1, b2, b3, b4, b5, b6, g1, g2, g3, g4, g5, g6, sa, st, ea, et) prop = PBUSH.add_op2_data(data_in) self._add_op2_property(prop) n += ntotal #else: #ntotal = 92 # 23*4 #s = Struct(b(self._endian + 'i22f')) #nentries = (len(data) - n) // ntotal #assert nentries > 0, 'table=%r len=%s' % (self.table_name, len(data) - n) #for i in range(nentries): #edata = data[n:n+92] #out = s.unpack(edata) #(pid, k1, k2, k3, k4, k5, k6, b1, b2, b3, b4, b5, b6, #g1, g2, g3, g4, g5, g6, sa, st, ea, et) = out #prop = PBUSH.add_op2_data(out) #self._add_op2_property(prop) #n += ntotal self.card_count['PBUSH'] = nentries return n
def _readCHEXA(self, data, n): """ CHEXA(7308,73,253) - the marker for Record 45 """ s = Struct(b(self._endian + '22i')) ntotal = 88 # 22*4 nelements = (len(data) - n) // ntotal for i in range(nelements): edata = data[n:n+88] out = s.unpack(edata) self.binary_debug.write(' CHEXA=%s\n' % str(out)) (eid, pid, g1, g2, g3, g4, g5, g6, g7, g8, g9, g10, g11, g12, g13, g14, g15, g16, g17, g18, g19, g20) = out dataIn = [eid, pid, g1, g2, g3, g4, g5, g6, g7, g8, ] big_nodes = [g9, g10, g11, g12, g13, g14, g15, g16, g17, g18, g19, g20] if sum(big_nodes) > 0: elem = CHEXA20(None, dataIn + big_nodes) else: elem = CHEXA8(None, dataIn) self.addOp2Element(elem) n += ntotal self.card_count['CHEXA'] = nelements return n
def _read_complex_eigenvalue_4(self, data): """parses the Complex Eigenvalues Table 4 Data""" if self.read_mode == 1: return len(data) ntotal = 4 * 6 nmodes = len(data) // ntotal n = 0 #assert self.isubcase != 0, self.isubcase clama = ComplexEigenvalues(11) self.eigenvalues[self.Title] = clama #self.eigenvalues[self.isubcase] = lama s = Struct(b'ii4f') for i in range(nmodes): edata = data[n:n+ntotal] out = s.unpack(edata) if self.debug4(): self.binary_debug.write(' eigenvalue%s - %s\n' % (i, str(out))) (imode, order, eigr, eigc, freq, damping) = out # CLAMA #print('imode=%s order=%s eigr=%s eigc=%s freq=%s damping=%s' % #(imode, order, eigr, eigc, freq, damping)) clama.addF06Line(out) n += ntotal assert n == len(data), 'clama length error' return n
def _readCGAP(self, data, n): """ CGAP(1908,19,104) - the marker for Record 39 """ s1 = Struct(b(self._endian + '4i3fii')) nelements = (len(data) - n) // 36 for i in range(nelements): eData = data[n:n + 36] # 9*4 out = s1.unpack(eData) (eid, pid, ga, gb, x1, x2, x3, f, cid) = out # f=0,1 g0 = None f2, = self.struct_i.unpack(eData[28:32]) assert f == f2, 'f=%s f2=%s' % (f, f2) if f == 2: g0 = self.struct_i.unpack(eData[16:20]) x1 = None x2 = None x3 = None dataIn = [eid, pid, ga, gb, g0, x1, x2, x3, cid] elem = CGAP(None, dataIn) self.addOp2Element(elem) n += 36 self.card_count['CGAP'] = nelements return n
def read_elements_binary(self, nelements): self.nElementsRead = nelements self.nElementsSkip = 0 #print "starting read_elements" #print self.infile.tell(), 'elements' #isBuffered = True size = nelements * 12 # 12=3*4 all the elements elements = zeros(self.nElements*3, 'int32') n = 0 s = Struct(b'>3000i') while size > 12000: # 4k is 1000 elements data = self.infile.read(4 * 3000) nodes = s.unpack(data) elements[n : n + 3000] = nodes size -= 4 * 3000 n += 3000 assert size >= 0, 'size=%s' % size if size > 0: data = self.infile.read(size) Format = b'>%ii' % (size // 4) nodes = unpack(Format, data) elements[n:] = nodes #if isBuffered: #pass #else: #raise RuntimeError('unBuffered') elements2 = elements.reshape((nelements, 3)) self.infile.read(8) # end of third (element) block, start of regions (fourth) block #print "finished read_elements" return elements2
def _read_radm(self, data, n): """ RADM(8802,88,413) - record 25 .. todo:: add object """ struct_i = self.struct_i nmaterials = 0 ndata = len(data) while n < ndata: # 1*4 packs = [] edata = data[n:n+4] number, = struct_i.unpack(edata) n += 4 iformat = 'i %if' % (number) struct_i_nf = Struct(b(self._endian + iformat)) #mid, absorb, emiss1, emiss2, ... ndata_per_pack = 1 + number nstr_per_pack = ndata_per_pack * 4 nfields = (ndata - n) // 4 npacks = nfields // ndata_per_pack for ipack in range(npacks): edata = data[n:n+nstr_per_pack] pack = list(struct_i_nf.unpack(edata)) packs.append(pack) n += nstr_per_pack mat = RADM.add_op2_data(pack) self.add_thermal_BC(mat, mat.radmid) nmaterials += 1 self.card_count['RADM'] = nmaterials return n
def _read_real_table(self, data, result_name, flag, is_cid=False): if self.debug4(): self.binary_debug.write(' _read_real_table\n') assert flag in ['node', 'elem'], flag n = 0 ntotal = 32 # 8 * 4 dt = self.nonlinear_factor assert self.obj is not None obj = self.obj format1 = '2i6f' # 8 nnodes = len(data) // ntotal assert nnodes > 0 #assert len(data) % ntotal == 0 s = Struct(format1) for inode in range(nnodes): edata = data[n:n+ntotal] out = s.unpack(edata) (eid_device, grid_type, tx, ty, tz, rx, ry, rz) = out eid = (eid_device - self.device_code) // 10 if self.debug4(): self.binary_debug.write(' %s=%i; %s\n' % (flag, eid, str(out))) obj.add(dt, eid, grid_type, tx, ty, tz, rx, ry, rz) n += ntotal return n
def _readRADM(self, data, n): """ RADM(8802,88,413) - record 25 .. todo:: add object """ #print "reading RADM" return s = Struct(b'i', ) while len(data) >= 4: # 1*4 eData = data[:4] data = data[4:] number, = s.unpack(eData) iFormat = 'if%if' % (number + 1) eDataLen = len(strings) * 4 eData = data[:eDataLen] data = data[eDataLen:] iFormat = bytes(iFormat) pack = list(unpack(iFormat, eData)) packs = [] while data: eData = data[:eDataLen] data = data[eDataLen:] pack = list(unpack(iFormat, eData)) packs.append(pack) #mat = RADM(None, packs) #self.addOp2Material(mat) return n
def _readCTETP(self, data, n): """ CTETP(12201,122,9013) - the marker for Record 86 .. todo:: create object """ #raise NotImplementedError('needs work...') nelements = (len(data) - n) // 108 # 27*4 s = Struct(b(self._endian + '27i')) for i in range(nelements): eData = data[n:n+108] out = s.unpack(eData) self.binary_debug.write(' CTETP=%s\n' % str(out)) (eid, pid, n1, n2, n3, n4, e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12, f1, f2, f3, f4, b1, ee1, ee2, ee3, ee4) = out #print("out = ",out) e = [e1, e2, e3, e4, e5, e6, e7, e8, e9, e10, e11, e12] f = [f1, f2, f3, f4] ee = [ee1, ee2, ee3, ee4] #print("e = ",e) #print("f = ",f) #print("b1 = ",b1) #print("ee = ",ee) dataIn = [eid, pid, n1, n2, n2, n3, n4] elem = CTETRA4(None, dataIn) self.addOp2Element(elem)
class FormatField(StaticField): """ A field that uses ``struct`` to pack and unpack data. See ``struct`` documentation for instructions on crafting format strings. :param str name: name of the field :param str endianness: format endianness string; one of "<", ">", or "=" :param str format: a single format character """ __slots__ = ["packer"] def __init__(self, name, endianity, format): if endianity not in (">", "<", "="): raise ValueError("endianity must be be '=', '<', or '>'", endianity) if len(format) != 1: raise ValueError("must specify one and only one format char") self.packer = Packer(endianity + format) StaticField.__init__(self, name, self.packer.size) def __getstate__(self): attrs = StaticField.__getstate__(self) attrs["packer"] = attrs["packer"].format return attrs def __setstate__(self, attrs): attrs["packer"] = Packer(attrs["packer"]) return StaticField.__setstate__(attrs) def _parse(self, stream, context): try: return self.packer.unpack(_read_stream(stream, self.length))[0] except Exception, ex: raise FieldError(ex)
def _validate(self): # look for first packet on the wire packetid = self.buff[0] if packetid != P_REQUEST_SERVICE: self._sendError(P_ERROR_BAD_PACKET) raise BadPacketIdentifierException('Unexpected packet ID') bufferlength = len(self.buff) # check packet is intact signedlength = Z_COOKIE_LENGTH + 35 packetlength = signedlength + 256 if bufferlength < packetlength: return # unpack fields packet = buffer(self.buff, 0, packetlength) packetformat = Struct('!BBB%ds32s256s' % Z_COOKIE_LENGTH) ( packetid, protocol, flags, cookie, fingerprint, signature ) = packetformat.unpack(packet) if protocol != Z_PROTOCOL_NUMBER: self._sendError(P_ERROR_BAD_PROTOCOL) raise ProtocolMismatchException('Client using bad protocol') if fingerprint not in self.keymanager.authorizedkeys: self._sendError(P_ERROR_UNAUTHORIZED_KEY) raise KeyNotFoundException('Client using unauthorized key') self.hostkey = self.keymanager.authorizedkeys[fingerprint] if not self.keymanager.verify(packet[:signedlength], signature, self.hostkey): self._sendError(P_ERROR_BAD_SIGNATURE) raise InvalidSignatureException('Client failed to sign message') # clear packet from buffer del self.buff[:packetlength] # start creating response packetheader = self.byteformat.pack(P_ACKNOWLEDGE_SERVICE) # set cipher scheme self.cipherflag = flags & Z_CIPHERSUITE_MASK self.cipherscheme = Z_CIPHER[self.cipherflag] ivlength = self.cipherscheme.block_size # generate secret key self.prng = Random.new() self.biscuit = self.prng.read(Z_COOKIE_LENGTH) self.desecret = self.prng.read(Z_SECRET_KEY_LENGTH) self.deiv = self.prng.read(ivlength) # add signable fields enc = self.keymanager.encrypt(self.desecret, self.hostkey) signable = '%s%s%s%s%s' % ( packetheader, enc, self.deiv, self.biscuit, self.keymanager.fingerprint ) # sign this message signature = self.keymanager.sign(signable + cookie) # send it self.sock.send(signable + signature) # direct function pointer to next step self.parseBuffer = self._servePublicKey
def _read_buckling_eigenvalue_4(self, data): # BLAMA - Buckling eigenvalue summary table # CLAMA - Complex eigenvalue summary table # LAMA - Normal modes eigenvalue summary table if self.read_mode == 1: return len(data) msg = '_read_buckling_eigenvalue_4' #return self._not_implemented_or_skip(data, msg) # TODO: implement buckling eigenvalues ntotal = 4 * 7 nModes = len(data) // ntotal n = 0 #assert self.isubcase != 0, self.isubcase blama = BucklingEigenvalues(11) self.eigenvalues[self.Title] = blama #self.eigenvalues[self.isubcase] = lama s = Struct(b'ii5f') for i in range(nModes): edata = data[n:n+ntotal] out = s.unpack(edata) if self.debug4(): self.binary_debug.write(' eigenvalue%s - %s\n' % (i, str(out))) (iMode, order, eigen, omega, freq, mass, stiff) = out # BLAMA?? #(modeNum, extractOrder, eigenvalue, radian, cycle, genM, genK) = line # LAMA #(rootNum, extractOrder, eigr, eigi, cycle, damping) = data # CLAMA blama.addF06Line(out) n += ntotal return n
def _read_monpnt1_nx(self, data: bytes, n: int) -> int: """ MSC 2018.2 Word Name Type Description 1 NAME(2) CHAR4 3 LABEL(14) CHAR4 17 AXES I 18 COMP(2) CHAR4 20 CP I 21 X RS 22 Y RS 23 Z RS 24 CD I (not in NX) """ #ntotal = 4 * 24 # 4 * 24 ntotal = 4 * 23 # 4 * 23 ndatai = len(data) - n ncards = ndatai // ntotal assert ndatai % ntotal == 0 #structi = Struct(self._endian + b'8s 56s i 8s i 3f i') # msc structi = Struct(self._endian + b'8s 56s i 8s i 3f') # nx #monpnt1s = [] for unused_i in range(ncards): edata = data[n:n + ntotal] out = structi.unpack(edata) #name_bytes, label_bytes, axes, comp_bytes, cp, x, y, z, cd = out name_bytes, label_bytes, axes, comp_bytes, cp, x, y, z = out name = name_bytes.rstrip().decode('ascii') label = label_bytes.rstrip().decode('ascii') aecomp_name = comp_bytes.rstrip().decode('ascii') xyz = [x, y, z] monpnt1 = self.add_monpnt1(name, label, axes, aecomp_name, xyz, cp=cp) str(monpnt1) n += ntotal #monpnt1s.append(monpnt1) self.to_nx() return n
class CyKitClient: def __init__(self, reader, writer, channels=14, sample_rate=128): self.sample_rate = sample_rate self._reader, self._writer = reader, writer self._struct = Struct('>' + 'f' * channels) def stop(self): if self._writer is not None: self._writer.close() def __aiter__(self): return self async def __anext__(self): if self._reader.at_eof(): raise ConnectionError("No more data from peer") data = await self._reader.readexactly(self._struct.size) if not data: raise ConnectionError("No more data from peer") return self._struct.unpack(data) async def _initialize(self, good_packet_threshold=64): last_time = time() good_packets = 0 while good_packets < good_packet_threshold: await self._reader.readexactly(self._struct.size) cur_time = time() delta = cur_time - last_time if delta > (1.0 / self.sample_rate) / 2: good_packets += 1 logger.debug("Good packet: %.4f ms", delta * 1000.0) else: logger.debug("Bad packet: %.4f ms", delta * 1000.0) last_time = cur_time return self
def _read_matt2(self, data: bytes, n: int) -> int: """ 1 MID I Material identification number 2 TID(15) I TABLEMi entry identification numbers 17 UNDEF none Not used """ ntotal = 68 * self.factor # 17*4 s = Struct(mapfmt(self._endian + b'17i', self.size)) nmaterials = (len(data) - n) // ntotal for unused_i in range(nmaterials): edata = data[n:n + ntotal] out = s.unpack(edata) (mid, g11_table, g12_table, g13_table, g22_table, g23_table, g33_table, rho_table, a1_table, a2_table, a3_table, unused_zeroa, ge_table, st_table, sc_table, ss_table, unused_zerob) = out assert unused_zeroa == 0, f'unused_zeroa={unused_zeroa} out={out}' assert unused_zerob == 0, f'unused_zerob={unused_zerob} out={out}' if self.is_debug_file: self.binary_debug.write(' MATT2=%s\n' % str(out)) mat = MATT2(mid, g11_table, g12_table, g13_table, g22_table, g23_table, g33_table, rho_table, a1_table, a2_table, a3_table, ge_table, st_table, sc_table, ss_table, comment='') self._add_material_dependence_object(mat, allow_overwrites=False) n += ntotal self.card_count['MATT2'] = nmaterials return n
def _read_mats1(self, data: bytes, n: int) -> int: """ MATS1(503,5,90) - record 12 """ ntotal = 44 * self.factor # 11*4 s = Struct(mapfmt(self._endian + b'3ifiiff3i', self.size)) nmaterials = (len(data) - n) // ntotal for unused_i in range(nmaterials): edata = data[n:n + ntotal] out = s.unpack(edata) (mid, tid, Type, h, yf, hr, limit1, limit2, a, bmat, c) = out assert a == 0, a assert bmat == 0, bmat assert c == 0, c data_in = [mid, tid, Type, h, yf, hr, limit1, limit2] if self.is_debug_file: self.binary_debug.write(' MATS1=%s\n' % str(out)) mat = MATS1.add_op2_data(data_in) self._add_material_dependence_object(mat, allow_overwrites=False) n += ntotal self.card_count['MATS1'] = nmaterials return n
def _read_aestat(self, data: bytes, n: int) -> int: """ MSC 2018.2 Word Name Type Description 1 ID I 2 LABEL(2) CHAR4 """ ntotal = 12 # 4 * 8 ndatai = len(data) - n ncards = ndatai // ntotal assert ndatai % ntotal == 0 structi = Struct(self._endian + b'i 8s') for unused_i in range(ncards): edata = data[n:n + ntotal] out = structi.unpack(edata) aestat_id, label = out label = label.rstrip().decode('ascii') self.add_aestat(aestat_id, label) n += ntotal return n
def runCQUAD4(self, data, n, Element): """ common method for CQUAD4, CQUADR """ nelements = (len(data) - n) // 56 s = Struct(b'6iffii4f') self.binary_debug.write('ndata=%s\n' % (nelements * 44)) for i in range(nelements): eData = data[n:n + 56] # 14*4 out = s.unpack(eData) (eid, pid, n1, n2, n3, n4, theta, zoffs, blank, tflag, t1, t2, t3, t4) = out self.binary_debug.write(' %s=%s\n' % (Element.type, str(out))) #print("eid=%s pid=%s n1=%s n2=%s n3=%s n4=%s theta=%s zoffs=%s blank=%s tflag=%s t1=%s t2=%s t3=%s t4=%s" %(eid,pid,n1,n2,n3,n4,theta,zoffs,blank,tflag,t1,t2,t3,t4)) dataInit = [ eid, pid, n1, n2, n3, n4, theta, zoffs, tflag, t1, t2, t3, t4 ] elem = Element(None, dataInit) self.addOp2Element(elem) n += 56 self.card_count[Element.type] = nelements return n
def ReceivePacket(self): from struct import Struct MAXBUFFSIZE = 1024 Packet = self.request.recv(self.Head.size) if not Packet: raise socket.error('request disconnected') Fcode, Id, DataLength = self.Head.unpack(Packet) Body = Struct('!%ds' % (DataLength, )) Packet = '' while DataLength > 0: if DataLength < MAXBUFFSIZE: BUFFSIZE = DataLength + 0 else: BUFFSIZE = MAXBUFFSIZE + 0 receivepacket = self.request.recv(BUFFSIZE) if receivepacket: DataLength -= len(receivepacket) Packet += receivepacket else: raise socket.error('request disconnected') Data = Body.unpack(Packet)[0] return Fcode, Id, Body.size, Data
def _read_mat11_old(self, data, n): """ MAT11(2903,29,371) """ ntotal = 80 # 20*4 s = Struct(self._endian + b'i 15f 4s 4s 4s 4s') nmaterials = (len(data) - n) // ntotal assert nmaterials > 0, nmaterials for i in range(nmaterials): edata = data[n:n+80] out = s.unpack(edata) (mid, e1, e2, e3, nu12, nu13, nu23, g12, g13, g23, rho, a1, a2, a3, tref, ge, blank1, blank2, blank3, blank4) = out if self.is_debug_file: self.binary_debug.write(' MAT11-old=%s\n' % str(out)) mat = MAT11.add_op2_data(out) assert mid > 0, mat self.add_op2_material(mat) n += 80 self.card_count['MAT11'] = nmaterials return n
class NodePackFormat(object): __slots__ = ('structfmt', ) def __init__(self): self.structfmt = Struct('<hHH', ) def format(self, node): node = ( node.parent, node.source or 0, node.target or 0, ) return self.structfmt.pack(*node) def parse(self, byteseq): parent, source, target = self.structfmt.unpack(byteseq) return Node( parent=parent, source=source or None, target=target or None, )
def _read_mat2(self, data, n): """ MAT2(203,2,78) - record 3 """ ntotal = 68 # 17*4 s = Struct(b(self._endian + 'i15fi')) nmaterials = (len(data) - n) // ntotal for i in range(nmaterials): edata = data[n:n+68] out = s.unpack(edata) (mid, g1, g2, g3, g4, g5, g6, rho, aj1, aj2, aj3, tref, ge, St, Sc, Ss, mcsid) = out #print("MAT2 = ",out) mat = MAT2.add_op2_data(out) if 0 < mid <= 1e8: # just a checker for out of range materials self.add_op2_material(mat) else: self.big_materials[mid] = mat n += ntotal self.card_count['MAT2'] = nmaterials return n
def _read_temp(self, data, n): """ TEMP(5701,57,27) - the marker for Record 32 .. warning:: buggy """ ntotal = 12 # 3*4 nentries = (len(data) - n) // ntotal struct_2if = Struct('iif') for i in range(nentries): edata = data[n:n + 12] out = struct_2if.unpack(edata) if self.is_debug_file: self.binary_debug.write(' TEMP=%s\n' % str(out)) (sid, g, T) = out if g < 10000000: load = TEMP.add_op2_data(out) self._add_thermal_load_object(load) else: self.log.debug('TEMP = %s' % (out)) n += 12 self.card_count['TEMP'] = nentries return n
def _read_darea(self, data, n): """ DAREA(27,17,182) - the marker for Record 2 1 SID I Load set identification number 2 P I Grid, scalar, or extra point identification number 3 C I Component number 4 A RS Scale factor """ ntotal = 16 nentries = (len(data) - n) // ntotal self.increase_card_count('DAREA', nentries) struc = Struct(self._endian + b'3if') for i in range(nentries): edata = data[n:n+ntotal] out = struc.unpack(edata) #(sid,p,c,a) = out darea = DAREA.add_op2_data(data=out) self._add_darea_object(darea) n += ntotal return n
def _read_cord2r(self, data, n): """ (2101,21,8) - the marker for Record 5 """ nentries = (len(data) - n) // 52 s = Struct(self._endian + b'4i9f') for i in range(nentries): edata = data[n:n + 52] # 13*4 (cid, one, two, rid, a1, a2, a3, b1, b2, b3, c1, c2, c3) = s.unpack(edata) assert one == 1, one assert two == 2, two data_in = [cid, rid, a1, a2, a3, b1, b2, b3, c1, c2, c3] #print("cid=%s rid=%s a1=%s a2=%s a3=%s b1=%s b2=%s b3=%s c1=%s c2=%s c3=%s" % #(cid, rid, a1, a2, a3, b1, b2, b3, c1, c2, c3)) if self.is_debug_file: self.binary_debug.write(' CORD2R=%s\n' % data_in) coord = CORD2R.add_op2_data(data_in) self._add_coord_object(coord, allow_overwrites=True) n += 52 self.increase_card_count('CORD2R', nentries) return n
def _read_temp(self, data: bytes, n: int) -> int: """ TEMP(5701,57,27) - the marker for Record 32 .. warning:: buggy """ ntotal = 12 * self.factor # 3*4 nentries = (len(data) - n) // ntotal struct_2if = Struct(mapfmt(self._endian + b'iif', self.size)) for unused_i in range(nentries): edata = data[n:n + ntotal] out = struct_2if.unpack(edata) if self.is_debug_file: self.binary_debug.write(' TEMP=%s\n' % str(out)) (sid, g, T) = out if g < 10000000: load = TEMP.add_op2_data(out) self._add_thermal_load_object(load) else: self.log.debug('TEMP = %s' % (out)) n += ntotal self.card_count['TEMP'] = nentries return n
def _read_spline4_nx(self, data: bytes, n: int) -> int: """ MSC 2018.2 Word Name Type Description 1 EID I Spline element Identification 2 CAERO I Component Identifification 3 AELIST I AELIST Id for boxes 4 SETG I SETi Id for grids 5 DZ RS Smoothing Parameter 6 METHOD(2) CHAR4 Method: IPS|TPS|FPS 8 USAGE(2) CHAR4 Usage flag: FORCE|DISP|BOTH 10 NELEM I Number of elements for FPS on x-axis 11 MELEM I Number of elements for FPS on y-axis 12 FTYPE I Radial interpolation funtion fir METHOD=RIS (not in NX) 13 RCORE RS Radius of radial interpolation function (not in NX) """ # 792/4 = 198 # 198 = 2 * 99 = 2 * 11 * 9 ntotal = 4 * 11 # 4 * 13 ndatai = len(data) - n ncards = ndatai // ntotal assert ndatai % ntotal == 0 #structi = Struct(self._endian + b'4i f 8s 8s 3i f') # msc structi = Struct(self._endian + b'4i f 8s 8s 2i') for unused_i in range(ncards): edata = data[n:n + ntotal] out = structi.unpack(edata) #eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements, ftype, rcore = out # msc eid, caero, aelist, setg, dz, method_bytes, usage_bytes, nelements, melements = out method = method_bytes.rstrip().decode('ascii') usage = usage_bytes.rstrip().decode('ascii') self.add_spline4(eid, caero, aelist, setg, dz, method, usage, nelements, melements) n += ntotal self.to_nx() return n
def _read_real_eigenvalue_4(self, data): if self.read_mode == 1: return len(data) #self.show_data(data) nModes = len(data) // 28 n = 0 ntotal = 28 #assert self.isubcase != 0, self.isubcase lama = RealEigenvalues(self.Title) self.eigenvalues[self.Title] = lama s = Struct('ii5f') for i in range(nModes): edata = data[n:n+28] out = s.unpack(edata) if self.debug4(): self.binary_debug.write(' eigenvalue%s - %s\n' % (i, str(out))) #(iMode, order, eigen, omega, freq, mass, stiff) = out (modeNum, extractOrder, eigenvalue, radian, cycle, genM, genK) = out #print(out) lama.addF06Line(out) n += ntotal return n
def _read_pbeaml(self, data, n): """ PBEAML(9202,92,53) Word Name Type Description 1 PID I Property identification number 2 MID I Material identification number 3 GROUP(2) CHAR4 Cross-section group name 5 TYPE(2) CHAR4 Cross section type 7 VALUE RS Cross section values for XXB, SO, NSM, and dimensions Word 7 repeats until (-1) occurs """ #strs = numpy.core.defchararray.reshapesplit(data, sep=",") ints = np.frombuffer(data[n:], self._uendian + 'i').copy() floats = np.frombuffer(data[n:], self._uendian + 'f').copy() iminus1 = np.where(ints == -1)[0] istart = [0] + list(iminus1[:-1] + 1) iend = iminus1 struct1 = Struct(self._endian + b'2i8s8s') for i, (istarti, iendi) in enumerate(zip(istart, iend)): idata = data[n + istarti * 4:n + (istarti + 6) * 4] pid, mid, group, beam_type = struct1.unpack(idata) group = group.decode('latin1').strip() beam_type = beam_type.decode('latin1').strip() fvalues = floats[istarti + 6:iendi] if self.is_debug_file: self.binary_debug.write(' %s\n' % str(fvalues)) self.log.debug('pid=%i mid=%i group=%r beam_type=%r' % (pid, mid, group, beam_type)) self.log.debug(fvalues) #self.log.debug('pid=%i mid=%i group=%s beam_type=%s' % (pid, mid, group, beam_type)) data_in = [pid, mid, group, beam_type, fvalues] prop = PBEAML.add_op2_data(data_in) self._add_op2_property(prop) nproperties = len(istart) self.card_count['PBEAML'] = nproperties return len(data)
def _readCPENTA(self, data, n): """ CPENTA(4108,41,280) - the marker for Record 62 """ s = Struct(b'17i') nelements = (len(data) - n) // 68 for i in xrange(nelements): eData = data[n:n + 68] # 17*4 out = s.unpack(eData) (eid, pid, g1, g2, g3, g4, g5, g6, g7, g8, g9, g10, g11, g12, g13, g14, g15) = out dataIn = [eid, pid, g1, g2, g3, g4, g5, g6] bigNodes = [g7, g8, g9, g10, g11, g12, g13, g14, g15] if sum(bigNodes) > 0: elem = CPENTA15(None, dataIn + bigNodes) else: elem = CPENTA6(None, dataIn) self.addOp2Element(elem) n += 68 self.card_count['CPENTA'] = nelements return n
def read_points_binary(self, npoints): #print "starting read_points" #print self.infile.tell(), 'points' #isBuffered = True size = npoints * 12 # 12=3*4 all the points n = 0 points = zeros(npoints * 3, 'float64') s = Struct(b'>3000f') # 3000 floats; 1000 points while size > 12000: # 12k = 4 bytes/float*3 floats/point*1000 points data = self.infile.read(4 * 3000) nodeXYZs = s.unpack(data) points[n:n+3000] = nodeXYZs n += 3000 size -= 4 * 3000 assert size >= 0, 'size=%s' % size if size > 0: data = self.infile.read(size) Format = b'>%if' % (size // 4) nodeXYZs = unpack(Format, data) points[n:] = nodeXYZs #size = 0 points = points.reshape((npoints, 3)) #if isBuffered: #pass #else: #raise RuntimeError('unBuffered') #for nid in xrange(nPoints): #assert nid in points, 'nid=%s not in points' % nid self.infile.read(8) # end of second block, start of third block #print "finished read_points" return points
def _readCTETRA(self, data, n): """ CTETRA(5508,55,217) - the marker for Record 87 """ s = Struct(b'12i') nelements = (len(data) - n) // 48 # 12*4 for i in xrange(nelements): eData = data[n:n + 48] out = s.unpack(eData) (eid, pid, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10) = out #print "out = ",out dataIn = [eid, pid, n1, n2, n3, n4] bigNodes = [n5, n6, n7, n8, n9, n10] if sum(bigNodes) > 0: elem = CTETRA10(None, dataIn + bigNodes) else: elem = CTETRA4(None, dataIn) self.addOp2Element(elem) n += 48 self.card_count['CTETRA'] = nelements return n
def _read_real_eigenvalue_4(self, data, ndata): """parses the Real Eigenvalues Table 4 Data""" if self.read_mode == 1: return ndata #self.show_data(data) nmodes = ndata // 28 n = 0 ntotal = 28 #assert self.isubcase != 0, self.isubcase lama = RealEigenvalues(self.title, nmodes=nmodes) self.eigenvalues[self.title] = lama structi = Struct(self._endian + b'ii5f') for i in range(nmodes): edata = data[n:n + 28] out = structi.unpack(edata) if self.is_debug_file: self.binary_debug.write(' eigenvalue%s - %s\n' % (i, str(out))) #(imode, extract_order, eigenvalue, radian, cycle, gen_mass, gen_stiffness) = out lama.add_f06_line(out, i) n += ntotal return n
def _read_tload2_nx(self, data, n): """ TLOAD2(7207,72,139) - Record 37 NX 1 SID I Load set identification number 2 DAREA I DAREA Bulk Data entry identification number 3 DELAYI I DELAY Bulk Data entry identification number 4 TYPE I Nature of the dynamic excitation 5 T1 RS Time constant 1 6 T2 RS Time constant 2 7 F RS Frequency 8 P RS Phase angle 9 C RS Exponential coefficient 10 B RS Growth coefficient 11 DELAYR RS If DELAYI = 0, constant value for delay 12 US0 RS not documented in NX 13 VS0 RS not documented in NX """ ntotal = 52 nentries = (len(data) - n) // ntotal struc = Struct(self._endian + b'4i 7f 2f') for i in range(nentries): edata = data[n:n+ntotal] out = struc.unpack(edata) sid, darea, delayi, load_type, t1, t2, freq, p, c, growth, delayr, us0, vs0 = out if self.is_debug_file: self.binary_debug.write(' TLOAD2=%s\n' % str(out)) delay = delayi if delayi == 0: delay = delayr dload = TLOAD2(sid, darea, delay=delay, Type=load_type, T1=t1, T2=t2, frequency=freq, phase=p, c=c, b=growth, us0=us0, vs0=vs0) self._add_dload_entry(dload) n += ntotal self.increase_card_count('TLOAD2', nentries) return n
def _read_tic(self, data, n): """ TIC(6607,66,137) 1 SID I Load set identification number 2 G I Grid, scalar, or extra point identification number 3 C I Component number for point GD 4 U0 RS Initial displacement 5 V0 RS Initial velocity """ ntotal = 20 # 5*4 struct1 = Struct(self._endian + b'3i 2f') nentries = (len(data) - n) // ntotal for i in range(nentries): out = struct1.unpack(data[n:n + ntotal]) if self.is_debug_file: self.binary_debug.write(' TIC=%s\n' % str(out)) sid, nid, comp, u0, v0 = out self.add_tic(sid, [nid], [comp], u0=u0, v0=v0) n += ntotal self.card_count['TIC'] = nentries return n
def yield_stimulus(spec: struct.Struct, stacklimit=2) -> Tuple[Any]: """Iterates over the generated stimulus generated by the generate_stimulus method. The calling function specifies the format of the binary file in which to find stimulus. The name of the parsed stimulus file is the name of the calling function with a '.stim' extension. Args: spec (struct.Struct): spec for how to unpack binary stimulus files Yields: tuple: a tuple of values specified by the spec argument """ # Get the name of the function requesting stimulus test = traceback.extract_stack(limit=stacklimit)[0] stim_file = f"{pathlib.Path(test.filename).parent / test.name!s}.stim" with open(stim_file, 'rb') as f: binary = f.read(spec.size) while binary: yield spec.unpack(binary) binary = f.read(spec.size)
def _read_randt1(self, data, n): """ RANDT1(2207,22,196) Word Name Type Description 1 SID I Set identification number 2 N I Number of time lag intervals 3 TO RS Starting time lag 4 TMAX RS Maximum time lag """ ntotal = 16 # 4*4 struct1 = Struct(self._endian + b'2i 2f') nentries = (len(data) - n) // ntotal for i in range(nentries): out = struct1.unpack(data[n:n + ntotal]) if self.is_debug_file: self.binary_debug.write(' RANDT1=%s\n' % str(out)) sid, nlags, to, tmax = out self.add_randt1(sid, nlags, to, tmax) n += ntotal self.card_count['RANDT1'] = nentries return n
def _readPSHELL(self, data, n): """ PSHELL(2302,23,283) - the marker for Record 51 """ ntotal = 44 # 11*4 s = Struct(b'iififi4fi') nproperties = (len(data) - n) // ntotal for i in range(nproperties): eData = data[n:n + 44] out = s.unpack(eData) (pid, mid1, t, mid2, bk, mid3, ts, nsm, z1, z2, mid4) = out self.binary_debug.write(' PSHELL=%s\n' % str(out)) prop = PSHELL(None, out) if max(pid, mid1, mid2, mid3, mid4) > 1e8: #print("PSHELL = ",out) self.bigProperties[pid] = prop else: self.addOp2Property(prop) n += ntotal self.card_count['PSHELL'] = nproperties return n
def _read_tload1(self, data, n): """ TLOAD1(7107,71,138) - Record 37 1 SID I Load set identification number 2 DAREA I DAREA Bulk Data entry identification number 3 DELAYI I DELAY Bulk Data entry identification number 4 TYPE I Nature of the dynamic excitation 5 TID I Identification number of TABLEDi entry that gives F(t) 6 DELAYR RS If DELAYI = 0, constant value for delay 6 U0 RS Initial displacement factor for enforced motion (MSC; NX undocumented) 7 V0 RS Initial velocity factor for enforced motion (MSC; NX undocumented) 8 T RS Time delay (MSC) """ ntotal = 8 * 4 #self.show_data(data[n:], 'if') nentries = (len(data) - n) // ntotal struc = Struct(self._endian + b'5i 3f') for i in range(nentries): edata = data[n:n + ntotal] out = struc.unpack(edata) sid, darea, delayi, load_type, tid, delayr, us0, vs0 = out if self.is_debug_file: self.binary_debug.write('TLOAD1=%s\n' % str(out)) delay = delayi if delayi == 0: delay = delayr dload = TLOAD1(sid, darea, tid, delay=delay, Type=load_type, us0=us0, vs0=vs0) self._add_dload_entry(dload) n += ntotal self.increase_card_count('TLOAD1', nentries) return n
def main(args): # prepare parser and parse args parser = ArgumentParser(description="luks2hashcat extraction tool") parser.add_argument("path", type=str, help="path to LUKS container") args = parser.parse_args(args) # prepare struct header_struct = Struct(">6sH") with open(args.path, "rb") as file: # read pre header header = file.read(header_struct.size) assert len( header ) == header_struct.size, "File contains less data than needed" # convert bytes into temporary pre header header = header_struct.unpack(header) header = TmpHeaderPre(*header) # check magic bytes magic_bytes = { HeaderVersion1.MAGIC, } assert header.magic in magic_bytes, "Improper magic bytes" # back to start of the file file.seek(0, SEEK_SET) # extract with proper function try: mapping = { HeaderVersion1.VERSION: extract_version1, } extract = mapping[header.version] extract(file) except KeyError as e: raise ValueError("Unsupported version") from e
def _read_plplane(self, data, n): """ PLPLANE(4606,46,375) NX 10 1 PID I Property identification number 2 MID I Material identification number 3 CID I Coordinate system identification number 4 STR CHAR4 Location of stress and strain output 5 T RS Default membrane thickness for Ti on the connection entry 6 CSOPT I Reserved for coordinate system definition of plane 7 UNDEF(5) None MSC 2016 PID I Property identification number 2 MID I Material identification number 3 CID I Coordinate system identification number 4 STR CHAR4 Location of stress and strain output 5 UNDEF(7 ) none Not used .. warning:: CSOPT ad T are not supported """ ntotal = 44 # 4*11 s = Struct(b(self._endian + '3i 4s f 6i')) nentries = (len(data) - n) // ntotal for i in range(nentries): out = s.unpack(data[n:n + ntotal]) pid, mid, cid, location, t, csopt = out[:6] location = location.decode('latin1') #self.show_data(data[n:n+ntotal], 'ifs') plplane = self.add_plplane(pid, mid, cid=cid, stress_strain_output_location=location) #print(plplane) n += ntotal self.card_count['PLPLANE'] = nentries return n
def unpack(self, data): # check 'data' length if len(data) < self.size: return (False, "Not well formed GameControlData!") msg = Struct.unpack(self, data[:self.size]) # check header if msg[0] != self.GAMECONTROLLER_STRUCT_HEADER: raise Exception("Invalid message type!") # check spl message version if msg[1] != self.GAMECONTROLLER_STRUCT_VERSION: raise Exception("Wrong version!") # assign data it = iter(msg[2:]) self.packetNumber = next(it) self.playersPerTeam = next(it) self.competitionPhase = next(it) self.competitionType = next(it) self.gamePhase = next(it) self.gameState = next(it) self.setPlay = next(it) self.firstHalf = next(it) self.kickingTeam = next(it) self.dropInTeam = next(it) self.dropInTime = next(it) self.secsRemaining = next(it) self.secondaryTime = next(it) for i, t in enumerate(self.team): t.unpack(data[self.size + i * t.size:self.size + i * t.size + t.size]) return (True, None)