def vl_enc(data, md5_extra): datalen = len(data) md5 = hashlib.md5() md5.update(data + md5_extra) hasheddata = hexToStr(md5.hexdigest()) j = 0 i = 0 while (i < datalen): try: j += data[i] except TypeError: j += ord(data[i]) i += 1 magickey = chr(int(round(float(j) / float(datalen)))) encddata = list(range(len(data))) if isinstance(magickey, int): pass else: magickey = ord(magickey) for i in range(datalen): # Python doesn't do bitwise operations with characters, so we need to convert them to integers first. # It also doesn't like it if you put integers in the ord() to be translated to integers, that's what the IF, ELSE is for. if isinstance(data[i], int): encddata[i] = data[i] ^ magickey else: encddata[i] = ord(data[i]) ^ magickey try: result = "\x02" + chr(magickey) + "\x04\x00\x00\x00" + str(hasheddata) + bytearray(encddata).decode("utf-8") except UnicodeDecodeError: result = "\x02" + chr(magickey) + "\x04\x00\x00\x00" + str(hasheddata) + bytearray(encddata) return (result)
def AppendToFile(self, filename, n_line_breaks=0): '''Appends user-friendly description of this time result object to a specified text file. If line breaks are desired, they will be appended to the file before the data.''' # Guard conditions if n_line_breaks < 0: n_line_breaks = 0 # Open binary file in append mode filehandler = OpenAppendByteDataFile(filename) # If line breaks are desired, append them before the data lineBreak = tuple(os.linesep) if n_line_breaks > 0: lineBreakBytes = bytearray() for i in range(n_line_breaks): lineBreakBytes.extend(ord(char) for char in lineBreak) filehandler.write(lineBreakBytes) # Append byte format of string representation of self to file filehandler.write(bytes(str(self).encode("utf-8"))) # Insert a newline to prepare for readable format considering future # data appending filehandler.write(bytearray(ord(char) for char in lineBreak)) # Close file filehandler.close()
def writeData(self,f): self.offset=f.tell(); print("BindShape "+str(len(self.bindShape))); for b in self.bindShape: f.write(struct.pack("<f",b)); f.write(struct.pack("<I",len(self.joints)));#joints for j in self.joints: f.write(struct.pack("<I",len(j.reference)+1));#mesh f.write(bytearray('#',"ascii")); f.write(bytearray(j.reference,"ascii")); f.write(struct.pack("<I",len(self.joints)*16));#joints for b in self.jointBindPoses: f.write(struct.pack("<f",b)); #Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #f.write(struct.pack("<f",0));#Omit bounding box #Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere #f.write(struct.pack("<f",0));#Omit bounding sphere return ;
def testProcedureWithBinaryAndFloatParameters(self): if self.dsn == "ODBC": with udaExec.connect(self.dsn, username=self.username, password=self.password) as conn: self.assertIsNotNone(conn) for r in conn.execute( """REPLACE PROCEDURE testProcedure1 (INOUT p1 VARBYTE(128), OUT p2 VARBYTE(128), INOUT p3 FLOAT, OUT p4 FLOAT, OUT p5 TIMESTAMP) BEGIN SET p2 = p1; SET p4 = p3; SET p5 = CURRENT_TIMESTAMP; END;"""): logger.info(r) result = conn.callproc( "testProcedure1", (teradata.InOutParam(bytearray([0xFF]), "p1"), teradata.OutParam("p2"), teradata.InOutParam(float("inf"), "p3"), teradata.OutParam("p4", dataType="FLOAT"), teradata.OutParam("p5", dataType="TIMESTAMP"))) self.assertEqual(result.p1, bytearray([0xFF])) self.assertEqual(result.p2, result.p1) self.assertEqual(result.p3, float('inf')) self.assertEqual(result.p4, result.p3)
def test_readinto(self): buf = self.buftype("1234567890") memio = self.ioclass(buf) b = bytearray(b"hello") self.assertEqual(memio.readinto(b), 5) self.assertEqual(b, b"12345") self.assertEqual(memio.readinto(b), 5) self.assertEqual(b, b"67890") self.assertEqual(memio.readinto(b), 0) self.assertEqual(b, b"67890") b = bytearray(b"hello world") memio.seek(0) self.assertEqual(memio.readinto(b), 10) self.assertEqual(b, b"1234567890d") b = bytearray(b"") memio.seek(0) self.assertEqual(memio.readinto(b), 0) self.assertEqual(b, b"") self.assertRaises(TypeError, memio.readinto, '') import array a = array.array(b'b', b"hello world") memio = self.ioclass(buf) memio.readinto(a) self.assertEqual(a.tostring(), b"1234567890d") memio.close() self.assertRaises(ValueError, memio.readinto, b) memio = self.ioclass(b"123") b = bytearray() memio.seek(42) memio.readinto(b) self.assertEqual(b, b"")
def _receive(self, cls): message = bytearray(self.socket.recv(max(BUFFER_SIZE, HEADER_SIZE + 2))) if len(message) < HEADER_SIZE + 2: raise ProtocolException("Unable to read header") message_length = 0 for i in xrange(0, HEADER_SIZE): message_length = message_length << 8 | message[i] if message[HEADER_SIZE] != PROTOCOL_VERSION: raise ProtocolException("Wrong protocol 0x%X expected 0x%X" % (message[HEADER_SIZE], PROTOCOL_VERSION)) header_length = message[HEADER_SIZE + 1] while len(message) < HEADER_SIZE + 2 + header_length: message.extend(self.socket.recv(BUFFER_SIZE)) (sequence, offset) = read_integer(message, HEADER_SIZE + 2) if sequence != self.sequence: raise ProtocolException("Invalid sequence %d expected %d" % (sequence, self.sequence)) (status, offset) = read_integer(message, offset) if status != 0: raise ProtocolException("Bad status: 0x%X" % status) bytes_to_read = message_length - len(message) + HEADER_SIZE while bytes_to_read > 0: chunk = bytearray(self.socket.recv(bytes_to_read)) bytes_to_read -= len(chunk) message.extend(chunk) if len(chunk) == 0: break return cls(**{ 'message': message, 'offset': offset })
def writeData(self,f): self.offset=f.tell(); f.write(struct.pack("<I",self.tipoNodo)); for t in self.transforms: f.write(struct.pack("<f",t)); if not self.parent_id is None: f.write(struct.pack("<I",len(self.parent_id)+1));#mesh f.write(bytearray('#',"ascii")); if len(self.parent_id)>0: f.write(bytearray(self.parent_id,"ascii")); else: f.write(struct.pack("<I",0)); f.write(struct.pack("<I",len(self.childrens))),#hijos longitud de cadena 0 if len(self.childrens)>0: for c in self.childrens: c.writeData(f); if not self.camera is None: self.camera.writeData(f); else: f.write(struct.pack("B",0)); if not self.light is None: self.light.writeData(f); else: f.write(struct.pack("B",0));#luz longitud de cadena 0 if not self.model is None: self.model.writeData(f); else: f.write(struct.pack("<I",1));#mesh f.write(bytearray('#',"ascii")); return ;
def decint(raw, forward=True): ''' Read a variable width integer from the bytestring or bytearray raw and return the integer and the number of bytes read. If forward is True bytes are read from the start of raw, otherwise from the end of raw. This function is the inverse of encint above, see its docs for more details. ''' val = 0 byts = bytearray() src = bytearray(raw) if not forward: src.reverse() for bnum in src: byts.append(bnum & 0b01111111) if bnum & 0b10000000: break if not forward: byts.reverse() for byte in byts: val <<= 7 # Shift value to the left by 7 bits val |= byte return val, len(byts)
def LzssUnc(buff,outlen): uncompr=bytearray(outlen) win=bytearray(0x1000) iWin=0xfee iSrc=0 iDest=0 while iSrc<len(buff): code=buff[iSrc] iSrc+=1 for i in range(8): if (code>>i) & 1: if iDest>=outlen: break uncompr[iDest]=buff[iSrc] win[iWin]=buff[iSrc] iSrc+=1 iDest+=1 iWin=(iWin+1)&0xfff else: if iSrc+1>=len(buff): break count=(buff[iSrc+1] & 0xF)+3 pos=(buff[iSrc]) | ((buff[iSrc+1]&0xf0)<<4) iSrc+=2 for j in range(count): d=win[(pos+j)&0xfff] if iDest>=outlen: return uncompr uncompr[iDest]=d win[iWin]=d iDest+=1 iWin=(iWin+1)&0xfff return uncompr
def test_getitem(self): b = bytearray('test') assert b[0] == ord('t') assert b[2] == ord('s') raises(IndexError, b.__getitem__, 4) assert b[1:5] == bytearray('est') assert b[slice(1,5)] == bytearray('est')
def write_font_record(data, obfuscate=True, compress=True): ''' Write the ttf/otf font represented by data into a font record. See read_font_record() for details on the format of the record. ''' flags = 0 key_len = 20 usize = len(data) xor_key = b'' if compress: flags |= 0b1 data = zlib.compress(data, 9) if obfuscate and len(data) >= 1040: flags |= 0b10 xor_key = os.urandom(key_len) key = bytearray(xor_key) data = bytearray(data) for i in xrange(1040): data[i] ^= key[i%key_len] data = bytes(data) key_start = struct.calcsize(b'>5L') + 4 data_start = key_start + len(xor_key) header = b'FONT' + struct.pack(b'>5L', usize, flags, data_start, len(xor_key), key_start) return header + xor_key + data
def serialize_tail(self): msg = super(BMPStatisticsReport, self).serialize_tail() stats_count = len(self.stats) msg += bytearray(struct.pack('!I', stats_count)) for v in self.stats: t = v['type'] if t == BMP_STAT_TYPE_REJECTED or \ t == BMP_STAT_TYPE_DUPLICATE_PREFIX or \ t == BMP_STAT_TYPE_DUPLICATE_WITHDRAW or \ t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP or \ t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP or \ t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID or \ t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP: valuepackstr = 'I' elif t == BMP_STAT_TYPE_ADJ_RIB_IN or \ t == BMP_STAT_TYPE_LOC_RIB: valuepackstr = 'Q' else: continue v['len'] = struct.calcsize(valuepackstr) msg += bytearray(struct.pack(self._TLV_PACK_STR + valuepackstr, t, v['len'], v['value'])) return msg
def test_float(self): assert float(bytearray(b'10.4')) == 10.4 assert float(bytearray('-1.7e-1')) == -1.7e-1 assert float(bytearray(u'.9e10', 'utf-8')) == .9e10 import math assert math.isnan(float(bytearray('nan'))) raises(ValueError, float, bytearray('not_a_number'))
def name_to_ipv6(addr): if len(addr)>6 and addr.endswith('.onion'): vchAddr = b32decode(addr[0:-6], True) if len(vchAddr) != 16-len(pchOnionCat): raise ValueError('Invalid onion %s' % s) return pchOnionCat + vchAddr elif '.' in addr: # IPv4 return pchIPv4 + bytearray((int(x) for x in addr.split('.'))) elif ':' in addr: # IPv6 sub = [[], []] # prefix, suffix x = 0 addr = addr.split(':') for i,comp in enumerate(addr): if comp == '': if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end continue x += 1 # :: skips to suffix assert(x < 2) else: # two bytes per component val = int(comp, 16) sub[x].append(val >> 8) sub[x].append(val & 0xff) nullbytes = 16 - len(sub[0]) - len(sub[1]) assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0)) return bytearray(sub[0] + ([0] * nullbytes) + sub[1]) elif addr.startswith('0x'): # IPv4-in-little-endian return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:]))) else: raise ValueError('Could not parse address %s' % addr)
def signInterest(self, interest, keyName=None, wireFormat=None): # Adds the nonce and timestamp here, because there is no # 'makeCommandInterest' call for this yet nonceValue = bytearray(8) for i in range(8): nonceValue[i] = self.random.randint(0,0xff) timestampValue = bytearray(8) ts = int(timestamp()*1000) for i in range(8): byte = ts & 0xff timestampValue[-(i+1)] = byte ts = ts >> 8 if wireFormat is None: wireFormat = WireFormat.getDefaultWireFormat() s = Sha256HmacSignature() s.getKeyLocator().setType(KeyLocatorType.KEYNAME) s.getKeyLocator().setKeyName(keyName) interestName = interest.getName() interestName.append(nonceValue).append(timestampValue) interestName.append(wireFormat.encodeSignatureInfo(s)) interestName.append(Name.Component()) encoding = interest.wireEncode(wireFormat) signer = hmac.new(self.key, encoding.toSignedBuffer(), sha256) s.setSignature(Blob(signer.digest())) interest.setName(interestName.getPrefix(-1).append( wireFormat.encodeSignatureValue(s)))
def __init__(self, segments, gamma=True, tint=True): """Create a new LED strip with a given number of segments""" super(LPD8806, self).__init__() if segments <= 0: raise ValueError("ERROR [LPD8806]: Invalid number of segments.") self.segments = segments # Open the Raspi's SPI device (must be sudo) try: self.dev = open(dev_name, "wb") except: print("FATAL ERROR [LPD8806]: Cannot open SPI device. Try running as superuser.") raise # Hard failure; not much we can do here # Create an 8-to-7 bit gamma lookup table self.gamma = bytearray(256) for i in range(256): self.gamma[i] = 0x80 | int(pow(float(i) / 255.0, 2.5) * 127.0 + 0.5) # Whitepoint compensation (experimental) # 1.0, 0.84, 0.71 self.tint = (1.00, 0.92, 0.82) # Initialize local color array # The data format is a bit odd and requires an extra 0x00 every meter latch_bytes = int((self.segments + 31) / 32) self.buffer = bytearray(self.segments * 3 + 1 + latch_bytes) # Reset the strip, displaying twice just in case self.clear(True) self.show() self.do_gamma = gamma self.do_tint = tint
def __init__(self, filename, mode='r', buffer=16<<10): modes = os.O_LARGEFILE | os.O_CREAT self._offset = 0 self._buffer_size = buffer if buffer: self._buffer_lock = RLock() self._read = False self._write = False self._read_buf = None self._write_buf = None self._eof = False # Optimization to limit calls self._append = False # Append Mode writes ignore offset self._stay_alive = gevent.spawn(_keep_awake); if mode.startswith('r') or '+' in mode: self._read = True self._read_buf = bytearray() if '+' not in mode: modes |= os.O_RDONLY if mode.startswith('w') or mode.startswith('a') or '+' in mode: if mode.startswith('w'): modes |= os.O_TRUNC self._write = True self._write_buf = bytearray() self._flush = False if '+' not in mode: modes |= os.O_WRONLY if '+' in mode: modes |= os.O_RDWR if mode.startswith('a'): modes |= os.O_APPEND self._append = True self._fd = os.open(filename, modes)
def test_speech128(self): # compress more data data = HAMLET_SCENE * 128 x = zlib.compress(data) self.assertEqual(zlib.compress(bytearray(data)), x) for ob in x, bytearray(x): self.assertEqual(zlib.decompress(ob), data)
def get_data(im): s = StringIO() if im.format != 'DIB': im.save(s, im.format) else: s.write(im.buf) s.seek(0) if im.format == 'BMP': bmp_f = s bmp_f.seek(10) offset = i32(bmp_f.read(4)) dib_size = i32(bmp_f.read(4)) dib = o32(dib_size)+bytearray(bmp_f.read(36)) dib[:4] = o32(40) dib[8:12] = o32(i32(str(dib[8:12]))*2) dib[16:20] = o32(0) dib = dib[:40] bmp_f.seek(offset) data = bytearray(bmp_f.read()) data = dib+data else: data = bytearray(s.read()) return data
def __init__(self, length, data_source=None): """Initiate section object. Args: length: Amount of data the section can store in bytes. data_source: Should provide a read function to read in the initial data of the section. Default is None. Raises: IOError: If data_source was not able to provide the whole initial data. """ if data_source is None: self._data = bytearray(length) self._last_free = 0 else: data = data_source.read(length) self._data = bytearray(data) if len(data) < length: raise IOError("expected %i bytes, but got only %i" % (length, len(data))) self._last_free = self._find_last_free_chunk()
def set_color(self, channel=0, index=0, red=0, green=0, blue=0, name=None, hex=None): """Set the color to the device as RGB Args: red: Red color intensity 0 is off, 255 is full red intensity green: Green color intensity 0 is off, 255 is full green intensity blue: Blue color intensity 0 is off, 255 is full blue intensity name: Use CSS colour name as defined here: http://www.w3.org/TR/css3-color/ hex: Specify color using hexadecimal color value e.g. '#FF3366' """ red, green, blue = self._determine_rgb(red=red, green=green, blue=blue, name=name, hex=hex) r = int(round(red, 3)) g = int(round(green, 3)) b = int(round(blue, 3)) if self.inverse: r, g, b = 255 - r, 255 - g, 255 - b if index == 0 and channel == 0: control_string = bytes(bytearray([0, r, g, b])) self._usb_ctrl_transfer(0x20, 0x9, 0x0001, 0, control_string) else: control_string = bytes(bytearray([0, channel, index, r, g, b])) self._usb_ctrl_transfer(0x20, 0x9, 0x0005, 0, control_string)
def CreateRequestV4(): # Update the call ID config['call_id'] += 1 # Create KMS Client Request Base requestBase = CreateRequestBase() # Create Hash hashed = str(kmsRequestV4.main(bytearray(requestBase))) # Generate Request bodyLength = len(requestBase) + len(hashed) if bodyLength % 8 == 0: paddingLength = 0 else: paddingLength = 8 - bodyLength % 8 v4Data = { "BodyLength" : bodyLength, "BodyLength2" : bodyLength, "Hash" : hashed, "Padding" : str(bytearray(functions.arrayFill([], paddingLength, 0x00))) } logging.debug("Request V4 Data:", v4Data) request = str() request += struct.pack('<I',v4Data["BodyLength"]) request += struct.pack('<I',v4Data["BodyLength2"]) request += requestBase request += v4Data["Hash"] request += v4Data["Padding"] logging.debug("Request V4:", binascii.b2a_hex(request), len(request)) return request
def main(): global i global j if (4 != len(sys.argv)): usage() sys.exit(-1) key_file = open(sys.argv[1], "rb") msg_file = open(sys.argv[2], "rb") out_file = open(sys.argv[3], "wb") key_file.seek(0, 2) key_len = key_file.tell() key_file.seek(0, 0) key_bytearray = bytearray(key_file.read(key_len)) key_file.close() msg_file.seek(0, 2) msg_len = msg_file.tell() msg_file.seek(0, 0) msg_bytearray = bytearray(msg_file.read(msg_len)) msg_file.close() S = KSA(key_bytearray) out_list = [] i = 0 j = 0 for n in msg_bytearray: K = PRGA(S) out_list.append(n ^ K) dumphex("enc", out_list) out_file.write(bytearray(out_list)) out_file.close()
def makeGroupValue(cls, apci, data="\x00", size=0): """ Create an APDU from apci and data @param apci: L{APCI} @type apci: int @param size: size of the data @type size: int """ data = bytearray(data) if apci not in (APCI.GROUPVALUE_READ, APCI.GROUPVALUE_RES, APCI.GROUPVALUE_WRITE): raise APDUValueError("unsoported APCI") if size and len(data) != size or not size and (len(data) != 1 or data[0] & 0x3f != data[0]): raise APDUValueError("incompatible data/size values") aPDU = bytearray(2 + size) if size: aPDU[0] = (apci >> 8) & 0xff aPDU[1] = apci & 0xff aPDU[2:] = data else: aPDU[0] = (apci >> 8) & 0xff aPDU[1] = apci & 0xff | data[0] & 0x3f return aPDU
def _bmp_to_raw(self, bmpfile): """Converts a 16bpp, RGB 5:6:5 bitmap to a raw format bytearray.""" data_offset = self._le_unpack(bytearray(bmpfile[0x0a:0x0d])) width = self._le_unpack(bytearray(bmpfile[0x12:0x15])) height = self._le_unpack(bytearray(bmpfile[0x16:0x19])) bpp = self._le_unpack(bytearray(bmpfile[0x1c:0x1d])) if bpp != 16: raise IOError("Image is not 16bpp") if (width != 36 or height != 36) and (width != 320 or height != 240): raise IOError("Image dimensions must be 36x36 or 320x240 (not %dx%d)" % (width, height)) raw_size = width * height * 2 rawfile = bytearray(b'\x00' * (raw_size + 8)) rawfile[0:8] = [16, 16, bmpfile[0x13], bmpfile[0x12], bmpfile[0x17], bmpfile[0x16], 1, 27] raw_index = 8 for y in range(0, height): current_index = (width * (height - (y + 1)) * 2) + data_offset for k in range(0, width): rawfile[raw_index] = bmpfile[current_index + 1] rawfile[raw_index + 1] = bmpfile[current_index] raw_index += 2 current_index += 2 return rawfile
def __extractBytes(self, input, start, end, mode): """ Private method to extract a range of bytes from the input. @param input input data (bytes) @param start start index (integer) @param end end index (integer) @param mode mode of operation (0, 1, 2) @return extracted bytes (bytearray) """ if end - start > 16: end = start + 16 if mode == self.ModeOfOperation["CBC"]: ar = bytearray(16) else: ar = bytearray() i = start j = 0 while len(ar) < end - start: ar.append(0) while i < end: ar[j] = input[i] j += 1 i += 1 return ar
def test_remove_with_policy_key_digest(self): """ Invoke remove() with policy_key_digest """ key = ( 'test', 'demo', None, bytearray("asd;as[d'as;djk;uyfl", "utf-8")) meta = { 'gen' : 0 } policy = { 'timeout': 1000, 'retry': aerospike.POLICY_RETRY_ONCE, 'key': aerospike.POLICY_KEY_DIGEST } retobj = TestRemove.client.put(key, policy) assert retobj == 0L retobj = TestRemove.client.remove(key, meta, policy) assert retobj == 0L (key, meta, bins) = TestRemove.client.get(key) assert key == ('test', 'demo', None, bytearray(b"asd;as[d\'as;djk;uyfl")) assert meta == None assert bins == None
def test_select_with_unicode_value(self): key = ('test', 'demo', 'aa') rec = { 'a': ["nanslkdl", 1, bytearray("asd;as[d'as;d", "utf-8")], 'b': {"key": "asd';q;'1';"}, 'c': 1234, 'd': '!@#@#$QSDAsd;as' } assert 0 == TestSelect.client.put(key, rec) bins_to_select = ['a'] key, meta, bins = TestSelect.client.select(key, bins_to_select) assert bins == { 'a': ["nanslkdl", 1, bytearray("asd;as[d'as;d", "utf-8")] } assert meta != None assert key != None key = ('test', 'demo', 'aa') TestSelect.client.remove(key)
def dump(self): line = b' ' * self.width grid = [bytearray(line) for _ in range(self.height)] for obj in self.all: grid[obj.y][obj.x] = ord('R') if isinstance(obj, Robot) else ord('#') return grid
f = (1, 2, 3) #tuple g = range(4) #range #Mapping Type h = {"val": 1,"add": 22} #dict #Set Types i = {"ab", "cd", "ef"} #set g = frozenset({0, 1, 2}) #frozenset #Boolean Type k = True #boolean #Binary Types l = b"Hello" #bytes m = bytearray(4) #bytearray n = memoryview(bytes(3)) #memoryview #Print values print(a) print(a1) print(b) print(c) print(d) print(e) print(f) print(g) print(h) print(i) print(g) print(k)
def __init__(self, key: bytearray): self.key = bytearray(key.decode().lower().encode())
class RFM9x: # Global buffer to hold data sent and received with the chip. This must be # at least as large as the FIFO on the chip (256 bytes)! Keep this on the # class level to ensure only one copy ever exists (with the trade-off that # this is NOT re-entrant or thread safe code by design). _BUFFER = bytearray(10) class _RegisterBits: # Class to simplify access to the many configuration bits avaialable # on the chip's registers. This is a subclass here instead of using # a higher level module to increase the efficiency of memory usage # (all of the instances of this bit class will share the same buffer # used by the parent RFM69 class instance vs. each having their own # buffer and taking too much memory). # Quirk of pylint that it requires public methods for a class. This # is a decorator class in Python and by design it has no public methods. # Instead it uses dunder accessors like get and set below. For some # reason pylint can't figure this out so disable the check. # pylint: disable=too-few-public-methods # Again pylint fails to see the true intent of this code and warns # against private access by calling the write and read functions below. # This is by design as this is an internally used class. Disable the # check from pylint. # pylint: disable=protected-access def __init__(self, address, *, offset=0, bits=1): assert 0 <= offset <= 7 assert 1 <= bits <= 8 assert (offset + bits) <= 8 self._address = address self._mask = 0 for _ in range(bits): self._mask <<= 1 self._mask |= 1 self._mask <<= offset self._offset = offset def __get__(self, obj, objtype): reg_value = obj._read_u8(self._address) return (reg_value & self._mask) >> self._offset def __set__(self, obj, val): reg_value = obj._read_u8(self._address) reg_value &= ~self._mask reg_value |= (val & 0xFF) << self._offset obj._write_u8(self._address, reg_value) operation_mode = _RegisterBits(_RH_RF95_REG_01_OP_MODE, bits=3) low_frequency_mode = _RegisterBits(_RH_RF95_REG_01_OP_MODE, offset=3, bits=1) modulation_type = _RegisterBits(_RH_RF95_REG_01_OP_MODE, offset=5, bits=2) # Long range/LoRa mode can only be set in sleep mode! long_range_mode = _RegisterBits(_RH_RF95_REG_01_OP_MODE, offset=7, bits=1) output_power = _RegisterBits(_RH_RF95_REG_09_PA_CONFIG, bits=4) max_power = _RegisterBits(_RH_RF95_REG_09_PA_CONFIG, offset=4, bits=3) pa_select = _RegisterBits(_RH_RF95_REG_09_PA_CONFIG, offset=7, bits=1) pa_dac = _RegisterBits(_RH_RF95_REG_4D_PA_DAC, bits=3) dio0_mapping = _RegisterBits(_RH_RF95_REG_40_DIO_MAPPING1, offset=6, bits=2) tx_done = _RegisterBits(_RH_RF95_REG_12_IRQ_FLAGS, offset=3, bits=1) rx_done = _RegisterBits(_RH_RF95_REG_12_IRQ_FLAGS, offset=6, bits=1) def __init__(self, spi, cs, resetNum, frequency, *, preamble_length=8, high_power=True, baudrate=5000000): self.high_power = high_power self.cs=cs #self.reset=reset self.reset=Pin(resetNum,Pin.PULL_UP) self.reset() try: # Set sleep mode, wait 10s and confirm in sleep mode (basic device check). # Also set long range mode (LoRa mode) as it can only be done in sleep. self.sleep() self.long_range_mode = True #self._write_u8(_RH_RF95_REG_01_OP_MODE, 0b10001000) time.sleep(0.01) #val = self._read_u8(_RH_RF95_REG_01_OP_MODE) #print('op mode: {0}'.format(bin(val))) if self.operation_mode != SLEEP_MODE or not self.long_range_mode: raise RuntimeError('Failed to configure radio for LoRa mode, check wiring!') except OSError: raise RuntimeError('Failed to communicate with radio, check wiring!') # clear default setting for access to LF registers if frequency > 525MHz if frequency > 525: self.low_frequency_mode = 0 # Setup entire 256 byte FIFO self._write_u8(_RH_RF95_REG_0E_FIFO_TX_BASE_ADDR, 0x00) self._write_u8(_RH_RF95_REG_0F_FIFO_RX_BASE_ADDR, 0x00) # Set mode idle self.idle() # Set modem config to RadioHead compatible Bw125Cr45Sf128 mode. # Note no sync word is set for LoRa mode either! self._write_u8(_RH_RF95_REG_1D_MODEM_CONFIG1, 0x72) # Fei msb? self._write_u8(_RH_RF95_REG_1E_MODEM_CONFIG2, 0x74) # Fei lsb? self._write_u8(_RH_RF95_REG_26_MODEM_CONFIG3, 0x00) # Preamble lsb? # Set preamble length (default 8 bytes to match radiohead). self.preamble_length = preamble_length # Set frequency self.frequency_mhz = frequency # Set TX power to low defaut, 13 dB. self.tx_power = 13 def _read_into(self, address, buf, length=None): self.cs.value(1) # reset to default self.cs.value(0) # pull low for spi access # Read a number of bytes from the specified address into the provided # buffer. If length is not specified (the default) the entire buffer # will be filled. if length is None: length = len(buf) self._BUFFER[0] = address & 0x7F #print("before:\n",self._BUFFER) spi.write(bytearray([self._BUFFER[0]])) #print("middle:\n",self._BUFFER) newbuf=buf[0:length] spi.readinto(newbuf) buf[0:len(newbuf)]=newbuf #print("after:\n",self._BUFFER) self.cs.value(1) # reset to default def _read_u8(self, address): # Read a single byte from the provided address and return it. self._read_into(address, self._BUFFER, length=1) return self._BUFFER[0] def _write_from(self, address, buf, length=None): self.cs.value(1) # reset to default self.cs.value(0) # pull low for spi access if length is None: length = len(buf) self._BUFFER[0] = (address | 0x80) & 0xFF # Set top bit spi.write(bytearray([self._BUFFER[0]])) spi.write(buf[0:length]) self.cs.value(1) # reset to default def _write_u8(self, address, val): self.cs.value(1) # reset to default self.cs.value(0) # pull low for spi access self._BUFFER[0] = (address | 0x80) & 0xFF self._BUFFER[1] = val & 0xFF spi.write(self._BUFFER[0:2]) self.cs.value(1) # reset to default def reset(self): """Perform a reset of the chip.""" # See section 7.2.2 of the datasheet for reset description. self.reset=Pin(resetNum,Pin.OUT) self.reset.value(0) time.sleep(0.0001) # 100 us self.reset=Pin(resetNum,Pin.PULL_UP) time.sleep(0.005) # 5 ms def idle(self): """Enter idle standby mode.""" self.operation_mode = STANDBY_MODE def sleep(self): """Enter sleep mode.""" self.operation_mode = SLEEP_MODE def listen(self): """Listen for packets to be received by the chip. Use :py:func:`receive` to listen, wait and retrieve packets as they're available. """ self.operation_mode = RX_MODE self.dio0_mapping = 0b00 # Interrupt on rx done. def transmit(self): """Transmit a packet which is queued in the FIFO. This is a low level function for entering transmit mode and more. For generating and transmitting a packet of data use :py:func:`send` instead. """ self.operation_mode = TX_MODE self.dio0_mapping = 0b01 # Interrupt on tx done. @property def preamble_length(self): """The length of the preamble for sent and received packets, an unsigned 16-bit value. Received packets must match this length or they are ignored! Set to 8 to match the RadioHead RFM95 library. """ msb = self._read_u8(_RH_RF95_REG_20_PREAMBLE_MSB) lsb = self._read_u8(_RH_RF95_REG_21_PREAMBLE_LSB) return ((msb << 8) | lsb) & 0xFFFF @preamble_length.setter def preamble_length(self, val): assert 0 <= val <= 65535 self._write_u8(_RH_RF95_REG_20_PREAMBLE_MSB, (val >> 8) & 0xFF) self._write_u8(_RH_RF95_REG_21_PREAMBLE_LSB, val & 0xFF) @property def frequency_mhz(self): """The frequency of the radio in Megahertz. Only the allowed values for your radio must be specified (i.e. 433 vs. 915 mhz)! """ msb = self._read_u8(_RH_RF95_REG_06_FRF_MSB) mid = self._read_u8(_RH_RF95_REG_07_FRF_MID) lsb = self._read_u8(_RH_RF95_REG_08_FRF_LSB) frf = ((msb << 16) | (mid << 8) | lsb) & 0xFFFFFF frequency = (frf * _RH_RF95_FSTEP) / 1000000.0 return frequency @frequency_mhz.setter def frequency_mhz(self, val): assert 240 <= val <= 960 # Calculate FRF register 24-bit value. frf = int((val * 1000000.0) / _RH_RF95_FSTEP) & 0xFFFFFF # Extract byte values and update registers. msb = frf >> 16 mid = (frf >> 8) & 0xFF lsb = frf & 0xFF self._write_u8(_RH_RF95_REG_06_FRF_MSB, msb) self._write_u8(_RH_RF95_REG_07_FRF_MID, mid) self._write_u8(_RH_RF95_REG_08_FRF_LSB, lsb) @property def tx_power(self): """The transmit power in dBm. Can be set to a value from 5 to 23 for high power devices (RFM95/96/97/98, high_power=True) or -1 to 14 for low power devices. Only integer power levels are actually set (i.e. 12.5 will result in a value of 12 dBm). The actual maximum setting for high_power=True is 20dBm but for values > 20 the PA_BOOST will be enabled resulting in an additional gain of 3dBm. The actual setting is reduced by 3dBm. The reported value will reflect the reduced setting. """ if self.high_power: return self.output_power + 5 return self.output_power - 1 @tx_power.setter def tx_power(self, val): val = int(val) if self.high_power: assert 5 <= val <= 23 # Enable power amp DAC if power is above 20 dB. # Lower setting by 3db when PA_BOOST enabled - see Data Sheet Section 6.4 if val > 20: self.pa_dac = _RH_RF95_PA_DAC_ENABLE val -= 3 else: self.pa_dac = _RH_RF95_PA_DAC_DISABLE self.pa_select = True self.output_power = (val - 5) & 0x0F else: assert -1 <= val <= 14 self.pa_select = False self.max_power = 0b111 # Allow max power output. self.output_power = (val + 1) & 0x0F @property def rssi(self): """The received strength indicator (in dBm) of the last received message.""" # Read RSSI register and convert to value using formula in datasheet. # Remember in LoRa mode the payload register changes function to RSSI! return self._read_u8(_RH_RF95_REG_1A_PKT_RSSI_VALUE) - 137 def send(self, data, timeout=2.): """Send a string of data using the transmitter. You can only send 252 bytes at a time (limited by chip's FIFO size and appended headers). Note this appends a 4 byte header to be compatible with the RadioHead library. The timeout is just to prevent a hang (arbitrarily set to 2 Seconds). """ # Disable pylint warning to not use length as a check for zero. # This is a puzzling warning as the below code is clearly the most # efficient and proper way to ensure a precondition that the provided # buffer be within an expected range of bounds. Disable this check. # pylint: disable=len-as-condition assert 0 < len(data) <= 252 # pylint: enable=len-as-condition self.idle() # Stop receiving to clear FIFO and keep it clear. # Fill the FIFO with a packet to send. self._write_u8(_RH_RF95_REG_0D_FIFO_ADDR_PTR, 0x00) # FIFO starts at 0. # Write header bytes. self._write_u8(_RH_RF95_REG_00_FIFO, _RH_BROADCAST_ADDRESS) # txHeaderTo self._write_u8(_RH_RF95_REG_00_FIFO, _RH_BROADCAST_ADDRESS) # txHeaderFrom self._write_u8(_RH_RF95_REG_00_FIFO, 0x00) # txHeaderId self._write_u8(_RH_RF95_REG_00_FIFO, 0x00) # txHeaderFlags # Write payload. self._write_from(_RH_RF95_REG_00_FIFO, data) # Write payload and header length. self._write_u8(_RH_RF95_REG_22_PAYLOAD_LENGTH, len(data) + 4) # Turn on transmit mode to send out the packet. self.transmit() # Wait for tx done interrupt with explicit polling (not ideal but # best that can be done right now without interrupts). start = utime.ticks_ms() timed_out = False while not timed_out and not self.tx_done: if (utime.ticks_ms() - start)/1000. >= timeout: timed_out = True # Go back to idle mode after transmit. self.idle() # Clear interrupts. self._write_u8(_RH_RF95_REG_12_IRQ_FLAGS, 0xFF) if timed_out: raise RuntimeError('Timeout during packet send') def receive(self, timeout=0.5, keep_listening=True): """Wait to receive a packet from the receiver. Will wait for up to timeout amount of seconds for a packet to be received and decoded. If a packet is found the payload bytes are returned, otherwise None is returned (which indicates the timeout elapsed with no reception). Note this assumes a 4-byte header is prepended to the data for compatibilty with the RadioHead library (the header is not validated nor returned). If keep_listening is True (the default) the chip will immediately enter listening mode after reception of a packet, otherwise it will fall back to idle mode and ignore any future reception. """ # Make sure we are listening for packets. self.listen() # Wait for the rx done interrupt. This is not ideal and will # surely miss or overflow the FIFO when packets aren't read fast # enough, however it's the best that can be done from Python without # interrupt supports. start = utime.ticks_ms() timed_out = False while not timed_out and not self.rx_done: if (utime.ticks_ms() - start)/1000. >= timeout: timed_out = True # Payload ready is set, a packet is in the FIFO. packet = None if not timed_out: # Grab the length of the received packet and check it has at least 5 # bytes to indicate the 4 byte header and at least 1 byte of user data. length = self._read_u8(_RH_RF95_REG_13_RX_NB_BYTES) if length < 5: packet = None else: # Have a good packet, grab it from the FIFO. # Reset the fifo read ptr to the beginning of the packet. current_addr = self._read_u8(_RH_RF95_REG_10_FIFO_RX_CURRENT_ADDR) self._write_u8(_RH_RF95_REG_0D_FIFO_ADDR_PTR, current_addr) packet = bytearray(length) # Read the packet. self._read_into(_RH_RF95_REG_00_FIFO, packet) # strip off the header packet = packet[4:] # Listen again if necessary and return the result packet. if keep_listening: self.listen() else: # Enter idle mode to stop receiving other packets. self.idle() # Clear interrupt. self._write_u8(_RH_RF95_REG_12_IRQ_FLAGS, 0xFF) return packet
def log_anonymize(filename): """Anonymize the log.""" global all_addrs # Get the log_data from the file log_bytes = bytearray(hdf_util.hdf5_to_log_data(filename=filename)) # Get the raw_log_index from the file raw_log_index = hdf_util.hdf5_to_log_index(filename=filename) # Get the user attributes from the file log_attr_dict = hdf_util.hdf5_to_attr_dict(filename=filename) # Generate the index of log entry locations sorted by log entry type # Merge the Rx / Tx subtypes that can be processed together log_index = log_util.filter_log_index(raw_log_index, merge={'RX_OFDM': ['RX_OFDM', 'RX_OFDM_LTG'], 'TX_HIGH': ['TX_HIGH', 'TX_HIGH_LTG'], 'TX_LOW' : ['TX_LOW', 'TX_LOW_LTG']}) # Re-initialize the address-byteindex map per file using the running # list of known MAC addresses addr_idx_map = dict() for addr in all_addrs: addr_idx_map[addr] = list() log_util.print_log_index_summary(log_index, "Log Index Summary (merged):") #--------------------------------------------------------------------- # Step 1: Build a dictionary of all MAC addresses in the log, then # map each addresses to a unique anonymous address # Uses tuple(bytearray slice) since bytearray isn't hashable as-is # print("Anonmyizing file step 1 ...") start_time = time.time() #---------------------------------- # Rx DSSS entries # try: print(" Anonmyizing {0} RX_DSSS entries".format(len(log_index['RX_DSSS']))) pyld_start = struct.calcsize(''.join( entry_types.entry_rx_dsss.get_field_struct_formats()[:-1]) ) for idx in log_index['RX_DSSS']: # 6-byte addresses at offsets 4, 10, 16 in the mac_payload for o in (4, 10, 16): addr_to_replace(tuple(log_bytes[idx+pyld_start+o:idx+pyld_start+o+6]), idx+pyld_start+o, addr_idx_map) except KeyError: pass if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #---------------------------------- # Rx OFDM entries # try: print(" Anonmyizing {0} RX_OFDM entries".format(len(log_index['RX_OFDM']))) pyld_start = struct.calcsize(''.join( entry_types.entry_rx_ofdm.get_field_struct_formats()[:-1]) ) for idx in log_index['RX_OFDM']: # 6-byte addresses at offsets 4, 10, 16 in the mac_payload for o in (4, 10, 16): addr_to_replace(tuple(log_bytes[idx+pyld_start+o:idx+pyld_start+o+6]), idx+pyld_start+o, addr_idx_map) except KeyError: pass if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #---------------------------------- # Tx entries # try: print(" Anonmyizing {0} TX_HIGH entries".format(len(log_index['TX_HIGH']))) pyld_start = struct.calcsize(''.join( entry_types.entry_tx_high.get_field_struct_formats()[:-1]) ) for idx in log_index['TX_HIGH']: # 6-byte addresses at offsets 4, 10, 16 in the mac_payload for o in (4, 10, 16): addr_to_replace(tuple(log_bytes[idx+pyld_start+o:idx+pyld_start+o+6]), idx+pyld_start+o, addr_idx_map) except KeyError: pass if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #---------------------------------- # Tx Low entries # try: print(" Anonmyizing {0} TX_LOW entries".format(len(log_index['TX_LOW']))) pyld_start = struct.calcsize(''.join( entry_types.entry_tx_low.get_field_struct_formats()[:-1]) ) for idx in log_index['TX_LOW']: # 6-byte addresses at offsets 40, 46, 52 for o in (4, 10, 16): addr_to_replace(tuple(log_bytes[idx+pyld_start+o:idx+pyld_start+o+6]), idx+pyld_start+o, addr_idx_map) except KeyError: pass if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #--------------------------------------------------------------------- # Step 2: Enumerate actual MAC addresses and their anonymous replacements # print("Anonmyizing file step 2 ...") print(" Enumerate MAC addresses and their anonymous replacements") addr_map = dict() for ii,addr in enumerate(all_addrs): # Address should not have a first octet that is odd, as this indicates # the address is multicast. Hence, use 0xFE as the first octet. # # Due to FCS errors, the number of addresses in a log file is # potentially large. Therefore, the anonymizer supports 2^24 unique # addresses. # anon_addr = (0xFE, 0xFF, 0xFF, (ii//(256**2)), ((ii//256)%256), (ii%256)) addr_map[addr] = anon_addr if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #--------------------------------------------------------------------- # Step 3: Replace all MAC addresses in the log # print("Anonmyizing file step 3 ...") print(" Replace all MAC addresses in the log") for old_addr in addr_idx_map.keys(): new_addr = bytearray(addr_map[old_addr]) for byte_idx in addr_idx_map[old_addr]: log_bytes[byte_idx:byte_idx+6] = new_addr if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #--------------------------------------------------------------------- # Step 4: Other annonymization steps # print("Anonmyizing file step 4 ...") print(" Remove all payloads") # Overwrite all payloads with zeros try: for key in log_index.keys(): log_util.overwrite_payloads(log_bytes, log_index[key]) except: pass if print_time: print(" Time = {0:.3f}s".format(time.time() - start_time)) #--------------------------------------------------------------------- # Write output files # # Write the modified log to a new HDF5 file (fn_fldr, fn_file) = os.path.split(filename) # Find the last '.' in the file name and classify everything after that as the <ext> ext_i = fn_file.rfind('.') if (ext_i != -1): # Remember the original file extension fn_ext = fn_file[ext_i:] fn_base = fn_file[0:ext_i] else: fn_ext = '' fn_base = fn_file newfilename = os.path.join(fn_fldr, fn_base + "_anon" + fn_ext) print("Writing new file {0} ...".format(newfilename)) # Copy any user attributes to the new anonymized file hdf_util.log_data_to_hdf5(log_bytes, newfilename, attr_dict=log_attr_dict) return
def send(s, msg): s.send(bytearray(msg, 'utf-8'))
def put_audio(): """ Getting Constants from Constants.py Variables must be extracted from the Variables_dict who is managed by Disk_IO. """ """ Variables and constants """ f_hp = 35 Variables_dict = recall_pickled_dict() # Load Variables_dict SampRate = Variables_dict["v.SamplingFreq"] # Sampling Frequency, Hz Length = Variables_dict["v.Length"] # Pile length, m Speed = Variables_dict["v.Speed"] # Wave speed samples_acq = Variables_dict["v.samples_acq"] # N. of Samples acquired: c.n_blocks multiple AveragesRequired = int(Variables_dict["v.AveragesRequired"]) # Required Averages number # FORMAT = aa.PCM_FORMAT_S32_LE # Int 4 Bytes signed CHANNELS = int(2) byte_width = 4 bytes_size = CHANNELS * byte_width * c.n_frames # Bytes in each period """ Create the data_out alsaaudio object instance In PCM_NORMAL mode, the call will block if the kernel buffer is full, and until enough sound has been played to allow the sound data to be buffered. The call always returns the size of the data provided. """ #data_out = aa.PCM(aa.PCM_PLAYBACK, aa.PCM_NONBLOCK) # aa.PCM_NORMAL aa.PCM_NONBLOCK data_out = aa.PCM(aa.PCM_PLAYBACK, aa.PCM_NORMAL) data_out.setchannels(CHANNELS) data_out.setrate(SampRate) data_out.setformat(FORMAT) data_out.setperiodsize(c.n_frames) # """ x1x2: vector of interleaved channels, Ch1, Ch2, output signal """ # A = 0.5 * (2**31 - 1) # Amplitude referr. to 32 bit int f.s. dt = 1.0 / 96000 # Sampling period T = 1 # Total signal time length N = int(T / dt) # Number of samples L_echo_0 = Length # Toe echo k_A_echo_0 = -1 * 0.1 # Echo_0 reflexion coefficient: 1 => free; -1 => vincolated T_echo_0 = 2 * Length / Speed # Return time of echo_0 indx_echo_0 = round(T_echo_0 / dt) # Index of the 1.st echo_0 sample T_effective = hammer_DT # Pulse equivalent length T_pulse = T_effective * 3/2 # Length at the sine base, valid only for the half sine N_pulse = round(T_pulse / dt) # Number of pulse samples t = np.arange(N_pulse) * dt # Time length of the pulse sample pulse = np.sin(np.pi * t / T_pulse) # Pulse as a positive half sine pulse_echo_0 = k_A_echo_0 * pulse # Pulse echo_0 x1f = np.zeros(N) # Array of 0's, ch1 x2f = np.zeros(N) # Array of 0's, ch2 x1f[0:N_pulse] = pulse # Put pulse at index 0 for ch1 x2f[0:N_pulse] = pulse # Put pulse at index 0 for ch2 x2f[indx_echo_0:indx_echo_0+N_pulse] = pulse_echo_0 # Put echo_0 delayed pulse on ch2 # # Insertion of a HP 1.st order filter fnyquist = 0.5*SampRate w_d = f_hp / fnyquist # Digital pulsation, 0..1, where 1 => fnyquist b, a = butter(1, w_d, btype='high') # Digital HP Butterworth filter first order y2f_hp_f = lfilter(b, a, x2f) # Filtering the signal # pk = np.max(x1f) # Normalize amplitude x1f *= A / pk # Float type, original, ch1 y2f_hp_f *= A / pk # Float type, HP filtered, ch2 x1 = x1f.astype(np.int32) # Convert to int32, ch1 x2 = y2f_hp_f.astype(np.int32) # Convert to int32, x1x2 = np.zeros(CHANNELS*c.N_of_samples, dtype=np.int32) # Array for interleaved Ch1, Ch2 data # x1x2[0::2] = x1 # Fill x1 at even indexes: ch1 x1x2[1::2] = x2 # Fill x1 at odd indexes: ch2 # out_big_buffer = bytearray(bytes_size * c.n_blocks) out_big_buffer[:] = pack('%ii' % int(2*c.N_of_samples), *x1x2) # Pack from numpy.int32 to bytes out_short_buffer = bytearray(bytes_size) # Output array written one frame at time # if DEBUG: tp = np.arange(0, N, 1) * dt sel = int(1.2 * indx_echo_0) fig = plt.figure(1) fig.set_size_inches(w = 15, h = 9) fig.subplots_adjust(hspace = 0.35) plt.subplot(311) plt.plot(tp[0:sel], x1[0:sel]) plt.title('Hammer') plt.xlabel('Time [s]') plt.ylabel('Force') plt.grid(True) # plt.subplot(312) plt.plot(tp[0:sel], x2f[0:sel]) plt.title('Acceleration') plt.xlabel('Time [s]') plt.ylabel('Acceleration') plt.grid(True) # plt.subplot(313) plt.plot(tp[0:sel], y2f_hp_f[0:sel]) plt.title('Acceleration, HP filtered') plt.xlabel('Time [s]') plt.ylabel('Acceleration') plt.grid(True) # plt.show() # # time.sleep(3) for n in range(AveragesRequired): print("Started pulse N°", n + 1) beg = 0 end = bytes_size for i in range(c.n_blocks): out_short_buffer[:] = out_big_buffer[beg:end] size = data_out.write(out_short_buffer) #print(size) beg = end end += bytes_size # # time.sleep(4.5) # # data_out.close() print("put_audio.py terminated")
def __init__(self, fd, complete, bufsize=4096, map=None): asyncore.file_dispatcher.__init__(self, fd, map=map) filecontrol.set_close_on_exec(self._fileno) self._complete = complete self._bufsize = bufsize self._data = bytearray()
def diagnose(self, test, test_data=None): if test == "line": size = self.host_command_frame_max_size - 3 data = bytearray([x & 0xFF for x in range(size)]) return self.command(0x00, b"\x00" + data, timeout=1.0) == data return super(Chipset, self).diagnose(test, test_data)
elif stype == SECTION_WORDS: words = slen / 4 value = struct.unpack(("<%s" % 'L' * words), data[offset:offset + slen])[0] print("VALUE: %s" % value) elif stype == SECTION_DWORDS: dwords = slen / 8 value = struct.unpack(("<%s" % 'Q' * dwords), data[offset:offset + slen])[0] print("VALUE: %s" % value) elif stype == SECTION_DOUBLES: doubles = slen / 8 value = struct.unpack(("<%s" % 'd' * doubles), data[offset:offset + slen])[0] print("VALUE: %s" % value) elif stype == SECTION_COORD: value = struct.unpack("<dd", data[offset:offset + slen]), print("COORDINATES: %s" % value) elif stype == SECTION_PNG: signature = [137, 80, 78, 71, 13, 10, 26, 10] value = struct.unpack('<' + ("%s" % 'B' * slen), data[offset:offset + slen]) myImage = open("myImage.png", "wb") myImage.write(bytearray(signature + list(value))) print(70 * "-") offset += slen
import json import random import math from single_byte_xor import SingleByteXorAttacker ALPHABET = bytearray('abcdefghijklmnopqrstuvwxyz'.encode()) with open('./bigrams_percentages.json') as bigrams: TWO_LETTER_FREQUENCES = json.load(bigrams) with open('./trigrams_percentages.json') as trigrams: THREE_LETTER_FREQUENCES = json.load(trigrams) class Substitution: def __init__(self, alphabet: bytearray): self.alphabet = alphabet def encrypt(self, text_b: bytearray, keys_b: list): text_b = text_b[:] for i in range(len(text_b)): key_b = keys_b[i % len(keys_b)] try: alphabet_index = self.alphabet.index(text_b[i]) text_b[i] = key_b[alphabet_index] except ValueError: continue return text_b def decrypt(self, encrypted_text_b: bytearray, keys_b: list): encrypted_text_b = encrypted_text_b[:] for i in range(len(encrypted_text_b)):
################## # main program ################## if __name__ == '__main__': start_time = time.time() # get command line arguments args = get_parser('IID').parse_args() datafile = args.datafile bits_per_symbol = int(args.bits_per_symbol) verbose = bool(args.verbose) with open(datafile, 'rb') as file: # Read in raw bytes and convert to list of output symbols bytes_in = bytearray(file.read()) dataset = to_dataset(bytes_in, bits_per_symbol) k = len(set(dataset)) if verbose: # print file and dataset details print ("Read in file %s, %d bytes long." % (datafile, len(bytes_in))) print ("Dataset: %d %d-bit symbols, %d symbols in alphabet." % (len(dataset), bits_per_symbol, k)) print ("Output symbol values: min = %d, max = %d\n" % (min(dataset), max(dataset))) ####################################### # STEP 1: Determine if Dataset is IID # ####################################### # determine if dataset is IID using shuffle and Chi-square tests passed_permutation_tests = permutation_test(dataset, verbose) if passed_permutation_tests:
from PIL import Image import io image_data = [] with open("digimon.jpg", "rb") as image: f = image.read() print(len(f)) b = bytearray() b.append(3) print(len(b)) image_data = b image = Image.open(io.BytesIO(image_data)) image.show() """ import PIL from PIL import Image from PIL import ImageDraw from PIL import ImageFont import urllib.request with urllib.request.urlopen('http://pastebin.ca/raw/2311595') as in_file: hex_data = in_file.read() print(hex_data) img = Image.frombuffer('RGB', (320,240), hex_data) #i have tried fromstring draw = ImageDraw.Draw(img) font = ImageFont.truetype("arial.ttf",14) draw.text((0, 220),"This is a test11",(255,255,0),font=font) draw = ImageDraw.Draw(img) img.save("a_test.jpg")
@micropython.viper def set1(dest:ptr32, val:int): dest[1] = val @micropython.viper def memset(dest:ptr32, val:int, n:int): for i in range(n): dest[i] = val @micropython.viper def memset2(dest_in, val:int): dest = ptr32(dest_in) n = int(len(dest_in)) >> 2 for i in range(n): dest[i] = val b = bytearray(8) print(b) set(b, 0x42424242) print(b) set1(b, 0x43434343) print(b) memset(b, 0x44444444, len(b) // 4) print(b) memset2(b, 0x45454545) print(b)
class extended_status: verbose = False ssreq_fn = MEM4(MEM4(MEM4(0, 0x10), 0x128), 0x14) ssob_and_stat_len = 0x23C # version 10 ssob_and_stat = bytearray_set_address_size(bytearray(ssob_and_stat_len), 31) arglist = bytearray_set_address_size(bytearray(4), 31) struct.pack_into('=I', arglist, 0, bytearray_buffer_address(ssob_and_stat)) struct.pack_into( '=4sHHI4xI', ssob_and_stat, 0, codecs.encode("{:4s}".format("SSOB"), encoding=cp1047_oe), # E2E2D6C2 0x1C, 80, 0, bytearray_buffer_address(ssob_and_stat) + 0x20) # 0x1C is the SSOB length; 80 is extended status struct.pack_into( 'H4sBB5x', ssob_and_stat, 0x20, 0x21C, # version 10 length codecs.encode("{:4s}".format("STAT"), encoding=cp1047_oe), # E2E3C1E3 10, 0) # version 10, modifier 0 struct.pack_into( 'B', ssob_and_stat, 0xDA, # STATOPT1 0x04) # Returned areas may be obtained in 64-bit storage save_area = bytearray_set_address_size(bytearray(18 * 4), 31) ssreq = bytearray(5 * 8) def call(self, request_type, criteria={}, requested_fields=None): self.requested_fields = requested_fields ssob_and_stat = self.ssob_and_stat arglist = self.arglist save_area = self.save_area ssreq = self.ssreq request_types = ('job_terse', 'job_verbose', 'close', 'sysout_terse', 'sysout_verbose', 'data_set_list') struct.pack_into('B', ssob_and_stat, 0x2C, request_types.index(request_type) + 1) for name, value in criteria.items(): if not value: continue struct_fmt, position_list, bit_position, bit = criteria_fields[ name] if isinstance(value, str): value = codecs.encode( ("{:%ds}" % int(struct_fmt[:-1])).format(value), encoding=cp1047_oe) if isinstance(position_list, tuple): for position in position_list: struct.pack_into(struct_fmt, ssob_and_stat, position, value) else: position = position_list struct.pack_into(struct_fmt, ssob_and_stat, position, value) ssob_and_stat[bit_position] |= bit struct.pack_into('QQQQQ', ssreq, 0, ssreq_fn, 0, bytearray_buffer_address(arglist), bytearray_buffer_address(save_area), SYSTEM_CALL__CALL31) zos_system_call(ssreq) self.ssi_return_code = struct.unpack_from('=4xI', self.ssreq, 0)[0] if self.ssi_return_code != 0: error_index = self.ssi_return_code / 4 - 1 error_messages = ( "The subsystem does not support this function.", "The subsystem exists, but is not active.", "The subsystem is not defined to MVS.", "Invalid SSOB, SSIB or function code", "The SSOB or SSIB have invalid lengths or formats", "The SSI has not been initialized.") raise Exception(error_messages[errior_index]) self.subsystem_return_code = struct.unpack_from( '=I', ssob_and_stat, 0x0C)[0] # SSOBRETN self.version, self.reason, self.reason2 = struct.unpack_from( '=BxBB', self.ssob_and_stat, 0x28) # STATVER, STATREAS, STATREA2 if self.ssi_return_code == 4: raise Exception("Invalid search arguments") elif self.ssi_return_code == 8: raise Exception("Logic error, reason=0x%X" % self.reason) elif self.ssi_return_code == 12: raise Exception("Unsupported call type") #print(dump_region(bytearray_buffer_address(ssob_and_stat), len(self.ssob_and_stat))) return None def job_elements(self): have_64bit_results = struct.unpack_from('B', self.ssob_and_stat, 0x108)[0] & 0x40 for self.jq64 in (True, False) if have_64bit_results else (False, ): self.jqe_address = struct.unpack_from( 'Q' if self.jq64 else 'I', self.ssob_and_stat, 0x118 if self.jq64 else 0x0F4)[0] if self.verbose: print("jq64=%r, jqe=%X" % (self.jq64, self.jqe_address)) while self.jqe_address: self.subsystem_name = codecs.decode((ctypes.c_char*4)\ .from_address(self.jqe_address + 0x10).value, cp1047_oe) job = {"subsystem": self.subsystem_name} job = self.parse_element_information(self.jqe_address, 'job_terse', job) self.job_verbose_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.jqe_address + (0x30 if self.jq64 else 0x14)).value if self.job_verbose_address: if self.verbose: print("job_verbose=%X" % (self.job_verbose_address, )) job = self.parse_element_information( self.job_verbose_address, 'job_verbose', job) yield job self.jqe_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.jqe_address + (0x20 if self.jq64 else 0x08)).value if self.verbose: print("jqe=%X" % (self.jqe_address, )) def job_dependency_elements(self): if ctypes.c_short.from_address(self.jqe_address + 0x4).value >= 0x48: self.dependency_address = ctypes.c_ulong.from_address( self.jqe_address + 0x40).value while self.dependency_address: yield self.parse_element_information(self.dependency_address, 'job_dependency') self.dependency_address = ctypes.c_ulong.from_address( self.dependency_address + 0x8).value def sysout_terse_elements(self): self.sysout_terse_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.jqe_address + (0x28 if self.jq64 else 0x0C)).value while self.sysout_terse_address: yield self.parse_element_information(self.sysout_terse_address, 'sysout_terse') self.sysout_terse_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.sysout_terse_address + (0x18 if self.jq64 else 0x08)).value def sysout_verbose_elements(self): self.sysout_verbose_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.sysout_terse_address + (0x28 if self.jq64 else 0x10)).value while self.sysout_verbose_address: yield self.parse_element_information(self.sysout_verbose_address, 'sysout_verbose') self.sysout_verbose_address = (ctypes.c_ulong if self.jq64 else ctypes.c_uint)\ .from_address(self.sysout_verbose_address + (0x30 if self.jq64 else 0x14)).value def parse_element_information(self, header_address, header_type, result=None): if not result: result = {} offset = ctypes.c_ushort.from_address(header_address + 0x4).value total_length = ctypes.c_ushort.from_address(header_address + offset + 0x0).value address = header_address + offset offset = 4 while offset < total_length: section_length = ctypes.c_ushort.from_address(address + offset + 0x0).value result = self.parse_information(address + offset, total_length - offset, result) offset += section_length return result def parse_information(self, address, remaining_length, result=None): if not result: result = {} type_and_modifier = ctypes.c_ushort.from_address(address + 0x2).value data_format = structure_info.get(type_and_modifier, ()) if data_format: address += 4 for name, ctype, offset, convert, size in data_format: if name and (not self.requested_fields or name in self.requested_fields): if self.verbose: print("%X %s %s" % (offset, name, dump_region(address + offset, size, show_header=False, show_address=False)), flush=True) if ctype: value = convert( ctype.from_address(address + offset).value) else: value = bytearray(size) ctypes.memmove(bytearray_buffer_address(value), address + offset, size) if value: result[name] = value return result
print(type(x)) x = ("apple", "banana", "cherry") # tuple print(type(x)) x = range(6) # range print(type(x)) x = {"name" : "John", "age" : 36} # dict print(type(x)) x = {"apple", "banana", "cherry"} # set print(type(x)) x = frozenset({"apple", "banana", "cherry"}) # frozenset print(type(x)) x = True # bool print(type(x)) x = b"Hello" # bytes print(type(x)) x = bytearray(5) # bytearray print(type(x)) x = memoryview(bytes(5)) # memoryview print(type(x)) print("++++++++++++++++++++++ Python Casting +++++++++++++++++++++++++") x = str("Hello World") # str print(type(x)) x = int(20) # int print(type(x)) x = float(20.5) # float print(type(x)) x = complex(1j) # complex print(type(x)) x = list(("apple", "banana", "cherry")) # list
def __init__(self, name): self.name = name self.address = 0 self.alignment = 4 self.data = bytearray()
def __init__(self, service=None): self._pixel_packet_buf = bytearray(_PixelPacket.MAX_LENGTH) super().__init__(service=service)
def test_can_reset_stream_handles_bytearray(self): contents = bytearray(b'notastream') self.prepared_request.body = contents self.prepared_request.reset_stream() # assert the request body doesn't change after reset_stream is called self.assertEqual(self.prepared_request.body, contents)
def strwrite(fp, str): fp.write(bytearray(str, 'ascii'))
def add_objects(self): # Add each object to robot workspace at x,y location and orientation (random or pre-loaded) self.object_handles = [] sim_obj_handles = [] for object_idx in range(len(self.obj_mesh_ind)): curr_mesh_file = os.path.join(self.obj_mesh_dir, self.mesh_list[self.obj_mesh_ind[object_idx]]) curr_mesh_file = os.path.abspath(curr_mesh_file) curr_shape_name = 'shape_%02d' % object_idx drop_x = (self.workspace_limits[0][1] - self.workspace_limits[0][0] - 0.2) * np.random.random_sample() + self.workspace_limits[0][0] + 0.1 drop_y = (self.workspace_limits[1][1] - self.workspace_limits[1][0] - 0.2) * np.random.random_sample() + self.workspace_limits[1][0] + 0.1 object_position = [drop_x, drop_y, 0.15] object_orientation = [2*np.pi*np.random.random_sample(), 2*np.pi*np.random.random_sample(), 2*np.pi*np.random.random_sample()] object_color = [self.obj_mesh_color[object_idx][0], self.obj_mesh_color[object_idx][1], self.obj_mesh_color[object_idx][2]] ret_resp,ret_ints,ret_floats,ret_strings,ret_buffer = vrep.simxCallScriptFunction(self.sim_client, 'remoteApiCommandServer',vrep.sim_scripttype_childscript,'importShape',[0,0,255,0], object_position + object_orientation + object_color, [curr_mesh_file, curr_shape_name], bytearray(), vrep.simx_opmode_blocking) if ret_resp == 8: print('Failed to add new objects to simulation. Please restart.') exit() curr_shape_handle = ret_ints[0] self.object_handles.append(curr_shape_handle) time.sleep(2) self.prev_obj_positions = [] self.obj_positions = []
# Load image in RGB format and get dimensions: print("Loading...") img = Image.open(FILENAME).convert("RGB") pixels = img.load() width, height = img.size ratio = width / height print("Image is {}x{}".format(width, height)) if height > NUMPIXELS: height = NUMPIXELS width = int(height * ratio) img.resize((width, height)) print("Resized to {}x{}".format(width, height)) # Calculate gamma correction table, makes mid-range colors look 'right': gamma = bytearray(256) for i in range(256): gamma[i] = int(pow(float(i) / 255.0, 2.7) * 255.0 + 0.5) print("Displaying...") try: while True: for x in range(width): # For each column of image... for y in range(height): # For each pixel in column... value = pixels[x, y] # Read pixel in image mote.set_pixel( 1, y, # Set pixel in strip gamma[value[0]], # Gamma-corrected red gamma[value[1]], # Gamma-corrected green
def login_req2buf(name,password): #随机生成16位登录包AesKey login_aes_key = bytes(''.join(random.sample(string.ascii_letters + string.digits, 16)), encoding = "utf8") #protobuf组包1 accountRequest = mm_pb2.ManualAuthAccountRequest( aes = mm_pb2.ManualAuthAccountRequest.AesKey( len = 16, key = login_aes_key ), ecdh = mm_pb2.ManualAuthAccountRequest.Ecdh( nid = 713, ecdhKey = mm_pb2.ManualAuthAccountRequest.Ecdh.EcdhKey( len = len(Util.EcdhPubKey), key = Util.EcdhPubKey ) ), userName = name, password1 = Util.GetMd5(password), password2 = Util.GetMd5(password) ) #protobuf组包2 deviceRequest = mm_pb2.ManualAuthDeviceRequest( login = mm_pb2.LoginInfo( aesKey = login_aes_key, uin = 0, guid = define.__GUID__ + '\0', #guid以\0结尾 clientVer = define.__CLIENT_VERSION__, androidVer = define.__ANDROID_VER__, unknown = 1, ), tag2 = mm_pb2.ManualAuthDeviceRequest._Tag2(), imei = define.__IMEI__, softInfoXml = define.__SOFTINFO__.format(define.__IMEI__,define.__ANDROID_ID__, define.__MANUFACTURER__+" "+define.__MODELNAME__, define.__MOBILE_WIFI_MAC_ADDRESS__, define.__CLIENT_SEQID_SIGN__, define.__AP_BSSID__, define.__MANUFACTURER__,"taurus", define.__MODELNAME__, define.__IMEI__), unknown5 = 0, clientSeqID = define.__CLIENT_SEQID__, clientSeqID_sign = define.__CLIENT_SEQID_SIGN__, loginDeviceName = define.__MANUFACTURER__+" "+define.__MODELNAME__, deviceInfoXml = define.__DEVICEINFO__.format(define.__MANUFACTURER__, define.__MODELNAME__), language = define.__LANGUAGE__, timeZone = "8.00", unknown13 = 0, unknown14 = 0, deviceBrand = define.__MANUFACTURER__, deviceModel = define.__MODELNAME__+"armeabi-v7a", osType = define.__ANDROID_VER__, realCountry = "cn", unknown22 = 2, #Unknown ) logger.debug("accountData protobuf数据:" + str(accountRequest.SerializeToString())) logger.debug("deviceData protobuf数据:" + str(deviceRequest.SerializeToString())) #加密 reqAccount = Util.compress_and_rsa(accountRequest.SerializeToString()) reqDevice = Util.compress_and_aes(deviceRequest.SerializeToString(),login_aes_key) logger.debug("加密后数据长度:reqAccount={},reqDevice={}".format(len(reqAccount),len(reqDevice[0]))) logger.debug("加密后reqAccount数据:" + str(reqAccount)) logger.debug("加密后reqDevice数据:" + str(reqDevice[0])) #封包包体 subheader = b'' subheader += struct.pack(">I",len(accountRequest.SerializeToString())) #accountData protobuf长度 subheader += struct.pack(">I",len(deviceRequest.SerializeToString())) #deviceData protobuf长度 subheader += struct.pack(">I",len(reqAccount)) #accountData RSA加密后长度 body = subheader + reqAccount + reqDevice[0] #包体由头信息、账号密码加密后数据、硬件设备信息加密后数据3部分组成 #封包包头 header = bytearray(0) header += bytes([0]) #最后2bit:02--包体不使用压缩算法;前6bit:包头长度,最后计算 # header += bytes([((0x7<<4) + 0xf)]) #07:RSA加密算法 0xf:cookie长度 header += struct.pack(">I",define.__CLIENT_VERSION__) #客户端版本号 网络字节序 header += bytes([0]*4) #uin header += bytes([0]*15) #coockie header += encoder._VarintBytes(701) #cgi type header += encoder._VarintBytes(len(body)) #body 压缩前长度 header += encoder._VarintBytes(len(body)) #body 压缩后长度(登录包不需要压缩body数据) header += struct.pack(">B",define.__LOGIN_RSA_VER__) #RSA秘钥版本 header += b'\x01\x02' #Unknown Param header[0] = (len(header)<<2) + 2 #包头长度 #组包 logger.debug('包体数据:' + str(body)) logger.debug('包头数据:' + str(header)) senddata = header + body return (senddata,login_aes_key)
def get_image_filename(self) -> str: """ :return: Return image file name """ return bytearray(self.source.FileName.string[:-2]).decode("utf-16le")
def __init__(self, ser): self.buf = bytearray() self.s = ser # Serial object self.i = 0 # Index for buffer to indicate how much data left
#! python3 import struct import os b = bytearray(open('BRF_testfile.txt', 'rb').read()) for i in b: print(i, end=' ') for lino, record in enumerate(struct.iter_unpack('<15s', b), start=1): print('{0}: {1}'.format(lino, record[0]), encoding=None)
def alloc(length): return haxe_io_Bytes(length, bytearray(length))
values = [] for prediction in predictions: value = {} value['Tag'] = prediction['Tag'] value['Probability'] = prediction['Probability'] values.append(value) result['values'] = values return result while True: for camera in list_of_cameras: # Use urllib to get the image and convert into a cv2 usable format imgResp=request.urlopen('{}/shot.jpg'.format(cameras_to_ip[camera])) imgNp=np.array(bytearray(imgResp.read()),dtype=np.uint8) img=cv2.imdecode(imgNp,-1) raw = Image.fromarray(img, 'RGB') image_file_name = 'images/image{}.png'.format(count) raw.save(image_file_name) count += 1 count = count % 9 print('loop') response = azure.analyze_image(open(image_file_name, 'rb').read()) put_dict = construct_update_dict(response, camera, image_file_name) store.put(put_dict) # put the image on screen
#hardware platform:FireBeetle-ESP32 from machine import Pin, I2C import time i2c = I2C(scl=Pin(22), sda=Pin(21), freq=10000) #create I2C object,init Pin and frequency b = bytearray("dfrobot") #create a array i2c.writeto_mem(0x50, 0, b, addrsize=16) #Write b to the slave specified by 0x50 starting #from the memory address specified by 0, The addrsize is 16 time.sleep(0.1) print( i2c.readfrom_mem(0x50, 0, 7, addrsize=16) ) #Read 7 bytes from the slave specified by 0x50 starting from the memory address specified by 0. #The addrsize specifies the address size in bits. Returns a bytes object with the data read.
def process_image(self, image): """Process an image.""" self._image = Image.open(io.BytesIO(bytearray(image))) self._image_width, self._image_height = self._image.size # resize image if different then default if self._scale != DEAULT_SCALE: newsize = (self._image_width * self._scale, self._image_width * self._scale) self._image.thumbnail(newsize, Image.ANTIALIAS) self._image_width, self._image_height = self._image.size with io.BytesIO() as output: self._image.save(output, format="JPEG") image = output.getvalue() _LOGGER.debug(( f"Image scaled with : {self._scale} W={self._image_width} H={self._image_height}" )) self._state = None self._objects = [] # The parsed raw data self._targets_found = [] self._summary = {} saved_image_path = None try: predictions = self._dsobject.detect(image) except ds.DeepstackException as exc: _LOGGER.error("Deepstack error : %s", exc) return self._objects = get_objects(predictions, self._image_width, self._image_height) self._targets_found = [] for obj in self._objects: if not ((obj["name"] in self._targets_names) or (obj["object_type"] in self._targets_names)): continue ## Then check if the type has a configured confidence, if yes assign ## Then if a confidence for a named object, this takes precedence over type confidence confidence = None for target in self._targets: if obj["object_type"] == target[CONF_TARGET]: confidence = target[CONF_CONFIDENCE] for target in self._targets: if obj["name"] == target[CONF_TARGET]: confidence = target[CONF_CONFIDENCE] if obj["confidence"] > confidence: if not object_in_roi(self._roi_dict, obj["centroid"]): continue self._targets_found.append(obj) self._state = len(self._targets_found) if self._state > 0: self._last_detection = dt_util.now().strftime(DATETIME_FORMAT) targets_found = [ obj["name"] for obj in self._targets_found ] # Just the list of target names, e.g. [car, car, person] self._summary = dict( Counter(targets_found)) # e.g. {'car':2, 'person':1} if self._save_file_folder: if self._state > 0 or self._always_save_latest_file: saved_image_path = self.save_image( self._targets_found, self._save_file_folder, ) # Fire events for target in self._targets_found: target_event_data = target.copy() target_event_data[ATTR_ENTITY_ID] = self.entity_id if saved_image_path: target_event_data[SAVED_FILE] = saved_image_path self.hass.bus.fire(EVENT_OBJECT_DETECTED, target_event_data)
def test_select_with_key_and_multiple_bins_to_select_policy_key_digest(self ): key = ('test', 'demo', None, bytearray("asd;as[d'as;djk;uyfl", "utf-8")) rec = { 'a': ["nanslkdl", 1, bytearray("asd;as[d'as;d", "utf-8")], 'b': {"key": "asd';q;'1';"}, 'c': 1234, 'd': '!@#@#$QSDAsd;as' } TestSelect.client.put(key, rec) bins_to_select = ['c', 'd'] policy = {'timeout': 1000, 'key': aerospike.POLICY_KEY_DIGEST} key, meta, bins = TestSelect.client.select(key, bins_to_select, policy) assert bins == {'c': 1234, 'd': '!@#@#$QSDAsd;as'} assert key == ('test', 'demo', None, bytearray(b"asd;as[d\'as;djk;uyfl")) assert meta != None key = ('test', 'demo', None, bytearray("asd;as[d'as;djk;uyfl", "utf-8")) TestSelect.client.remove(key)
def test_with_bytearray(self): try: h = hmac.HMAC(bytearray(b"key"), bytearray(b"hash this!"), digestmod="md5") except Exception: self.fail("Constructor call with bytearray arguments raised exception.") self.assertEqual(h.hexdigest(), '34325b639da4cfd95735b381e28cb864')