def test_c_buffer_raw(self, memoryview=memoryview): buf = c_buffer(32) buf.raw = memoryview("Hello, World") self.assertEqual(buf.value, "Hello, World") self.assertRaises(TypeError, setattr, buf, "value", memoryview("abc")) self.assertRaises(ValueError, setattr, buf, "raw", memoryview("x" * 100))
def test_buffer_or_unicode(self): # Test that send/sendall/sendto accept a buffer or a unicode as arg import _socket, os s = _socket.socket(_socket.AF_INET, _socket.SOCK_STREAM, 0) # XXX temporarily we use python.org to test, will have more robust tests # in the absence of a network connection later when more parts of the # socket API are implemented. Currently skip the test if there is no # connection. try: s.connect(("www.python.org", 80)) except _socket.gaierror as ex: skip("GAIError - probably no connection: %s" % str(ex.args)) exc = raises(TypeError, s.send, None) assert str(exc.value) == "must be string or buffer, not None" assert s.send(buffer('')) == 0 assert s.sendall(buffer('')) is None assert s.send(memoryview('')) == 0 assert s.sendall(memoryview('')) is None assert s.send(u'') == 0 assert s.sendall(u'') is None raises(UnicodeEncodeError, s.send, u'\xe9') s.close() s = _socket.socket(_socket.AF_INET, _socket.SOCK_DGRAM, 0) s.sendto(buffer(''), ('localhost', 9)) # Send to discard port. s.close()
def test_stringlike_conversions(self): # methods that should return bytearray (and not str) def check(result, expected): assert result == expected assert type(result) is bytearray check(bytearray('abc').replace('b', bytearray('d')), 'adc') check(bytearray('abc').replace('b', 'd'), 'adc') check(bytearray('abc').upper(), 'ABC') check(bytearray('ABC').lower(), 'abc') check(bytearray('abc').title(), 'Abc') check(bytearray('AbC').swapcase(), 'aBc') check(bytearray('abC').capitalize(), 'Abc') check(bytearray('abc').ljust(5), 'abc ') check(bytearray('abc').rjust(5), ' abc') check(bytearray('abc').center(5), ' abc ') check(bytearray('1').zfill(5), '00001') check(bytearray('1\t2').expandtabs(5), '1 2') check(bytearray(',').join(['a', bytearray('b')]), 'a,b') check(bytearray('abca').lstrip('a'), 'bca') check(bytearray('cabc').rstrip('c'), 'cab') check(bytearray('abc').lstrip(memoryview('a')), 'bc') check(bytearray('abc').rstrip(memoryview('c')), 'ab') check(bytearray('aba').strip('a'), 'b')
def deserialize(self, msg_list, content=True, copy=True): """Unserialize a msg_list to a nested message dict. This is roughly the inverse of serialize. The serialize/deserialize methods work with full message lists, whereas pack/unpack work with the individual message parts in the message list. Parameters ---------- msg_list : list of bytes or Message objects The list of message parts of the form [HMAC,p_header,p_parent, p_metadata,p_content,buffer1,buffer2,...]. content : bool (True) Whether to unpack the content dict (True), or leave it packed (False). copy : bool (True) Whether msg_list contains bytes (True) or the non-copying Message objects in each place (False). Returns ------- msg : dict The nested message dict with top-level keys [header, parent_header, content, buffers]. The buffers are returned as memoryviews. """ minlen = 5 message = {} if not copy: # pyzmq didn't copy the first parts of the message, so we'll do it for i in range(minlen): msg_list[i] = msg_list[i].bytes if self.auth is not None: signature = msg_list[0] if not signature: raise ValueError("Unsigned Message") if signature in self.digest_history: raise ValueError("Duplicate Signature: %r" % signature) self._add_digest(signature) check = self.sign(msg_list[1:5]) if not compare_digest(signature, check): raise ValueError("Invalid Signature: %r" % signature) if not len(msg_list) >= minlen: raise TypeError("malformed message, must have at least %i elements"%minlen) header = self.unpack(msg_list[1]) message['header'] = extract_dates(header) message['msg_id'] = header['msg_id'] message['msg_type'] = header['msg_type'] message['parent_header'] = extract_dates(self.unpack(msg_list[2])) message['metadata'] = self.unpack(msg_list[3]) if content: message['content'] = self.unpack(msg_list[4]) else: message['content'] = msg_list[4] buffers = [memoryview(b) for b in msg_list[5:]] if buffers and buffers[0].shape is None: # force copy to workaround pyzmq #646 buffers = [memoryview(b.bytes) for b in msg_list[5:]] message['buffers'] = buffers # adapt to the current version return adapt(message)
def _compress_frame(self): ''' frame contains all the blocks, plus frame header and checksum ''' self.dst_file.write(self._frame_header()) def read_src(buf): return self.src_file.readinto(buf) self.src_buffer = bytearray(b'\0') * BLOCK_SIZE self.dst_buffer = bytearray( b'\0') * worst_case_block_length(BLOCK_SIZE) xxh = xxhash.xxh32(seed=0) nbytes = read_src(self.src_buffer) while nbytes != 0: block_len = lz4_compress_block( self.dst_buffer, memoryview(self.src_buffer)[0:nbytes]) self.dst_file.write(memoryview(self.dst_buffer)[0:block_len]) # only pinned buffer, not appropriate here xxh.update(bytes(self.src_buffer[0:nbytes])) nbytes = read_src(self.src_buffer) self.dst_file.write((0).to_bytes(4, 'little')) # EndMark self.dst_file.write(xxh.intdigest().to_bytes(4, 'little')) # CheckSum
def test_mutable_read(conn): # Read data and modify it, to make sure that this doesn't # affect the buffer conn._rbuf = dugong._Buffer(129) conn.send_request('GET', '/send_512_bytes') conn.read_response() # Assert that buffer is full, but does not start at beginning assert conn._rbuf.b > 0 # Need to avoid conn.read(), because it converts to bytes buf = dugong.eval_coroutine(conn.co_read(150)) pos = len(buf) assert buf == DUMMY_DATA[:pos] memoryview(buf)[:10] = b'\0' * 10 # Assert that buffer is empty assert conn._rbuf.b == 0 assert conn._rbuf.e == 0 buf = dugong.eval_coroutine(conn.co_read(150)) assert buf == DUMMY_DATA[pos:pos+len(buf)] memoryview(buf)[:10] = b'\0' * 10 pos += len(buf) assert conn.readall() == DUMMY_DATA[pos:512] assert not conn.response_pending()
def test_hexewkb(self): "Testing (HEX)EWKB output." # For testing HEX(EWKB). ogc_hex = b'01010000000000000000000000000000000000F03F' ogc_hex_3d = b'01010000800000000000000000000000000000F03F0000000000000040' # `SELECT ST_AsHEXEWKB(ST_GeomFromText('POINT(0 1)', 4326));` hexewkb_2d = b'0101000020E61000000000000000000000000000000000F03F' # `SELECT ST_AsHEXEWKB(ST_GeomFromEWKT('SRID=4326;POINT(0 1 2)'));` hexewkb_3d = b'01010000A0E61000000000000000000000000000000000F03F0000000000000040' pnt_2d = Point(0, 1, srid=4326) pnt_3d = Point(0, 1, 2, srid=4326) # OGC-compliant HEX will not have SRID value. self.assertEqual(ogc_hex, pnt_2d.hex) self.assertEqual(ogc_hex_3d, pnt_3d.hex) # HEXEWKB should be appropriate for its dimension -- have to use an # a WKBWriter w/dimension set accordingly, else GEOS will insert # garbage into 3D coordinate if there is none. self.assertEqual(hexewkb_2d, pnt_2d.hexewkb) self.assertEqual(hexewkb_3d, pnt_3d.hexewkb) self.assertEqual(True, GEOSGeometry(hexewkb_3d).hasz) # Same for EWKB. self.assertEqual(memoryview(a2b_hex(hexewkb_2d)), pnt_2d.ewkb) self.assertEqual(memoryview(a2b_hex(hexewkb_3d)), pnt_3d.ewkb) # Redundant sanity check. self.assertEqual(4326, GEOSGeometry(hexewkb_2d).srid)
def get_switch_state(self): # Request device state packet: 0xCCAA03120113 payload = bytes(array('B', [0xCC, 0xAA, 0x03, 0x12, 0x01, 0x13])) self.socket.send(payload) response = self.__recv_data() on = memoryview(response).tolist()[7] response_list = [] while True: response = self.__recv_data(23) if memoryview(response).tolist() == [0x00]: break else: response_list.append(response) timer_list = [] for timer_response in response_list: timer_bytes = memoryview(timer_response).tolist() timer = BTapsTimer(timer_bytes[0], timer_response[7:]) timer.set_repeat_days_byte(timer_bytes[1]) timer.set_start_time(bad_hex_to_dec(timer_bytes[2]), bad_hex_to_dec(timer_bytes[3])) timer.set_end_time(bad_hex_to_dec(timer_bytes[4]), bad_hex_to_dec(timer_bytes[5])) timer.on = timer_bytes[6] timer_list.append(timer) return on, timer_list
def check_type_errors(self, f): self.assertRaises(TypeError, f, "") self.assertRaises(TypeError, f, []) multidimensional = memoryview(b"1234").cast('B', (2, 2)) self.assertRaises(TypeError, f, multidimensional) int_data = memoryview(b"1234").cast('I') self.assertRaises(TypeError, f, int_data)
def test_memoryview(): if sys.version_info < (2, 7): return # skip data = os.urandom(128 * 1024) # Read 128kb compressed = lz4.block.compress(data) assert lz4.block.compress(memoryview(data)) == compressed assert lz4.block.decompress(memoryview(compressed)) == data
def test_pack_unpack_buffer(self): import array b = array.array('b', b'\x00' * 19) sz = self.struct.calcsize("ii") for offset in [2, -17]: self.struct.pack_into("ii", b, offset, 17, 42) assert bytes(memoryview(b)) == (b'\x00' * 2 + self.struct.pack("ii", 17, 42) + b'\x00' * (19-sz-2)) b2 = array.array('b', b'\x00' * 19) self.struct.pack_into("ii", memoryview(b2), 0, 17, 42) assert bytes(b2) == self.struct.pack("ii", 17, 42) + (b'\x00' * 11) exc = raises(TypeError, self.struct.pack_into, "ii", b'test', 0, 17, 42) assert str(exc.value) == "must be read-write buffer, not bytes" exc = raises(self.struct.error, self.struct.pack_into, "ii", b[0:1], 0, 17, 42) assert str(exc.value) == "pack_into requires a buffer of at least 8 bytes" assert self.struct.unpack_from("ii", b, 2) == (17, 42) assert self.struct.unpack_from("ii", b, -17) == (17, 42) assert self.struct.unpack_from("ii", memoryview(b)[2:]) == (17, 42) assert self.struct.unpack_from("ii", memoryview(b), 2) == (17, 42) exc = raises(TypeError, self.struct.unpack_from, "ii", 123) assert str(exc.value) == "'int' does not support the buffer interface" exc = raises(TypeError, self.struct.unpack_from, "ii", None) assert str(exc.value) == "'NoneType' does not support the buffer interface" exc = raises(self.struct.error, self.struct.unpack_from, "ii", b'') assert str(exc.value) == "unpack_from requires a buffer of at least 8 bytes" exc = raises(self.struct.error, self.struct.unpack_from, "ii", memoryview(b'')) assert str(exc.value) == "unpack_from requires a buffer of at least 8 bytes"
def write(self, data): assert isinstance(data, (bytes, bytearray, memoryview)), repr(data) if isinstance(data, bytearray): data = memoryview(data) if not data: return if self._conn_lost or self._closing: if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES: logger.warning('pipe closed by peer or ' 'os.write(pipe, data) raised exception.') self._conn_lost += 1 return if not self._buffer: # Attempt to send it right away first. try: n = os.write(self._fileno, data) except (BlockingIOError, InterruptedError): n = 0 except Exception as exc: self._conn_lost += 1 self._fatal_error(exc, 'Fatal write error on pipe transport') return if n == len(data): return elif n > 0: data = memoryview(data)[n:] self._loop._add_writer(self._fileno, self._write_ready) self._buffer += data self._maybe_pause_protocol()
def _test_pbkdf2_hmac(self, pbkdf2): for digest_name, results in self.pbkdf2_results.items(): for i, vector in enumerate(self.pbkdf2_test_vectors): password, salt, rounds, dklen = vector expected, overwrite_dklen = results[i] if overwrite_dklen: dklen = overwrite_dklen out = pbkdf2(digest_name, password, salt, rounds, dklen) self.assertEqual(out, expected, (digest_name, password, salt, rounds, dklen)) out = pbkdf2(digest_name, memoryview(password), memoryview(salt), rounds, dklen) out = pbkdf2(digest_name, bytearray(password), bytearray(salt), rounds, dklen) self.assertEqual(out, expected) if dklen is None: out = pbkdf2(digest_name, password, salt, rounds) self.assertEqual(out, expected, (digest_name, password, salt, rounds)) self.assertRaises(TypeError, pbkdf2, b'sha1', b'pass', b'salt', 1) self.assertRaises(TypeError, pbkdf2, 'sha1', 'pass', 'salt', 1) self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 0) self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', -1) self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, 0) self.assertRaises(ValueError, pbkdf2, 'sha1', b'pass', b'salt', 1, -1) with self.assertRaisesRegex(ValueError, 'unsupported hash type'): pbkdf2('unknown', b'pass', b'salt', 1)
def read_all(self): if self.content_length: buff = bytearray(int(self.content_length)) buff_view = memoryview(buff) p = 0 for data in self.read_buffers: buff_view[p:p+len(data)] = data p += len(data) self.read_buffers = [] self.read_buffer_len = 0 while p < self.content_length: data = self.read() if not data: break buff_view[p:p + len(data)] = data[0:len(data)] p += len(data) return buff_view[:p] else: out = list() while True: data = self.read() if not data: break if isinstance(data, memoryview): data = data.tobytes() out.append(data) out_buf = "".join(out) return memoryview(out_buf)
def diffInts(self, pairs=None, limit=None, delta=None): r1 = self.readMemory(self.integers,4*self.integers_c) print('Press enter...') input() r2 = self.readMemory(self.integers,4*self.integers_c) m1 = memoryview(r1).cast('I') m2 = memoryview(r2).cast('I') print('Compare...') step = self.integers_c // 100 res = [] for i in range(self.integers_c): if i%(step*5) == 0: print('%d%%'%(i/step)) if limit and i > limit: break if m1[i] != m2[i]: v1 = self.decode(m1[i]) v2 = self.decode(m2[i]) s = '%08x: %d -> %d'%(i*4, v1, v2) if pairs: if (v1, v2) in pairs or (v1, None) in pairs or (None, v2) in pairs: res.append(s) if delta: if v2-v1 == delta: res.append(s) print(s) print('Done') if pairs: print("\n".join(res))
def _test_send(self, send_func): rd, wr = socket.socketpair() self.addCleanup(wr.close) # rd closed explicitly by parent # we must send enough data for the send() to block data = b"xyz" * (support.SOCK_MAX_SIZE // 3) pid = os.fork() if pid == 0: wr.close() # let the parent block on send() self._sleep() received_data = bytearray(len(data)) n = 0 while n < len(data): n += rd.recv_into(memoryview(received_data)[n:]) self.assertEqual(received_data, data) os._exit(0) else: rd.close() written = 0 while written < len(data): sent = send_func(wr, memoryview(data)[written:]) # sendall() returns None written += len(data) if sent is None else sent self.assertEqual(0, os.waitpid(pid, 0)[1])
def _recv_payload(self, length): """ This receive function handles the situation where the underlying socket has not received the full set of data. It spins on calling `recv` until the full quantity of data is available before returning. Note that this function makes us vulnerable to a DoS attack, where a server can send part of a frame and then swallow the rest. We should add support for socket timeouts here at some stage. """ # TODO: Fix DoS risk. if not length: return memoryview(b'') buffer = bytearray(length) buffer_view = memoryview(buffer) index = 0 data_length = -1 # _sock.recv(length) might not read out all data if the given length # is very large. So it should be to retrieve from socket repeatedly. while length and data_length: data = self._sock.recv(length) data_length = len(data) end = index + data_length buffer_view[index:end] = data[:] length -= data_length index = end return buffer_view[:end]
def __init__(self, command, subprocess, success_codes=None): self.progress = 0.0 self.command = command # timestamp of the last time when command was dumped to db # (required to limit the rate of db dumps when stdout and stderr is updated) self.update_ts = int(time.time()) # log extra extra should be defined before _feed and _feed_error is used self.log_extra = self.command.log_extra subprocess.stdout.read_until_close(self._feed, streaming_callback=self._feed) subprocess.stderr.read_until_close(self._feed_error, streaming_callback=self._feed_error) subprocess.stdout.set_close_callback(self._ensure_exit_cb) subprocess.set_exit_callback(self._exit_cb) self.subprocess = subprocess self._exit = False self._force_complete_cb_timeout = None self.exit_code = None self.command_code = None self.output = memoryview(bytearray(' ' * self.OUTPUT_WINDOW_SIZE)) self.output_size = 0 self.error_output = memoryview(bytearray(' ' * self.OUTPUT_WINDOW_SIZE)) self.error_output_size = 0 self.output_closed = False self.error_output_closed = False
def test_pack_unpack_buffer(self): import array b = array.array('c', '\x00' * 19) sz = self.struct.calcsize("ii") for offset in [2, -17]: self.struct.pack_into("ii", b, offset, 17, 42) assert str(buffer(b)) == ('\x00' * 2 + self.struct.pack("ii", 17, 42) + '\x00' * (19-sz-2)) exc = raises(TypeError, self.struct.pack_into, "ii", buffer(b), 0, 17, 42) assert str(exc.value) == "must be read-write buffer, not buffer" exc = raises(TypeError, self.struct.pack_into, "ii", 'test', 0, 17, 42) assert str(exc.value) == "must be read-write buffer, not str" exc = raises(self.struct.error, self.struct.pack_into, "ii", b[0:1], 0, 17, 42) assert str(exc.value) == "pack_into requires a buffer of at least 8 bytes" assert self.struct.unpack_from("ii", b, 2) == (17, 42) assert self.struct.unpack_from("ii", b, -17) == (17, 42) assert self.struct.unpack_from("ii", buffer(b, 2)) == (17, 42) assert self.struct.unpack_from("ii", buffer(b), 2) == (17, 42) assert self.struct.unpack_from("ii", memoryview(buffer(b)), 2) == (17, 42) exc = raises(TypeError, self.struct.unpack_from, "ii", 123) assert 'must be string or buffer, not int' in str(exc.value) exc = raises(self.struct.error, self.struct.unpack_from, "ii", None) assert str(exc.value) == "unpack_from requires a buffer argument" exc = raises(self.struct.error, self.struct.unpack_from, "ii", '') assert str(exc.value) == "unpack_from requires a buffer of at least 8 bytes" exc = raises(self.struct.error, self.struct.unpack_from, "ii", memoryview('')) assert str(exc.value) == "unpack_from requires a buffer of at least 8 bytes"
def data_received(self, chunk): self.read_counter += len(chunk) if self.metadata is not None: self.consume_chunk(chunk) else: if self.metadata_size == 0: self.metadata_size = struct.unpack('!I', chunk[:4])[0] if self.metadata_size > 32 * 1024: self.return_error(400, "%d is a too big metadata size" % self.metadata_size) return self.debug("metadata size is %d", self.metadata_size) if PY3: chunk = memoryview(chunk)[4:] else: chunk = chunk[4:] if self.size + len(chunk) > self.metadata_size: self.debug("approached a breakpoint") rem = self.metadata_size - self.size if PY3: self.cache.append(memoryview(chunk)[:rem]) else: self.cache.append(chunk[:rem]) try: self.read_metadata() except Exception as e: self.return_error(400, e) return self.size = rem if PY3: self.cache = [memoryview(chunk)[rem:]] else: self.cache = [chunk[rem:]] else: self.consume_chunk(chunk)
def test_writelines(self): import array fn = self.temptestfile with file(fn, 'w') as f: f.writelines(['abc']) f.writelines([u'def']) exc = raises(TypeError, f.writelines, [array.array('c', 'ghi')]) assert str(exc.value) == "writelines() argument must be a sequence of strings" exc = raises(TypeError, f.writelines, [memoryview('jkl')]) assert str(exc.value) == "writelines() argument must be a sequence of strings" assert open(fn, 'r').readlines() == ['abcdef'] with file(fn, 'wb') as f: f.writelines(['abc']) f.writelines([u'def']) exc = raises(TypeError, f.writelines, [array.array('c', 'ghi')]) assert str(exc.value) == "writelines() argument must be a sequence of strings" exc = raises(TypeError, f.writelines, [memoryview('jkl')]) assert str(exc.value) == "writelines() argument must be a sequence of strings" assert open(fn, 'rb').readlines() == ['abcdef'] with file(fn, 'wb') as f: exc = raises(TypeError, f.writelines, ['abc', memoryview('def')]) assert str(exc.value) == "writelines() argument must be a sequence of strings" assert open(fn, 'rb').readlines() == []
def get_resource(self, path, zipped=False): """Returns the cached version of the file""" if zipped and path in self.gzip_cache: return memoryview(self.gzip_cache[path]) if path in self.cache: return memoryview(self.cache[path]) return None
def _read(self, n, initial, _errnos): """Read from socket This is the default implementation. Subclasses may implement `read()` to simply call this method, or provide their own `read()` implementation. Note: According to SSL_read(3), it can at most return 16kB of data. Thus, we use an internal read buffer like to get the exact number of bytes wanted. Note: ssl.sock.read may cause ENOENT if the operation couldn't be performed (?). :param int n: exact number of bytes to read :return: data read :rtype: memoryview """ mview = memoryview(self._rbuf) to_read = n while to_read: try: bytes_read = self.sock.recv_into(mview, to_read) mview = mview[bytes_read:] to_read -= bytes_read except socket.error as exc: if not initial and exc.errno in _errnos: continue raise if not bytes_read: raise IOError('socket closed') return memoryview(self._rbuf)[:n]
async def send_bytes(self, buf, offset=0, size=None): ''' Send a buffer of bytes as a single message ''' m = memoryview(buf) if m.itemsize > 1: m = memoryview(bytes(m)) n = len(m) if offset < 0: raise ValueError("offset is negative") if n < offset: raise ValueError("buffer length < offset") if size is None: size = n - offset elif size < 0: raise ValueError("size is negative") elif offset + size > n: raise ValueError("buffer length < offset + size") header = struct.pack('!i', size) if size >= 16384: await self._writer.write(header) await self._writer.write(m[offset:offset + size]) else: msg = header + bytes(m[offset:offset + size]) await self._writer.write(msg)
def _check_shuffle(self, func, ptr): """ Check a shuffle()-like function for 1D arrays. """ # Our implementation follows Python 3's. a = np.arange(20) if sys.version_info >= (3,): r = self._follow_cpython(ptr) for i in range(3): got = a.copy() expected = a.copy() func(got) r.shuffle(expected) self.assertTrue(np.all(got == expected), (got, expected)) else: # Sanity check for i in range(3): b = a.copy() func(b) self.assertFalse(np.all(a == b)) self.assertEqual(sorted(a), sorted(b)) a = b # Test with an arbitrary buffer-providing object b = a.copy() func(memoryview(b)) self.assertFalse(np.all(a == b)) self.assertEqual(sorted(a), sorted(b)) # Read-only object with self.assertTypingError(): func(memoryview(b"xyz"))
def test_stringlike_conversions(self): # methods that should return bytearray (and not str) def check(result, expected): assert result == expected assert type(result) is bytearray check(bytearray("abc").replace("b", bytearray("d")), "adc") check(bytearray("abc").replace("b", "d"), "adc") check(bytearray("").replace("a", "ab"), "") check(bytearray("abc").upper(), "ABC") check(bytearray("ABC").lower(), "abc") check(bytearray("abc").title(), "Abc") check(bytearray("AbC").swapcase(), "aBc") check(bytearray("abC").capitalize(), "Abc") check(bytearray("abc").ljust(5), "abc ") check(bytearray("abc").rjust(5), " abc") check(bytearray("abc").center(5), " abc ") check(bytearray("1").zfill(5), "00001") check(bytearray("1\t2").expandtabs(5), "1 2") check(bytearray(",").join(["a", bytearray("b")]), "a,b") check(bytearray("abca").lstrip("a"), "bca") check(bytearray("cabc").rstrip("c"), "cab") check(bytearray("abc").lstrip(memoryview("a")), "bc") check(bytearray("abc").rstrip(memoryview("c")), "ab") check(bytearray("aba").strip("a"), "b")
def recv(self): """ async recv recv completed packet is put to recv packet """ if self.readbuf == []: # read header self.readbuf = [ memoryview(bytearray(self.headerLen)), self.headerLen, 'header' ] nbytes = self.sock.recv_into( self.readbuf[0][-self.readbuf[1]:], self.readbuf[1]) if nbytes == 0: return 'disconnected' self.readbuf[1] -= nbytes if self.readbuf[1] == 0: # complete recv if self.readbuf[2] == 'header': bodylen = self.headerStruct.unpack( self.readbuf[0].tobytes())[0] self.readbuf = [ memoryview(bytearray(bodylen)), bodylen, 'body' ] elif self.readbuf[2] == 'body': self.recvcallback(self.readbuf[0].tobytes()) self.readbuf = [] return 'complete' else: Log.error('invalid recv state %s', self.readbuf[2]) return 'unknown' return 'cont'
def testSourceByteTypes_loads_memoryview(self): call_ser, _ = self.serializer.serializeCall("object", "method", [1, 2, 3], {"kwarg": 42}, False) ser, _ = self.serializer.serializeData([4, 5, 6], False) _, _, vargs, _ = self.serializer.loadsCall(memoryview(call_ser)) self.assertEqual([1, 2, 3], vargs) d = self.serializer.loads(memoryview(ser)) self.assertEqual([4, 5, 6], d)
def write(self, data): assert isinstance(data, (bytes, bytearray, memoryview)), repr(data) if isinstance(data, bytearray): data = memoryview(data) if not data: return if self._conn_lost or self._closing: self._conn_lost += 1 return if not self._buffer: # Attempt to send it right away first. try: n = os.write(self._fileno, data) except (BlockingIOError, InterruptedError): n = 0 except Exception as exc: self._conn_lost += 1 self._fatal_error(exc, 'Fatal write error on pipe transport') return if n == len(data): return elif n > 0: data = memoryview(data)[n:] self._loop.add_writer(self._fileno, self._write_ready) self._buffer += data self._maybe_pause_protocol()
def _parse_string(self, key, value): if type(value) == dict: if "type" in value: if value["type"] == "string": logging.info(value) return_dict = dict() languages = value["ln"] enable_conditions = value["enable_conditions"] for ln in languages.keys(): if type(languages[ln]) == list: return_dict[ln] = [ StringConverter(self._exd_manager, get_language_id(ln), enable_conditions).convert( memoryview(sub_str) ) for sub_str in languages[ln] ] else: try: return_dict[ln] = StringConverter( self._exd_manager, get_language_id(ln), enable_conditions ).convert(memoryview(languages[ln])) except TypeError: print("TypeError", ln) return return_dict return None
if args.encoder == 'turbo': from turbojpeg import TurboJPEG jpeg = TurboJPEG() jpeg_encode_func = lambda img, jpeg_quality=jpeg_quality: utils.turbo_encode_image( img, jpeg, jpeg_quality) jpeg_decode_func = lambda buf: utils.turbo_decode_image_buffer(buf, jpeg) else: jpeg_encode_func = lambda img, jpeg_quality=jpeg_quality: utils.cv2_encode_image( img, jpeg_quality) jpeg_decode_func = lambda buf: utils.cv2_decode_image_buffer(buf) # A temporary buffer in which the received data will be copied # this prevents creating a new buffer all the time tmp_buf = bytearray(7) tmp_view = memoryview( tmp_buf) # this allows to get a reference to a slice of tmp_buf # Creates a temporary buffer which can hold the largest image we can transmit img_buf = bytearray(9999999) img_view = memoryview(img_buf) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.bind((host, port)) s.listen(1) conn, addr = s.accept() with conn: print('Connected by', addr) while True: utils.recv_data_into(conn, tmp_view[:5], 5) cmd = tmp_buf[:5].decode('ascii') if (cmd == 'image'):
from .. import util from ._securetransport.bindings import (Security, SecurityConst, CoreFoundation) from ._securetransport.low_level import (_assert_no_error, _cert_array_from_pem, _temporary_keychain, _load_client_cert_chain) try: # Platform-specific: Python 2 from socket import _fileobject except ImportError: # Platform-specific: Python 3 _fileobject = None from ..packages.backports.makefile import backport_makefile try: memoryview(b'') except NameError: raise ImportError("SecureTransport only works on Pythons with memoryview") __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI always works HAS_SNI = True orig_util_HAS_SNI = util.HAS_SNI orig_util_SSLContext = util.ssl_.SSLContext # This dictionary is used by the read callback to obtain a handle to the # calling wrapped socket. This is a pretty silly approach, but for now it'll # do. I feel like I should be able to smuggle a handle to the wrapped socket # directly in the SSLConnectionRef, but for now this approach will work I
def test_buffer(self): import bz2 data = bz2.compress(memoryview(self.TEXT)) result = bz2.decompress(memoryview(data)) assert result == self.TEXT
def test_buffer(self): from bz2 import BZ2Decompressor bz2d = BZ2Decompressor() decompressed_data = bz2d.decompress(memoryview(self.DATA)) assert decompressed_data == self.TEXT
def test_buffer(self): from bz2 import BZ2Compressor bz2c = BZ2Compressor() data = bz2c.compress(memoryview(self.TEXT)) data += bz2c.flush() assert self.decompress(data) == self.TEXT
def _read_line_view(cls): line = cls._read_line_raw() if not isinstance(line, memoryview): line = memoryview(line) return line
def sg_audio_main(): # Create a buffer for socket IO. buf = memoryview(bytearray(SG_PKT_BUFSZ)) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s, \ socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as udp_sock: try: s.connect((DEVICE_IP, SG_NET_SERVER_PORT)) s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) except OSError: print(' Can not connect to the device {}.'.format(DEVICE_IP)) return -1 # Create AES cryptor by device security key. dev_cryptor = AES.new(sg_gen_security_key(DEVICE_PASSWORD), AES.MODE_ECB) ####################################################################### # This dict is used to find the AES decryptor by a session ID. session_key_dct = {} udp_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) udp_sock.bind((LOCAL_IP, LOCAL_PORT)) # Create a new session ID and session key session_id = random.randint(1, 0xFFFFFFFF) session_key = os.urandom(16) # Register the session ID, connect it to the AES cryptor. session_key_dct[session_id] = AES.new(session_key, AES.MODE_ECB) # Start audio transmission. sg_net_request(s, buf, dev_cryptor, dict( cmd='open_audio', session_id=session_id, session_key=base64.b64encode(session_key).decode(), server_addr='{}:{}'.format(LOCAL_IP, LOCAL_PORT), output_channel=1, audio_encoder=3, # SG_AUDIO_PCMS24LE audio_channels=1, # mono audio_frame_size=320, # samples per frame audio_sample_rate=16000, audio_sample_bits=24, audio_bitrate=0, # no use for PCM retrans_timeout=1000, # threshold of retransmission timeout, in milliseconds )) # Start receiving audio frames packets = {} with open("pickup.pcm", "wb") as f: current_sn = None start = time.time() n = 0 while start + DURATION >= time.time(): try: data, addr = sg_net_udp_recv(udp_sock, buf) # The session ID is not encrypted. sid, = struct.unpack_from('<I', data, SG_PKT_HEADSZ) # Get the AES cryptor of the session session_cryptor = session_key_dct[sid] # Parse the packet pkt = sg_net_parse_packet(session_cryptor, data) n += 1 if n % 100 == 0 and current_sn is not None: # Test audio ACK ack = sg_net_audio_ack(buf[SG_PKT_HEADSZ:], session_id, [current_sn, current_sn - 1, current_sn - 2]) udp_sock.sendto(sg_net_build_packet( buf, session_cryptor, ack, packet_type=SG_PKT_TYPE_AUDIO_ACK), addr) print("****************** ACK", current_sn) # Packets lost ? if current_sn is None: # The first frame. current_sn = pkt.sn else: delta = (pkt.sn - current_sn) & 0xFFFFFFFF if delta & 0x80000000: delta -= 0x100000000 if delta == 1: # Sn is continuous. current_sn += 1 elif delta < 0: print("****************** delta", delta, pkt.sn) continue elif delta > 0: print("****************** delta", delta, pkt.sn) continue print( pkt.session_id, pkt.sn, pkt.timestamp / 1000, pkt.output_channel, pkt.audio_encoder, pkt.audio_channels, pkt.audio_sample_rate, pkt.audio_frame_size, pkt.audio_sample_bits, len(pkt.payload) ) # Save PCM data to file. if pkt.session_id == session_id: f.write(pkt.payload) except (ValueError, IndexError) as e: print(e) # Close the session del session_key_dct[session_id] sg_net_request(s, buf, dev_cryptor, dict(cmd='close_audio', session_id=session_id))
def _get_stream_readbuff_data(stream): readbuf = stream.read_buffer data = memoryview( readbuf)[stream.read_buffer_first:stream.read_buffer_last] stream.consume(len(data)) return data
def parse(data): """Return a Packet class appropriate to what's in the buffer.""" with memoryview(data) as buf: opcode = Opcode(buf[0:2]) return PACKETS[opcode].parse(buf)
# dict d = {"name": "John", "age": 36, "Sex": "Male"} print(d) print(type(d)) # set s = {"apple", "banana", "cherry"} print(s) print(type(s)) # frozenset fs = frozenset({"apple", "banana", "cherry"}) print(fs) print(type(fs)) # bool bnT = True bnF = False print(bnT) print(bnF) print(type(bnT)) # bytes bt = b"Hello" print(bt) print(type(bt)) # byteArray btA = bytearray(5) print(btA) print(type(btA)) # memoryView mv = memoryview(bytes(5)) print(mv) print(type(mv))
def drawgrey(self): # self.camera.setHpr(180,0,0) memoryview(self.tex0.modify_ram_image())[:] = self.img1.tobytes()
def test_memoryview(): a = bytearray(range(8)) ma = memoryview(a) output = method(ma, workaround=True) output = method(a, workaround=True) print(output)
def __init__(self, geom_input, srs=None): """Initialize Geometry on either WKT or an OGR pointer as input.""" str_instance = isinstance(geom_input, str) # If HEX, unpack input to a binary buffer. if str_instance and hex_regex.match(geom_input): geom_input = memoryview(bytes.fromhex(geom_input)) str_instance = False # Constructing the geometry, if str_instance: wkt_m = wkt_regex.match(geom_input) json_m = json_regex.match(geom_input) if wkt_m: if wkt_m.group('srid'): # If there's EWKT, set the SRS w/value of the SRID. srs = int(wkt_m.group('srid')) if wkt_m.group('type').upper() == 'LINEARRING': # OGR_G_CreateFromWkt doesn't work with LINEARRING WKT. # See https://trac.osgeo.org/gdal/ticket/1992. g = capi.create_geom(OGRGeomType(wkt_m.group('type')).num) capi.import_wkt( g, byref(c_char_p(wkt_m.group('wkt').encode()))) else: g = capi.from_wkt( byref(c_char_p(wkt_m.group('wkt').encode())), None, byref(c_void_p())) elif json_m: g = self._from_json(geom_input.encode()) else: # Seeing if the input is a valid short-hand string # (e.g., 'Point', 'POLYGON'). OGRGeomType(geom_input) g = capi.create_geom(OGRGeomType(geom_input).num) elif isinstance(geom_input, memoryview): # WKB was passed in g = self._from_wkb(geom_input) elif isinstance(geom_input, OGRGeomType): # OGRGeomType was passed in, an empty geometry will be created. g = capi.create_geom(geom_input.num) elif isinstance(geom_input, self.ptr_type): # OGR pointer (c_void_p) was the input. g = geom_input else: raise GDALException( 'Invalid input type for OGR Geometry construction: %s' % type(geom_input)) # Now checking the Geometry pointer before finishing initialization # by setting the pointer for the object. if not g: raise GDALException('Cannot create OGR Geometry from input: %s' % geom_input) self.ptr = g # Assigning the SpatialReference object to the geometry, if valid. if srs: self.srs = srs # Setting the class depending upon the OGR Geometry Type self.__class__ = GEO_CLASSES[self.geom_type.num]
# 외부의 자료를 읽어들일 때 메모리에 복사 저장하는 것이 아닌 # 중간 단계인 버퍼에 저장 한다. # 형식은 bytes, bytearray 형식만 지원한다. # 많은 자원을 소모하는 데이터 복사에 비해 # 속도가 빠르다. a = 'A'.encode() * 1000 b = bytearray(a) # b = bytes(a) buff_b = memoryview(b) buff_b[999] = 66 print(b) v = memoryview(b'abcdefg') print(v.readonly) # 메모리가 읽기 전용인지 여부의 불리안 print(b.nbytes) # 총 바이트 수, = len(memoryview.tobytes()) print(v.format) # format이 struct 모듈의 네이티브 형식 지정자인 경우 # 올바른 형으로 하나의 요소를 돌려준다. print(v.obj) # 메모리 뷰의 원본 객체 # 원본 객체 obj는 버퍼 프로토콜을 지원하는 자료형이어야 한다.
def decode_rle(bitmap, file, compression, y_range, width): """Helper to decode RLE images""" # pylint: disable=too-many-locals,too-many-nested-blocks,too-many-branches # RLE algorithm, either 8-bit (1) or 4-bit (2) # # Ref: http://www.fileformat.info/format/bmp/egff.htm is_4bit = compression == 2 # This will store the 2-byte run commands, which are either an # amount to repeat and a value to repeat, or a 0x00 and command # marker. run_buf = bytearray(2) # We need to be prepared to load up to 256 pixels of literal image # data. (0xFF is max literal length, but odd literal runs are padded # up to an even byte count, so we need space for 256 in the case of # 8-bit.) 4-bit images can get away with half that. literal_buf = bytearray(128 if is_4bit else 256) # We iterate with numbers rather than a range because the "delta" # command can cause us to jump forward arbitrarily in the output # image. # # In theory RLE images are only stored in bottom-up scan line order, # but we support either. (range1, range2, range3) = y_range y = range1 x = 0 while y * range3 < range2 * range3: offset = y * width + x # We keep track of how much space is left in our row so that we # can avoid writing extra data outside of the Bitmap. While the # reference above seems to say that the "end run" command is # optional and that image data should wrap from one scan line to # the next, in practice (looking at the output of ImageMagick # and GIMP, and what Preview renders) the bitmap part of the # image can contain data that goes beyond the image’s stated # width that should just be ignored. For example, the 8bit RLE # file is 15px wide but has data for 16px. width_remaining = width - x file.readinto(run_buf) if run_buf[0] == 0: # A repeat length of "0" is a special command. The next byte # tells us what needs to happen. if run_buf[1] == 0: # end of the current scan line y = y + range3 x = 0 elif run_buf[1] == 1: # end of image break elif run_buf[1] == 2: # delta command jumps us ahead in the bitmap output by # the x, y amounts stored in the next 2 bytes. file.readinto(run_buf) x = x + run_buf[0] y = y + run_buf[1] * range3 else: # command values of 3 or more indicate that many pixels # of literal (uncompressed) image data. For 8-bit mode, # this is raw bytes, but 4-bit mode counts in nibbles. literal_length_px = run_buf[1] # Inverting the value here to get round-up integer division if is_4bit: read_length_bytes = -(-literal_length_px // 2) else: read_length_bytes = literal_length_px # If the run has an odd length then there’s a 1-byte padding # we need to consume but not write into the output if read_length_bytes % 2 == 1: read_length_bytes += 1 # We use memoryview to artificially limit the length of # literal_buf so that readinto only reads the amount # that we want. literal_buf_mem = memoryview(literal_buf) file.readinto(literal_buf_mem[0:read_length_bytes]) if is_4bit: for i in range(0, min(literal_length_px, width_remaining)): # Expanding the two nibbles of the 4-bit data # into two bytes for our output bitmap. if i % 2 == 0: bitmap[offset + i] = literal_buf[i // 2] >> 4 else: bitmap[offset + i] = literal_buf[i // 2] & 0x0F else: # 8-bit values are just a raw copy (limited by # what’s left in the row so we don’t overflow out of # the buffer) for i in range(0, min(literal_length_px, width_remaining)): bitmap[offset + i] = literal_buf[i] x = x + literal_length_px else: # first byte was not 0, which means it tells us how much to # repeat the next byte into the output run_length_px = run_buf[0] if is_4bit: # In 4 bit mode, we repeat the *two* values that are # packed into the next byte. The repeat amount is based # on pixels, not bytes, though, so if we were to repeat # 0xab 3 times, the output pixel values would be: 0x0a # 0x0b 0x0a (notice how it ends at 0x0a) rather than # 0x0a 0x0b 0x0a 0x0b 0x0a 0x0b run_values = [run_buf[1] >> 4, run_buf[1] & 0x0F] for i in range(0, min(run_length_px, width_remaining)): bitmap[offset + i] = run_values[i % 2] else: run_value = run_buf[1] for i in range(0, min(run_length_px, width_remaining)): bitmap[offset + i] = run_value x = x + run_length_px
b'\x66\xfe\x00\x00\x00\x00\x00\xc6\xee\xfe\xfe\xd6\xc6\xc6\xc6\xc6'\ b'\x00\x00\x00\x00\x00\xc6\xe6\xf6\xfe\xde\xce\xc6\xc6\xc6\x00\x00'\ b'\x00\x00\x00\x38\x6c\xc6\xc6\xc6\xc6\xc6\x6c\x38\x00\x00\x00\x00'\ b'\x00\xfc\x66\x66\x66\x7c\x60\x60\x60\xf0\x00\x00\x00\x00\x00\x7c'\ b'\xc6\xc6\xc6\xc6\xd6\xde\x7c\x0c\x0e\x00\x00\x00\x00\xfc\x66\x66'\ b'\x66\x7c\x6c\x66\x66\xe6\x00\x00\x00\x00\x00\x7c\xc6\xc6\x60\x38'\ b'\x0c\xc6\xc6\x7c\x00\x00\x00\x00\x00\x7e\x7e\x5a\x18\x18\x18\x18'\ b'\x18\x3c\x00\x00\x00\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\xc6\xc6\x7c'\ b'\x00\x00\x00\x00\x00\xc6\xc6\xc6\xc6\xc6\xc6\x6c\x38\x10\x00\x00'\ b'\x00\x00\x00\xc6\xc6\xc6\xc6\xd6\xd6\xfe\x7c\x6c\x00\x00\x00\x00'\ b'\x00\xc6\xc6\x6c\x38\x38\x38\x6c\xc6\xc6\x00\x00\x00\x00\x00\x66'\ b'\x66\x66\x66\x3c\x18\x18\x18\x3c\x00\x00\x00\x00\x00\xfe\xc6\x8c'\ b'\x18\x30\x60\xc2\xc6\xfe\x00\x00\x00\x00\x00\x3c\x30\x30\x30\x30'\ b'\x30\x30\x30\x3c\x00\x00\x00\x00\x00\x80\xc0\xe0\x70\x38\x1c\x0e'\ b'\x06\x02\x00\x00\x00\x00\x00\x3c\x0c\x0c\x0c\x0c\x0c\x0c\x0c\x3c'\ b'\x00\x00\x00\x10\x38\x6c\xc6\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x30'\ b'\x30\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\ b'\x00\x00\x78\x0c\x7c\xcc\xcc\x76\x00\x00\x00\x00\x00\xe0\x60\x60'\ b'\x78\x6c\x66\x66\x66\x7c\x00\x00\x00\x00\x00\x00\x00\x00\x7c\xc6'\ b'\xc0\xc0\xc6\x7c\x00\x00\x00\x00\x00\x1c\x0c\x0c\x3c\x6c\xcc\xcc'\ b'\xcc\x76\x00\x00\x00\x00\x00\x00\x00\x00\x7c\xc6\xfe\xc0\xc6\x7c'\ b'\x00\x00\x00\x00\x00\x38\x6c\x64\x60\xf0\x60\x60\x60\xf0\x00\x00'\ b'\x00\x00\x00\x00\x00\x00\x76\xcc\xcc\xcc\x7c\x0c\xcc\x78\x00\x00'\ b'\x00\xe0\x60\x60\x6c\x76\x66\x66\x66\xe6\x00\x00\x00\x00\x00\x18'\ b'\x18\x00\x38\x18\x18\x18\x18\x3c\x00\x00\x00\x00\x00\x06\x06\x00'\ b'\x0e\x06\x06\x06\x06\x66\x66\x3c\x00\x00\x00\xe0\x60\x60\x66\x6c'\ b'\x78\x6c\x66\xe6\x00\x00\x00\x00\x00\x38\x18\x18\x18\x18\x18\x18'\ FONT = memoryview(_FONT)
def read_gyro(self): """Returns gyroscope vector in degrees/sec.""" mv = memoryview(self.scratch_int) f = self.scale_gyro self.i2c.readfrom_mem_into(self.address_gyro, OUT_G | 0x80, mv) return (mv[0]/f, mv[1]/f, mv[2]/f)
def read_accel(self): """Returns acceleration vector in gravity units (9.81m/s^2).""" mv = memoryview(self.scratch_int) f = self.scale_accel self.i2c.readfrom_mem_into(self.address_gyro, OUT_XL | 0x80, mv) return (mv[0]/f, mv[1]/f, mv[2]/f)
# Ignore SSL certificate errors (For HTTPS) ctx = ssl.create_default_context() ctx.check_hostname = False ctx.verify_mode = ssl.CERT_NONE fh = open("where.data") count = 0 for line in fh: if count > 200: print('Retrieved 200 locations, restart to retrieve more') break address = line.strip() print('') cur.execute("SELECT geodata FROM Locations WHERE address= ?", (memoryview(address.encode()), )) try: data = cur.fetchone()[0] print("Found in database ", address) continue except: pass parms = dict() parms["address"] = address if api_key is not False: parms['key'] = api_key url = serviceurl + urllib.parse.urlencode(parms) print('Retrieving', url) # uh = urllib.request.urlopen(url, context=ctx)
def test_compress_memoryview(): mv = memoryview(b"0" * 1000000) compression, compressed = maybe_compress(mv) if compression: assert len(compressed) < len(mv)
print('('+str(len(html))+')', end=' ') soup = BeautifulSoup(html, "html.parser") except KeyboardInterrupt: print('') print('Program interrupted by user...') break except: print("Unable to retrieve or parse page") cur.execute('UPDATE Pages SET error=-1 WHERE url=?', (url, ) ) conn.commit() continue cur.execute('INSERT OR IGNORE INTO Pages (url, html, new_rank) VALUES ( ?, NULL, 1.0 )', ( url, ) ) # inserts new entry into pages table, can specify some inputs of DB inputs on leave others as vars to be loaded cur.execute('UPDATE Pages SET html=? WHERE url=?', (memoryview(html), url ) ) # update require set= because need to know what value that currently exists going to overwrite with your new input # ? = placeholder for tuple variable one sets conn.commit() # Retrieve all of the anchor tags tags = soup('a') count = 0 for tag in tags: href = tag.get('href', None) if ( href is None ) : continue # Resolve relative references like href="/contact" up = urlparse(href) # this below codes looks at all the returned links and filters them based on if desired or not if ( len(up.scheme) < 1 ) : href = urljoin(url, href)
def frombytes(cls, octets): typecode = chr(octets[0]) memv = memoryview(octets[1:]).cast(typecode) # the only change needed from the earlier frombytes is in the last line: we pass the memoryview directly to the constructor, without unpacking with * as we did before return cls(memv)
def unpack(self, data): mem = memoryview(data) for key, field in self.fields.items(): mem = field.unpack(mem) return bytes(mem) # remaining data
def __repr__(self): return "Section(address = 0X{0:08X}, length = {1:d}, data = {2})".format( self.start_address, self.length, self.repr.repr(memoryview(self.data).tobytes()))
def func(): return memoryview(a + b)
def extract_data(self, graphics_engine): graphics_engine.extract_texture_data(self._buffer, self._gsg) view = memoryview(self._buffer.get_ram_image()).cast('f') return view
""" Demonstrates how USM allocated memory can be accessed from the host in a Python program. """ import dpctl.memory as dpmem # USM-shared and USM-host pointers are host-accessible, # meaning they are accessible from Python, therefore # they implement Pyton buffer protocol # allocate 1K of USM-shared buffer ms = dpmem.MemoryUSMShared(1024) # create memoryview into USM-shared buffer msv = memoryview(ms) # populate buffer from host one byte at a type for i in range(len(ms)): ir = i % 256 msv[i] = ir ** 2 % 256 mh = dpmem.MemoryUSMHost(64) mhv = memoryview(mh) # copy content of block of USM-shared buffer to # USM-host buffer mhv[:] = msv[78 : 78 + len(mh)] print("Byte-values of the USM-host buffer") print(list(mhv))
def _get_memory(string, offset): return memoryview(string)[offset:]
def test_Data_buffer(data): import numpy as np assert isinstance(np.array(data), np.ndarray) assert isinstance(bytes(data), bytes) assert isinstance(memoryview(data), memoryview)
def func(): return memoryview(math_ops.add_v2(a, b))