def encode(digest, code, length=None): """ Encode a hash digest along with the specified function code :param bytes digest: hash digest :param (int or str) code: hash function code :param int length: hash digest length :return: encoded multihash :rtype: bytes :raises TypeError: when the digest is not a bytes object :raises ValueError: when the digest length is not correct """ hash_code = coerce_code(code) if not isinstance(digest, bytes): raise TypeError('digest must be a bytes object, not {}'.format( type(digest))) if length is None: length = len(digest) elif length != len(digest): raise ValueError('digest length should be equal to specified length') return varint.encode(hash_code) + varint.encode(length) + digest
def __serialize(self, value, itemType): if itemType == FieldTypes.VARINT: return varint.encode(value) if itemType == FieldTypes.APPLY_AREA_ENUM: return varint.encode(value) if itemType & FieldTypes.TYPED_ARRAY: return self.__serializeArray(value, itemType ^ FieldTypes.TYPED_ARRAY) if itemType >= FieldTypes.CUSTOM_TYPE_OFFSET: return self.__serializeCustomType(value) raise SerializationException('Unsupported field type %d' % (itemType,))
def get_coded_output(self): coded_output = b'' coded_output += MAGIC_PACKET coded_output += varint.encode(self.byte_size) coded_output += varint.encode(self.packet_type) coded_output += varint.encode(self.message_type) coded_output += varint.encode(self.crypto_type) coded_output += varint.encode(self.connection_id) coded_output += byted_message return coded_output
def encode(self): # Encode the RecordHeader Key and Value klen_var = ord(varint.encode((len(self._key) << 1) ^ \ (len(self._key) >> 31))) vlen_var = ord(varint.encode((len(self._value) << 1) ^ \ (len(self._value) >> 31))) # Return the encoded RecordHeader return struct.pack( "> {} {}s {} {}s".format(val2fmt(klen_var), len(self._key), val2fmt(vlen_var), len(self._value)), klen_var, self._key, vlen_var, self._value)
def string_to_bytes(string): bs = [] for proto, codec, value in string_iter(string): bs.append(varint.encode(proto.code)) if value is not None: try: buf = codec.to_bytes(proto, value) except Exception as exc: six.raise_from(exceptions.StringParseError(str(exc), string, proto.name, exc), exc) if codec.SIZE == LENGTH_PREFIXED_VAR_SIZE: bs.append(varint.encode(len(buf))) bs.append(buf) return b''.join(bs)
def pubkey_to_msg(self, pubkey: str): key_bytes = encoding.to_bytes(pubkey) return ( encoding.to_bytes(TYPE_PREFIX["PubKey"]) + encode(len(key_bytes)) + key_bytes )
def write(self, msg): l = len(msg) try: self.sock.send(varint.encode(l)) self.sock.send(msg) except Exception as e: print(e, sys.stderr)
def build_iscc_id(ledger_id, iscc_code, counter): """Create ISCC-ID from full ISCC for given ledger with a given counter""" digest = iscc_decode(iscc_code) cid = digest[10:13] did = digest[20:22] iid = digest[29:31] return iscc.encode(ledger_id + cid + did + iid + varint.encode(counter))
def string_to_bytes(string): if not string: return b'' bs = [] for proto, codec, value in string_iter(string): bs.append(varint.encode(proto.code)) if value is not None: try: buf = codec.to_bytes(proto, value) except Exception as exc: six.raise_from(exceptions.StringParseError(str(exc), string, proto.name, exc), exc) if codec.SIZE == LENGTH_PREFIXED_VAR_SIZE: bs.append(varint.encode(len(buf))) bs.append(buf) return b''.join(bs)
def test_varint_decoder(): buffer = b"" numbers = [12, 5168, 4984531, 151] for n in numbers: buffer += varint.encode(n) ans = varint_decoder(buffer) assert ans == numbers
def parse_data_var(pos, msg): size = varint.decode_bytes(msg[pos:]) size_bytes_len = len(varint.encode(size)) pos = pos + size_bytes_len data = msg[pos:pos + size] pos = pos + size return pos, data
def create_connection(ip_address): global prometheus_ip message = pm_proto_pb2.pmproto() message.type = pm_proto_pb2.pmproto.PING print "Connecting to ", ip_address print message s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((str(ip_address), PM_PROTO_PORT)) msg = message.SerializeToString() print "sending", len(msg), "bytes" packed_len = varint.encode(len(msg)) print "sent", s.send(packed_len + msg), "bytes" #s.sendto("hello",(ip_address,PM_PROTO_PORT)) #try: if True: s.settimeout(1.0) msg_len = decode_socket(s) msg_buf = socket_read_n(s, msg_len) message.ParseFromString(msg_buf) print "Got", len(msg_buf), "bytes" print "Msg len: ", msg_len print message print msg_len if (message.type != pm_proto_pb2.pmproto.PING): prometheus_ip = ip_address else: print "No response!" s.settimeout(None)
def test_protocol_with_code(): proto = protocols.protocol_with_code(protocols.P_IP4) assert proto.name == 'ip4' assert proto.code == protocols.P_IP4 assert proto.size == 32 assert proto.vcode == varint.encode(protocols.P_IP4) assert hash(proto) == protocols.P_IP4 with pytest.raises(exceptions.ProtocolNotFoundError): proto = protocols.protocol_with_code(1234)
def generate_StdTxMsg(self): """Geneate StdTx""" std = StdTx() std.msgs.extend([self.stdMsg]) std.signatures.extend([self.stdSignature]) std.memo = self.memo std.source = 1 std.data = self.data proto_bytes = std.SerializeToString() type_bytes = encoding.to_bytes(TYPE_PREFIX["StdTx"]) return encode(len(proto_bytes) + len(type_bytes)) + type_bytes + proto_bytes
def remove_prefix(bytes_): """ Removes prefix from a prefixed data :param bytes bytes_: multicodec prefixed data bytes :return: prefix removed data bytes :rtype: bytes """ prefix_int = extract_prefix(bytes_) prefix = varint.encode(prefix_int) return bytes_[len(prefix):]
def encrypt(data, encoding=""): """编码 :param data 待编码的对象 :param encoding 字符串编码 """ if isinstance(data, bytes): return data length_field = lambda v: varint.encode(len(v)) + v rv = b"" for k, v in data.items(): if isinstance(v, int): wire_type = WireType.VARINT value = varint.encode(v) elif isinstance(v, str): wire_type = WireType.LENGTHDELIMITED value = length_field(v.encode(encoding)) elif isinstance(v, bytes): wire_type = WireType.LENGTHDELIMITED value = length_field(v) elif isinstance(v, list): wire_type = WireType.LENGTHDELIMITED rv += b"".join([ _set_field(k, wire_type) + length_field(encrypt(o)) for o in v ]) elif isinstance(v, dict): wire_type = WireType.LENGTHDELIMITED value = length_field(encrypt(v, encoding)) elif isinstance(v, float): wire_type = WireType.FLOAT value = struct.pack("f", v) elif isinstance(v, Decimal): wire_type = WireType.DOUBLE value = struct.pack("d", v) else: raise InvalidPBError("invalid pb") if not isinstance(v, list): rv += _set_field(k, wire_type) + value return rv
def _gcm_send_login(self, android_id, security_token): lr = mcs_pb2.LoginRequest() lr.auth_service = 2 lr.auth_token = str(security_token) lr.id = "android-11" lr.domain = "mcs.android.com" lr.device_id = "android-%0.2X" % android_id lr.resource = str(android_id) lr.user = str(android_id) lr.account_id = android_id data = lr.SerializeToString() self.sock.sendall(struct.pack('BB', 41, 2) + varint.encode(len(data)) + data)
async def _createPushConnection( cred: GoogleCredentials ) -> Tuple[bool, Optional[asyncio.streams.StreamReader], Optional[asyncio.streams.StreamWriter]]: host = "alt3-mtalk.google.com" port = 5228 try: streams: Tuple[ asyncio.streams.StreamReader, asyncio.streams.StreamWriter] = await asyncio.wait_for( asyncio.open_connection(host, port, ssl=ssl.create_default_context()), timeout=3) reader, writer = streams except asyncio.TimeoutError: return False, None, None loginReq = Application._getLoginData(cred) writer.write(bytes([41])) writer.write(bytes([2])) writer.write(varint.encode(len(loginReq))) writer.write(loginReq) _ = await Application._readStream(reader, timeout=1, length=1) if not _: writer.close() return False, None, None prefix = await Application._readStream(reader, timeout=3, length=1) if not prefix: return False, None, None if prefix != b'\x03': writer.close() await writer.wait_closed() return False, None, None length = await Application._readMessageLength(reader) message = await Application._readStream(reader, timeout=3, length=length) if not message: return False, None, None resp = LoginResponse() resp.ParseFromString(message) return True, reader, writer
def get_prefix(multicodec): """ Returns prefix for a given multicodec :param str multicodec: multicodec codec name :return: the prefix for the given multicodec :rtype: byte :raises ValueError: if an invalid multicodec name is provided """ try: prefix = varint.encode(NAME_TABLE[multicodec]) except KeyError: raise ValueError('{} multicodec is not supported.'.format(multicodec)) return prefix
def __serializeCustomType(self, obj): hasValue = 0 offset = 1 result = ['\x00'] for fieldName, fieldInfo in obj.fields.iteritems(): if fieldInfo.deprecated: offset <<= 1 continue value = getattr(obj, fieldName) if value != fieldInfo.default: hasValue |= offset result.append(self.__serialize(value, fieldInfo.type)) offset <<= 1 result[0] = varint.encode(hasValue) return ''.join(result)
def _extract_pubkey(self, extra): if extra: if extra[0] == self.TX_EXTRA_TAG_PUBKEY: extra = extra[1:] self.offset += 1 extra = self._pop_pubkey(extra) elif extra[0] == self.TX_EXTRA_TAG_ADDITIONAL_PUBKEYS: extra = extra[1:] self.offset += 1 keycount = varint.decode_bytes(bytearray(extra)) valen = len(varint.encode(keycount)) extra = extra[valen:] self.offset += valen for i in range(keycount): extra = self._pop_pubkey(extra) return extra
def transact_message(msg): global prometheus_ip if prometheus_ip is not None: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((str(prometheus_ip), PM_PROTO_PORT)) packed_len = varint.encode(len(msg)) print len(msg) s.send(packed_len + msg) try: s.settimeout(1.0) msg_len = decode_socket(s) msg_buf = socket_read_n(s, msg_len) return msg_buf except: return None else: print "Connect first!"
def data_received(self, data): self.pending_data += data while len(self.pending_data) > 0: length = varint.decode_bytes(self.pending_data) length_bytes = len(varint.encode(length)) if len(self.pending_data) < length + length_bytes: break data = self.pending_data[length_bytes:length_bytes + length] self.pending_data = self.pending_data[length_bytes + length:] msg = ProtocolMessage_pb2.ProtocolMessage() decrypted = self.decrypt(data) msg.ParseFromString(decrypted) self.message_received(msg)
def encode(self, data): data = data.encode('utf-8') self._encoder_buffer.write(varint.encode(len(data))) # Don't initialize the encoder before the first call to encode(), since # it writes the gzip header immediately and we need to insert the # message length prior to that happening. if self._encoder is None: self._encoder = GzipFile(fileobj=self._encoder_buffer, mode='wb', compresslevel=self._compression_level) self._encoder.write(data) self._encoder.flush() output = self._encoder_buffer.getvalue() reset_buffer(self._encoder_buffer) return output
def hashfileobject(f, sample_threshhold=SAMPLE_THRESHOLD, sample_size=SAMPLE_SIZE, hexdigest=False): #get file size from file object f.seek(0, os.SEEK_END) size = f.tell() f.seek(0, os.SEEK_SET) if size < sample_threshhold or sample_size < 1: data = f.read() else: data = f.read(sample_size) f.seek(size//2) data += f.read(sample_size) f.seek(-sample_size, os.SEEK_END) data += f.read(sample_size) hash_tmp = mmh3.hash_bytes(data) hash_ = hash_tmp[7::-1] + hash_tmp[16:7:-1] enc_size = varint.encode(size) digest = enc_size + hash_[len(enc_size):] return binascii.hexlify(digest).decode() if hexdigest else digest
def _scan_pubkeys(svk, psk, stealth_address, amount, encamount): for keyidx, tx_key in enumerate(self.pubkeys): # precompute svk_2 = ed25519.scalar_add(svk, svk) svk_4 = ed25519.scalar_add(svk_2, svk_2) svk_8 = ed25519.scalar_add(svk_4, svk_4) # hsdata = b"".join([ ed25519.scalarmult(svk_8, tx_key), varint.encode(idx), ]) Hs_ur = keccak_256(hsdata).digest() Hs = ed25519.scalar_reduce(Hs_ur) k = ed25519.edwards_add( ed25519.scalarmult_B(Hs), psk, ) if k != stealth_address: continue if not encamount: # Tx ver 1 return Payment( amount=amount, timestamp=self.timestamp, transaction=self, local_address=addr, ) amount_hs = keccak_256(b"amount" + Hs).digest() xormask = amount_hs[:len(encamount)] dec_amount = bytearray( a ^ b for a, b in zip(*map(bytearray, (encamount, xormask)))) int_amount = struct.unpack("<Q", dec_amount)[0] amount = from_atomic(int_amount) return Payment( amount=amount, timestamp=self.timestamp, transaction=self, local_address=addr, )
def _scan_pubkeys(svk, psk, stealth_address, amount, encamount): for keyidx, tx_key in enumerate(self.pubkeys): hsdata = b"".join([ ed25519.encodepoint( ed25519.scalarmult(ed25519.decodepoint(tx_key), ed25519.decodeint(svk) * 8)), varint.encode(idx), ]) Hs_ur = sha3.keccak_256(hsdata).digest() # sc_reduce32: Hsint_ur = ed25519.decodeint(Hs_ur) Hsint = Hsint_ur % ed25519.l Hs = ed25519.encodeint(Hsint) k = ed25519.encodepoint( ed25519.edwards_add( ed25519.scalarmult_B(Hsint), ed25519.decodepoint(psk), )) if k != stealth_address: continue if not encamount: # Tx ver 1 return Payment(amount=amount, timestamp=self.timestamp, transaction=self, local_address=addr) amount_hs = sha3.keccak_256(b"amount" + Hs).digest() xormask = amount_hs[:len(encamount)] dec_amount = bytearray( a ^ b for a, b in zip(*map(bytearray, (encamount, xormask)))) int_amount = struct.unpack("<Q", dec_amount)[0] amount = from_atomic(int_amount) return Payment(amount=amount, timestamp=self.timestamp, transaction=self, local_address=addr)
def hashfile(filename, sample_threshhold=SAMPLE_THRESHOLD, sample_size=SAMPLE_SIZE, hexdigest=False): size = os.path.getsize(filename) with open(filename, 'rb') as f: if size < sample_threshhold or sample_size < 1: data = f.read() else: data = f.read(sample_size) f.seek(size // 2) data += f.read(sample_size) f.seek(-sample_size, os.SEEK_END) data += f.read(sample_size) hash_tmp = mmh3.hash_bytes(data) hash_ = hash_tmp[7::-1] + hash_tmp[16:7:-1] enc_size = varint.encode(size) digest = enc_size + hash_[len(enc_size):] return binascii.hexlify(digest) if hexdigest else digest
def hashfileobject(f, sample_threshhold=SAMPLE_THRESHOLD, sample_size=SAMPLE_SIZE, hexdigest=False): #get file size from file object f.seek(0, os.SEEK_END) size = f.tell() f.seek(0, os.SEEK_SET) if size < sample_threshhold or sample_size < 1: data = f.read() else: data = f.read(sample_size) f.seek(size//2) data += f.read(sample_size) f.seek(-sample_size, os.SEEK_END) data += f.read(sample_size) hash_tmp = mmh3.hash_bytes(data) hash_ = hash_tmp[7::-1] + hash_tmp[16:7:-1] enc_size = varint.encode(size) digest = enc_size + hash_[len(enc_size):] f.seek(0, os.SEEK_SET) return binascii.hexlify(digest).decode() if hexdigest else digest
def split(self, maxsplit=-1): """Returns the list of individual path components this MultiAddr is made up of.""" final_split_offset = -1 results = [] for idx, (offset, proto, codec, part_value) in enumerate(bytes_iter(self._bytes)): # Split at most `maxplit` times if idx == maxsplit: final_split_offset = offset break # Re-assemble binary MultiAddr representation part_size = varint.encode( len(part_value)) if codec.SIZE < 0 else b"" part = b"".join((proto.vcode, part_size, part_value)) # Add MultiAddr with the given value results.append(self.__class__(part)) # Add final item with remainder of MultiAddr if there is anything left if final_split_offset >= 0: results.append(self.__class__(self._bytes[final_split_offset:])) return results
def encode(self): # Encode the Attributes (unused) b = struct.pack("> b", 0) # Encode the TimeStampDelta ts_delta_var = ord(varint.encode((self._ts_delta << 1) ^ \ (self._ts_delta >> 31))) b += struct.pack("> {}".format(varint2fmt(ts_delta_var)), ts_delta_var) # Encode the OffsetDelta offs_delta_var = ord(varint.encode((self._offs_delta << 1) ^ \ (self._offs_delta >> 31))) b += struct.pack("> {}".format(varint2fmt(offs_delta_var)), \ offs_delta_var) # Encode the Record Key and Value klen_var = ord(varint.encode((len(self._key) << 1) ^ \ (len(self._key) >> 31))) vlen_var = ord(varint.encode((len(self._value) << 1) ^\ (len(self._value) >> 31))) b += struct.pack("> {} {}s {} {}s".format( varint2fmt(klen_var), len(self._key), varint2fmt(vlen_var), len(self._value)), klen_var, self._key, vlen_var, self._value) # Encode the number of Headers nhdrs_var = ord(varint.encode((len(self._headers) << 1) ^ \ (len(self._headers) >> 31))) b += struct.pack("> {}".format(varint2fmt(nhdrs_var)), \ nhdrs_var) # Encode the Headers for header in self._headers: b += header.encode() # Encode the Length len_var = ord(varint.encode(((len(b) << 1) ^ (len(b) >> 31)))) # Return the encoded Record return struct.pack("> {}".format(varint2fmt(len_var)), len_var) + b
def vcode(self): return varint.encode(self.code)
def test_get_codec_invalid_prefix(_, prefix): prefix_bytes = varint.encode(prefix) with pytest.raises(ValueError) as excinfo: get_codec(prefix_bytes) assert 'not present in the lookup table' in str(excinfo.value)
def code_to_varint(num): """Convert an integer to a varint-encoded byte.""" return binascii.hexlify(varint.encode(num))
def _msg(msg): data = msg.SerializeToString() return varint.encode(len(data)) + data
def write_varint(self, value): self.write(varint.encode(value))