def validate_predicate(self, predicate): serialized = self.service.to_data(predicate) self.assertEqual(-2, serialized.get_type()) # Identified b = serialized.to_bytes() # 4(partition hash) + 4(serializer type) + 1(is_identified) + 4(factory id) + 4(class id) + payload(if any) self.assertTrue(len(b) >= 17) self.assertEqual(predicate.get_factory_id(), BE_INT.unpack_from(b, 9)[0]) self.assertEqual(predicate.get_class_id(), BE_INT.unpack_from(b, 13)[0])
def get_type(self): """Returns serialization type of binary form. Returns: int: Serialization type of binary form. """ if len(self.buffer) == 0: return CONSTANT_TYPE_NULL return BE_INT.unpack_from(self.buffer, TYPE_OFFSET)[0]
def has_partition_hash(self): """ Determines whether this Data has partition hash or not. :return: (bool), ``true`` if Data has partition hash, ``false`` otherwise. """ return self._buffer is not None \ and len(self._buffer) >= HEAP_DATA_OVERHEAD \ and BE_INT.unpack_from(self._buffer, PARTITION_HASH_OFFSET)[0] != 0
def get_type(self): """ Returns serialization type of binary form. :return: Serialization type of binary form. """ if self.total_size() == 0: return CONSTANT_TYPE_NULL return BE_INT.unpack_from(self._buffer, TYPE_OFFSET)[0]
def get_partition_hash(self): """ Returns partition hash calculated for serialized object. Partition hash is used to determine partition of a Data and is calculated using * PartitioningStrategy during serialization. * If partition hash is not set then hash_code() is used. :return: partition hash """ if self.has_partition_hash(): return BE_INT.unpack_from(self._buffer, PARTITION_HASH_OFFSET)[0] return self.hash_code()
def get_partition_hash(self): """Returns partition hash calculated for serialized object. Partition hash is used to determine partition of a Data and is calculated using: - PartitioningStrategy during serialization. - If partition hash is not set then hash_code() is used. Returns: int: Partition hash. """ partition_hash = BE_INT.unpack_from(self.buffer, PARTITION_HASH_OFFSET)[0] if partition_hash != 0: return partition_hash return self.hash_code()
def test_metrics_blob(self): skip_if_client_version_older_than(self, "4.2.1") map_name = random_string() client = HazelcastClient( cluster_name=self.cluster.id, cluster_connect_timeout=30.0, statistics_enabled=True, statistics_period=self.STATS_PERIOD, near_caches={ map_name: {}, }, ) client_uuid = client._connection_manager.client_uuid client.get_map(map_name).blocking() time.sleep(2 * self.STATS_PERIOD) response = self.wait_for_statistics_collection(client_uuid, get_metric_blob=True) result = bytearray(response.result) # We will try to decompress the blob according to its contract # to verify we have sent something that make sense pos = 2 # Skip the version dict_buf_size = BE_INT.unpack_from(result, pos)[0] pos += INT_SIZE_IN_BYTES dict_buf = result[pos:pos + dict_buf_size] self.assertTrue(len(dict_buf) > 0) pos += dict_buf_size pos += INT_SIZE_IN_BYTES # Skip metric count metrics_buf = result[pos:] self.assertTrue(len(metrics_buf) > 0) # If we are able to decompress it, we count the blob # as valid. zlib.decompress(dict_buf) zlib.decompress(metrics_buf) client.shutdown()
def read(self, inp): buf = inp.read_byte_array() return CustomByteArraySerializable(BE_INT.unpack_from(buf, 0)[0], BE_FLOAT.unpack_from(buf, 4)[0])