def test_values_with_predicate(self): self.map.put("key-1", "value-1") self.map.put("key-2", "value-2") self.map.put("key-3", "value-3") with self.client.new_transaction() as tx: tx_map = tx.get_map(self.map.name) self.assertCountEqual(tx_map.values(predicate=sql("this == value-1")), ["value-1"])
def test_execute_on_entries_with_predicate(self): m = self._fill_map() expected_entry_set = [(key, "processed") if key < "key-5" else (key, m[key]) for key in m] expected_values = [(key, "processed") for key in m if key < "key-5"] values = self.map.execute_on_entries(EntryProcessor("processed"), sql("__key < 'key-5'")) six.assertCountEqual(self, expected_entry_set, self.map.entry_set()) six.assertCountEqual(self, expected_values, values)
def test_predicate_portable_key(self): _map = self._fill_map() map_keys = list(_map.keys()) predicate = sql("param_int >= 900") key_set = self.map.key_set(predicate) self.assertEqual(len(key_set), 100) for k in key_set: self.assertGreaterEqual(k.param_int, 900) self.assertIn(k, map_keys)
def test_predicate_portable_key(self): _map = self.fill_map() map_keys = list(_map.keys()) predicate = sql("field >= 900") entries = self.map.entry_set(predicate) self.assertEqual(len(entries), 100) for k, v in entries: self.assertGreaterEqual(v.field, 900) self.assertIn(k, map_keys)
def test_add_entry_listener_with_predicate(self): collector = event_collector() self.map.add_entry_listener(predicate=sql("this == value1"), include_value=True, added_func=collector) self.map.put('key2', 'value2') self.map.put('key1', 'value1') def assert_event(): self.assertEqual(len(collector.events), 1) event = collector.events[0] self.assertEntryEvent(event, key='key1', event_type=EntryEventType.ADDED, value='value1') self.assertTrueEventually(assert_event, 5)
def test_add_entry_listener_with_predicate(self): collector = event_collector() self.replicated_map.add_entry_listener( predicate=sql("this == value1"), added_func=collector ) self.replicated_map.put("key2", "value2") self.replicated_map.put("key1", "value1") def assert_event(): self.assertEqual(len(collector.events), 1) event = collector.events[0] self.assertEntryEvent( event, key="key1", event_type=EntryEventType.ADDED, value="value1" ) self.assertTrueEventually(assert_event, 5)
def test_cluster_restart(self): client = self.create_client({ "cluster_name": self.cluster.id, "compact_serializers": [SomeFieldsSerializer([FieldDefinition(name="int32")])], }) m = client.get_map(random_string()).blocking() m.put(1, SomeFields(int32=42)) self.rc.terminateMember(self.cluster.id, self.member.uuid) CompactOnClusterRestartTest.member = self.cluster.start_member() m.put(1, SomeFields(int32=42)) obj = m.get(1) self.assertEqual(42, obj.int32) # Perform a query to make sure that the schema is available on the cluster self.assertEqual(1, len(m.values(sql("int32 == 42"))))
def test_sql(self): predicate = sql("this == 'value-1'") self.assertEqual(str(predicate), "SqlPredicate(sql='this == 'value-1'')")
def test_values_with_predicate(self): self.fill_map() self.assertEqual(self.map.values(sql("this == 'value-1'")), ["value-1"])
def test_key_set_with_predicate(self): self.fill_map() self.assertEqual(self.map.key_set(sql("this == 'value-1'")), ["key-1"])
def test_entry_set_with_predicate(self): self.fill_map() self.assertEqual(self.map.entry_set(sql("this == 'value-1'")), [("key-1", "value-1")])
"CustomStreamSerializable": _custom_serializable, "CustomByteArraySerializable": _custom_byte_array_serializable, "AnIdentifiedDataSerializable": _identified, "APortable": _portable, "ArrayList": [None, _non_null_list], "LinkedList": [None, _non_null_list], "TruePredicate": predicate.true(), "FalsePredicate": predicate.false(), "SqlPredicate": predicate.sql(_sql_string), "EqualPredicate": predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]), "NotEqualPredicate": predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]), "GreaterLessPredicate": predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]), "BetweenPredicate": predicate.between(_sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"]), "LikePredicate": predicate.like(_sql_string, _sql_string), "ILikePredicate": predicate.ilike(_sql_string, _sql_string), "InPredicate": predicate.in_(_sql_string, REFERENCE_OBJECTS["Integer"],
self.age, self.active) def generate_users(users): users.put("Rod", User("Rod", 19, True)) users.put("Jane", User("Jane", 20, True)) users.put("Freddy", User("Freddy", 23, True)) # Start the Hazelcast Client and connect to an already running Hazelcast Cluster on 127.0.0.1 hz = hazelcast.HazelcastClient( portable_factories={User.FACTORY_ID: { User.CLASS_ID: User }}) # Get a Distributed Map called "users" users_map = hz.get_map("users").blocking() # Add some users to the Distributed Map generate_users(users_map) # Create a Predicate from a String (a SQL like Where clause) sql_query = sql("active AND age BETWEEN 18 AND 21)") # Creating the same Predicate as above but with a builder criteria_query = and_(equal("active", True), between("age", 18, 21)) # Get result collections using the two different Predicates result1 = users_map.values(sql_query) result2 = users_map.values(criteria_query) # Print out the results print(result1) print(result2) # Shutdown this Hazelcast Client hz.shutdown()
REFERENCE_OBJECTS["LocalTime"], REFERENCE_OBJECTS["OffsetDateTime"], ] REFERENCE_OBJECTS.update( { "AnInnerPortable": _inner_portable, "CustomStreamSerializable": _custom_serializable, "CustomByteArraySerializable": _custom_byte_array_serializable, "AnIdentifiedDataSerializable": _identified, "APortable": _portable, "ArrayList": [None, _non_null_list], "LinkedList": [None, _non_null_list], "TruePredicate": predicate.true(), "FalsePredicate": predicate.false(), "SqlPredicate": predicate.sql(_sql_string), "EqualPredicate": predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]), "NotEqualPredicate": predicate.not_equal(_sql_string, REFERENCE_OBJECTS["Integer"]), "GreaterLessPredicate": predicate.greater(_sql_string, REFERENCE_OBJECTS["Integer"]), "BetweenPredicate": predicate.between( _sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"] ), "LikePredicate": predicate.like(_sql_string, _sql_string), "ILikePredicate": predicate.ilike(_sql_string, _sql_string), "InPredicate": predicate.in_( _sql_string, REFERENCE_OBJECTS["Integer"], REFERENCE_OBJECTS["Integer"] ), "RegexPredicate": predicate.regex(_sql_string, _sql_string), "AndPredicate": predicate.and_( predicate.sql(_sql_string), predicate.equal(_sql_string, REFERENCE_OBJECTS["Integer"]),
import sys import pprint from analytics import factory from hazelcast import HazelcastClient from hazelcast.predicate import sql if __name__ == '__main__': component = sys.argv[1] instant = int(sys.argv[2]) client = HazelcastClient( portable_factories={ 1: factory } ) analytics = client.get_map('analytics') select = 'component = {component} AND instant < {instant}'.format(component = component, instant = instant) results = analytics.values(sql(select)).result() pprinter = pprint.PrettyPrinter() for result in results: pprinter.pprint(result.to_dic()) client.shutdown()
def test_nested_attribute_query_sql_predicate(self): predicate = sql("limb.name == 'leg'") values = self.map.values(predicate) self.assertEqual(1, len(values)) self.assertEqual("body2", values[0].name)
import hazelcast from hazelcast.core import HazelcastJsonValue from hazelcast.predicate import and_, greater, sql client = hazelcast.HazelcastClient() employees_map = client.get_map("employees").blocking() alice = "{\"name\": \"Alice\", \"age\": 35}" andy = "{\"name\": \"Andy\", \"age\": 22}" bob = {"name": "Bob", "age": 37} # HazelcastJsonValue can be constructed from JSON strings employees_map.put(0, HazelcastJsonValue(alice)) employees_map.put(1, HazelcastJsonValue(andy)) # or from JSON serializable objects employees_map.put(2, HazelcastJsonValue(bob)) # Employees whose name starts with 'A' and age is greater than 30 predicate = and_(sql("name like A%"), greater("age", 30)) values = employees_map.values(predicate) for value in values: print(value.to_string()) # As JSON string print(value.loads()) # As Python object client.shutdown()
def test_sql(self): self.fill_map() predicate = sql("this == 'value-1'") self.assertCountEqual(self.map.key_set(predicate), ["key-1"])
Employee) and self.name == other.name and self.age == other.age client = hazelcast.HazelcastClient( portable_factories={Employee.FACTORY_ID: { Employee.CLASS_ID: Employee }}) my_map = client.get_map("employee-map") my_map.put(0, Employee("Jack", 28)) my_map.put(1, Employee("Jane", 29)) my_map.put(2, Employee("Joe", 30)) print("Map Size:", my_map.size().result()) predicate = sql("age <= 29") def values_callback(f): result_set = f.result() print("Query Result Size:", len(result_set)) for value in result_set: print("Value:", value) my_map.values(predicate).add_done_callback(values_callback) time.sleep(3) client.shutdown()