def test_batch_process_multiple_success(mock_requests, mock_update_fetched_url, mock_upload_page): # Given count = 10 urls = [random_string() for _ in range(count)] mock_responses = [] for _ in range(count): mock_response = Mock() mock_response.status_code = 200 mock_response.content = "content" mock_response.text = random_string() mock_responses.append(mock_response) mock_requests.get.side_effect = mock_responses # When batch = BatchProcessor() batch.process([{"PageUrl": url} for url in urls]) # Then mock_requests.get.assert_has_calls([call(url) for url in urls], any_order=True) mock_update_fetched_url.assert_has_calls([call(url, 200) for url in urls], any_order=True) mock_upload_page.assert_has_calls([call(url, "content") for url in urls], any_order=True) assert batch.errors == []
def test_handle_cron_event( mock_get_all_user_ids, mock_get_all_records, mock_create_user_metadata_url, mock_create_user_review_pages_urls, mock_create_review_status_url, ): # Given event = {"source": "aws.events"} user_id_1, user_id_2, user_id_3 = random_string(), random_string( ), random_string() mock_get_all_user_ids.return_value = [user_id_1, user_id_2, user_id_3] record_1 = {"UserId": user_id_1, "SortKey": "Metadata"} record_2 = {"UserId": user_id_1, "SortKey": "Review"} record_3 = {"UserId": user_id_2, "SortKey": "Review"} record_4 = {"UserId": user_id_3, "SortKey": "Metadata"} mock_get_all_records.side_effect = [ [record_1, record_2], [record_3], [record_4], ] # When handle(event) # Then mock_create_user_metadata_url.assert_has_calls( [call(user_id_1), call(user_id_2), call(user_id_3)]) mock_create_user_review_pages_urls.assert_has_calls( [call(record_1), call(record_4)]) mock_create_review_status_url.assert_has_calls( [call(record_2), call(record_3)])
def test_bytes_received(self): reactor = self.client._reactor bytes_received = reactor.bytes_received self.assertGreater(bytes_received, 0) m = self.client.get_map(random_string()).blocking() m.get(random_string()) self.assertGreater(reactor.bytes_received, bytes_received)
def test_bytes_sent(self): reactor = self.client._reactor bytes_sent = reactor.bytes_sent self.assertGreater(bytes_sent, 0) m = self.client.get_map(random_string()).blocking() m.set(random_string(), random_string()) self.assertGreater(reactor.bytes_sent, bytes_sent)
def test_get_user_id_from_review_id_exists(): # Given user_id, review_id = random_string(), random_string() mock_yelp_table = Mock() mock_yelp_table.query.return_value = {"Items": [{"UserId": user_id}]} yelp_table.YELP_TABLE = mock_yelp_table # When result = get_user_id_from_review_id(review_id) # Then assert result == user_id
def test_create_review_status_url(mock_upsert_new_url, mock_get_user_id_from_review_id): # Given biz_id, review_id = random_string(), random_string() review_record = {"BizId": biz_id, "ReviewId": review_id} user_id = random_string() mock_get_user_id_from_review_id.return_value = user_id # When _create_review_status_url(review_record) # Then mock_upsert_new_url.assert_called_once_with( user_id, f"https://www.yelp.com/biz/{biz_id}?hrid={review_id}")
def test_off_reconnect_mode(self): self.cluster = self.rc.createCluster(None, None) member = self.rc.startMember(self.cluster.id) config = ClientConfig() config.cluster_name = self.cluster.id config.network.addresses.append("localhost:5701") config.connection_strategy.reconnect_mode = RECONNECT_MODE.OFF config.connection_strategy.connection_retry.cluster_connect_timeout = six.MAXSIZE def collector(): events = [] def on_state_change(event): if event == LifecycleState.SHUTDOWN: events.append(event) on_state_change.events = events return on_state_change event_collector = collector() config.add_lifecycle_listener(event_collector) self.client = HazelcastClient(config) m = self.client.get_map(random_string()).blocking() # no exception at this point m.put(1, 1) self.rc.shutdownMember(self.cluster.id, member.uuid) self.assertTrueEventually(lambda: self.assertEqual(1, len(event_collector.events))) with self.assertRaises(HazelcastClientNotActiveError): m.put(1, 1)
def test_async_start_with_no_cluster(self): config = ClientConfig() config.connection_strategy.async_start = True self.client = HazelcastClient(config) with self.assertRaises(ClientOfflineError): self.client.get_map(random_string())
def test_async_start_with_partition_specific_proxies(self): config = ClientConfig() config.connection_strategy.async_start = True self.client = HazelcastClient(config) with self.assertRaises(ClientOfflineError): self.client.get_list(random_string())
def test_map_listener(self): config = { "cluster_name": self.cluster.id, "compact_serializers": [SomeFieldsSerializer([FieldDefinition(name="int32")])], } client = self.create_client(config) map_name = random_string() m = client.get_map(map_name).blocking() counter = AtomicInteger() def listener(_): counter.add(1) m.add_entry_listener(include_value=True, added_func=listener) # Put the entry from other client to not create a local # registry in the actual client. This will force it to # go the cluster to fetch the schema. other_client = self.create_client(config) other_client_map = other_client.get_map(map_name).blocking() other_client_map.put(1, SomeFields(int32=42)) self.assertTrueEventually(lambda: self.assertEqual(1, counter.get()))
def test_write_then_read_as_nullable(self, _, field_kind, nullable_field_kind): map_name = random_string() field_name = field_kind.name.lower() self._put_entry( map_name=map_name, value_to_put=REFERENCE_OBJECTS[field_kind], field_name=field_name, ) nullable_method_suffix = nullable_field_kind.name.lower() client = self.create_client({ "cluster_name": self.cluster.id, "compact_serializers": [ SomeFieldsSerializer([ FieldDefinition( name=field_name, reader_method_name=f"read_{nullable_method_suffix}", ) ]), ], }) m = client.get_map(map_name).blocking() obj = m.get("key") self.assertTrue( is_equal(REFERENCE_OBJECTS[field_kind], getattr(obj, field_name)))
def test_read_with_type_mismatch(self, _, field_kind): map_name = random_string() mismatched_field_kind = FIELD_KINDS[(field_kind.value + 1) % len(FIELD_KINDS)] field_name = field_kind.name.lower() self._put_entry( map_name=map_name, value_to_put=REFERENCE_OBJECTS[mismatched_field_kind], field_name=field_name, writer_method_name=f"write_{mismatched_field_kind.name.lower()}", ) client = self.create_client({ "cluster_name": self.cluster.id, "compact_serializers": [ SomeFieldsSerializer([FieldDefinition(name=field_name)]), NestedSerializer(), ], }) m = client.get_map(map_name).blocking() with self.assertRaisesRegex(HazelcastSerializationError, "Mismatched field types"): m.get("key")
def test_read_when_field_does_not_exist(self, _, field_kind): map_name = random_string() field_name = field_kind.name.lower() self._put_entry( map_name=map_name, value_to_put=REFERENCE_OBJECTS[field_kind], field_name=field_name, ) client = self.create_client({ "cluster_name": self.cluster.id, "compact_serializers": [ SomeFieldsSerializer([ FieldDefinition( name=field_name, name_to_read="not-a-field", reader_method_name=f"read_{field_name}", ) ]), NestedSerializer(), ], }) evolved_m = client.get_map(map_name).blocking() with self.assertRaisesRegex(HazelcastSerializationError, "No field with the name"): evolved_m.get("key")
def get_semaphore(self, semaphore_type, initialize_with=None): semaphore = self.client.cp_subsystem.get_semaphore( semaphore_type + random_string()).blocking() if initialize_with is not None: semaphore.init(initialize_with) self.semaphore = semaphore return semaphore
def test_add_listener(self): topic = self.get_topic(random_string()) messages = [] class Listener(ReliableMessageListener): def on_message(self, message): messages.append(message.message) def retrieve_initial_sequence(self): return -1 def store_sequence(self, sequence): pass def is_loss_tolerant(self): return False def is_terminal(self, error): return False registration_id = topic.add_listener(Listener()) self.assertIsNotNone(registration_id) topic.publish("a") topic.publish("b") self.assertTrueEventually( lambda: self.assertEqual(["a", "b"], messages))
def test_add_listener_when_on_message_and_is_terminal_raises_error(self): topic = self.get_topic(random_string()) messages = [] class Listener(ReliableMessageListener): def on_message(self, message): message = message.message if message < 5: messages.append(message) else: raise ValueError("expected") def retrieve_initial_sequence(self): return -1 def store_sequence(self, sequence): pass def is_loss_tolerant(self): return False def is_terminal(self, error): raise error registration_id = topic.add_listener(Listener()) self.assertIsNotNone(registration_id) topic.publish_all(range(10)) self.assertTrueEventually(lambda: self.assertEqual(list(range(5)), messages)) # Should be cancelled since on_message raised error self.assertTrueEventually(lambda: self.assertEqual(0, len(topic._wrapped._runners)))
def test_replicated_map_smart_listener_local_only(self): replicated_map = self.client.get_replicated_map( random_string()).blocking() replicated_map.add_entry_listener(added_func=self.collector) replicated_map.put('key', 'value') sleep(5) self.assertEqual(1, len(self.collector.events))
def test_handle_config_table_event(mock_upsert_new_url): # Given user_id = random_string() event = { "Records": [{ "eventName": "INSERT", "dynamodb": { "NewImage": { "UserId": { "S": user_id } }, }, "eventSourceARN": "arn:aws:dynamodb:us-west-1:316936913708:table/ConfigTable/stream/2020-12-26T06:39:42.594", }] } # When handle(event) # Then mock_upsert_new_url.assert_called_once_with( user_id, f"https://www.yelp.com/user_details?userid={user_id}")
def test_create(): db = DB() name = random_string(10) UserAction.create(db, name) user = UserLoader.find_by_name(db, name) assert user.name == name
def test_index(): with global_user(random_string(5)) as global_user_name: res = web_client().get('/') assert res.status == '200 OK' d = pq(res.data) assert d('h1').text() == "%s's blog" % (global_user_name, )
def test_download_page(mock_key_utils): # Given page_bucket_name = "test-bucket-name" page_bucket.PAGE_BUCKET_NAME = page_bucket_name url = "test-url" key = "test-key" html = random_string(100) html_bytes = bytes(html, encoding="utf8") mock_s3, mock_obj, mock_streaming_body = Mock(), Mock(), Mock() mock_streaming_body.read.return_value = html_bytes mock_obj.get.return_value = {"Body": mock_streaming_body} mock_s3.Object.return_value = mock_obj page_bucket.S3 = mock_s3 mock_key_utils.to_key.return_value = key # When result = download_page(url) # Then mock_s3.Object.assert_called_once_with(page_bucket_name, key) mock_obj.get.assert_called_once_with() assert result == html
def setUpClass(cls): cls.rc = cls.create_rc() cls.cluster = cls.create_cluster(cls.rc, cls.configure_cluster()) cls.cluster.start_member() cls.cluster.start_member() cls.client = HazelcastClient(cluster_name=cls.cluster.id) cls.map = cls.client.get_map(random_string()).blocking()
def test_add_listener_with_store_sequence(self): topic = self.get_topic(random_string()) sequences = [] class Listener(ReliableMessageListener): def on_message(self, message): pass def retrieve_initial_sequence(self): return -1 def store_sequence(self, sequence): sequences.append(sequence) def is_loss_tolerant(self): return False def is_terminal(self, error): return False registration_id = topic.add_listener(Listener()) self.assertIsNotNone(registration_id) topic.publish_all(["item-%s" % i for i in range(20)]) self.assertTrueEventually( lambda: self.assertEqual(list(range(20)), sequences))
def test_update_fetched_url(mock_table, mock_get_user_id_from_url, url, expected_sort_key): # Given status_code = 42 user_id = random_string() mock_get_user_id_from_url.return_value = user_id # When update_fetched_url(url, status_code) # Then mock_get_user_id_from_url.assert_called_once_with(url) mock_table.update_item.assert_called_once_with( Key={ "UserId": user_id, "SortKey": expected_sort_key }, UpdateExpression= "set StatusCode=:status_code, LastFetched=:last_fetched", ExpressionAttributeValues={ ":status_code": status_code, ":last_fetched": int(datetime(2020, 8, 23).timestamp()), }, )
def test_client_receives_when_server_publish_messages(self): skip_if_client_version_older_than(self, "4.2.1") topic_name = random_string() topic = self.get_topic(topic_name) received_message_count = [0] def listener(message): self.assertIsNotNone(message.member) received_message_count[0] += 1 topic.add_listener(listener) message_count = 10 script = """ var topic = instance_0.getReliableTopic("%s"); for (var i = 0; i < %d; i++) { topic.publish(i); } """ % ( topic_name, message_count, ) self.rc.executeOnController(self.cluster.id, script, Lang.JAVASCRIPT) self.assertTrueEventually( lambda: self.assertEqual(message_count, received_message_count[0]))
def test_off_reconnect_mode(self): self.cluster = self.rc.createCluster(None, None) member = self.rc.startMember(self.cluster.id) def collector(): events = [] def on_state_change(event): if event == LifecycleState.SHUTDOWN: events.append(event) on_state_change.events = events return on_state_change event_collector = collector() self.client = HazelcastClient( cluster_members=["localhost:5701"], cluster_name=self.cluster.id, reconnect_mode=ReconnectMode.OFF, lifecycle_listeners=[event_collector], ) m = self.client.get_map(random_string()).blocking() # no exception at this point m.put(1, 1) self.rc.shutdownMember(self.cluster.id, member.uuid) self.assertTrueEventually(lambda: self.assertEqual(1, len(event_collector.events))) with self.assertRaises(HazelcastClientNotActiveError): m.put(1, 1)
def test_async_start(self): self.cluster = self.rc.createCluster(None, None) self.rc.startMember(self.cluster.id) def collector(): events = [] def on_state_change(event): if event == LifecycleState.CONNECTED: events.append(event) on_state_change.events = events return on_state_change event_collector = collector() self.client = HazelcastClient( cluster_name=self.cluster.id, cluster_members=["localhost:5701"], async_start=True, lifecycle_listeners=[event_collector], ) self.assertTrueEventually(lambda: self.assertEqual(1, len(event_collector.events))) self.client.get_map(random_string())
def _verify_adding_a_field( self, *existing_fields: typing.Tuple[str, typing.Any], new_field_name: str, new_field_value: typing.Any, new_field_default_value: typing.Any, ): map_name = random_string() v1_field_definitions = [ FieldDefinition(name=name) for name, _ in existing_fields ] v1_serializer = SomeFieldsSerializer(v1_field_definitions) v1_client = self._create_client(v1_serializer) v1_map = v1_client.get_map(map_name).blocking() v1_fields = {name: value for name, value in existing_fields} v1_map.put("key1", SomeFields(**v1_fields)) v2_field_definitions = v1_field_definitions + [ FieldDefinition(name=new_field_name) ] v2_serializer = SomeFieldsSerializer(v2_field_definitions) v2_client = self._create_client(v2_serializer) v2_map = v2_client.get_map(map_name).blocking() v2_fields = copy.deepcopy(v1_fields) v2_fields[new_field_name] = new_field_value v2_map.put("key2", SomeFields(**v2_fields)) careful_v2_field_definitions = v1_field_definitions + [ FieldDefinition( name=new_field_name, reader_method_name=f"read_{new_field_name}_or_default", default_value_to_read=new_field_default_value, ) ] careful_v2_serializer = SomeFieldsSerializer( careful_v2_field_definitions) careful_client_v2 = self._create_client(careful_v2_serializer) careful_v2_map = careful_client_v2.get_map(map_name).blocking() # Old client can read data written by the new client v1_obj = v1_map.get("key2") for name in v1_fields: self.assertEqual(v2_fields[name], getattr(v1_obj, name)) # New client cannot read data written by the old client, since # there is no such field on the old data. with self.assertRaisesRegex(HazelcastSerializationError, "No field with the name"): v2_map.get("key1") # However, if it has default value, everything should work careful_v2_obj = careful_v2_map.get("key1") for name in v2_fields: self.assertEqual( v1_fields.get(name) or new_field_default_value, getattr(careful_v2_obj, name), )
def configure_client(cls, config): config.cluster_name = cls.cluster.id near_cache_config = NearCacheConfig(random_string()) # near_cache_config.time_to_live_seconds = 1000 # near_cache_config.max_idle_seconds = 1000 config.add_near_cache_config(near_cache_config) return super(MapTest, cls).configure_client(config)
def _restart_cluster(self): self.rc.terminateMember(self.cluster.id, self.member.uuid) ConnectionManagerOnClusterRestartTest.member = self.cluster.start_member() # Perform an invocation to wait until the client state is sent m = self.client.get_map(random_string()).blocking() m.set(1, 1) self.assertEqual(1, m.get(1))
def test_near_cache_config(self): config = NearCacheConfig(random_string()) with self.assertRaises(ValueError): config.in_memory_format = 100 with self.assertRaises(ValueError): config.eviction_policy = 100 with self.assertRaises(ValueError): config.time_to_live_seconds = -1 with self.assertRaises(ValueError): config.max_idle_seconds = -1 with self.assertRaises(ValueError): config.eviction_max_size = 0
def setUp(self): self.queue = self.client.get_queue("ClientQueueTest_" + random_string()).blocking()
def setUp(self): self.atomic_long = self.client.get_atomic_long(random_string()).blocking() self.reference = object()
def setUp(self): self.latch = self.client.get_count_down_latch(random_string()).blocking()
def setUp(self): self.semaphore = self.client.get_semaphore(random_string()).blocking()
def setUp(self): self.id_gen = self.client.get_id_generator(random_string()).blocking()
def setUp(self): self.topic = self.client.get_topic(random_string()).blocking()
def setUp(self): self.list = self.client.get_list(random_string()).blocking()
def setUp(self): self.set = self.client.get_set(random_string()).blocking()
def setUp(self): self.lock = self.client.get_lock(random_string()).blocking()
def setUp(self): self.atomic_reference = self.client.get_atomic_reference(random_string()).blocking()
def setUp(self): self.replicated_map = self.client.get_replicated_map(random_string()).blocking()
def configure_client(cls, config): near_cache_config = NearCacheConfig(random_string()) # near_cache_config.time_to_live_seconds = 1000 # near_cache_config.max_idle_seconds = 1000 config.add_near_cache_config(near_cache_config) return super(MapTest, cls).configure_client(config)
def setUp(self): self.executor = self.client.get_executor(random_string()).blocking()
def setUp(self): configure_logging() self.set = self.client.get_set(random_string()).blocking()
def setUp(self): self.map = self.client.get_map(random_string()).blocking()
def setUp(self): self.ringbuffer = self.client.get_ringbuffer("ringbuffer-" + random_string()).blocking()