async def _test_binary_blob_read_function(self, func_name): """ Verify that the function executed successfully when the worker received inputs for the function over shared memory. """ async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) # Write binary content into shared memory mem_map_name = self.get_new_mem_map_name() content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 content = self.get_random_bytes(content_size) content_md5 = hashlib.md5(content).hexdigest() mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_size mem_map = self.file_accessor.create_mem_map( mem_map_name, mem_map_size) shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, mem_map) num_bytes_written = shared_mem_map.put_bytes(content) # Create a message to send to the worker containing info about the # shared memory region to read input from value = protos.RpcSharedMemory(name=mem_map_name, offset=0, count=num_bytes_written, type=protos.RpcDataType.bytes) # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET'))), protos.ParameterBinding(name='file', rpc_shared_memory=value) ]) # Dispose the shared memory map since the function is done using it shared_mem_map.dispose() # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) response_bytes = response_msg.response.return_value.http.body.bytes json_response = json.loads(response_bytes) func_received_content_size = json_response['content_size'] func_received_content_md5 = json_response['content_md5'] # Check the function response to ensure that it read the complete # input that we provided and the md5 matches self.assertEqual(content_size, func_received_content_size) self.assertEqual(content_md5, func_received_content_md5)
async def call_and_check(due: bool): _, r = await host.invoke_function('return_pastdue', [ protos.ParameterBinding( name='timer', data=protos.TypedData( json=json.dumps({'IsPastDue': due}))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(list(r.response.output_data), [ protos.ParameterBinding( name='pastdue', data=protos.TypedData(string=str(due))) ])
async def test_call_async_function_check_logs(self): async with testutils.start_mockhost() as host: await host.load_function('async_logging') invoke_id, r = await host.invoke_function( 'async_logging', [ protos.ParameterBinding( name='req', data=protos.TypedData( http=protos.RpcHttp( method='GET'))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) user_logs = [line for line in r.logs if line.category == 'my function'] self.assertEqual(len(user_logs), 2) first_msg = user_logs[0] self.assertEqual(first_msg.invocation_id, invoke_id) self.assertEqual(first_msg.message, 'hello info') self.assertEqual(first_msg.level, protos.RpcLog.Information) second_msg = user_logs[1] self.assertEqual(second_msg.invocation_id, invoke_id) self.assertTrue(second_msg.message.startswith('and another error')) self.assertEqual(second_msg.level, protos.RpcLog.Error) self.assertEqual(r.response.return_value.string, 'OK-async')
async def test_mock_eventhub_cardinality_many(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: func_id, r = await host.load_function('eventhub_cardinality_many') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'eventhub_cardinality_many', [ protos.ParameterBinding( name='events', data=protos.TypedData( json=json.dumps({ 'id': 'cardinality_many' }) ), ), ], metadata={} ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(r.response.return_value.string, 'OK_MANY')
async def test_mock_eventhub_cardinality_many(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: func_id, r = await host.load_function('eventhub_cardinality_many') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'eventhub_cardinality_many', [ protos.ParameterBinding( name='events', data=protos.TypedData( json=json.dumps([{ 'id': 'cardinality_many' }]) ), ), ], metadata={ 'SystemPropertiesArray': protos.TypedData(json=json.dumps([ { 'iothub-device-id': 'mock-iothub-device-id', 'iothub-auth-data': 'mock-iothub-auth-data', 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' } ])) } ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(r.response.return_value.string, 'OK_MANY')
async def call_and_check(): _, r = await host.invoke_function( 'eventhub_trigger_iot', [ protos.ParameterBinding( name='event', data=protos.TypedData( json=json.dumps({ 'id': 'foo' }) ), ), ], metadata={ 'iothub-device-id': protos.TypedData( string='mock-iothub-device-id' ), 'iothub-auth-data': protos.TypedData( string='mock-iothub-auth-data' ) } ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertIn( 'mock-iothub-device-id', r.response.return_value.string )
async def test_call_sync_function_check_logs(self): async with testutils.start_mockhost() as host: await host.load_function('sync_logging') invoke_id, r = await host.invoke_function( 'sync_logging', [ protos.ParameterBinding( name='req', data=protos.TypedData( http=protos.RpcHttp( method='GET'))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) user_logs = [line for line in r.logs if line.category == 'my function'] self.assertEqual(len(user_logs), 1) log = user_logs[0] self.assertEqual(log.invocation_id, invoke_id) self.assertTrue(log.message.startswith( 'a gracefully handled error')) self.assertEqual(r.response.return_value.string, 'OK-sync')
async def call_and_check(): _, r = await host.invoke_function( 'eventhub_trigger_iot', [ protos.ParameterBinding( name='event', data=protos.TypedData( json=json.dumps({ 'id': 'foo' }) ), ), ], metadata={ 'SystemProperties': protos.TypedData(json=json.dumps({ 'iothub-device-id': 'mock-iothub-device-id', 'iothub-auth-data': 'mock-iothub-auth-data', 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' })) } ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) res_json_string = r.response.return_value.string self.assertIn('device-id', res_json_string) self.assertIn('mock-iothub-device-id', res_json_string) self.assertIn('auth-data', res_json_string) self.assertIn('mock-iothub-auth-data', res_json_string)
async def test_mock_generic_as_bytes_no_anno(self): async with testutils.start_mockhost( script_root=self.generic_funcs_dir) as host: func_id, r = await host.load_function('foobar_as_bytes_no_anno') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'foobar_as_bytes_no_anno', [ protos.ParameterBinding( name='input', data=protos.TypedData( bytes=b'\x00\x01' ) ) ] ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual( r.response.return_value, protos.TypedData(bytes=b'\x00\x01') )
async def _invoke_function(self, host: testutils._MockWebHost, function_name: str): _, r = await host.invoke_function(function_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp(method='GET'))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success)
async def call_and_check(): _, r = await host.invoke_function('user_event_loop_timer', [ protos.ParameterBinding( name='timer', data=protos.TypedData( json=json.dumps({'IsPastDue': False}))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success)
async def test_close_shared_memory_maps(self): """ Close the shared memory maps created by the worker to transfer output blob to the host after the host is done processing the response. """ func_name = 'put_blob_as_bytes_return_http_response' async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 http_params = {'content_size': str(content_size)} # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET', query=http_params))), ]) # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) # Verify if the worker produced an output blob which was written # in shared memory output_data = response_msg.response.output_data output_binding = output_data[0] # Get the information about the shared memory region in which the # worker wrote the function's output blob shmem = output_binding.rpc_shared_memory mem_map_name = shmem.name # Request the worker to close the memory maps mem_map_names = [mem_map_name] response_msg = \ await host.close_shared_memory_resources(mem_map_names) # Verify that the worker responds with a successful status after # closing the requested memory map mem_map_statuses = response_msg.response.close_map_results self.assertEqual(len(mem_map_names), len(mem_map_statuses.keys())) for mem_map_name in mem_map_names: self.assertTrue(mem_map_name in mem_map_statuses) status = mem_map_statuses[mem_map_name] self.assertTrue(status)
async def _check_if_function_is_ok(self, host): # Ensure the function can be properly loaded func_id, load_r = await host.load_function('show_context') self.assertEqual(load_r.response.function_id, func_id) self.assertEqual(load_r.response.result.status, protos.StatusResult.Success) # Ensure the function can be properly invoked invoke_id, call_r = await host.invoke_function('show_context', [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp(method='GET'))) ]) self.assertIsNotNone(invoke_id) self.assertEqual(call_r.response.result.status, protos.StatusResult.Success)
async def test_mock_generic_should_not_support_implicit_output(self): async with testutils.start_mockhost( script_root=self.generic_funcs_dir) as host: func_id, r = await host.load_function('foobar_implicit_output') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function('foobar_as_bytes_no_anno', [ protos.ParameterBinding( name='input', data=protos.TypedData(bytes=b'\x00\x01')) ]) # It should fail here, since generic binding requires # $return statement in function.json to pass output self.assertEqual(r.response.result.status, protos.StatusResult.Failure)
async def test_mock_generic_should_support_without_datatype(self): async with testutils.start_mockhost( script_root=self.generic_funcs_dir) as host: func_id, r = await host.load_function('foobar_with_no_datatype') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function('foobar_with_no_datatype', [ protos.ParameterBinding( name='input', data=protos.TypedData(bytes=b'\x00\x01')) ]) # It should fail here, since the generic binding requires datatype # to be defined in function.json self.assertEqual(r.response.result.status, protos.StatusResult.Failure)
async def test_mock_orchestration_trigger(self): async with testutils.start_mockhost( script_root=self.durable_functions_dir) as host: func_id, r = await host.load_function('orchestration_trigger') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function('orchestration_trigger', [ protos.ParameterBinding( name='context', data=protos.TypedData( string='Durable functions coming soon')) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual( r.response.return_value, protos.TypedData(json='Durable functions coming soon :)'))
async def test_mock_activity_trigger(self): async with testutils.start_mockhost( script_root=self.durable_functions_dir) as host: func_id, r = await host.load_function('activity_trigger') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'activity_trigger', [ # According to Durable Python # Activity Trigger's input must be json serializable protos.ParameterBinding( name='input', data=protos.TypedData(string='test single_word')) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(r.response.return_value, protos.TypedData(json='"test single_word"'))
async def test_shared_memory_not_used_with_small_output(self): """ Even though shared memory is enabled, small inputs will not be transferred over shared memory (in this case from the worker to the host.) """ func_name = 'put_blob_as_bytes_return_http_response' async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER - 10 http_params = {'content_size': str(content_size)} # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET', query=http_params))), ]) # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) # Verify if the worker produced an output blob which was sent over # RPC instead of shared memory output_data = response_msg.response.output_data self.assertEqual(1, len(output_data)) output_binding = output_data[0] binding_type = output_binding.WhichOneof('rpc_data') self.assertEqual('data', binding_type)
async def test_binary_blob_write_function(self): """ Write a blob with binary output that was transferred between the worker and host over shared memory. """ func_name = 'put_blob_as_bytes_return_http_response' async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 http_params = {'content_size': str(content_size)} # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET', query=http_params))), ]) # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) # The function responds back in the HTTP body with the md5 digest of # the output it created along with its size response_bytes = response_msg.response.return_value.http.body.bytes json_response = json.loads(response_bytes) func_created_content_size = json_response['content_size'] func_created_content_md5 = json_response['content_md5'] # Verify if the worker produced an output blob which was written # in shared memory output_data = response_msg.response.output_data self.assertEqual(1, len(output_data)) output_binding = output_data[0] binding_type = output_binding.WhichOneof('rpc_data') self.assertEqual('rpc_shared_memory', binding_type) # Get the information about the shared memory region in which the # worker wrote the function's output blob shmem = output_binding.rpc_shared_memory mem_map_name = shmem.name offset = shmem.offset count = shmem.count data_type = shmem.type # Verify if the shared memory region's information is valid self.assertTrue(self.is_valid_uuid(mem_map_name)) self.assertEqual(0, offset) self.assertEqual(func_created_content_size, count) self.assertEqual(protos.RpcDataType.bytes, data_type) # Read data from the shared memory region mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + count mem_map = self.file_accessor.open_mem_map(mem_map_name, mem_map_size) shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, mem_map) read_content = shared_mem_map.get_bytes() # Dispose the shared memory map since we have read the function's # output now shared_mem_map.dispose() # Verify if we were able to read the correct output that the # function has produced read_content_md5 = hashlib.md5(read_content).hexdigest() self.assertEqual(func_created_content_md5, read_content_md5) self.assertEqual(len(read_content), func_created_content_size)
async def test_multiple_input_output_blobs(self): """ Read two blobs and write two blobs, all over shared memory. """ func_name = 'put_get_multiple_blobs_as_bytes_return_http_response' async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) # Input 1 # Write binary content into shared memory mem_map_name_1 = self.get_new_mem_map_name() input_content_size_1 = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 input_content_1 = self.get_random_bytes(input_content_size_1) input_content_md5_1 = hashlib.md5(input_content_1).hexdigest() input_mem_map_size_1 = \ consts.CONTENT_HEADER_TOTAL_BYTES + input_content_size_1 input_mem_map_1 = \ self.file_accessor.create_mem_map(mem_map_name_1, input_mem_map_size_1) input_shared_mem_map_1 = \ SharedMemoryMap(self.file_accessor, mem_map_name_1, input_mem_map_1) input_num_bytes_written_1 = \ input_shared_mem_map_1.put_bytes(input_content_1) # Create a message to send to the worker containing info about the # shared memory region to read input from input_value_1 = protos.RpcSharedMemory( name=mem_map_name_1, offset=0, count=input_num_bytes_written_1, type=protos.RpcDataType.bytes) # Input 2 # Write binary content into shared memory mem_map_name_2 = self.get_new_mem_map_name() input_content_size_2 = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 20 input_content_2 = self.get_random_bytes(input_content_size_2) input_content_md5_2 = hashlib.md5(input_content_2).hexdigest() input_mem_map_size_2 = \ consts.CONTENT_HEADER_TOTAL_BYTES + input_content_size_2 input_mem_map_2 = \ self.file_accessor.create_mem_map(mem_map_name_2, input_mem_map_size_2) input_shared_mem_map_2 = \ SharedMemoryMap(self.file_accessor, mem_map_name_2, input_mem_map_2) input_num_bytes_written_2 = \ input_shared_mem_map_2.put_bytes(input_content_2) # Outputs output_content_size_1 = \ consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 11 output_content_size_2 = \ consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 22 http_params = { 'output_content_size_1': str(output_content_size_1), 'output_content_size_2': str(output_content_size_2) } # Create a message to send to the worker containing info about the # shared memory region to read input from input_value_2 = protos.RpcSharedMemory( name=mem_map_name_2, offset=0, count=input_num_bytes_written_2, type=protos.RpcDataType.bytes) # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET', query=http_params))), protos.ParameterBinding(name='inputfile1', rpc_shared_memory=input_value_1), protos.ParameterBinding(name='inputfile2', rpc_shared_memory=input_value_2) ]) time.sleep(1) # Dispose the shared memory map since the function is done using it input_shared_mem_map_1.dispose() input_shared_mem_map_2.dispose() # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) response_bytes = response_msg.response.return_value.http.body.bytes json_response = json.loads(response_bytes) func_received_content_size_1 = json_response[ 'input_content_size_1'] func_received_content_md5_1 = json_response['input_content_md5_1'] func_received_content_size_2 = json_response[ 'input_content_size_2'] func_received_content_md5_2 = json_response['input_content_md5_2'] func_created_content_size_1 = json_response[ 'output_content_size_1'] func_created_content_size_2 = json_response[ 'output_content_size_2'] func_created_content_md5_1 = json_response['output_content_md5_1'] func_created_content_md5_2 = json_response['output_content_md5_2'] # Check the function response to ensure that it read the complete # input that we provided and the md5 matches self.assertEqual(input_content_size_1, func_received_content_size_1) self.assertEqual(input_content_md5_1, func_received_content_md5_1) self.assertEqual(input_content_size_2, func_received_content_size_2) self.assertEqual(input_content_md5_2, func_received_content_md5_2) # Verify if the worker produced two output blobs which were written # in shared memory output_data = response_msg.response.output_data self.assertEqual(2, len(output_data)) # Output 1 output_binding_1 = output_data[0] binding_type = output_binding_1.WhichOneof('rpc_data') self.assertEqual('rpc_shared_memory', binding_type) shmem_1 = output_binding_1.rpc_shared_memory self._verify_function_output(shmem_1, func_created_content_size_1, func_created_content_md5_1) # Output 2 output_binding_2 = output_data[1] binding_type = output_binding_2.WhichOneof('rpc_data') self.assertEqual('rpc_shared_memory', binding_type) shmem_2 = output_binding_2.rpc_shared_memory self._verify_function_output(shmem_2, func_created_content_size_2, func_created_content_md5_2)