async def test_mock_eventhub_trigger_iot(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: func_id, r = await host.load_function('eventhub_trigger_iot') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) async def call_and_check(): _, r = await host.invoke_function( 'eventhub_trigger_iot', [ protos.ParameterBinding( name='event', data=protos.TypedData( json=json.dumps({'id': 'foo'})), ), ], metadata={ 'iothub-device-id': protos.TypedData(string='mock-iothub-device-id'), 'iothub-auth-data': protos.TypedData(string='mock-iothub-auth-data') }) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertIn('mock-iothub-device-id', r.response.return_value.string) await call_and_check()
async def test_call_async_function_check_logs(self): async with testutils.start_mockhost() as host: await host.load_function('async_logging') invoke_id, r = await host.invoke_function( 'async_logging', [ protos.ParameterBinding( name='req', data=protos.TypedData( http=protos.RpcHttp( method='GET'))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) user_logs = [line for line in r.logs if line.category == 'my function'] self.assertEqual(len(user_logs), 2) first_msg = user_logs[0] self.assertEqual(first_msg.invocation_id, invoke_id) self.assertEqual(first_msg.message, 'hello info') self.assertEqual(first_msg.level, protos.RpcLog.Information) second_msg = user_logs[1] self.assertEqual(second_msg.invocation_id, invoke_id) self.assertTrue(second_msg.message.startswith('and another error')) self.assertEqual(second_msg.level, protos.RpcLog.Error) self.assertEqual(r.response.return_value.string, 'OK-async')
async def test_mock_timer__return_pastdue(self): async with testutils.start_mockhost( script_root=self.timer_funcs_dir) as host: func_id, r = await host.load_function('return_pastdue') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) async def call_and_check(due: bool): _, r = await host.invoke_function( 'return_pastdue', [ protos.ParameterBinding( name='timer', data=protos.TypedData( json=json.dumps({ 'IsPastDue': due }))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual( list(r.response.output_data), [ protos.ParameterBinding( name='pastdue', data=protos.TypedData(string=str(due))) ]) await call_and_check(True) await call_and_check(False)
async def test_dispatcher_sync_threadpool_in_placeholder_below_min(self): """Test if the sync threadpool will use the default setting when the app setting is below minimum """ ctrl = testutils.start_mockhost(script_root=self.dispatcher_funcs_dir) with patch('azure_functions_worker.dispatcher.logger') as mock_logger: async with ctrl as host: await self._check_if_function_is_ok(host) # Reload environment variable on specialization await host.reload_environment( environment={PYTHON_THREADPOOL_THREAD_COUNT: '0'}) # Ensure the dispatcher sync threadpool should fallback to 1 self.assertEqual(ctrl._worker._sync_tp_max_workers, 1) self.assertIsNotNone(ctrl._worker._sync_call_tp) # Check if the dispatcher still function await self._check_if_function_is_ok(host) # Check warning message mock_logger.warning.assert_any_call( f'{PYTHON_THREADPOOL_THREAD_COUNT} must be set to a value ' 'between 1 and 32')
async def test_mock_generic_as_bytes_no_anno(self): async with testutils.start_mockhost( script_root=self.generic_funcs_dir) as host: func_id, r = await host.load_function('foobar_as_bytes_no_anno') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'foobar_as_bytes_no_anno', [ protos.ParameterBinding( name='input', data=protos.TypedData( bytes=b'\x00\x01' ) ) ] ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual( r.response.return_value, protos.TypedData(bytes=b'\x00\x01') )
async def test_call_sync_function_check_logs(self): async with testutils.start_mockhost() as host: await host.load_function('sync_logging') invoke_id, r = await host.invoke_function( 'sync_logging', [ protos.ParameterBinding( name='req', data=protos.TypedData( http=protos.RpcHttp( method='GET'))) ]) self.assertEqual(r.response.result.status, protos.StatusResult.Success) user_logs = [line for line in r.logs if line.category == 'my function'] self.assertEqual(len(user_logs), 1) log = user_logs[0] self.assertEqual(log.invocation_id, invoke_id) self.assertTrue(log.message.startswith( 'a gracefully handled error')) self.assertEqual(r.response.return_value.string, 'OK-sync')
async def test_dispatcher_sync_threadpool_invalid_worker_count(self): """Test when sync threadpool maximum worker is set to an invalid value, the host should fallback to default value 1 """ # The @patch decorator does not work as expected and will suppress # any assertion failures in the async test cases. # Thus we're moving the patch() method to use the with syntax with patch('azure_functions_worker.dispatcher.logger') as mock_logger: # Configure thread pool max worker to an invalid value os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: 'invalid'}) ctrl = testutils.start_mockhost( script_root=self.dispatcher_funcs_dir) async with ctrl as host: await self._check_if_function_is_ok(host) # Ensure the dispatcher sync threadpool should fallback to 1 self.assertEqual(ctrl._worker._sync_tp_max_workers, 1) self.assertIsNotNone(ctrl._worker._sync_call_tp) # Check if the dispatcher still function await self._check_if_function_is_ok(host) mock_logger.warning.assert_any_call( f'{PYTHON_THREADPOOL_THREAD_COUNT} must be an integer')
async def test_mock_eventhub_cardinality_many(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: func_id, r = await host.load_function('eventhub_cardinality_many') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'eventhub_cardinality_many', [ protos.ParameterBinding( name='events', data=protos.TypedData( json=json.dumps([{ 'id': 'cardinality_many' }]) ), ), ], metadata={ 'SystemPropertiesArray': protos.TypedData(json=json.dumps([ { 'iothub-device-id': 'mock-iothub-device-id', 'iothub-auth-data': 'mock-iothub-auth-data', 'EnqueuedTimeUtc': '2020-02-18T21:28:42.5888539Z' } ])) } ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(r.response.return_value.string, 'OK_MANY')
async def test_mock_eventhub_cardinality_many(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: func_id, r = await host.load_function('eventhub_cardinality_many') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Success) _, r = await host.invoke_function( 'eventhub_cardinality_many', [ protos.ParameterBinding( name='events', data=protos.TypedData( json=json.dumps({ 'id': 'cardinality_many' }) ), ), ], metadata={} ) self.assertEqual(r.response.result.status, protos.StatusResult.Success) self.assertEqual(r.response.return_value.string, 'OK_MANY')
def setUp(self): self._ctrl = testutils.start_mockhost( script_root=DISPATCHER_FUNCTIONS_DIR) self._pre_env = dict(os.environ) self.mock_version_info = patch( 'azure_functions_worker.dispatcher.sys.version_info', SysVersionInfo(3, 9, 0, 'final', 0)) self.mock_version_info.start()
async def _test_binary_blob_read_function(self, func_name): """ Verify that the function executed successfully when the worker received inputs for the function over shared memory. """ async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) # Write binary content into shared memory mem_map_name = self.get_new_mem_map_name() content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 content = self.get_random_bytes(content_size) content_md5 = hashlib.md5(content).hexdigest() mem_map_size = consts.CONTENT_HEADER_TOTAL_BYTES + content_size mem_map = self.file_accessor.create_mem_map( mem_map_name, mem_map_size) shared_mem_map = SharedMemoryMap(self.file_accessor, mem_map_name, mem_map) num_bytes_written = shared_mem_map.put_bytes(content) # Create a message to send to the worker containing info about the # shared memory region to read input from value = protos.RpcSharedMemory(name=mem_map_name, offset=0, count=num_bytes_written, type=protos.RpcDataType.bytes) # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET'))), protos.ParameterBinding(name='file', rpc_shared_memory=value) ]) # Dispose the shared memory map since the function is done using it shared_mem_map.dispose() # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) response_bytes = response_msg.response.return_value.http.body.bytes json_response = json.loads(response_bytes) func_received_content_size = json_response['content_size'] func_received_content_md5 = json_response['content_md5'] # Check the function response to ensure that it read the complete # input that we provided and the md5 matches self.assertEqual(content_size, func_received_content_size) self.assertEqual(content_md5, func_received_content_md5)
async def test_mock_eventhub_cardinality_one_bad_annotation(self): async with testutils.start_mockhost( script_root=self.mock_funcs_dir) as host: # This suppose to fail since the event should not be int func_id, r = await host.load_function( 'eventhub_cardinality_one_bad_anno') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure)
async def test_import_module_troubleshooting_url(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('missing_module') self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex(r.response.result.exception.message, r'.*ModuleNotFoundError')
async def test_load_broken__syntax_error(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('syntax_error') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertIn('SyntaxError', r.response.result.exception.message)
async def test_dispatcher_sync_threadpool_default_worker(self): '''Test if the sync threadpool has maximum worker count set to 1 by default ''' ctrl = testutils.start_mockhost(script_root=self.dispatcher_funcs_dir) async with ctrl as host: await self._check_if_function_is_ok(host) # Ensure the dispatcher sync threadpool count is set to 1 self.assertEqual(ctrl._worker._sync_tp_max_workers, 1)
def setUp(self): self._ctrl = testutils.start_mockhost( script_root=DISPATCHER_FUNCTIONS_DIR) self._default_workers: Optional[ int] = PYTHON_THREADPOOL_THREAD_COUNT_DEFAULT self._over_max_workers: int = 10000 self._allowed_max_workers: int = PYTHON_THREADPOOL_THREAD_COUNT_MAX_37 self._pre_env = dict(os.environ) self.mock_version_info = patch( 'azure_functions_worker.dispatcher.sys.version_info', SysVersionInfo(3, 7, 0, 'final', 0)) self.mock_version_info.start()
async def test_dispatcher_sync_threadpool_set_worker(self): '''Test if the sync threadpool maximum worker can be set ''' # Configure thread pool max worker os.environ.update({PYTHON_THREADPOOL_THREAD_COUNT: '5'}) ctrl = testutils.start_mockhost(script_root=self.dispatcher_funcs_dir) async with ctrl as host: await self._check_if_function_is_ok(host) # Ensure the dispatcher sync threadpool count is set to 1 self.assertEqual(ctrl._worker._sync_tp_max_workers, 5)
async def test_root_logger_should_be_customer_log(self): """When customer use the root logger to send logs, the 'root' namespace should be treated as customer log, only sending to our customers. """ with patch('azure_functions_worker.dispatcher.is_system_log_category' ) as islc_mock: async with testutils.start_mockhost(script_root=self.dir) as host: await host.load_function('debug_logging') await self._invoke_function(host, 'debug_logging') self.assertIn(call('root'), islc_mock.call_args_list) self.assertFalse(is_system_log_category('root'))
async def test_load_broken__return_param_in(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('return_param_in') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the return_param_in function' r'.*"\$return" .* set to "out"')
async def test_load_broken__invalid_in_anno_non_type(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('invalid_in_anno_non_type') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the invalid_in_anno_non_type function: ' r'binding req has invalid non-type annotation 123')
async def test_dispatcher_sync_threadpool_default_worker(self): """Test if the sync threadpool has maximum worker count set to 1 by default """ ctrl = testutils.start_mockhost(script_root=self.dispatcher_funcs_dir) async with ctrl as host: await self._check_if_function_is_ok(host) self.assertEqual(ctrl._worker._sync_tp_max_workers, 1) self.assertIsNotNone(ctrl._worker._sync_call_tp) # Check if the dispatcher still function await self._check_if_function_is_ok(host)
async def test_load_broken__invalid_context_param(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('invalid_context_param') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the invalid_context_param function' r'.*the "context" parameter.*')
async def test_load_broken__bad_out_annotation(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('bad_out_annotation') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the bad_out_annotation function' r'.*binding foo has invalid Out annotation.*')
async def test_load_broken__wrong_param_dir(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('wrong_param_dir') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the wrong_param_dir function' r'.*binding foo is declared to have the "out".*')
async def test_close_shared_memory_maps(self): """ Close the shared memory maps created by the worker to transfer output blob to the host after the host is done processing the response. """ func_name = 'put_blob_as_bytes_return_http_response' async with testutils.start_mockhost(script_root=self.blob_funcs_dir) \ as host: await host.load_function(func_name) content_size = consts.MIN_BYTES_FOR_SHARED_MEM_TRANSFER + 10 http_params = {'content_size': str(content_size)} # Invoke the function; it should read the input blob from shared # memory and respond back in the HTTP body with the number of bytes # it read in the input _, response_msg = await host.invoke_function( func_name, [ protos.ParameterBinding( name='req', data=protos.TypedData(http=protos.RpcHttp( method='GET', query=http_params))), ]) # Verify if the function executed successfully self.assertEqual(protos.StatusResult.Success, response_msg.response.result.status) # Verify if the worker produced an output blob which was written # in shared memory output_data = response_msg.response.output_data output_binding = output_data[0] # Get the information about the shared memory region in which the # worker wrote the function's output blob shmem = output_binding.rpc_shared_memory mem_map_name = shmem.name # Request the worker to close the memory maps mem_map_names = [mem_map_name] response_msg = \ await host.close_shared_memory_resources(mem_map_names) # Verify that the worker responds with a successful status after # closing the requested memory map mem_map_statuses = response_msg.response.close_map_results self.assertEqual(len(mem_map_names), len(mem_map_statuses.keys())) for mem_map_name in mem_map_names: self.assertTrue(mem_map_name in mem_map_statuses) status = mem_map_statuses[mem_map_name] self.assertTrue(status)
async def test_handles_unsupported_messages_gracefully(self): async with testutils.start_mockhost() as host: # Intentionally send a message to worker that isn't # going to be ever supported by it. The idea is that # workers should survive such messages and continue # their operation. If anything, the host can always # terminate the worker. await host.send( protos.StreamingMessage( worker_heartbeat=protos.WorkerHeartbeat())) _, r = await host.load_function('return_out') self.assertEqual(r.response.result.status, protos.StatusResult.Success)
async def test_sdk_logger_should_be_system_log(self): """When sdk uses the 'azure.functions' logger to send logs, the namespace should be treated as system log, sending to our customers and our kusto table. """ with patch('azure_functions_worker.dispatcher.is_system_log_category' ) as islc_mock: async with testutils.start_mockhost(script_root=self.dir) as host: await host.load_function('sdk_logging') await self._invoke_function(host, 'sdk_logging') self.assertIn(call('azure.functions'), islc_mock.call_args_list) self.assertTrue(is_system_log_category('azure.functions'))
async def test_load_broken__import_error(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('import_error') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertIn('ImportError', r.response.result.exception.message) self.assertNotIn('<frozen importlib._bootstrap>', r.response.result.exception.message) self.assertNotIn('<frozen importlib._bootstrap_external>', r.response.result.exception.message)
async def test_load_broken__missing_py_param(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('missing_py_param') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r".*cannot load the missing_py_param function" r".*parameters are declared in function.json" r".*'req'.*")
async def test_load_broken__invalid_return_anno(self): async with testutils.start_mockhost( script_root=self.broken_funcs_dir) as host: func_id, r = await host.load_function('invalid_return_anno') self.assertEqual(r.response.function_id, func_id) self.assertEqual(r.response.result.status, protos.StatusResult.Failure) self.assertRegex( r.response.result.exception.message, r'.*cannot load the invalid_return_anno function' r'.*Python return annotation "int" does not match ' r'binding type "http"')