def wrapper(self, *args, __meth__=test_case, __check_log__=check_log_case, **kwargs): if (__check_log__ is not None and callable(__check_log__) and not is_envvar_true(PYAZURE_WEBHOST_DEBUG)): # Check logging output for unit test scenarios result = self._run_test(__meth__, *args, **kwargs) # Trim off host output timestamps host_output = getattr(self, 'host_out', '') output_lines = host_output.splitlines() ts_re = r"^\[\d+(\/|-)\d+(\/|-)\d+T*\d+\:\d+\:\d+.*(" \ r"A|P)*M*\]" output = list( map(lambda s: re.sub(ts_re, '', s).strip(), output_lines)) # Execute check_log_ test cases self._run_test(__check_log__, host_out=output) return result else: # Check normal unit test return self._run_test(__meth__, *args, **kwargs)
def setUpClass(cls): script_dir = pathlib.Path(cls.get_script_dir()) if is_envvar_true(PYAZURE_WEBHOST_DEBUG): cls.host_stdout = None else: cls.host_stdout = tempfile.NamedTemporaryFile('w+t') _setup_func_app(TESTS_ROOT / script_dir) try: cls.webhost = start_webhost(script_dir=script_dir, stdout=cls.host_stdout) except Exception: _teardown_func_app(TESTS_ROOT / script_dir) raise
def test_is_disabled(self): """ Verify that when the AppSetting is disabled, SharedMemoryManager is disabled. """ # Make sure shared memory data transfer is disabled was_shmem_env_true = is_envvar_true( FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) os.environ.update( {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '0'}) manager = SharedMemoryManager() self.assertFalse(manager.is_enabled()) # Restore the env variable to original value if was_shmem_env_true: os.environ.update( {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'})
def setUp(self): self.was_shmem_env_true = is_envvar_true( FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED) os.environ.update( {FUNCTIONS_WORKER_SHARED_MEMORY_DATA_TRANSFER_ENABLED: '1'}) os_name = platform.system() if os_name == 'Darwin': # If an existing AppSetting is specified, save it so it can be # restored later self.was_shmem_dirs = get_app_setting( UNIX_SHARED_MEMORY_DIRECTORIES) self._setUpDarwin() elif os_name == 'Linux': self._setUpLinux() self.file_accessor = FileAccessorFactory.create_file_accessor()
def start_webhost(*, script_dir=None, stdout=None): script_root = TESTS_ROOT / script_dir if script_dir else FUNCS_PATH if stdout is None: if is_envvar_true(PYAZURE_WEBHOST_DEBUG): stdout = sys.stdout else: stdout = subprocess.DEVNULL port = _find_open_port() proc = popen_webhost(stdout=stdout, stderr=subprocess.STDOUT, script_root=script_root, port=port) time.sleep(10) # Giving host some time to start fully. addr = f'http://{LOCALHOST}:{port}' return _WebHostProxy(proc, addr)
def test_is_envvar_not_true_on_unset(self): self._unset_feature_flag() self.assertFalse(common.is_envvar_true(TEST_FEATURE_FLAG))
def test_is_envvar_true(self): os.environ[TEST_FEATURE_FLAG] = 'true' self.assertTrue(common.is_envvar_true(TEST_FEATURE_FLAG))
def popen_webhost(*, stdout, stderr, script_root=FUNCS_PATH, port=None): testconfig = None if WORKER_CONFIG.exists(): testconfig = configparser.ConfigParser() testconfig.read(WORKER_CONFIG) hostexe_args = [] os.environ['AzureWebJobsFeatureFlags'] = 'EnableWorkerIndexing' # If we want to use core-tools coretools_exe = os.environ.get('CORE_TOOLS_EXE_PATH') if coretools_exe: coretools_exe = coretools_exe.strip() if pathlib.Path(coretools_exe).exists(): hostexe_args = [str(coretools_exe), 'host', 'start'] if port is not None: hostexe_args.extend(['--port', str(port)]) # If we need to use Functions host directly if not hostexe_args: dll = os.environ.get('PYAZURE_WEBHOST_DLL') if not dll and testconfig and testconfig.has_section('webhost'): dll = testconfig['webhost'].get('dll') if dll: # Paths from environment might contain trailing # or leading whitespace. dll = dll.strip() if not dll: dll = DEFAULT_WEBHOST_DLL_PATH os.makedirs(dll.parent / 'Secrets', exist_ok=True) with open(dll.parent / 'Secrets' / 'host.json', 'w') as f: secrets = SECRETS_TEMPLATE f.write(secrets) if dll and pathlib.Path(dll).exists(): hostexe_args = ['dotnet', str(dll)] if not hostexe_args: raise RuntimeError('\n'.join([ 'Unable to locate Azure Functions Host binary.', 'Please do one of the following:', ' * run the following command from the root folder of', ' the project:', '', f' $ {sys.executable} setup.py webhost', '', ' * or download or build the Azure Functions Host and' ' then write the full path to WebHost.dll' ' into the `PYAZURE_WEBHOST_DLL` environment variable.', ' Alternatively, you can create the', f' {WORKER_CONFIG.name} file in the root folder', ' of the project with the following structure:', '', ' [webhost]', ' dll = /path/Microsoft.Azure.WebJobs.Script.WebHost.dll', ' * or download Azure Functions Core Tools binaries and', ' then write the full path to func.exe into the ', ' `CORE_TOOLS_EXE_PATH` envrionment variable.', '', 'Setting "export PYAZURE_WEBHOST_DEBUG=true" to get the full', 'stdout and stderr from function host.' ])) worker_path = os.environ.get(PYAZURE_WORKER_DIR) worker_path = WORKER_PATH if not worker_path else pathlib.Path(worker_path) if not worker_path.exists(): raise RuntimeError(f'Worker path {worker_path} does not exist') # Casting to strings is necessary because Popen doesn't like # path objects there on Windows. extra_env = { 'AzureWebJobsScriptRoot': str(script_root), 'languageWorkers:python:workerDirectory': str(worker_path), 'host:logger:consoleLoggingMode': 'always', 'AZURE_FUNCTIONS_ENVIRONMENT': 'development', 'AzureWebJobsSecretStorageType': 'files', 'FUNCTIONS_WORKER_RUNTIME': 'python' } # In E2E Integration mode, we should use the core tools worker # from the latest artifact instead of the azure_functions_worker module if is_envvar_true(PYAZURE_INTEGRATION_TEST): extra_env.pop('languageWorkers:python:workerDirectory') if testconfig and 'azure' in testconfig: st = testconfig['azure'].get('storage_key') if st: extra_env['AzureWebJobsStorage'] = st cosmos = testconfig['azure'].get('cosmosdb_key') if cosmos: extra_env['AzureWebJobsCosmosDBConnectionString'] = cosmos eventhub = testconfig['azure'].get('eventhub_key') if eventhub: extra_env['AzureWebJobsEventHubConnectionString'] = eventhub servicebus = testconfig['azure'].get('servicebus_key') if servicebus: extra_env['AzureWebJobsServiceBusConnectionString'] = servicebus eventgrid_topic_uri = testconfig['azure'].get('eventgrid_topic_uri') if eventgrid_topic_uri: extra_env['AzureWebJobsEventGridTopicUri'] = eventgrid_topic_uri eventgrid_topic_key = testconfig['azure'].get('eventgrid_topic_key') if eventgrid_topic_key: extra_env['AzureWebJobsEventGridConnectionKey'] = \ eventgrid_topic_key if port is not None: extra_env['ASPNETCORE_URLS'] = f'http://*:{port}' return subprocess.Popen(hostexe_args, cwd=script_root, env={ **os.environ, **extra_env, }, stdout=stdout, stderr=stderr)
class TestGRPCandProtobufDependencyIsolationOnDedicated( testutils.WebHostTestCase): """Test the dependency manager E2E scenario via Http Trigger. The following E2E tests ensures the dependency manager is behaving as expected. They are tested against the dependency_isolation_functions/ folder which contain a dummy .python_packages_grpc_protobuf folder. This testcase checks if the customers library version of grpc and protobuf are being loaded in the functionapp """ function_name = 'dependency_isolation_functions' package_name = '.python_packages_grpc_protobuf' project_root = testutils.E2E_TESTS_ROOT / function_name customer_deps = project_root / package_name / 'lib' / 'site-packages' @classmethod def setUpClass(cls): os_environ = os.environ.copy() # Turn on feature flag os_environ['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] = '1' # Emulate Python worker in Azure environment. # For how the PYTHONPATH is set in Azure, check prodV3/worker.py. os_environ['PYTHONPATH'] = str(cls.customer_deps) cls._patch_environ = patch.dict('os.environ', os_environ) cls._patch_environ.start() super().setUpClass() @classmethod def tearDownClass(self): super().tearDownClass() self._patch_environ.stop() @classmethod def get_script_dir(cls): return cls.project_root @testutils.retryable_test(3, 5) def test_dependency_function_should_return_ok(self): """The common scenario of general import should return OK in any circumstances """ r: Response = self.webhost.request('GET', 'report_dependencies') self.assertTrue(r.ok) @testutils.retryable_test(3, 5) def test_feature_flag_is_turned_on(self): """Since passing the feature flag PYTHON_ISOLATE_WORKER_DEPENDENCIES to the host, the customer's function should also be able to receive it """ r: Response = self.webhost.request('GET', 'report_dependencies') environments = r.json()['environments'] flag_value = environments['PYTHON_ISOLATE_WORKER_DEPENDENCIES'] self.assertEqual(flag_value, '1') @testutils.retryable_test(3, 5) def test_working_directory_resolution(self): """Check from the dependency manager and see if the current working directory is resolved correctly """ r: Response = self.webhost.request('GET', 'report_dependencies') environments = r.json()['environments'] dir = os.path.dirname(__file__) self.assertEqual( environments['AzureWebJobsScriptRoot'].lower(), os.path.join(dir, 'dependency_isolation_functions').lower() ) @skipIf( is_envvar_true(PYAZURE_INTEGRATION_TEST), 'Integration test expects dependencies derived from core tools folder' ) @testutils.retryable_test(3, 5) def test_paths_resolution(self): """Dependency manager requires paths to be resolved correctly before switching to customer's modules. This test is to ensure when the app is in ready state, check if the paths are in good state. """ r: Response = self.webhost.request('GET', 'report_dependencies') dm = r.json()['dependency_manager'] self.assertEqual( dm['cx_working_dir'].lower(), str(self.project_root).lower() ) self.assertEqual( dm['cx_deps_path'].lower(), str(self.customer_deps).lower() ) # Should derive the package location from the built-in azure.functions azf_spec = importlib.util.find_spec('azure.functions') self.assertEqual( dm['worker_deps_path'].lower(), os.path.abspath( os.path.join(os.path.dirname(azf_spec.origin), '..', '..') ).lower() ) @testutils.retryable_test(3, 5) def test_loading_libraries_from_customers_package(self): """Since the Python now loaded the customer's dependencies, the libraries version should match the ones in .python_packages_grpc_protobuf/ folder """ r: Response = self.webhost.request('GET', 'report_dependencies') libraries = r.json()['libraries'] self.assertEqual( libraries['proto.expected.version'], libraries['proto.version'] ) self.assertEqual( libraries['grpc.expected.version'], libraries['grpc.version'] )