From 65d1bc6e47118f837e8537132f8ce55d18217564 Mon Sep 17 00:00:00 2001 From: peterstone2017 <12449837+YunchuWang@users.noreply.github.com> Date: Tue, 18 Oct 2022 19:03:49 -0500 Subject: [PATCH] Lazy Logging rather than in-line strings (#1067) --- azure_functions_worker/bindings/datumdef.py | 13 ++--- .../file_accessor_unix.py | 48 +++++++++++-------- .../file_accessor_windows.py | 4 +- .../shared_memory_manager.py | 15 +++--- .../shared_memory_map.py | 6 +-- azure_functions_worker/dispatcher.py | 35 +++++++------- azure_functions_worker/extension.py | 20 ++++---- azure_functions_worker/testutils.py | 4 +- azure_functions_worker/utils/dependency.py | 36 +++++++------- tests/unittests/test_dispatcher.py | 22 ++++----- tests/unittests/test_extension.py | 24 +++++----- 11 files changed, 119 insertions(+), 108 deletions(-) diff --git a/azure_functions_worker/bindings/datumdef.py b/azure_functions_worker/bindings/datumdef.py index 3600c376..b420fbf3 100644 --- a/azure_functions_worker/bindings/datumdef.py +++ b/azure_functions_worker/bindings/datumdef.py @@ -133,8 +133,8 @@ def from_rpc_shared_memory( if ret_val is not None: logger.info( - f'Read {count} bytes from memory map {mem_map_name} ' - f'for data type {data_type}') + 'Read %s bytes from memory map %s for data type %s', count, + mem_map_name, data_type) return ret_val return None @@ -162,8 +162,8 @@ def to_rpc_shared_memory( ) if shared_mem_meta is None: - logger.warning('Cannot write to shared memory for type: ' - f'{datum.type}') + logger.warning('Cannot write to shared memory for type: %s', + datum.type) return None shmem = protos.RpcSharedMemory( @@ -173,8 +173,9 @@ def to_rpc_shared_memory( type=data_type) logger.info( - f'Wrote {shared_mem_meta.count_bytes} bytes to memory map ' - f'{shared_mem_meta.mem_map_name} for data type {data_type}') + 'Wrote %s bytes to memory map %s for data type %s', + shared_mem_meta.count_bytes, shared_mem_meta.mem_map_name, + data_type) return shmem diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py index 5d031559..aaf12210 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_unix.py @@ -43,7 +43,7 @@ def open_mem_map( f'Cannot open memory map. Invalid size {mem_map_size}') fd = self._open_mem_map_file(mem_map_name) if fd is None: - logger.warning(f'Cannot open file: {mem_map_name}') + logger.warning('Cannot open file: %s', mem_map_name) return None mem_map = mmap.mmap(fd.fileno(), mem_map_size, access=access) return mem_map @@ -58,7 +58,7 @@ def create_mem_map(self, mem_map_name: str, mem_map_size: int) \ f'Cannot create memory map. Invalid size {mem_map_size}') file = self._create_mem_map_file(mem_map_name, mem_map_size) if file is None: - logger.warning(f'Cannot create file: {mem_map_name}') + logger.warning('Cannot create file: %s', mem_map_name) return None mem_map = mmap.mmap(file.fileno(), mem_map_size, mmap.MAP_SHARED, mmap.PROT_WRITE) @@ -80,7 +80,7 @@ def delete_mem_map(self, mem_map_name: str, mem_map: mmap.mmap) -> bool: # deletion was unsuccessful. # These logs can help identify if we may be leaking memory and not # cleaning up the created memory maps. - logger.error(f'Cannot delete memory map {mem_map_name} - {e}', + logger.error('Cannot delete memory map %s - %s', mem_map_name, e, exc_info=True) return False mem_map.close() @@ -96,12 +96,15 @@ def _get_allowed_mem_map_dirs(self) -> List[str]: allowed_mem_map_dirs_str = get_app_setting(setting) if allowed_mem_map_dirs_str is None: allowed_mem_map_dirs = consts.UNIX_TEMP_DIRS - logger.info('Using allowed directories for shared memory: ' - f'{allowed_mem_map_dirs} from App Setting: {setting}') + logger.info( + 'Using allowed directories for shared memory: %s from App ' + 'Setting: %s', + allowed_mem_map_dirs, setting) else: allowed_mem_map_dirs = allowed_mem_map_dirs_str.split(',') - logger.info('Using default allowed directories for shared memory: ' - f'{allowed_mem_map_dirs}') + logger.info( + 'Using default allowed directories for shared memory: %s', + allowed_mem_map_dirs) return allowed_mem_map_dirs def _get_valid_mem_map_dirs(self) -> List[str]: @@ -121,18 +124,20 @@ def _get_valid_mem_map_dirs(self) -> List[str]: if os.path.exists(dir_path): # A valid directory already exists valid_dirs.append(dir_path) - logger.debug(f'Found directory {dir_path} to store memory maps') + logger.debug('Found directory %s to store memory maps', + dir_path) else: try: os.makedirs(dir_path) valid_dirs.append(dir_path) except Exception as e: # We keep trying to check/create others - logger.warning(f'Cannot create directory {dir_path} to ' - f'store memory maps - {e}', exc_info=True) + logger.warning('Cannot create directory %s to ' + 'store memory maps - %s', dir_path, e, + exc_info=True) if len(valid_dirs) == 0: - logger.error('No valid directory for memory maps in ' - f'{allowed_dirs}') + logger.error('No valid directory for memory maps in %s', + allowed_dirs) return valid_dirs def _open_mem_map_file(self, mem_map_name: str) -> Optional[BufferedRandom]: @@ -149,11 +154,13 @@ def _open_mem_map_file(self, mem_map_name: str) -> Optional[BufferedRandom]: fd = open(file_path, 'r+b') return fd except Exception as e: - logger.error(f'Cannot open file {file_path} - {e}', + logger.error('Cannot open file %s - %s', file_path, e, exc_info=True) # The memory map was not found in any of the known directories - logger.error(f'Cannot open memory map {mem_map_name} in any of the ' - f'following directories: {self.valid_dirs}') + logger.error( + 'Cannot open memory map %s in any of the following directories: ' + '%s', + mem_map_name, self.valid_dirs) return None def _create_mem_map_file(self, mem_map_name: str, mem_map_size: int) \ @@ -179,12 +186,13 @@ def _create_mem_map_file(self, mem_map_name: str, mem_map_size: int) \ except Exception as e: # If the memory map could not be created in this directory, we # keep trying in other applicable directories. - logger.warning(f'Cannot create memory map in {file_path} - {e}.' - ' Trying other directories.', exc_info=True) + logger.warning('Cannot create memory map in %s - %s.' + ' Trying other directories.', file_path, e, + exc_info=True) # Could not create the memory map in any of the applicable directory # paths so we fail. logger.error( - f'Cannot create memory map {mem_map_name} with size ' - f'{mem_map_size} in any of the following directories: ' - f'{self.valid_dirs}') + 'Cannot create memory map %s with size %s in any of the ' + 'following directories: %s', + mem_map_name, mem_map_size, self.valid_dirs) return None diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py index 50ea2050..dfd3ed3a 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/file_accessor_windows.py @@ -34,8 +34,8 @@ def open_mem_map( return mem_map except Exception as e: logger.warning( - f'Cannot open memory map {mem_map_name} with size ' - f'{mem_map_size} - {e}') + 'Cannot open memory map %s with size %s - %s', mem_map_name, + mem_map_size, e) return None def create_mem_map(self, mem_map_name: str, mem_map_size: int) \ diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py index b5fc1ed0..a74027b7 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_manager.py @@ -92,14 +92,14 @@ def put_bytes(self, content: bytes) -> Optional[SharedMemoryMetadata]: try: num_bytes_written = shared_mem_map.put_bytes(content) except Exception as e: - logger.warning(f'Cannot write {content_length} bytes into shared ' - f'memory {mem_map_name} - {e}') + logger.warning('Cannot write %s bytes into shared memory %s - %s', + content_length, mem_map_name, e) shared_mem_map.dispose() return None if num_bytes_written != content_length: logger.error( - f'Cannot write data into shared memory {mem_map_name} ' - f'({num_bytes_written} != {content_length})') + 'Cannot write data into shared memory %s (%s != %s)', + mem_map_name, num_bytes_written, content_length) shared_mem_map.dispose() return None self.allocated_mem_maps[mem_map_name] = shared_mem_map @@ -128,8 +128,8 @@ def get_bytes(self, mem_map_name: str, offset: int, count: int) \ """ if offset != 0: logger.error( - f'Cannot read bytes. Non-zero offset ({offset}) ' - f'not supported.') + 'Cannot read bytes. Non-zero offset (%s) not supported.', + offset) return None shared_mem_map = self._open(mem_map_name, count) if shared_mem_map is None: @@ -168,7 +168,8 @@ def free_mem_map(self, mem_map_name: str, """ if mem_map_name not in self.allocated_mem_maps: logger.error( - f'Cannot find memory map in list of allocations {mem_map_name}') + 'Cannot find memory map in list of allocations %s', + mem_map_name) return False shared_mem_map = self.allocated_mem_maps[mem_map_name] success = shared_mem_map.dispose(to_delete_backing_resources) diff --git a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py index f1dedcb3..751617db 100644 --- a/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py +++ b/azure_functions_worker/bindings/shared_memory_data_transfer/shared_memory_map.py @@ -53,9 +53,9 @@ def put_bytes(self, content: bytes) -> Optional[int]: content_length_bytes) if num_content_length_bytes_written != num_content_length_bytes: logger.error( - f'Cannot write content size to memory map {self.mem_map_name} ' - f'({num_content_length_bytes_written} != ' - f'{num_content_length_bytes})') + 'Cannot write content size to memory map %s (%s != %s)', + self.mem_map_name, num_content_length_bytes_written, + num_content_length_bytes) return 0 # Write the content num_content_bytes_written = self.mem_map.write(content) diff --git a/azure_functions_worker/dispatcher.py b/azure_functions_worker/dispatcher.py index 26dff96d..f0cc832f 100644 --- a/azure_functions_worker/dispatcher.py +++ b/azure_functions_worker/dispatcher.py @@ -242,8 +242,8 @@ async def _dispatch_grpc_request(self, request): # Don't crash on unknown messages. Some of them can be ignored; # and if something goes really wrong the host can always just # kill the worker's process. - logger.error(f'unknown StreamingMessage content type ' - f'{content_type}') + logger.error('unknown StreamingMessage content type %s', + content_type) return resp = await request_handler(request) @@ -302,8 +302,8 @@ async def _handle__functions_metadata_request(self, request): if not os.path.exists(function_path): # Fallback to legacy model - logger.info(f"{SCRIPT_FILE_NAME} does not exist. " - "Switching to host indexing.") + logger.info("%s does not exist. " + "Switching to host indexing.", SCRIPT_FILE_NAME) return protos.StreamingMessage( request_id=request.request_id, function_metadata_response=protos.FunctionMetadataResponse( @@ -327,8 +327,8 @@ async def _handle__functions_metadata_request(self, request): indexed_function_logs.append(function_log) logger.info( - f'Successfully processed FunctionMetadataRequest for ' - f'functions: {" ".join(indexed_function_logs)}') + 'Successfully processed FunctionMetadataRequest for ' + 'functions: %s', " ".join(indexed_function_logs)) fx_metadata_results = loader.process_indexed_function( self._functions, @@ -378,9 +378,10 @@ async def _handle__function_load_request(self, request): ) logger.info('Successfully processed FunctionLoadRequest, ' - f'request ID: {self.request_id}, ' - f'function ID: {function_id},' - f'function Name: {function_name}') + 'request ID: %s, ' + 'function ID: %s,' + 'function Name: %s', self.request_id, function_id, + function_name) return protos.StreamingMessage( request_id=self.request_id, @@ -602,7 +603,7 @@ async def _handle__close_shared_memory_resources_request(self, request): to_delete_resources) results[map_name] = success except Exception as e: - logger.error(f'Cannot free memory map {map_name} - {e}', + logger.error('Cannot free memory map %s - %s', map_name, e, exc_info=True) finally: response = protos.CloseSharedMemoryResourcesResponse( @@ -653,16 +654,16 @@ def tp_max_workers_validator(value: str) -> bool: try: int_value = int(value) except ValueError: - logger.warning(f'{PYTHON_THREADPOOL_THREAD_COUNT} must be an ' - 'integer') + logger.warning('%s must be an integer', + PYTHON_THREADPOOL_THREAD_COUNT) return False if int_value < PYTHON_THREADPOOL_THREAD_COUNT_MIN: - logger.warning(f'{PYTHON_THREADPOOL_THREAD_COUNT} must be set ' - f'to a value between ' - f'{PYTHON_THREADPOOL_THREAD_COUNT_MIN} and ' - 'sys.maxint. Reverting to default value for ' - 'max_workers') + logger.warning( + '%s must be set to a value between %s and sys.maxint. ' + 'Reverting to default value for max_workers', + PYTHON_THREADPOOL_THREAD_COUNT, + PYTHON_THREADPOOL_THREAD_COUNT_MIN) return False return True diff --git a/azure_functions_worker/extension.py b/azure_functions_worker/extension.py index a2a7c14c..ceb619e5 100644 --- a/azure_functions_worker/extension.py +++ b/azure_functions_worker/extension.py @@ -236,24 +236,24 @@ def _try_get_sdk_with_extension_enabled(cls) -> Optional[ModuleType]: @classmethod def _info_extension_is_enabled(cls, sdk): logger.info( - 'Python Worker Extension is enabled in azure.functions ' - f'({get_sdk_version(sdk)}).' + 'Python Worker Extension is enabled in azure.functions (%s).', + get_sdk_version(sdk) ) @classmethod def _info_discover_extension_list(cls, function_name, sdk): logger.info( - f'Python Worker Extension Manager is loading {function_name}, ' - 'current registered extensions: ' - f'{sdk.ExtensionMeta.get_registered_extensions_json()}' + 'Python Worker Extension Manager is loading %s, current ' + 'registered extensions: %s', + function_name, sdk.ExtensionMeta.get_registered_extensions_json() ) @classmethod def _warn_sdk_not_support_extension(cls, sdk): logger.warning( - f'The azure.functions ({get_sdk_version(sdk)}) does not ' - 'support Python worker extensions. If you believe extensions ' - 'are correctly installed, please set the ' - f'{PYTHON_ISOLATE_WORKER_DEPENDENCIES} and ' - f'{PYTHON_ENABLE_WORKER_EXTENSIONS} to "true"' + 'The azure.functions (%s) does not support Python worker ' + 'extensions. If you believe extensions are correctly installed, ' + 'please set the %s and %s to "true"', + get_sdk_version(sdk), PYTHON_ISOLATE_WORKER_DEPENDENCIES, + PYTHON_ENABLE_WORKER_EXTENSIONS ) diff --git a/azure_functions_worker/testutils.py b/azure_functions_worker/testutils.py index 1510aa22..a07d928d 100644 --- a/azure_functions_worker/testutils.py +++ b/azure_functions_worker/testutils.py @@ -258,8 +258,8 @@ def _run_test(self, test, *args, **kwargs): self.host_stdout.seek(last_pos) self.host_out = self.host_stdout.read() self.host_stdout_logger.error( - f'Captured WebHost stdout from {self.host_stdout.name} ' - f':\n{self.host_out}') + 'Captured WebHost stdout from %s :\n%s', + self.host_stdout.name, self.host_out) finally: if test_exception is not None: raise test_exception diff --git a/azure_functions_worker/utils/dependency.py b/azure_functions_worker/utils/dependency.py index 8fb01f0e..08ac7da5 100644 --- a/azure_functions_worker/utils/dependency.py +++ b/azure_functions_worker/utils/dependency.py @@ -95,15 +95,16 @@ def use_worker_dependencies(cls): # The following log line will not show up in core tools but should # work in kusto since core tools only collects gRPC logs. This function # is executed even before the gRPC logging channel is ready. - logger.info(f'Applying use_worker_dependencies:' - f' worker_dependencies: {cls.worker_deps_path},' - f' customer_dependencies: {cls.cx_deps_path},' - f' working_directory: {cls.cx_working_dir}') + logger.info('Applying use_worker_dependencies:' + ' worker_dependencies: %s,' + ' customer_dependencies: %s,' + ' working_directory: %s', cls.worker_deps_path, + cls.cx_deps_path, cls.cx_working_dir) cls._remove_from_sys_path(cls.cx_deps_path) cls._remove_from_sys_path(cls.cx_working_dir) cls._add_to_sys_path(cls.worker_deps_path, True) - logger.info(f'Start using worker dependencies {cls.worker_deps_path}') + logger.info('Start using worker dependencies %s', cls.worker_deps_path) @classmethod @enable_feature_by( @@ -145,10 +146,10 @@ def prioritize_customer_dependencies(cls, cx_working_dir=None): if not cx_deps_path: cx_deps_path = cls.cx_deps_path - logger.info('Applying prioritize_customer_dependencies:' - f' worker_dependencies: {cls.worker_deps_path},' - f' customer_dependencies: {cx_deps_path},' - f' working_directory: {working_directory}') + logger.info( + 'Applying prioritize_customer_dependencies: worker_dependencies: ' + '%s, customer_dependencies: %s, working_directory: %s', + cls.worker_deps_path, cx_deps_path, working_directory) cls._remove_from_sys_path(cls.worker_deps_path) cls._add_to_sys_path(cls.cx_deps_path, True) @@ -211,11 +212,11 @@ def reload_azure_google_namespace_from_worker_deps(cls): packages_to_reload = ['azure', 'google'] for p in packages_to_reload: try: - logger.info(f'Reloading {p} module') + logger.info('Reloading %s module', p) importlib.reload(sys.modules[p]) except Exception as ex: - logger.info('Unable to reload {}: \n{}'.format(p, ex)) - logger.info(f'Reloaded {p} module') + logger.info('Unable to reload %s: \n%s', p, ex) + logger.info('Reloaded %s module', p) # Reload azure.functions to give user package precedence logger.info('Reloading azure.functions module at %s', @@ -225,8 +226,9 @@ def reload_azure_google_namespace_from_worker_deps(cls): logger.info('Reloaded azure.functions module now at %s', inspect.getfile(sys.modules['azure.functions'])) except Exception as ex: - logger.info('Unable to reload azure.functions. ' - 'Using default. Exception:\n{}'.format(ex)) + logger.info( + 'Unable to reload azure.functions. Using default. ' + 'Exception:\n%s', ex) @classmethod def _add_to_sys_path(cls, path: str, add_to_first: bool): @@ -392,6 +394,6 @@ def _remove_module_cache(path: str): sys.modules.pop(module_name) except Exception as e: logger.warning( - f'Attempt to remove module cache for {module_name} but' - f' failed with {e}. Using the original module cache.' - ) + 'Attempt to remove module cache for %s but failed with ' + '%s. Using the original module cache.', + module_name, e) diff --git a/tests/unittests/test_dispatcher.py b/tests/unittests/test_dispatcher.py index e52d6e10..ec40846e 100644 --- a/tests/unittests/test_dispatcher.py +++ b/tests/unittests/test_dispatcher.py @@ -150,7 +150,7 @@ async def test_dispatcher_sync_threadpool_invalid_worker_count(self): await self._assert_workers_threadpool(self._ctrl, host, self._default_workers) mock_logger.warning.assert_any_call( - f'{PYTHON_THREADPOOL_THREAD_COUNT} must be an integer') + '%s must be an integer', PYTHON_THREADPOOL_THREAD_COUNT) async def test_dispatcher_sync_threadpool_below_min_setting(self): """Test if the sync threadpool will pick up default value when the @@ -164,11 +164,10 @@ async def test_dispatcher_sync_threadpool_below_min_setting(self): await self._assert_workers_threadpool(self._ctrl, host, self._default_workers) mock_logger.warning.assert_any_call( - f'{PYTHON_THREADPOOL_THREAD_COUNT} must be set ' - f'to a value between ' - f'{PYTHON_THREADPOOL_THREAD_COUNT_MIN} and ' - 'sys.maxint. Reverting to default value for ' - 'max_workers') + '%s must be set to a value between %s and sys.maxint. ' + 'Reverting to default value for max_workers', + PYTHON_THREADPOOL_THREAD_COUNT, + PYTHON_THREADPOOL_THREAD_COUNT_MIN) async def test_dispatcher_sync_threadpool_exceed_max_setting(self): """Test if the sync threadpool will pick up default max value when the @@ -216,7 +215,7 @@ async def test_dispatcher_sync_threadpool_in_placeholder_invalid(self): # Check warning message mock_logger.warning.assert_any_call( - f'{PYTHON_THREADPOOL_THREAD_COUNT} must be an integer') + '%s must be an integer', PYTHON_THREADPOOL_THREAD_COUNT) async def test_dispatcher_sync_threadpool_in_placeholder_above_max(self): """Test if the sync threadpool will use the default max setting when @@ -252,11 +251,10 @@ async def test_dispatcher_sync_threadpool_in_placeholder_below_min(self): self._default_workers) mock_logger.warning.assert_any_call( - f'{PYTHON_THREADPOOL_THREAD_COUNT} must be set ' - f'to a value between ' - f'{PYTHON_THREADPOOL_THREAD_COUNT_MIN} and ' - 'sys.maxint. Reverting to default value for ' - 'max_workers') + '%s must be set to a value between %s and sys.maxint. ' + 'Reverting to default value for max_workers', + PYTHON_THREADPOOL_THREAD_COUNT, + PYTHON_THREADPOOL_THREAD_COUNT_MIN) async def test_sync_invocation_request_log(self): with patch('azure_functions_worker.dispatcher.logger') as mock_logger: diff --git a/tests/unittests/test_extension.py b/tests/unittests/test_extension.py index 9865e59b..4727f4f8 100644 --- a/tests/unittests/test_extension.py +++ b/tests/unittests/test_extension.py @@ -697,7 +697,7 @@ def test_info_extension_is_enabled(self, info_mock: Mock): self._instance._info_extension_is_enabled(sdk) info_mock.assert_called_once_with( 'Python Worker Extension is enabled in azure.functions ' - f'({sdk.__version__}).' + '(%s).', sdk.__version__ ) @patch('azure_functions_worker.extension.logger.info') @@ -717,9 +717,9 @@ def test_info_discover_extension_list_func_ext(self, info_mock: Mock): # Check logs self._instance._info_discover_extension_list(self._mock_func_name, sdk) info_mock.assert_called_once_with( - 'Python Worker Extension Manager is loading HttpTrigger, ' - 'current registered extensions: ' - r'{"FuncExtension": {"HttpTrigger": ["NewFuncExtension"]}}' + 'Python Worker Extension Manager is loading %s, ' + 'current registered extensions: %s', 'HttpTrigger', + '{"FuncExtension": {"HttpTrigger": ["NewFuncExtension"]}}' ) @patch('azure_functions_worker.extension.logger.info') @@ -733,9 +733,9 @@ def test_info_discover_extension_list_app_ext(self, info_mock: Mock): # Check logs self._instance._info_discover_extension_list(self._mock_func_name, sdk) info_mock.assert_called_once_with( - 'Python Worker Extension Manager is loading HttpTrigger, ' - 'current registered extensions: ' - r'{"AppExtension": ["NewAppExtension"]}' + 'Python Worker Extension Manager is loading %s, current ' + 'registered extensions: %s', + 'HttpTrigger', '{"AppExtension": ["NewAppExtension"]}' ) @patch('azure_functions_worker.extension.logger.warning') @@ -747,11 +747,11 @@ def test_warn_sdk_not_support_extension(self, warning_mock: Mock): # Check logs self._instance._warn_sdk_not_support_extension(sdk) warning_mock.assert_called_once_with( - 'The azure.functions (dummy) does not ' - 'support Python worker extensions. If you believe extensions ' - 'are correctly installed, please set the ' - 'PYTHON_ISOLATE_WORKER_DEPENDENCIES and ' - 'PYTHON_ENABLE_WORKER_EXTENSIONS to "true"' + 'The azure.functions (%s) does not support Python worker ' + 'extensions. If you believe extensions are correctly installed, ' + 'please set the %s and %s to "true"', + 'dummy', 'PYTHON_ISOLATE_WORKER_DEPENDENCIES', + 'PYTHON_ENABLE_WORKER_EXTENSIONS' ) def _generate_new_func_extension_class(self, base: type, trigger: str):