diff --git a/src/ipc_message.h b/src/ipc_message.h index 866070f6..ac28238c 100644 --- a/src/ipc_message.h +++ b/src/ipc_message.h @@ -55,7 +55,7 @@ typedef enum PYTHONSTUB_commandtype_enum { PYTHONSTUB_AutoCompleteResponse, PYTHONSTUB_LogRequest, PYTHONSTUB_BLSDecoupledInferPayloadCleanup, - PYTHONSTUB_BLSDecoupledResponseFactoryCleanup, + PYTHONSTUB_DecoupledResponseFactoryCleanup, PYTHONSTUB_MetricFamilyRequestNew, PYTHONSTUB_MetricFamilyRequestDelete, PYTHONSTUB_MetricRequestNew, diff --git a/src/pb_stub.cc b/src/pb_stub.cc index 53a6c540..d1f8f6fd 100644 --- a/src/pb_stub.cc +++ b/src/pb_stub.cc @@ -997,7 +997,7 @@ Stub::ServiceStubToParentRequests() (utils_msg_payload->command_type == PYTHONSTUB_BLSDecoupledInferPayloadCleanup) || (utils_msg_payload->command_type == - PYTHONSTUB_BLSDecoupledResponseFactoryCleanup)) { + PYTHONSTUB_DecoupledResponseFactoryCleanup)) { SendCleanupId(utils_msg_payload, utils_msg_payload->command_type); } else if ( utils_msg_payload->command_type == PYTHONSTUB_IsRequestCancelled) { diff --git a/src/python_be.cc b/src/python_be.cc index 8dfa72b1..3c9dd19d 100644 --- a/src/python_be.cc +++ b/src/python_be.cc @@ -830,8 +830,8 @@ ModelInstanceState::StubToParentMQMonitor() break; } case PYTHONSTUB_BLSDecoupledInferPayloadCleanup: - case PYTHONSTUB_BLSDecoupledResponseFactoryCleanup: { - ProcessBLSCleanupRequest(message); + case PYTHONSTUB_DecoupledResponseFactoryCleanup: { + ProcessCleanupRequest(message); break; } case PYTHONSTUB_IsRequestCancelled: { @@ -921,7 +921,7 @@ ModelInstanceState::ProcessLogRequest( } void -ModelInstanceState::ProcessBLSCleanupRequest( +ModelInstanceState::ProcessCleanupRequest( const std::unique_ptr& message) { AllocatedSharedMemory cleanup_request_message = @@ -932,8 +932,7 @@ ModelInstanceState::ProcessBLSCleanupRequest( if (message->Command() == PYTHONSTUB_BLSDecoupledInferPayloadCleanup) { // Remove the InferPayload object from the map. infer_payload_.erase(id); - } else if ( - message->Command() == PYTHONSTUB_BLSDecoupledResponseFactoryCleanup) { + } else if (message->Command() == PYTHONSTUB_DecoupledResponseFactoryCleanup) { // Delete response factory std::unique_ptr< TRITONBACKEND_ResponseFactory, backend::ResponseFactoryDeleter> diff --git a/src/python_be.h b/src/python_be.h index e644e159..f5620d07 100644 --- a/src/python_be.h +++ b/src/python_be.h @@ -400,9 +400,8 @@ class ModelInstanceState : public BackendModelInstance { std::unique_ptr* infer_response, bi::managed_external_buffer::handle_t* response_handle); - // Process the bls decoupled cleanup request for InferPayload and - // ResponseFactory - void ProcessBLSCleanupRequest(const std::unique_ptr& message); + // Process the decoupled cleanup request for InferPayload and ResponseFactory + void ProcessCleanupRequest(const std::unique_ptr& message); // Process request cancellation query void ProcessIsRequestCancelled(const std::unique_ptr& message); diff --git a/src/response_sender.cc b/src/response_sender.cc index fe06e554..94e3f0c8 100644 --- a/src/response_sender.cc +++ b/src/response_sender.cc @@ -50,7 +50,7 @@ ResponseSender::~ResponseSender() std::unique_ptr& stub = Stub::GetOrCreateInstance(); stub->EnqueueCleanupId( reinterpret_cast(response_factory_address_), - PYTHONSTUB_BLSDecoupledResponseFactoryCleanup); + PYTHONSTUB_DecoupledResponseFactoryCleanup); } void