diff --git a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.cc b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.cc index f95009eb8643d..fe6b959b962de 100644 --- a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.cc +++ b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.cc @@ -1599,6 +1599,10 @@ TensorrtExecutionProvider::TensorrtExecutionProvider(const TensorrtExecutionProv LOGS_DEFAULT(ERROR) << "In the case of dumping context model and for security purpose, The trt_engine_cache_path has '..', it's not allowed to point outside the directory."; } + // Engine cache relative path to context model directory. + // It's used when dumping the "ep_cache_context" node attribute. + engine_cache_relative_path_to_context_model_dir = cache_path_; + // Make cache_path_ to be the relative path of ep_context_file_path_ cache_path_ = GetPathOrParentPathOfCtxModel(ep_context_file_path_).append(cache_path_).string(); } @@ -3018,7 +3022,8 @@ Status TensorrtExecutionProvider::CreateNodeComputeInfoFromGraph(const GraphView if (dump_ep_context_model_) { // "ep_cache_context" node attribute should be a relative path to context model directory if (ep_cache_context_attr_.empty()) { - ep_cache_context_attr_ = std::filesystem::relative(engine_cache_path, ep_context_file_path_).string(); + auto cache_file_name = std::filesystem::path(engine_cache_path).filename(); + ep_cache_context_attr_ = std::filesystem::path(engine_cache_relative_path_to_context_model_dir).append(cache_file_name.string()).string(); } std::unique_ptr model_proto{CreateCtxModel(graph_body_viewer, @@ -3090,7 +3095,8 @@ Status TensorrtExecutionProvider::CreateNodeComputeInfoFromGraph(const GraphView if (dump_ep_context_model_ && has_dynamic_shape) { // "ep_cache_context" node attribute should be a relative path to context model directory if (ep_cache_context_attr_.empty()) { - ep_cache_context_attr_ = std::filesystem::relative(engine_cache_path, ep_context_file_path_).string(); + auto cache_file_name = std::filesystem::path(engine_cache_path).filename(); + ep_cache_context_attr_ = std::filesystem::path(engine_cache_relative_path_to_context_model_dir).append(cache_file_name.string()).string(); } model_proto_.reset(CreateCtxModel(graph_body_viewer, ep_cache_context_attr_, diff --git a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.h b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.h index 70b71aa221eef..ad2d2c55c67e1 100644 --- a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.h +++ b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider.h @@ -305,6 +305,7 @@ class TensorrtExecutionProvider : public IExecutionProvider { int ep_context_embed_mode_ = 0; std::string ctx_model_path_; std::string ep_cache_context_attr_; + std::string engine_cache_relative_path_to_context_model_dir; std::unique_ptr model_proto_ = ONNX_NAMESPACE::ModelProto::Create(); std::unordered_set control_flow_op_set_ = {"If", "Loop", "Scan"}; diff --git a/onnxruntime/test/providers/tensorrt/tensorrt_basic_test.cc b/onnxruntime/test/providers/tensorrt/tensorrt_basic_test.cc index 73e0cf59d198c..ff95d6e2c235c 100644 --- a/onnxruntime/test/providers/tensorrt/tensorrt_basic_test.cc +++ b/onnxruntime/test/providers/tensorrt/tensorrt_basic_test.cc @@ -527,6 +527,7 @@ TEST(TensorrtExecutionProviderTest, EPContextNode) { */ InferenceSession session_object6{so, GetEnvironment()}; OrtTensorRTProviderOptionsV2 params6; + params6.trt_ep_context_embed_mode = 1; model_name = params5.trt_ep_context_file_path; execution_provider = TensorrtExecutionProviderWithOptions(¶ms6); EXPECT_TRUE(session_object6.RegisterExecutionProvider(std::move(execution_provider)).IsOK());