From 5f3a2e70f549e358dd5a93c7cf97f364f592c0b9 Mon Sep 17 00:00:00 2001 From: Jacob Lee Date: Wed, 4 Dec 2024 09:59:07 -0800 Subject: [PATCH] docs: Update Google GenAI doc formatting (#7318) --- docs/core_docs/docs/integrations/chat/google_generativeai.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/core_docs/docs/integrations/chat/google_generativeai.ipynb b/docs/core_docs/docs/integrations/chat/google_generativeai.ipynb index d802e86669d6..3e2332d74d4b 100644 --- a/docs/core_docs/docs/integrations/chat/google_generativeai.ipynb +++ b/docs/core_docs/docs/integrations/chat/google_generativeai.ipynb @@ -801,7 +801,7 @@ "id": "a464c1a9", "metadata": {}, "source": [ - "### Context Caching\n", + "## Context Caching\n", "\n", "Context caching allows you to pass some content to the model once, cache the input tokens, and then refer to the cached tokens for subsequent requests to reduce cost. You can create a `CachedContent` object using `GoogleAICacheManager` class and then pass the `CachedContent` object to your `ChatGoogleGenerativeAIModel` with `enableCachedContent()` method." ]