diff --git a/rag/rag-springai-ollama-llm/docker/docker-compose.yml b/rag/rag-springai-ollama-llm/docker/docker-compose.yml
index 687c199..1ed7954 100644
--- a/rag/rag-springai-ollama-llm/docker/docker-compose.yml
+++ b/rag/rag-springai-ollama-llm/docker/docker-compose.yml
@@ -6,4 +6,14 @@ services:
redis-stack:
image: redis/redis-stack-server
ports:
- - '6379:6379'
\ No newline at end of file
+ - '6379:6379'
+ lgtm-stack:
+ image: grafana/otel-lgtm:0.7.1
+ extra_hosts: ['host.docker.internal:host-gateway']
+ container_name: lgtm-stack
+ environment:
+ - OTEL_METRIC_EXPORT_INTERVAL=500
+ ports:
+ - "3000:3000"
+ - "4317:4317"
+ - "4318:4318"
\ No newline at end of file
diff --git a/rag/rag-springai-ollama-llm/pom.xml b/rag/rag-springai-ollama-llm/pom.xml
index d11d6a3..ca4861e 100644
--- a/rag/rag-springai-ollama-llm/pom.xml
+++ b/rag/rag-springai-ollama-llm/pom.xml
@@ -64,6 +64,18 @@
micrometer-registry-prometheus
runtime
+
+ io.micrometer
+ micrometer-tracing-bridge-otel
+
+
+ io.opentelemetry
+ opentelemetry-exporter-otlp
+
+
+ io.micrometer
+ micrometer-registry-otlp
+
org.springframework.boot
spring-boot-starter-test
@@ -87,8 +99,15 @@
com.redis.testcontainers
testcontainers-redis
+ test
1.6.4
+
+ org.testcontainers
+ grafana
+ test
+ 1.20.1
+
io.rest-assured
rest-assured
diff --git a/rag/rag-springai-ollama-llm/src/main/resources/application.properties b/rag/rag-springai-ollama-llm/src/main/resources/application.properties
index 0a3789a..9870b16 100644
--- a/rag/rag-springai-ollama-llm/src/main/resources/application.properties
+++ b/rag/rag-springai-ollama-llm/src/main/resources/application.properties
@@ -3,12 +3,12 @@ spring.application.name=rag-springai-ollama-llm
spring.threads.virtual.enabled=true
spring.mvc.problemdetails.enabled=true
-spring.ai.ollama.chat.options.model=llama3
+spring.ai.ollama.chat.options.model=mistral
spring.ai.ollama.chat.options.temperature=0.3
spring.ai.ollama.chat.options.top-k=2
spring.ai.ollama.chat.options.top-p=0.2
-spring.ai.ollama.embedding.options.model=llama3
+spring.ai.ollama.embedding.options.model=nomic-embed-text
spring.ai.vectorstore.redis.index=vector_store
spring.ai.vectorstore.redis.prefix=ai
@@ -17,3 +17,12 @@ spring.ai.vectorstore.redis.initializeSchema=true
spring.ai.ollama.baseUrl=http://localhost:11434
spring.testcontainers.beans.startup=parallel
+
+##Observability
+spring.ai.chat.observations.include-completion=true
+spring.ai.chat.observations.include-prompt=true
+
+management.endpoints.web.exposure.include=*
+management.metrics.tags.service.name=${spring.application.name}
+management.tracing.sampling.probability=1.0
+management.otlp.tracing.endpoint=http://localhost:4318/v1/traces
diff --git a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/TestLlmRagWithSpringAiApplication.java b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/TestLlmRagWithSpringAiApplication.java
index fb13ad9..6a2fb5f 100644
--- a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/TestLlmRagWithSpringAiApplication.java
+++ b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/TestLlmRagWithSpringAiApplication.java
@@ -1,40 +1,13 @@
package com.learning.ai.llmragwithspringai;
-import static com.redis.testcontainers.RedisStackContainer.DEFAULT_IMAGE_NAME;
-import static com.redis.testcontainers.RedisStackContainer.DEFAULT_TAG;
-
-import com.redis.testcontainers.RedisStackContainer;
+import com.learning.ai.llmragwithspringai.config.TestcontainersConfiguration;
import org.springframework.boot.SpringApplication;
-import org.springframework.boot.test.context.TestConfiguration;
-import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
-import org.springframework.context.annotation.Bean;
-import org.springframework.test.context.DynamicPropertyRegistry;
-import org.testcontainers.ollama.OllamaContainer;
-import org.testcontainers.utility.DockerImageName;
-@TestConfiguration(proxyBeanMethods = false)
public class TestLlmRagWithSpringAiApplication {
- @Bean
- @ServiceConnection
- OllamaContainer ollama(DynamicPropertyRegistry properties) {
- // The model name to use (e.g., "orca-mini", "mistral", "llama2", "codellama", "phi", or
- // "tinyllama")
- return new OllamaContainer(
- DockerImageName.parse("langchain4j/ollama-llama3:latest").asCompatibleSubstituteFor("ollama/ollama"));
- }
-
- @Bean
- RedisStackContainer redisContainer(DynamicPropertyRegistry properties) {
- RedisStackContainer redis = new RedisStackContainer(DEFAULT_IMAGE_NAME.withTag(DEFAULT_TAG));
- properties.add("spring.ai.vectorstore.redis.uri", () -> "redis://%s:%d"
- .formatted(redis.getHost(), redis.getMappedPort(6379)));
- return redis;
- }
-
public static void main(String[] args) {
SpringApplication.from(LlmRagWithSpringAiApplication::main)
- .with(TestLlmRagWithSpringAiApplication.class)
+ .with(TestcontainersConfiguration.class)
.run(args);
}
}
diff --git a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/AbstractIntegrationTest.java b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/AbstractIntegrationTest.java
index 81ed342..00e1102 100644
--- a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/AbstractIntegrationTest.java
+++ b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/AbstractIntegrationTest.java
@@ -3,7 +3,6 @@
import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.RANDOM_PORT;
import com.fasterxml.jackson.databind.ObjectMapper;
-import com.learning.ai.llmragwithspringai.TestLlmRagWithSpringAiApplication;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
@@ -11,7 +10,7 @@
@SpringBootTest(
webEnvironment = RANDOM_PORT,
- classes = {TestLlmRagWithSpringAiApplication.class})
+ classes = {TestcontainersConfiguration.class})
@AutoConfigureMockMvc
public abstract class AbstractIntegrationTest {
diff --git a/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java
new file mode 100644
index 0000000..4c7deda
--- /dev/null
+++ b/rag/rag-springai-ollama-llm/src/test/java/com/learning/ai/llmragwithspringai/config/TestcontainersConfiguration.java
@@ -0,0 +1,46 @@
+package com.learning.ai.llmragwithspringai.config;
+
+import com.redis.testcontainers.RedisStackContainer;
+import java.io.IOException;
+import java.time.Duration;
+import org.springframework.boot.test.context.TestConfiguration;
+import org.springframework.boot.testcontainers.service.connection.ServiceConnection;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Scope;
+import org.springframework.test.context.DynamicPropertyRegistry;
+import org.testcontainers.grafana.LgtmStackContainer;
+import org.testcontainers.ollama.OllamaContainer;
+import org.testcontainers.utility.DockerImageName;
+
+@TestConfiguration(proxyBeanMethods = false)
+public class TestcontainersConfiguration {
+
+ @Bean
+ @ServiceConnection
+ OllamaContainer ollama() throws IOException, InterruptedException {
+ // The model name to use (e.g., "orca-mini", "mistral", "llama2", "codellama", "phi", or
+ // "tinyllama")
+ OllamaContainer ollamaContainer = new OllamaContainer(
+ DockerImageName.parse("langchain4j/ollama-mistral:latest").asCompatibleSubstituteFor("ollama/ollama"));
+ ollamaContainer.start();
+ ollamaContainer.execInContainer("ollama", "pull", "nomic-embed-text");
+ return ollamaContainer;
+ }
+
+ @Bean
+ RedisStackContainer redisContainer(DynamicPropertyRegistry properties) {
+ RedisStackContainer redis = new RedisStackContainer(
+ RedisStackContainer.DEFAULT_IMAGE_NAME.withTag(RedisStackContainer.DEFAULT_TAG));
+ properties.add("spring.ai.vectorstore.redis.uri", () -> "redis://%s:%d"
+ .formatted(redis.getHost(), redis.getMappedPort(6379)));
+ return redis;
+ }
+
+ @Bean
+ @Scope("singleton")
+ @ServiceConnection("otel/opentelemetry-collector-contrib")
+ LgtmStackContainer lgtmStackContainer() {
+ return new LgtmStackContainer(DockerImageName.parse("grafana/otel-lgtm").withTag("0.7.1"))
+ .withStartupTimeout(Duration.ofMinutes(2));
+ }
+}