diff --git a/langsmith/tracer.go b/langsmith/tracer.go index 8ca357e8c..221aff3b0 100644 --- a/langsmith/tracer.go +++ b/langsmith/tracer.go @@ -63,26 +63,11 @@ func (t *LangChainTracer) HandleText(_ context.Context, _ string) { func (t *LangChainTracer) HandleLLMGenerateContentStart(ctx context.Context, ms []llms.MessageContent) { childTree := t.activeTree.CreateChild() - inputs := []struct { - Role string `json:"role"` - Content []llms.ContentPart `json:"content"` - }{} - - for _, prompt := range ms { - inputs = append(inputs, struct { - Role string `json:"role"` - Content []llms.ContentPart `json:"content"` - }{ - Role: string(prompt.Role), - Content: prompt.Parts, - }) - } - childTree. SetName("LLMGenerateContent"). SetRunType("llm"). SetInputs(KVMap{ - "messages": inputs, + "messages": inputsFromMessages(ms), }) t.activeTree.AppendChild(childTree) @@ -97,9 +82,18 @@ func (t *LangChainTracer) HandleLLMGenerateContentStart(ctx context.Context, ms func (t *LangChainTracer) HandleLLMGenerateContentEnd(ctx context.Context, res *llms.ContentResponse) { childTree := t.activeTree.GetChild("LLMGenerateContent") - childTree.SetName("LLMGenerateContent").SetRunType("llm").SetOutputs(KVMap{ - "res_content": res, - }) + childTree. + SetName("LLMGenerateContent"). + SetRunType("llm"). + SetOutputs(KVMap{ + "choices": res.Choices, + }) + + if tracingOutput := res.GetTracingOutput(); tracingOutput != nil { + childTree. + SetName(tracingOutput.Name). + SetOutputs(tracingOutput.Output) + } // Close the run if err := childTree.patchRun(ctx); err != nil { diff --git a/langsmith/types.go b/langsmith/types.go index c5ea63119..586995fbd 100644 --- a/langsmith/types.go +++ b/langsmith/types.go @@ -1,6 +1,10 @@ package langsmith -import "time" +import ( + "time" + + "github.com/tmc/langchaingo/llms" +) type KVMap map[string]any @@ -24,3 +28,19 @@ func timeToMillisecondsPtr(t time.Time) *int64 { func ptr[T any](v T) *T { return &v } + +type ( + inputs []input + input struct { + Role string `json:"role"` + Content []llms.ContentPart `json:"content"` + } +) + +func inputsFromMessages(ms []llms.MessageContent) inputs { + inputs := make(inputs, len(ms)) + for i, msg := range ms { + inputs[i] = input{Role: string(msg.Role), Content: msg.Parts} + } + return inputs +} diff --git a/llms/generatecontent.go b/llms/generatecontent.go index 8702143b0..79d7f48a9 100644 --- a/llms/generatecontent.go +++ b/llms/generatecontent.go @@ -122,6 +122,21 @@ func (ToolCallResponse) isPart() {} // It can potentially return multiple content choices. type ContentResponse struct { Choices []*ContentChoice + // trackingOutputs is arbitrary information the model can send to the tracer. + tracingOutput *TracingOutput +} + +type TracingOutput struct { + Name string + Output map[string]any +} + +func (cr *ContentResponse) SetTracingOutput(v *TracingOutput) { + cr.tracingOutput = v +} + +func (cr *ContentResponse) GetTracingOutput() *TracingOutput { + return cr.tracingOutput } // ContentChoice is one of the response choices returned by GenerateContent diff --git a/llms/openai/openaillm.go b/llms/openai/openaillm.go index 2d91e9ae0..cac2b4983 100644 --- a/llms/openai/openaillm.go +++ b/llms/openai/openaillm.go @@ -2,6 +2,7 @@ package openai import ( "context" + "encoding/json" "fmt" "github.com/tmc/langchaingo/callbacks" @@ -195,11 +196,29 @@ func (o *LLM) GenerateContent(ctx context.Context, messages []llms.MessageConten response := &llms.ContentResponse{Choices: choices} if callbacksHandler := o.getCallbackHandler(ctx); callbacksHandler != nil { + tracingOutput, err := o.getTracingOutput(result) + if err != nil { + return nil, fmt.Errorf("failed to get tracing output: %w", err) + } + response.SetTracingOutput(tracingOutput) callbacksHandler.HandleLLMGenerateContentEnd(ctx, response) } return response, nil } +func (o *LLM) getTracingOutput(resp *openaiclient.ChatCompletionResponse) (*llms.TracingOutput, error) { + jsonBytes, err := json.Marshal(resp) + if err != nil { + return nil, err + } + outputs := map[string]any{} + if err := json.Unmarshal(jsonBytes, &outputs); err != nil { + return nil, err + } + + return &llms.TracingOutput{Name: "ChatOpenAI", Output: outputs}, nil +} + // CreateEmbedding creates embeddings for the given input texts. func (o *LLM) CreateEmbedding(ctx context.Context, inputTexts []string) ([][]float32, error) { embeddings, err := o.client.CreateEmbedding(ctx, &openaiclient.EmbeddingRequest{