-
Notifications
You must be signed in to change notification settings - Fork 0
/
llm.go
65 lines (56 loc) · 2.29 KB
/
llm.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
package main
import (
"context"
"os"
"strings"
"text/template"
"github.com/sashabaranov/go-openai"
)
// There are many different ways to provide the context to the LLM.
// You can pass each context as user message, or the list as one user message,
// or pass it in the system prompt. The system prompt itself also has a big impact
// on how well the LLM handles the context, especially for LLMs with < 7B parameters.
// The prompt engineering is up to you, it's out of scope for the vector database.
var systemPromptTpl = template.Must(template.New("system_prompt").Parse(`
You are a helpful assistant with access to knowledge of the Quran, Hadiths and names of Allah. You are tasked with answering questions related to Islam, life and the world.
Answer the question in a very concise manner. Use an unbiased and compassionate tone. Do not repeat text. Don't make anything up. If you are not sure about something, just say that you don't know.
{{- /* Stop here if no context is provided. The rest below is for handling contexts. */ -}}
{{- if . -}}
Answer the question solely based on the provided context. If the search results within the context are not relevant to the question, say I don't know.
Anything between the following 'context' XML blocks is retrieved from the knowledge base, not part of the conversation with the user. The bullet points are ordered by relevance, so the first one is the most relevant.
<context>
{{- if . -}}
{{- range $context := .}}
- {{.}}{{end}}
{{- end}}
</context>
{{- end -}}
Don't mention the knowledge base, context or search results in your answer.
`))
func askLLM(ctx context.Context, contexts []string, question string) string {
openAIClient := openai.NewClient(os.Getenv("OPENAI_API_KEY"))
sb := &strings.Builder{}
err := systemPromptTpl.Execute(sb, contexts)
if err != nil {
panic(err)
}
messages := []openai.ChatCompletionMessage{
{
Role: openai.ChatMessageRoleSystem,
Content: sb.String(),
}, {
Role: openai.ChatMessageRoleUser,
Content: "Question: " + question,
},
}
res, err := openAIClient.CreateChatCompletion(ctx, openai.ChatCompletionRequest{
Model: openai.GPT4oMini,
Messages: messages,
})
if err != nil {
panic(err)
}
reply := res.Choices[0].Message.Content
reply = strings.TrimSpace(reply)
return reply
}