Skip to content

Commit

Permalink
Add completion functions
Browse files Browse the repository at this point in the history
  • Loading branch information
NovacloudBot committed Jul 26, 2024
1 parent f41b030 commit c9732b0
Show file tree
Hide file tree
Showing 5 changed files with 305 additions and 0 deletions.
33 changes: 33 additions & 0 deletions src/completions/completion-with-functions.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { describe, expect, it } from 'vitest'

import OpenAI from 'openai'
import { z } from 'zod'
import { createChatCompletionFunction } from '../function'
import { completionWithFunctions } from './completion-with-functions'

describe(
'Completion with Functions',
() => {
it('should run completion with functions', async () => {
const client = new OpenAI()
const res = await completionWithFunctions({
client,
instructions: `Call test function foo and return it's value in fooResponse`,
prompt: `blah is '123'`,
functions: [
createChatCompletionFunction({
name: 'test',
description: 'test function',
parameters: z.object({ blah: z.string() }),
handler: async ({ blah }) => {
return `hello ${blah}`
}
})
]
})

expect(res.content).toEqual('hello 123')
})
},
{ concurrent: true }
)
133 changes: 133 additions & 0 deletions src/completions/completion-with-functions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
import type { ChatCompletionFunction } from '../function'
import OpenAI from 'openai'
import type { z } from 'zod'

import {
ChatCompletionMessageParam,
ChatCompletionTool
} from 'openai/resources'
import zodToJsonSchema from 'zod-to-json-schema'
import {
ChatCompletionCreateParamsBase,
ChatCompletionMessage,
ChatCompletionMessageToolCall
} from 'openai/resources/chat/completions'

type CompletionOpts = Partial<
Omit<ChatCompletionCreateParamsBase, 'functions' | 'tools'>
> & {
client: OpenAI
// options?: Partial<OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming>
instructions: string
prompt?: string
messages?: ChatCompletionMessageParam[]
}

export type CompletionOptsWithFunctionOpts = CompletionOpts & {
functions?: ChatCompletionFunction[]
parallelFunctionExecution?: false
}

export const functionToOpenAIChatCompletionTool = <T extends z.ZodRawShape>(
fn: ChatCompletionFunction<T>
): ChatCompletionTool => {
const params = fn.parameters ? zodToJsonSchema(fn.parameters) : undefined
return {
type: 'function',
function: {
name: fn.name,
description: fn.description,
parameters: params
}
}
}

export const completionWithFunctions = async (
opts: CompletionOptsWithFunctionOpts
): Promise<ChatCompletionMessage> => {
const {
client,
instructions,
prompt,
functions,
parallelFunctionExecution: parallelToolCalls,
messages,
model,
...rest
} = opts

// initialize messages
const _messages: ChatCompletionMessageParam[] = messages ?? [
{ role: 'system', content: instructions }
]
if (prompt) {
_messages.push({ role: 'user', content: prompt })
}

const response = await client.chat.completions.create({

Check failure on line 67 in src/completions/completion-with-functions.ts

View workflow job for this annotation

GitHub Actions / build (20.x)

src/completions/completion-with-functions.spec.ts > Completion with Functions > should run completion with functions

Error: 400 you must provide a model parameter ❯ Function.generate node_modules/openai/src/error.ts:70:14 ❯ OpenAI.makeStatusError node_modules/openai/src/core.ts:397:21 ❯ OpenAI.makeRequest node_modules/openai/src/core.ts:460:24 ❯ Module.completionWithFunctions src/completions/completion-with-functions.ts:67:19 ❯ src/completions/completion-with-functions.spec.ts:13:16 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 400, headers: { 'alt-svc': 'h3=":443"; ma=86400', 'cf-cache-status': 'DYNAMIC', 'cf-ray': '8a96b6193ee83afa-IAD', connection: 'keep-alive', 'content-length': '167', 'content-type': 'application/json; charset=utf-8', date: 'Fri, 26 Jul 2024 19:14:12 GMT', server: 'cloudflare', 'set-cookie': '__cf_bm=TCNcAXXDGaA5CZhk35zyNeMyuipsyHkL73pQwwinseo-1722021252-1.0.1.1-r07bpr.V6jfZkP_nYuae7kZjjUzhghGT_Or9TgMQCfDNjo5KUGJq4M7ZeQi2Xh0LATEQD2tnoMdzDUqu3kspCg; path=/; expires=Fri, 26-Jul-24 19:44:12 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None, _cfuvid=WOJuj80DK.zG9p_OfOwK7uF_2JSfEBQvcIgans_Rhr0-1722021252228-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None', 'strict-transport-security': 'max-age=15552000; includeSubDomains; preload', vary: 'Origin', 'x-content-type-options': 'nosniff', 'x-request-id': 'req_f51305561ae98e73a09f853ff5cba6b2' }, request_id: 'req_f51305561ae98e73a09f853ff5cba6b2', error: { message: 'you must provide a model parameter', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }

Check failure on line 67 in src/completions/completion-with-functions.ts

View workflow job for this annotation

GitHub Actions / build (20.x)

src/completions/completion-with-json.spec.ts > Completion with JSON response > should run completion with JSON response

Error: 400 you must provide a model parameter ❯ Function.generate node_modules/openai/src/error.ts:70:14 ❯ OpenAI.makeStatusError node_modules/openai/src/core.ts:397:21 ❯ OpenAI.makeRequest node_modules/openai/src/core.ts:460:24 ❯ Module.completionWithFunctions src/completions/completion-with-functions.ts:67:19 ❯ Module.completionWithJsonResponse src/completions/completion-with-json.ts:40:14 ❯ src/completions/completion-with-json.spec.ts:13:16 ⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯⎯ Serialized Error: { status: 400, headers: { 'alt-svc': 'h3=":443"; ma=86400', 'cf-cache-status': 'DYNAMIC', 'cf-ray': '8a96b6162a1220b2-IAD', connection: 'keep-alive', 'content-length': '167', 'content-type': 'application/json; charset=utf-8', date: 'Fri, 26 Jul 2024 19:14:11 GMT', server: 'cloudflare', 'set-cookie': '__cf_bm=2VyJ56blcY3gBYZ8PyNwAkgRHvyseMGK_XXyzXMvtz4-1722021251-1.0.1.1-MP8P4dQPysig.jqN2cB.F7KcKUh6wYvL8SV1Q7DD0r61MYEqXCTFeE3.XpH9b_pTZp_cxeGpr0WP_qc9wX.W0A; path=/; expires=Fri, 26-Jul-24 19:44:11 GMT; domain=.api.openai.com; HttpOnly; Secure; SameSite=None, _cfuvid=XdYUoM1f3tMU3wBwCxMCIIwU7zKUBeX59NuBqKnu3Bc-1722021251740-0.0.1.1-604800000; path=/; domain=.api.openai.com; HttpOnly; Secure; SameSite=None', 'strict-transport-security': 'max-age=15552000; includeSubDomains; preload', vary: 'Origin', 'x-content-type-options': 'nosniff', 'x-request-id': 'req_5b1745001cf82b593734e3a8c52d3d58' }, request_id: 'req_5b1745001cf82b593734e3a8c52d3d58', error: { message: 'you must provide a model parameter', type: 'invalid_request_error', param: null, code: null }, code: null, param: null }
model: model,
messages: _messages,
tools: functions?.map(functionToOpenAIChatCompletionTool),
...rest,
stream: false
})

let message = response?.choices?.[0]?.message

const handleToolCall = async (toolCall: ChatCompletionMessageToolCall) => {
try {
const fn = functions?.find((f) => f.name === toolCall.function.name)
if (!fn) {
throw new Error(
`Function ${toolCall.function.name} not found in functions: [${functions?.map((f) => f.name).join(', ')}]`
)
}
const output = await fn.handler(JSON.parse(toolCall.function.arguments))
return {
tool_call_id: toolCall.id,
output
}
} catch (e) {
return {
tool_call_id: toolCall.id,
output: `Failed with error: ${e}`
}
}
}

if (message?.tool_calls) {
let toolCallResults: {
tool_call_id: string
output: string
}[] = []
if (parallelToolCalls === false) {
for (const toolCall of message?.tool_calls) {
const res = await handleToolCall(toolCall)
toolCallResults.push(res)
}
} else {
toolCallResults = await Promise.all(
message?.tool_calls.map(handleToolCall)
)
}
_messages.push(message)
for (const res of toolCallResults) {
_messages.push({
tool_call_id: res.tool_call_id,
role: 'tool',
content: res.output
})
}
return completionWithFunctions({
...opts,
messages: _messages,
prompt: undefined
})
}

if (message) {
return message
} else {
throw new Error('Invalid response (empty message)')
}
}
35 changes: 35 additions & 0 deletions src/completions/completion-with-json.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import { describe, expect, it } from 'vitest'

import OpenAI from 'openai'
import { z } from 'zod'
import { completionWithJsonResponse } from './completion-with-json'
import { createChatCompletionFunction } from '../function'

describe(
'Completion with JSON response',
() => {
it('should run completion with JSON response', async () => {
const client = new OpenAI()
const res = await completionWithJsonResponse({
client,
instructions: `Call test function foo and return it's value in fooResponse`,
prompt: `blah is '123'`,
responseObject: z.object({
fooResponse: z.string()
}),
functions: [
createChatCompletionFunction({
name: 'test',
description: 'test function',
parameters: z.object({ blah: z.string() }),
handler: async ({ blah }) => {
return `hello ${blah}`
}
})
]
})
expect(res.fooResponse).toEqual('hello 123')
})
},
{ concurrent: true }
)
97 changes: 97 additions & 0 deletions src/completions/completion-with-json.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import type { z } from 'zod'
import type { ChatCompletionFunction } from '../function'

import { ChatCompletionTool } from 'openai/resources'
import zodToJsonSchema from 'zod-to-json-schema'
import {
CompletionOptsWithFunctionOpts,
completionWithFunctions
} from './completion-with-functions'

export type CompletionOptsWithJsonResponse<T extends z.ZodRawShape> =
CompletionOptsWithFunctionOpts & {
responseObject: z.ZodObject<T>
}

export const functionToOpenAIChatCompletionTool = <T extends z.ZodRawShape>(
fn: ChatCompletionFunction<T>
): ChatCompletionTool => {
const params = fn.parameters ? zodToJsonSchema(fn.parameters) : undefined
return {
type: 'function',
function: {
name: fn.name,
description: fn.description,
parameters: params
}
}
}

export const completionWithJsonResponse = async <T extends z.ZodRawShape>(
opts: CompletionOptsWithJsonResponse<T>
): Promise<z.infer<z.ZodObject<T>>> => {
const { responseObject, prompt, ...rest } = opts
const responseObjectSchema = JSON.stringify(
// eslint-disable-next-line @typescript-eslint/no-explicit-any
zodToJsonSchema(responseObject)
)

const _prompt = `Output JSON must be single object (only one JSON object) conforming to the following JsonSchema7:\n${responseObjectSchema}\n\n${prompt ? `${prompt}\n\n` : ''}\n`
const res = await completionWithFunctions({
...rest,
response_format: { type: 'json_object' },
prompt: _prompt
})

if (!res.content) {
throw new Error('Invalid response (null)')
}

try {
const content = res.content.replace(/^```json\n/, '').replace(/```$/, '')
let parsedContent = JSON.parse(content)
if (parsedContent.$schema && parsedContent.properties) {
parsedContent = parsedContent.properties
}
const parsed = responseObject.parse(parsedContent)
return parsed
} catch (err) {
throw new Error(`Failed to parse response: ${err}, json: '${res.content}'`)
}
}

export const completionWithJsonResponseWithRetry = async <
T extends z.ZodRawShape
>(
props: CompletionOptsWithJsonResponse<T>,
retryCount = 2
): Promise<z.infer<z.ZodObject<T>>> => {
let latestErr: Error | undefined
try {
return await completionWithJsonResponse(props)
} catch (err) {
latestErr = err as Error
if (retryCount <= 0) {
return await completionWithJsonResponseWithRetry(
{
...props,
response_format: { type: 'json_object' },
messages: [
...(props.messages ?? []),
{
role: 'user',
content: [
{
type: 'text',
text: `Your latest reply contains following error:\n\`${err}\``
}
]
}
]
},
retryCount - 1
)
}
}
throw new Error(`Max retries reached. Last error: ${latestErr}`)
}
7 changes: 7 additions & 0 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
export { completionWithFunctions } from './completions/completion-with-functions'
export {
completionWithJsonResponse,
completionWithJsonResponseWithRetry
} from './completions/completion-with-json'

export { Assistant, AssistantOpts } from './assistant'
export { promptWithPick } from './chains/prompt-with-pick'
export { promptWithRetry } from './chains/prompt-with-retry'
export { createChatCompletionFunction } from './function'
export { createOpenAIClient, getDefaultOpenAIClient } from './openai-client'

export { Thread, ThreadPromptWithFunctionOpts } from './thread'

0 comments on commit c9732b0

Please sign in to comment.