Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Wait for new Node release] adds max output tokens to conf #467

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ export const chatConfigFormSchema = z.object({
temperature: z.number(),
topP: z.number(),
topK: z.number(),
max_tokens: z.number(),
});

export type ChatConfigFormSchemaType = z.infer<typeof chatConfigFormSchema>;
Expand Down Expand Up @@ -72,6 +73,46 @@ function ChatConfigForm({ form }: ChatConfigFormProps) {
</FormItem>
)}
/>
<FormField
control={form.control}
name="max_tokens"
render={({ field }) => (
<FormItem className="flex gap-2.5">
<FormControl>
<HoverCard openDelay={200}>
<HoverCardTrigger asChild>
<div className="grid w-full gap-4">
<div className="flex items-center justify-between">
<Label htmlFor="max_tokens">Output Tokens</Label>
<span className="text-muted-foreground hover:border-border w-12 rounded-md border border-transparent px-2 py-0.5 text-right text-sm">
{field.value}
</span>
</div>
<Slider
aria-label="Output Tokens"
className="[&_[role=slider]]:h-4 [&_[role=slider]]:w-4"
id="max_tokens"
max={8000}
onValueChange={(vals) => {
field.onChange(vals[0]);
}}
step={100}
value={[field.value]}
/>
</div>
</HoverCardTrigger>
<HoverCardContent
align="start"
className="w-[260px] bg-gray-600 px-2 py-3 text-xs"
side="left"
>
Output Tokens determines the maximum number of tokens (words or parts of words) that the AI can generate in a single response. Higher values allow for longer responses.
</HoverCardContent>
</HoverCard>
</FormControl>
</FormItem>
)}
/>
<FormField
control={form.control}
name="temperature"
Expand Down Expand Up @@ -245,6 +286,7 @@ export function UpdateChatConfigActionBar() {
temperature: chatConfig?.temperature,
topP: chatConfig?.top_p,
topK: chatConfig?.top_k,
max_tokens: chatConfig?.max_tokens,
},
});

Expand All @@ -267,6 +309,7 @@ export function UpdateChatConfigActionBar() {
temperature: chatConfig.temperature,
topP: chatConfig.top_p,
topK: chatConfig.top_k,
max_tokens: chatConfig.max_tokens,
});
}
}, [chatConfig, form]);
Expand All @@ -283,6 +326,7 @@ export function UpdateChatConfigActionBar() {
temperature: data.temperature,
top_p: data.topP,
top_k: data.topK,
max_tokens: data.max_tokens,
},
});
};
Expand All @@ -297,6 +341,7 @@ export function UpdateChatConfigActionBar() {
temperature: chatConfig?.temperature,
topP: chatConfig?.top_p,
topK: chatConfig?.top_k,
max_tokens: chatConfig?.max_tokens,
});
}
}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ function ConversationEmptyFooter() {
temperature: DEFAULT_CHAT_CONFIG.temperature,
topP: DEFAULT_CHAT_CONFIG.top_p,
topK: DEFAULT_CHAT_CONFIG.top_k,
max_tokens: DEFAULT_CHAT_CONFIG.max_tokens,
},
});

Expand Down Expand Up @@ -314,6 +315,7 @@ function ConversationEmptyFooter() {
temperature: chatConfigForm.getValues('temperature'),
top_p: chatConfigForm.getValues('topP'),
top_k: chatConfigForm.getValues('topK'),
max_tokens: chatConfigForm.getValues('max_tokens'),
},
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ function BamlEditor() {
temperature: DEFAULT_CHAT_CONFIG.temperature,
top_p: DEFAULT_CHAT_CONFIG.top_p,
top_k: DEFAULT_CHAT_CONFIG.top_k,
max_tokens: DEFAULT_CHAT_CONFIG.max_tokens,
},
});
};
Expand Down
1 change: 1 addition & 0 deletions apps/shinkai-desktop/src/components/sheet/table-chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,7 @@ export default function ChatTable() {
temperature: DEFAULT_CHAT_CONFIG.temperature,
top_p: DEFAULT_CHAT_CONFIG.top_p,
top_k: DEFAULT_CHAT_CONFIG.top_k,
max_tokens: DEFAULT_CHAT_CONFIG.max_tokens,
},
});
createJobForm.reset();
Expand Down
1 change: 1 addition & 0 deletions libs/shinkai-message-ts/src/api/jobs/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,7 @@ export type GetFileNamesResponse = string[];
export type JobConfig = {
custom_prompt: string;
temperature?: number;
max_tokens?: number;
seed?: number;
top_k?: number;
top_p?: number;
Expand Down
1 change: 1 addition & 0 deletions libs/shinkai-node-state/src/v2/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,5 @@ export const DEFAULT_CHAT_CONFIG = {
top_k: 40,
top_p: 0.9,
stream: true,
max_tokens: 2000,
} as const;
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ export const useGetChatConfig = (
top_k: data.top_k ?? DEFAULT_CHAT_CONFIG.top_k,
top_p: data.top_p ?? DEFAULT_CHAT_CONFIG.top_p,
stream: data.stream ?? DEFAULT_CHAT_CONFIG.stream,
max_tokens: data.max_tokens ?? DEFAULT_CHAT_CONFIG.max_tokens,
};
},
...options,
Expand Down