diff --git a/.gitignore b/.gitignore index a4b46e383..9a1ebd6bd 100644 --- a/.gitignore +++ b/.gitignore @@ -49,3 +49,4 @@ apps/shinkai-desktop/src-tauri/node_storage apps/shinkai-desktop/src-tauri/bin/ollama-windows-resources/* apps/shinkai-desktop/src-tauri/bin/* +launch.json diff --git a/apps/shinkai-desktop/src/pages/chat/chat-conversation.tsx b/apps/shinkai-desktop/src/pages/chat/chat-conversation.tsx index 1075f5e46..c2e8e5a20 100644 --- a/apps/shinkai-desktop/src/pages/chat/chat-conversation.tsx +++ b/apps/shinkai-desktop/src/pages/chat/chat-conversation.tsx @@ -203,7 +203,8 @@ const ChatConversation = () => { const currentInbox = useGetCurrentInbox(); const hasProviderEnableStreaming = currentInbox?.agent?.model.split(':')?.[0] === Models.Ollama || - currentInbox?.agent?.model.split(':')?.[0] === Models.Gemini; + currentInbox?.agent?.model.split(':')?.[0] === Models.Gemini || + currentInbox?.agent?.model.split(':')?.[0] === Models.Exo; const chatForm = useForm({ resolver: zodResolver(chatMessageFormSchema), diff --git a/apps/shinkai-desktop/src/pages/create-agent.tsx b/apps/shinkai-desktop/src/pages/create-agent.tsx index dac36a692..a2e1d2b0d 100644 --- a/apps/shinkai-desktop/src/pages/create-agent.tsx +++ b/apps/shinkai-desktop/src/pages/create-agent.tsx @@ -56,6 +56,10 @@ const modelOptions: { value: Models; label: string }[] = [ value: Models.Gemini, label: 'Gemini', }, + { + value: Models.Exo, + label: 'Exo', + }, ]; export const getModelObject = ( @@ -71,6 +75,8 @@ export const getModelObject = ( return { Ollama: { model_type: modelType } }; case Models.Gemini: return { Gemini: { model_type: modelType } }; + case Models.Exo: + return { Exo: { model_type: modelType } }; default: return { [model]: { model_type: modelType } }; } diff --git a/libs/shinkai-message-ts/src/models/SchemaTypes.ts b/libs/shinkai-message-ts/src/models/SchemaTypes.ts index 572654fa9..3f19b65cd 100644 --- a/libs/shinkai-message-ts/src/models/SchemaTypes.ts +++ b/libs/shinkai-message-ts/src/models/SchemaTypes.ts @@ -250,6 +250,7 @@ export type AgentAPIModel = { GenericAPI?: GenericAPI; Ollama?: Ollama; Gemini?: Gemini; + Exo?: Exo; } & { [model: string]: ModelType; }; @@ -273,6 +274,10 @@ export interface GenericAPI { model_type: string; } +export interface Exo { + model_type: string; +} + export interface APIAddAgentRequest { agent: SerializedLLMProvider; } diff --git a/libs/shinkai-message-ts/src/wasm/SerializedLLMProviderWrapper.ts b/libs/shinkai-message-ts/src/wasm/SerializedLLMProviderWrapper.ts index 72f66b7e0..45943afe1 100644 --- a/libs/shinkai-message-ts/src/wasm/SerializedLLMProviderWrapper.ts +++ b/libs/shinkai-message-ts/src/wasm/SerializedLLMProviderWrapper.ts @@ -40,6 +40,8 @@ export class SerializedLLMProviderWrapper { modelStr = 'ollama:' + agent.model.Ollama.model_type; } else if (agent?.model?.Gemini) { modelStr = 'gemini:' + agent.model.Gemini.model_type; + } else if (agent?.model?.Exo) { + modelStr = 'exo:' + agent.model.Exo.model_type; } else if (Object.keys(agent?.model).length > 0) { const customModelProvider = Object.keys(agent.model)[0]; modelStr = `${customModelProvider}:${agent.model[customModelProvider].model_type}`; diff --git a/libs/shinkai-node-state/src/lib/utils/models.ts b/libs/shinkai-node-state/src/lib/utils/models.ts index 2097773fb..7d1ebab99 100644 --- a/libs/shinkai-node-state/src/lib/utils/models.ts +++ b/libs/shinkai-node-state/src/lib/utils/models.ts @@ -3,6 +3,7 @@ export enum Models { TogetherComputer = 'togethercomputer', Ollama = 'ollama', Gemini = 'gemini', + Exo = 'exo', } export const modelsConfig = { @@ -74,4 +75,13 @@ export const modelsConfig = { }, ], }, + [Models.Exo]: { + apiUrl: 'http://localhost:8000', + modelTypes: [ + { + name: 'Llama3.1-8b', + value: 'llama3.1-8b', + }, + ], + }, };