Skip to content

Commit

Permalink
llama_cpp_python use same snippet
Browse files Browse the repository at this point in the history
  • Loading branch information
mishig25 committed Nov 20, 2024
1 parent d6c5b5b commit fa745eb
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 6 deletions.
1 change: 1 addition & 0 deletions packages/tasks/src/model-libraries-snippets.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ describe("model-libraries-snippets", () => {
it("llama_cpp_python conversational", async () => {
const model: ModelData = {
id: "bartowski/Llama-3.2-3B-Instruct-GGUF",
pipeline_tag: "text-generation",
tags: ["conversational"],
inference: "",
};
Expand Down
11 changes: 5 additions & 6 deletions packages/tasks/src/model-libraries-snippets.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import type { ModelData } from "./model-data.js";
import type { WidgetExampleTextInput, WidgetExampleSentenceSimilarityInput } from "./widget-example.js";
import { LIBRARY_TASK_MAPPING } from "./library-to-tasks.js";
import { getModelInputSnippet } from "./snippets/inputs.js";
import { ChatCompletionInputMessage } from "./tasks/index.js";
import { stringifyMessages } from "./snippets/common.js";

const TAG_CUSTOM_CODE = "custom_code";

Expand Down Expand Up @@ -430,13 +433,9 @@ llm = Llama.from_pretrained(
];

if (model.tags.includes("conversational")) {
const messages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
snippets.push(`llm.create_chat_completion(
messages = [
{
"role": "user",
"content": "What is the capital of France?"
}
]
messages = ${stringifyMessages(messages, { attributeKeyQuotes: true, indent: "\t" })}
)`);
} else {
snippets.push(`output = llm(
Expand Down

0 comments on commit fa745eb

Please sign in to comment.