Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix model Id from setenv, instead of dropdow selector #14

Merged
merged 1 commit into from
May 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
# Icon must end with two \r
Icon


# Thumbnails
._*

Expand Down Expand Up @@ -75,4 +74,5 @@ build/
!**/src/test/**/build/
.idea
bin/
dist/
dist/
application-local.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.HttpStatus;
import org.springframework.messaging.handler.annotation.MessageMapping;
import org.springframework.messaging.simp.annotation.SendToUser;
Expand All @@ -20,6 +21,9 @@
public class PromptController {
Logger logger = LoggerFactory.getLogger(PromptController.class);

@Value("${genai.model_id}")
private String hardcodedModelId;

@Autowired
private final InteractionRepository interactionRepository;

Expand All @@ -35,17 +39,18 @@ public PromptController(InteractionRepository interactionRepository, OCIGenAISer
@SendToUser("/queue/answer")
public Answer handlePrompt(Prompt prompt) {
String promptEscaped = HtmlUtils.htmlEscape(prompt.content());
logger.info("Prompt " + promptEscaped + " received, on model " + prompt.modelId());
logger.info("Prompt " + promptEscaped + " received, on model " + prompt.modelId() + " but using hardcoded one" +
" " + hardcodedModelId);
Interaction interaction = new Interaction();
interaction.setConversationId(prompt.conversationId());
interaction.setDatetimeRequest(new Date());
interaction.setModelId(prompt.modelId());
interaction.setModelId(hardcodedModelId);
interaction.setRequest(promptEscaped);
Interaction saved = interactionRepository.save(interaction);
try {
if (prompt.content() == null || prompt.content().length()< 1) { throw new InvalidPromptRequest(); }
if (prompt.modelId() == null || !prompt.modelId().startsWith("ocid1.generativeaimodel.")) { throw new InvalidPromptRequest(); }
String responseFromGenAI = genAI.request(promptEscaped, prompt.modelId());
// if (prompt.modelId() == null || !prompt.modelId().startsWith("ocid1.generativeaimodel.")) { throw new InvalidPromptRequest(); }
String responseFromGenAI = genAI.request(promptEscaped, hardcodedModelId);
saved.setDatetimeResponse(new Date());
saved.setResponse(responseFromGenAI);
interactionRepository.save(saved);
Expand Down
13 changes: 6 additions & 7 deletions scripts/lib/oci.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -369,13 +369,12 @@ export async function getLatestGenAIModels(
vendor: vendor,
});

const filteredByCapatility = activeCohereModels.filter(
({ capabilities }) => {
if (capabilities.length !== 1) return false;
if (capabilities[0] !== capability) return false;
return true;
}
);
const filteredByCapatility = activeCohereModels.filter((model) => {
const { capabilities } = model;
if (capabilities.length !== 1) return false;
if (capabilities[0] !== capability) return false;
return true;
});

const latestVersion = max(filteredByCapatility, (item) =>
parseFloat(item.version)
Expand Down
52 changes: 28 additions & 24 deletions web/src/Chat.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,28 +27,28 @@ function Chat() {
const [updateModels, setUpdateModels] = useState(true);
const { subscribe, unsubscribe, send, isConnected } = useStomp();

useEffect(() => {
const fecthModels = async () => {
try {
const response = await fetch("/api/genai/models");
const data = await response.json();
setModels(
data.filter(
({ capabilities }) =>
capabilities.length === 1 &&
capabilities.includes("TEXT_GENERATION")
)
);
} catch (error) {
setErrorMessage("Error fetching Generative AI Models from Backend");
}
};
// useEffect(() => {
// const fecthModels = async () => {
// try {
// const response = await fetch("/api/genai/models");
// const data = await response.json();
// setModels(
// data.filter(
// ({ capabilities }) =>
// capabilities.length === 1 &&
// capabilities.includes("TEXT_GENERATION")
// )
// );
// } catch (error) {
// setErrorMessage("Error fetching Generative AI Models from Backend");
// }
// };

if (updateModels) {
setUpdateModels(false);
fecthModels();
}
}, [updateModels]);
// if (updateModels) {
// setUpdateModels(false);
// fecthModels();
// }
// }, [updateModels]);

useEffect(() => {
let timeoutId;
Expand Down Expand Up @@ -90,7 +90,11 @@ function Chat() {

useEffect(() => {
if (isConnected && promptValue.length) {
send("/genai/prompt", { conversationId, content: promptValue, modelId });
send("/genai/prompt", {
conversationId,
content: promptValue,
modelId: "notapply",
});
setWaiting(true);
setPromptValue("");
}
Expand All @@ -99,7 +103,7 @@ function Chat() {

return (
<Box>
<FormControl fullWidth>
{/* <FormControl fullWidth>
<InputLabel id="model-label">Model</InputLabel>
<Select
labelId="model-label"
Expand All @@ -116,7 +120,7 @@ function Chat() {
))}
</Select>
</FormControl>
<Divider style={{ margin: "1rem" }} />
<Divider style={{ margin: "1rem" }} /> */}
<Conversation>{conversation}</Conversation>
{waiting && <CircularProgress style={{ padding: "1rem" }} />}
<PromptInput
Expand Down