-
Notifications
You must be signed in to change notification settings - Fork 138
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Allow llmQuestion to be optional when llmMessages is used. (Issue #3… #3072
Changes from 1 commit
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -359,7 +359,7 @@ public class RestMLRAGSearchProcessorIT extends MLCommonsRestTestCase { | |
+ " \"ext\": {\n" | ||
+ " \"generative_qa_parameters\": {\n" | ||
+ " \"llm_model\": \"%s\",\n" | ||
+ " \"llm_question\": \"%s\",\n" | ||
// + " \"llm_question\": \"%s\",\n" | ||
+ " \"system_prompt\": \"%s\",\n" | ||
+ " \"user_instructions\": \"%s\",\n" | ||
+ " \"context_size\": %d,\n" | ||
|
@@ -378,7 +378,7 @@ public class RestMLRAGSearchProcessorIT extends MLCommonsRestTestCase { | |
+ " \"ext\": {\n" | ||
+ " \"generative_qa_parameters\": {\n" | ||
+ " \"llm_model\": \"%s\",\n" | ||
+ " \"llm_question\": \"%s\",\n" | ||
// + " \"llm_question\": \"%s\",\n" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same here? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Will remove it. |
||
// + " \"system_prompt\": \"%s\",\n" | ||
+ " \"user_instructions\": \"%s\",\n" | ||
+ " \"context_size\": %d,\n" | ||
|
@@ -723,8 +723,12 @@ public void testBM25WithBedrock() throws Exception { | |
public void testBM25WithBedrockConverse() throws Exception { | ||
// Skip test if key is null | ||
if (AWS_ACCESS_KEY_ID == null) { | ||
System.out.println("Skipping testBM25WithBedrockConverse because AWS_ACCESS_KEY_ID is null"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. minor: can we use log ? |
||
return; | ||
} | ||
|
||
System.out.println("Running testBM25WithBedrockConverse"); | ||
|
||
Response response = createConnector(BEDROCK_CONVERSE_CONNECTOR_BLUEPRINT); | ||
Map responseMap = parseResponseToMap(response); | ||
String connectorId = (String) responseMap.get("connector_id"); | ||
|
@@ -775,8 +779,11 @@ public void testBM25WithBedrockConverse() throws Exception { | |
public void testBM25WithBedrockConverseUsingLlmMessages() throws Exception { | ||
// Skip test if key is null | ||
if (AWS_ACCESS_KEY_ID == null) { | ||
System.out.println("Skipping testBM25WithBedrockConverseUsingLlmMessages because AWS_ACCESS_KEY_ID is null"); | ||
return; | ||
} | ||
System.out.println("Running testBM25WithBedrockConverseUsingLlmMessages"); | ||
|
||
Response response = createConnector(BEDROCK_CONVERSE_CONNECTOR_BLUEPRINT2); | ||
Map responseMap = parseResponseToMap(response); | ||
String connectorId = (String) responseMap.get("connector_id"); | ||
|
@@ -835,8 +842,11 @@ public void testBM25WithBedrockConverseUsingLlmMessages() throws Exception { | |
public void testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat() throws Exception { | ||
// Skip test if key is null | ||
if (AWS_ACCESS_KEY_ID == null) { | ||
System.out.println("Skipping testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat because AWS_ACCESS_KEY_ID is null"); | ||
return; | ||
} | ||
|
||
System.out.println("Running testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat"); | ||
Response response = createConnector(BEDROCK_DOCUMENT_CONVERSE_CONNECTOR_BLUEPRINT2); | ||
Map responseMap = parseResponseToMap(response); | ||
String connectorId = (String) responseMap.get("connector_id"); | ||
|
@@ -894,8 +904,11 @@ public void testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat() throws | |
public void testBM25WithOpenAIWithConversation() throws Exception { | ||
// Skip test if key is null | ||
if (OPENAI_KEY == null) { | ||
System.out.println("Skipping testBM25WithOpenAIWithConversation because OPENAI_KEY is null"); | ||
return; | ||
} | ||
System.out.println("Running testBM25WithOpenAIWithConversation"); | ||
|
||
Response response = createConnector(OPENAI_CONNECTOR_BLUEPRINT); | ||
Map responseMap = parseResponseToMap(response); | ||
String connectorId = (String) responseMap.get("connector_id"); | ||
|
@@ -951,8 +964,11 @@ public void testBM25WithOpenAIWithConversation() throws Exception { | |
public void testBM25WithOpenAIWithConversationAndImage() throws Exception { | ||
// Skip test if key is null | ||
if (OPENAI_KEY == null) { | ||
System.out.println("Skipping testBM25WithOpenAIWithConversationAndImage because OPENAI_KEY is null"); | ||
return; | ||
} | ||
System.out.println("Running testBM25WithOpenAIWithConversationAndImage"); | ||
|
||
Response response = createConnector(OPENAI_4o_CONNECTOR_BLUEPRINT); | ||
Map responseMap = parseResponseToMap(response); | ||
String connectorId = (String) responseMap.get("connector_id"); | ||
|
@@ -1245,7 +1261,6 @@ private Response performSearch(String indexName, String pipeline, int size, Sear | |
requestParameters.source, | ||
requestParameters.match, | ||
requestParameters.llmModel, | ||
requestParameters.llmQuestion, | ||
requestParameters.systemPrompt, | ||
requestParameters.userInstructions, | ||
requestParameters.contextSize, | ||
|
@@ -1268,8 +1283,6 @@ private Response performSearch(String indexName, String pipeline, int size, Sear | |
requestParameters.source, | ||
requestParameters.match, | ||
requestParameters.llmModel, | ||
requestParameters.llmQuestion, | ||
// requestParameters.systemPrompt, | ||
requestParameters.userInstructions, | ||
requestParameters.contextSize, | ||
requestParameters.interactionSize, | ||
|
@@ -1309,7 +1322,6 @@ private Response performSearch(String indexName, String pipeline, int size, Sear | |
requestParameters.source, | ||
requestParameters.match, | ||
requestParameters.llmModel, | ||
requestParameters.llmQuestion, | ||
requestParameters.systemPrompt, | ||
requestParameters.userInstructions, | ||
requestParameters.contextSize, | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -167,9 +167,11 @@ public GenerativeQAParameters( | |
this.conversationId = conversationId; | ||
this.llmModel = llmModel; | ||
|
||
// TODO: keep this requirement until we can extract the question from the query or from the request processor parameters | ||
// for question rewriting. | ||
Preconditions.checkArgument(!Strings.isNullOrEmpty(llmQuestion), LLM_QUESTION + " must be provided."); | ||
Preconditions | ||
.checkArgument( | ||
!(Strings.isNullOrEmpty(llmQuestion) && (llmMessages == null || llmMessages.isEmpty())), | ||
"At least one of " + LLM_QUESTION + " or " + LLM_MESSAGES_FIELD + " must be provided." | ||
); | ||
this.llmQuestion = llmQuestion; | ||
this.systemPrompt = systemPrompt; | ||
this.userInstructions = userInstructions; | ||
|
@@ -185,7 +187,7 @@ public GenerativeQAParameters( | |
public GenerativeQAParameters(StreamInput input) throws IOException { | ||
this.conversationId = input.readOptionalString(); | ||
this.llmModel = input.readOptionalString(); | ||
this.llmQuestion = input.readString(); | ||
this.llmQuestion = input.readOptionalString(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Hi @austintlee this does not quite make sense to me, why you only make There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @b4sjoo I do make them both optional in the first constructor:
But internally, So, when we write out to StreamOut, we don't need to do a null check:
Which is why I always expect it to be present (at least as an empty list) when I read it back:
Is this an incorrect assumption? Does the StreamInput constructor need to consider You can also take a look at stream roundtrip test cases I have in There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It's just because I saw a null check above and then you make here mandatory makes me confused. I think your answer makes sense to me, that There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. BTW, changing a There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Going from required to optional should be OK, but not the other way around. How do we test it? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. @pyek-bot is currently testing it, he should have a result by tomorrow. Basically we create a lower version cluster (e.g. 2.16) with dedicated master node, then we upgrade the data node to the current version to test. After this we perform the test again, but we upgrade master this time. Does this make sense? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I have tested this scenario. It seems to work fine when all nodes are eventually upgraded to 2.17.
However, when both are upgraded it works as expected with both llmQuestion and llmMessages. |
||
this.systemPrompt = input.readOptionalString(); | ||
this.userInstructions = input.readOptionalString(); | ||
this.contextSize = input.readInt(); | ||
|
@@ -246,9 +248,7 @@ public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params | |
public void writeTo(StreamOutput out) throws IOException { | ||
out.writeOptionalString(conversationId); | ||
out.writeOptionalString(llmModel); | ||
|
||
Preconditions.checkNotNull(llmQuestion, "llm_question must not be null."); | ||
out.writeString(llmQuestion); | ||
out.writeOptionalString(llmQuestion); | ||
out.writeOptionalString(systemPrompt); | ||
out.writeOptionalString(userInstructions); | ||
out.writeInt(contextSize); | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
why not removing this line?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Will do.