Skip to content

Commit

Permalink
Refactor the semantic_text field so that it can registers all the sub…
Browse files Browse the repository at this point in the history
…-fields in the mapping
  • Loading branch information
jimczi committed Mar 19, 2024
1 parent 86ddc9d commit b2b8635
Show file tree
Hide file tree
Showing 12 changed files with 815 additions and 540 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1176,7 +1176,7 @@ public static final class Conflicts {
private final String mapperName;
private final List<String> conflicts = new ArrayList<>();

Conflicts(String mapperName) {
public Conflicts(String mapperName) {
this.mapperName = mapperName;
}

Expand All @@ -1188,7 +1188,11 @@ void addConflict(String parameter, String existing, String toMerge) {
conflicts.add("Cannot update parameter [" + parameter + "] from [" + existing + "] to [" + toMerge + "]");
}

void check() {
public boolean hasConflicts() {
return conflicts.isEmpty() == false;
}

public void check() {
if (conflicts.isEmpty()) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,9 +171,12 @@ public void parse(DocumentParserContext context) throws IOException {
}

String feature = null;
boolean origIsWithLeafObject = context.path().isWithinLeafObject();
try {
// make sure that we don't expand dots in field names while parsing
context.path().setWithinLeafObject(true);
if (context.path().isWithinLeafObject() == false) {
context.path().setWithinLeafObject(true);
}
for (Token token = context.parser().nextToken(); token != Token.END_OBJECT; token = context.parser().nextToken()) {
if (token == Token.FIELD_NAME) {
feature = context.parser().currentName();
Expand Down Expand Up @@ -207,7 +210,7 @@ public void parse(DocumentParserContext context) throws IOException {
context.addToFieldNames(fieldType().name());
}
} finally {
context.path().setWithinLeafObject(false);
context.path().setWithinLeafObject(origIsWithLeafObject);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@
import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory;
import org.elasticsearch.xpack.inference.external.http.sender.RequestExecutorServiceSettings;
import org.elasticsearch.xpack.inference.logging.ThrottlerManager;
import org.elasticsearch.xpack.inference.mapper.InferenceResultFieldMapper;
import org.elasticsearch.xpack.inference.mapper.InferenceMetadataFieldMapper;
import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper;
import org.elasticsearch.xpack.inference.registry.ModelRegistry;
import org.elasticsearch.xpack.inference.rest.RestDeleteInferenceModelAction;
Expand Down Expand Up @@ -285,7 +285,7 @@ public Map<String, Mapper.TypeParser> getMappers() {

@Override
public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() {
return Map.of(InferenceResultFieldMapper.NAME, InferenceResultFieldMapper.PARSER);
return Map.of(InferenceMetadataFieldMapper.NAME, InferenceMetadataFieldMapper.PARSER);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
import org.elasticsearch.inference.Model;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.xpack.inference.mapper.InferenceResultFieldMapper;
import org.elasticsearch.xpack.inference.mapper.InferenceMetadataFieldMapper;
import org.elasticsearch.xpack.inference.registry.ModelRegistry;

import java.util.ArrayList;
Expand All @@ -49,7 +49,7 @@

/**
* An {@link ActionFilter} that performs inference on {@link BulkShardRequest} asynchronously and stores the results in
* the individual {@link BulkItemRequest}. The results are then consumed by the {@link InferenceResultFieldMapper}
* the individual {@link BulkItemRequest}. The results are then consumed by the {@link InferenceMetadataFieldMapper}
* in the subsequent {@link TransportShardBulkAction} downstream.
*/
public class ShardBulkInferenceActionFilter implements ActionFilter {
Expand Down Expand Up @@ -261,10 +261,10 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons
Map<String, Object> newDocMap = indexRequest.sourceAsMap();
Map<String, Object> inferenceMap = new LinkedHashMap<>();
// ignore the existing inference map if any
newDocMap.put(InferenceResultFieldMapper.NAME, inferenceMap);
newDocMap.put(InferenceMetadataFieldMapper.NAME, inferenceMap);
for (FieldInferenceResponse fieldResponse : response.responses()) {
try {
InferenceResultFieldMapper.applyFieldInference(
InferenceMetadataFieldMapper.applyFieldInference(
inferenceMap,
fieldResponse.field(),
fieldResponse.model(),
Expand Down
Loading

0 comments on commit b2b8635

Please sign in to comment.