Skip to content

Commit

Permalink
spotless
Browse files Browse the repository at this point in the history
  • Loading branch information
jimczi committed Nov 30, 2024
1 parent 4f29bd8 commit 50222ed
Show file tree
Hide file tree
Showing 9 changed files with 441 additions and 436 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
import org.elasticsearch.index.mapper.InferenceMetadataFieldsMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.indices.SystemIndexDescriptor;
import org.elasticsearch.inference.InferenceServiceExtension;
import org.elasticsearch.inference.InferenceServiceRegistry;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -396,20 +396,28 @@ private void applyInferenceResponses(BulkItemRequest item, FieldInferenceRespons
List<ChunkedInferenceServiceResults> results = responses.stream().map(r -> r.chunkedResults).collect(Collectors.toList());
if (addMetadataField) {
List<String> inputs = responses.stream()
.filter(r -> r.field().equals(fieldName))
.map(r -> r.input)
.collect(Collectors.toList());
.filter(r -> r.field().equals(fieldName))
.map(r -> r.input)
.collect(Collectors.toList());
assert inputs.size() == 1;
var result = new SemanticTextField(
fieldName,
model.getInferenceEntityId(),
new SemanticTextField.ModelSettings(model),
SemanticTextField.toSemanticTextFieldChunks(indexCreatedVersion, inputs.get(0), results, indexRequest.getContentType()),
SemanticTextField.toSemanticTextFieldChunks(
indexCreatedVersion,
inputs.get(0),
results,
indexRequest.getContentType()
),
indexRequest.getContentType()
);
inferenceFieldsMap.put(fieldName, result);
} else {
List<String> inputs = responses.stream().filter(r -> r.isOriginalFieldInput).map(r -> r.input).collect(Collectors.toList());
List<String> inputs = responses.stream()
.filter(r -> r.isOriginalFieldInput)
.map(r -> r.input)
.collect(Collectors.toList());
assert inputs.size() == 1;
var result = new LegacySemanticTextField(
fieldName,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,11 +31,11 @@
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.vectors.VectorData;
import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryWrapper;
import org.elasticsearch.xpack.inference.mapper.OffsetSourceFieldMapper;
import org.elasticsearch.xpack.inference.mapper.OffsetSourceMetaFieldMapper;
import org.elasticsearch.xpack.inference.mapper.SemanticTextFieldMapper;
import org.elasticsearch.xpack.inference.mapper.SemanticTextUtils;
import org.elasticsearch.xpack.core.ml.search.SparseVectorQueryWrapper;

import java.io.IOException;
import java.util.ArrayList;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.IndexVersion;
import org.elasticsearch.index.IndexVersions;
import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper;
import org.elasticsearch.inference.ChunkedInferenceServiceResults;
import org.elasticsearch.inference.Model;
Expand Down Expand Up @@ -277,7 +275,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
private static final ConstructingObjectParser<Chunk, Void> CHUNKS_PARSER = new ConstructingObjectParser<>(
CHUNKS_FIELD,
true,
args -> new Chunk((String) args[0], args[1] != null ? (String) args[1] : null, args[2] != null ? (int) args[2] : -1, args[3] != null ? (int) args[3] : -1, (BytesReference) args[4])
args -> new Chunk(
(String) args[0],
args[1] != null ? (String) args[1] : null,
args[2] != null ? (int) args[2] : -1,
args[3] != null ? (int) args[3] : -1,
(BytesReference) args[4]
)
);

private static final ConstructingObjectParser<ModelSettings, Void> MODEL_SETTINGS_PARSER = new ConstructingObjectParser<>(
Expand Down Expand Up @@ -337,7 +341,15 @@ public static List<Chunk> toSemanticTextFieldChunks(
.hasNext();) {
var chunkAsByteReference = it.next();
int startOffset = input.indexOf(chunkAsByteReference.matchedText());
chunks.add(new Chunk(sourceFieldName, null, startOffset, startOffset + chunkAsByteReference.matchedText().length(), chunkAsByteReference.bytesReference()));
chunks.add(
new Chunk(
sourceFieldName,
null,
startOffset,
startOffset + chunkAsByteReference.matchedText().length(),
chunkAsByteReference.bytesReference()
)
);
}
}
return chunks;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,20 +153,12 @@ public static class Builder extends FieldMapper.Builder {
private Function<MapperBuilderContext, ObjectMapper> inferenceFieldBuilder;

public static Builder from(SemanticTextFieldMapper mapper) {
Builder builder = new Builder(
mapper.leafName(),
mapper.fieldType().getChunksField().bitsetProducer(),
mapper.indexSettings
);
Builder builder = new Builder(mapper.leafName(), mapper.fieldType().getChunksField().bitsetProducer(), mapper.indexSettings);
builder.init(mapper);
return builder;
}

public Builder(
String name,
Function<Query, BitSetProducer> bitSetProducer,
IndexSettings indexSettings
) {
public Builder(String name, Function<Query, BitSetProducer> bitSetProducer, IndexSettings indexSettings) {
super(name);
this.indexSettings = indexSettings;
this.inferenceFieldBuilder = c -> createInferenceField(
Expand Down Expand Up @@ -325,11 +317,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
final SemanticTextFieldMapper mapper;
if (fieldType().getModelSettings() == null) {
context.path().remove();
Builder builder = (Builder) new Builder(
leafName(),
fieldType().getChunksField().bitsetProducer(),
indexSettings
).init(this);
Builder builder = (Builder) new Builder(leafName(), fieldType().getChunksField().bitsetProducer(), indexSettings).init(this);
try {
mapper = builder.setModelSettings(field.inference().modelSettings())
.setInferenceId(field.inference().inferenceId())
Expand Down Expand Up @@ -380,11 +368,11 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio
builder.field("end", chunk.endOffset());
builder.endObject();
try (
XContentParser subParser = XContentHelper.createParserNotCompressed(
XContentParserConfiguration.EMPTY,
BytesReference.bytes(builder),
context.parser().contentType()
)
XContentParser subParser = XContentHelper.createParserNotCompressed(
XContentParserConfiguration.EMPTY,
BytesReference.bytes(builder),
context.parser().contentType()
)
) {
DocumentParserContext subContext = nestedContext.switchParser(subParser);
subParser.nextToken();
Expand Down Expand Up @@ -644,15 +632,15 @@ public BlockLoader blockLoader(MappedFieldType.BlockLoaderContext blContext) {
}

private static ObjectMapper createInferenceField(
MapperBuilderContext context,
IndexVersion indexVersionCreated,
@Nullable SemanticTextField.ModelSettings modelSettings,
Function<Query, BitSetProducer> bitSetProducer,
IndexSettings indexSettings
MapperBuilderContext context,
IndexVersion indexVersionCreated,
@Nullable SemanticTextField.ModelSettings modelSettings,
Function<Query, BitSetProducer> bitSetProducer,
IndexSettings indexSettings
) {
return new ObjectMapper.Builder(INFERENCE_FIELD, Optional.of(ObjectMapper.Subobjects.ENABLED)).dynamic(ObjectMapper.Dynamic.FALSE)
.add(createChunksField(indexVersionCreated, modelSettings, bitSetProducer, indexSettings))
.build(context);
.add(createChunksField(indexVersionCreated, modelSettings, bitSetProducer, indexSettings))
.build(context);
}

private static NestedObjectMapper.Builder createChunksField(
Expand Down
Loading

0 comments on commit 50222ed

Please sign in to comment.