Skip to content

Commit

Permalink
[Type Removal] Remove TypeFieldMapper usage, remove support of `_type…
Browse files Browse the repository at this point in the history
…` in searches and from LeafFieldsLookup (#3016)

Removes TypeFieldMapper and _type support from searches

Signed-off-by: Suraj Singh <[email protected]>
  • Loading branch information
dreamer-89 authored Apr 21, 2022
1 parent a34d11f commit dbdee30
Show file tree
Hide file tree
Showing 20 changed files with 21 additions and 320 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -862,7 +862,7 @@ public void testUnsupportedQueries() {
PercolatorFieldMapper.verifyQuery(rangeQuery1);
PercolatorFieldMapper.verifyQuery(rangeQuery2);

HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None);
HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("parent", new MatchAllQueryBuilder(), ScoreMode.None);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery)));
PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1)));
Expand All @@ -881,7 +881,7 @@ public void testUnsupportedQueries() {
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery)));

HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false);
HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("parent", new MatchAllQueryBuilder(), false);
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery));
expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery)));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -561,19 +561,6 @@ setup:

- match: {hits.total: 4}

---
"Test exists query on _type field":
- do:
search:
rest_total_hits_as_int: true
index: test
body:
query:
exists:
field: _type

- match: {hits.total: 4}

---
"Test exists query on _routing field":
- do:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.test.OpenSearchIntegTestCase;
import org.opensearch.test.hamcrest.OpenSearchAssertions;

Expand All @@ -58,7 +57,7 @@
import static org.opensearch.client.Requests.getRequest;
import static org.opensearch.client.Requests.indexRequest;
import static org.opensearch.client.Requests.refreshRequest;
import static org.opensearch.index.query.QueryBuilders.termQuery;
import static org.opensearch.index.query.QueryBuilders.matchAllQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures;

import static org.hamcrest.Matchers.equalTo;
Expand Down Expand Up @@ -181,11 +180,7 @@ public void testIndexActions() throws Exception {
// check count
for (int i = 0; i < 5; i++) {
// test successful
SearchResponse countResponse = client().prepareSearch("test")
.setSize(0)
.setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME))
.execute()
.actionGet();
SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet();
assertNoFailures(countResponse);
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1386,7 +1386,7 @@ public void testWithRescore() {
SearchResponse response = client().prepareSearch("idx")
.addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)))
.addAggregation(
terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_type")))
terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_index")))
)
.get();
Terms terms = response.getAggregations().get("terms");
Expand All @@ -1403,7 +1403,7 @@ public void testWithRescore() {
.addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f)))
.addAggregation(
terms("terms").field(TERMS_AGGS_FIELD)
.subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_type")))
.subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index")))
)
.get();
Terms terms = response.getAggregations().get("terms");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,42 +471,6 @@ public void testIdBasedScriptFields() throws Exception {
assertThat(fields, equalTo(singleton("id")));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
}

response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();

assertNoFailures(response);

assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(singleton("type")));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
}

response = client().prepareSearch()
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.setSize(numDocs)
.addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap()))
.addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap()))
.get();

assertNoFailures(response);

assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs));
for (int i = 0; i < numDocs; i++) {
assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i)));
Set<String> fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet());
assertThat(fields, equalTo(newHashSet("type", "id")));
assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME));
assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i)));
}
}

public void testScriptFieldUsingSource() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@ public DocumentMapper(MapperService mapperService, Mapping mapping) {
final Collection<String> deleteTombstoneMetadataFields = Arrays.asList(
VersionFieldMapper.NAME,
IdFieldMapper.NAME,
TypeFieldMapper.NAME,
SeqNoFieldMapper.NAME,
SeqNoFieldMapper.PRIMARY_TERM_NAME,
SeqNoFieldMapper.TOMBSTONE_NAME
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -576,11 +576,6 @@ public DocumentMapperForType documentMapperWithAutoCreate() {
* Given the full name of a field, returns its {@link MappedFieldType}.
*/
public MappedFieldType fieldType(String fullName) {
if (fullName.equals(TypeFieldMapper.NAME)) {
String type = mapper == null ? null : mapper.type();
return new TypeFieldMapper.TypeFieldType(type);
}

return this.mapper == null ? null : this.mapper.fieldTypes().get(fullName);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.opensearch.common.geo.ShapeRelation;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.regex.Regex;
import org.opensearch.common.time.DateMathParser;
import org.opensearch.index.fielddata.IndexFieldData;
Expand All @@ -55,17 +54,9 @@
import java.util.Objects;
import java.util.function.Supplier;

// Todo: Remove TypeFieldMapper once we have NestedFieldMapper implementation
public class TypeFieldMapper extends MetadataFieldMapper {

private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeFieldType.class);

public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using the _type field "
+ "in queries and aggregations is deprecated, prefer to use a field instead.";

public static void emitTypesDeprecationWarning() {
deprecationLogger.deprecate("query_with_types", TYPES_DEPRECATION_MESSAGE);
}

public static final String NAME = "_type";

public static final String CONTENT_TYPE = "_type";
Expand Down Expand Up @@ -101,7 +92,6 @@ public String typeName() {

@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
emitTypesDeprecationWarning();
return new ConstantIndexFieldData.Builder(type, name(), CoreValuesSourceType.BYTES);
}

Expand All @@ -112,13 +102,11 @@ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup lookup,

@Override
public Query existsQuery(QueryShardContext context) {
emitTypesDeprecationWarning();
return new MatchAllDocsQuery();
}

@Override
protected boolean matches(String pattern, boolean caseInsensitive, QueryShardContext context) {
emitTypesDeprecationWarning();
if (type == null) {
return false;
}
Expand All @@ -136,7 +124,6 @@ public Query rangeQuery(
DateMathParser parser,
QueryShardContext context
) {
emitTypesDeprecationWarning();
BytesRef lower = (BytesRef) lowerTerm;
BytesRef upper = (BytesRef) upperTerm;
if (includeLower) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@
import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.TextFieldMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.index.seqno.RetentionLeaseBackgroundSyncAction;
import org.opensearch.index.seqno.RetentionLeaseSyncAction;
Expand Down Expand Up @@ -185,7 +184,6 @@ private static Map<String, MetadataFieldMapper.TypeParser> initBuiltInMetadataMa
builtInMetadataMappers.put(IndexFieldMapper.NAME, IndexFieldMapper.PARSER);
builtInMetadataMappers.put(DataStreamFieldMapper.NAME, DataStreamFieldMapper.PARSER);
builtInMetadataMappers.put(SourceFieldMapper.NAME, SourceFieldMapper.PARSER);
builtInMetadataMappers.put(TypeFieldMapper.NAME, TypeFieldMapper.PARSER);
builtInMetadataMappers.put(VersionFieldMapper.NAME, VersionFieldMapper.PARSER);
builtInMetadataMappers.put(SeqNoFieldMapper.NAME, SeqNoFieldMapper.PARSER);
// _field_names must be added last so that it has a chance to see all the other mappers
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,8 @@
import org.apache.lucene.index.LeafReader;
import org.opensearch.OpenSearchParseException;
import org.opensearch.index.fieldvisitor.SingleFieldsVisitor;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.TypeFieldMapper;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -147,22 +145,12 @@ private FieldLookup loadFieldData(String name) {
cachedFieldData.put(name, data);
}
if (data.fields() == null) {
List<Object> values;
if (TypeFieldMapper.NAME.equals(data.fieldType().name())) {
TypeFieldMapper.emitTypesDeprecationWarning();
values = new ArrayList<>(1);
final DocumentMapper mapper = mapperService.documentMapper();
if (mapper != null) {
values.add(mapper.type());
}
} else {
values = new ArrayList<Object>(2);
SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values);
try {
reader.document(docId, visitor);
} catch (IOException e) {
throw new OpenSearchParseException("failed to load field [{}]", e, name);
}
List<Object> values = new ArrayList<>(2);
SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values);
try {
reader.document(docId, visitor);
} catch (IOException e) {
throw new OpenSearchParseException("failed to load field [{}]", e, name);
}
data.fields(singletonMap(data.fieldType().name(), values));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ public void testSegmentsWithMergeFlag() throws Exception {
}

public void testSegmentsWithIndexSort() throws Exception {
Sort indexSort = new Sort(new SortedSetSortField("_type", false));
Sort indexSort = new Sort(new SortedSetSortField("field", false));
try (
Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, null, indexSort, null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import org.opensearch.index.mapper.IndexFieldMapper;
import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.mapper.VersionFieldMapper;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.RandomObjects;
Expand Down Expand Up @@ -372,9 +371,8 @@ public static Tuple<Map<String, DocumentField>, Map<String, DocumentField>> rand
Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
// As we are using this to construct a GetResult object that already contains
// index, type, id, version, seqNo, and source fields, we need to exclude them from random fields
Predicate<String> excludeMetaFieldFilter = field -> field.equals(TypeFieldMapper.NAME)
|| field.equals(IndexFieldMapper.NAME)
// index, id, version, seqNo, and source fields, we need to exclude them from random fields
Predicate<String> excludeMetaFieldFilter = field -> field.equals(IndexFieldMapper.NAME)
|| field.equals(IdFieldMapper.NAME)
|| field.equals(VersionFieldMapper.NAME)
|| field.equals(SourceFieldMapper.NAME)
Expand Down

This file was deleted.

Loading

0 comments on commit dbdee30

Please sign in to comment.