Skip to content

Commit

Permalink
Generate more analyzer/tokenizer/normalizer/tokenfilter
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Farr <[email protected]>
  • Loading branch information
Xtansia committed Sep 26, 2024
1 parent c46bb68 commit 4bdcb4d
Show file tree
Hide file tree
Showing 24 changed files with 518 additions and 403 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,15 @@
* GitHub history for details.
*/

//----------------------------------------------------
// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
//----------------------------------------------------

package org.opensearch.client.opensearch._types.analysis;

import jakarta.json.stream.JsonGenerator;
import java.util.function.Function;
import javax.annotation.Generated;
import javax.annotation.Nullable;
import org.opensearch.client.json.JsonpDeserializable;
import org.opensearch.client.json.JsonpDeserializer;
Expand All @@ -45,7 +50,9 @@
// typedef: _types.analysis.NGramTokenFilter

@JsonpDeserializable
@Generated("org.opensearch.client.codegen.CodeGenerator")
public class NGramTokenFilter extends TokenFilterBase implements TokenFilterDefinitionVariant {

@Nullable
private final Integer maxGram;

Expand All @@ -59,19 +66,17 @@ public class NGramTokenFilter extends TokenFilterBase implements TokenFilterDefi

private NGramTokenFilter(Builder builder) {
super(builder);

this.maxGram = builder.maxGram;
this.minGram = builder.minGram;
this.preserveOriginal = builder.preserveOriginal;

}

public static NGramTokenFilter of(Function<Builder, ObjectBuilder<NGramTokenFilter>> fn) {
public static NGramTokenFilter of(Function<NGramTokenFilter.Builder, ObjectBuilder<NGramTokenFilter>> fn) {
return fn.apply(new Builder()).build();
}

/**
* TokenFilterDefinition variant kind.
* {@link TokenFilterDefinition} variant kind.
*/
@Override
public TokenFilterDefinition.Kind _tokenFilterDefinitionKind() {
Expand Down Expand Up @@ -103,40 +108,34 @@ public final Boolean preserveOriginal() {
}

protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {

generator.write("type", "ngram");
super.serializeInternal(generator, mapper);
if (this.maxGram != null) {
generator.writeKey("max_gram");
generator.write(this.maxGram);

}

if (this.minGram != null) {
generator.writeKey("min_gram");
generator.write(this.minGram);

}

if (this.preserveOriginal != null) {
generator.writeKey("preserve_original");
generator.write(this.preserveOriginal);

}

}

// ---------------------------------------------------------------------------------------------

/**
* Builder for {@link NGramTokenFilter}.
*/

public static class Builder extends TokenFilterBase.AbstractBuilder<Builder> implements ObjectBuilder<NGramTokenFilter> {
@Nullable
private Integer maxGram;

@Nullable
private Integer minGram;

@Nullable
private Boolean preserveOriginal;

Expand Down Expand Up @@ -172,8 +171,7 @@ protected Builder self() {
/**
* Builds a {@link NGramTokenFilter}.
*
* @throws NullPointerException
* if some of the required fields are null.
* @throws NullPointerException if some of the required fields are null.
*/
public NGramTokenFilter build() {
_checkSingleUse();
Expand All @@ -193,12 +191,11 @@ public NGramTokenFilter build() {
);

protected static void setupNGramTokenFilterDeserializer(ObjectDeserializer<NGramTokenFilter.Builder> op) {
TokenFilterBase.setupTokenFilterBaseDeserializer(op);
setupTokenFilterBaseDeserializer(op);
op.add(Builder::maxGram, JsonpDeserializer.integerDeserializer(), "max_gram");
op.add(Builder::minGram, JsonpDeserializer.integerDeserializer(), "min_gram");
op.add(Builder::preserveOriginal, JsonpDeserializer.booleanDeserializer(), "preserve_original");

op.ignore("type");
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,16 @@
* GitHub history for details.
*/

//----------------------------------------------------
// THIS CODE IS GENERATED. MANUAL EDITS WILL BE LOST.
//----------------------------------------------------

package org.opensearch.client.opensearch._types.analysis;

import jakarta.json.stream.JsonGenerator;
import java.util.List;
import java.util.function.Function;
import javax.annotation.Generated;
import javax.annotation.Nullable;
import org.opensearch.client.json.JsonpDeserializable;
import org.opensearch.client.json.JsonpDeserializer;
Expand All @@ -47,7 +52,9 @@
// typedef: _types.analysis.NGramTokenizer

@JsonpDeserializable
@Generated("org.opensearch.client.codegen.CodeGenerator")
public class NGramTokenizer extends TokenizerBase implements TokenizerDefinitionVariant {

@Nullable
private final String customTokenChars;

Expand All @@ -61,20 +68,18 @@ public class NGramTokenizer extends TokenizerBase implements TokenizerDefinition

private NGramTokenizer(Builder builder) {
super(builder);

this.customTokenChars = builder.customTokenChars;
this.maxGram = ApiTypeHelper.requireNonNull(builder.maxGram, this, "maxGram");
this.minGram = ApiTypeHelper.requireNonNull(builder.minGram, this, "minGram");
this.tokenChars = ApiTypeHelper.unmodifiableRequired(builder.tokenChars, this, "tokenChars");

}

public static NGramTokenizer of(Function<Builder, ObjectBuilder<NGramTokenizer>> fn) {
public static NGramTokenizer of(Function<NGramTokenizer.Builder, ObjectBuilder<NGramTokenizer>> fn) {
return fn.apply(new Builder()).build();
}

/**
* TokenizerDefinition variant kind.
* {@link TokenizerDefinition} variant kind.
*/
@Override
public TokenizerDefinition.Kind _tokenizerDefinitionKind() {
Expand Down Expand Up @@ -111,46 +116,37 @@ public final List<TokenChar> tokenChars() {
}

protected void serializeInternal(JsonGenerator generator, JsonpMapper mapper) {

generator.write("type", "ngram");
super.serializeInternal(generator, mapper);
if (this.customTokenChars != null) {
generator.writeKey("custom_token_chars");
generator.write(this.customTokenChars);

}

generator.writeKey("max_gram");
generator.write(this.maxGram);

generator.writeKey("min_gram");
generator.write(this.minGram);

if (ApiTypeHelper.isDefined(this.tokenChars)) {
generator.writeKey("token_chars");
generator.writeStartArray();
for (TokenChar item0 : this.tokenChars) {
item0.serialize(generator, mapper);
}
generator.writeEnd();

generator.writeKey("token_chars");
generator.writeStartArray();
for (TokenChar item0 : this.tokenChars) {
item0.serialize(generator, mapper);
}

generator.writeEnd();
}

// ---------------------------------------------------------------------------------------------

/**
* Builder for {@link NGramTokenizer}.
*/

public static class Builder extends TokenizerBase.AbstractBuilder<Builder> implements ObjectBuilder<NGramTokenizer> {
@Nullable
private String customTokenChars;

private Integer maxGram;

private Integer minGram;

private List<TokenChar> tokenChars;

/**
Expand Down Expand Up @@ -179,8 +175,10 @@ public final Builder minGram(int value) {

/**
* Required - API name: {@code token_chars}
*
* <p>
* Adds all elements of <code>list</code> to <code>tokenChars</code>.
* </p>
*/
public final Builder tokenChars(List<TokenChar> list) {
this.tokenChars = _listAddAll(this.tokenChars, list);
Expand All @@ -189,8 +187,10 @@ public final Builder tokenChars(List<TokenChar> list) {

/**
* Required - API name: {@code token_chars}
*
* <p>
* Adds one or more values to <code>tokenChars</code>.
* </p>
*/
public final Builder tokenChars(TokenChar value, TokenChar... values) {
this.tokenChars = _listAdd(this.tokenChars, value, values);
Expand All @@ -205,8 +205,7 @@ protected Builder self() {
/**
* Builds a {@link NGramTokenizer}.
*
* @throws NullPointerException
* if some of the required fields are null.
* @throws NullPointerException if some of the required fields are null.
*/
public NGramTokenizer build() {
_checkSingleUse();
Expand All @@ -226,13 +225,12 @@ public NGramTokenizer build() {
);

protected static void setupNGramTokenizerDeserializer(ObjectDeserializer<NGramTokenizer.Builder> op) {
TokenizerBase.setupTokenizerBaseDeserializer(op);
setupTokenizerBaseDeserializer(op);
op.add(Builder::customTokenChars, JsonpDeserializer.stringDeserializer(), "custom_token_chars");
op.add(Builder::maxGram, JsonpDeserializer.integerDeserializer(), "max_gram");
op.add(Builder::minGram, JsonpDeserializer.integerDeserializer(), "min_gram");
op.add(Builder::tokenChars, JsonpDeserializer.arrayDeserializer(TokenChar._DESERIALIZER), "token_chars");

op.ignore("type");
}

}
Loading

0 comments on commit 4bdcb4d

Please sign in to comment.