diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 1dc8dc955f7c6..e82101896818e 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -51,4 +51,5 @@ BWC_VERSION: - "2.2.1" - "2.2.2" - "2.3.0" + - "2.3.1" - "2.4.0" diff --git a/CHANGELOG.md b/CHANGELOG.md index faf1e92a139db..4688046251828 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Add support for s390x architecture ([#4001](https://github.com/opensearch-project/OpenSearch/pull/4001)) - Github workflow for changelog verification ([#4085](https://github.com/opensearch-project/OpenSearch/pull/4085)) - Point in time rest layer changes for create and delete PIT API ([#4064](https://github.com/opensearch-project/OpenSearch/pull/4064)) +- Point in time rest layer changes for list PIT and PIT segments API ([#4388](https://github.com/opensearch-project/OpenSearch/pull/4388)) - Added @dreamer-89 as an Opensearch maintainer ([#4342](https://github.com/opensearch-project/OpenSearch/pull/4342)) - Added release notes for 1.3.5 ([#4343](https://github.com/opensearch-project/OpenSearch/pull/4343)) - Added release notes for 2.2.1 ([#4344](https://github.com/opensearch-project/OpenSearch/pull/4344)) @@ -19,37 +20,53 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Update previous release bwc version to 2.4.0 ([#4455](https://github.com/opensearch-project/OpenSearch/pull/4455)) - 2.3.0 release notes ([#4457](https://github.com/opensearch-project/OpenSearch/pull/4457)) - Added missing javadocs for `:distribution:tools` modules ([#4483](https://github.com/opensearch-project/OpenSearch/pull/4483)) - -### Dependencies - -- Bumps `reactive-streams` from 1.0.3 to 1.0.4 - +- Add BWC version 2.3.1 ([#4513](https://github.com/opensearch-project/OpenSearch/pull/4513)) +- [Segment Replication] Add snapshot and restore tests for segment replication feature ([#3993](https://github.com/opensearch-project/OpenSearch/pull/3993)) +- Added missing javadocs for `:example-plugins` modules ([#4540](https://github.com/opensearch-project/OpenSearch/pull/4540)) +- Add missing Javadoc tag descriptions in unit tests ([#4629](https://github.com/opensearch-project/OpenSearch/pull/4629)) + ### Dependencies +- Bumps `log4j-core` from 2.18.0 to 2.19.0 +- Bumps `reactor-netty-http` from 1.0.18 to 1.0.23 +- Bumps `jettison` from 1.5.0 to 1.5.1 -- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 -- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 ### Dependencies - -- Bumps `com.diffplug.spotless` from 6.9.1 to 6.10.0 -- Bumps `xmlbeans` from 5.1.0 to 5.1.1 -- Bumps azure-core-http-netty from 1.12.0 to 1.12.4([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-core from 1.27.0 to 1.31.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-storage-common from 12.16.0 to 12.18.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `xmlbeans` from 5.1.0 to 5.1.1 ([#4354](https://github.com/opensearch-project/OpenSearch/pull/4354)) +- Bumps `azure-core-http-netty` from 1.12.0 to 1.12.4 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `azure-core` from 1.27.0 to 1.31.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `azure-storage-common` from 12.16.0 to 12.18.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 ([#4411](https://github.com/opensearch-project/OpenSearch/pull/4411)) +- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 ([#4447](https://github.com/opensearch-project/OpenSearch/pull/4447)) +- Bumps `reactive-streams` from 1.0.3 to 1.0.4 ([#4488](https://github.com/opensearch-project/OpenSearch/pull/4488)) +- Bumps `com.diffplug.spotless` from 6.10.0 to 6.11.0 ([#4547](https://github.com/opensearch-project/OpenSearch/pull/4547)) +- Bumps `reactor-core` from 3.4.18 to 3.4.23 ([#4548](https://github.com/opensearch-project/OpenSearch/pull/4548)) +- Bumps `jempbox` from 1.8.16 to 1.8.17 ([#4550](https://github.com/opensearch-project/OpenSearch/pull/4550)) +- Bumps `hadoop-hdfs` from 3.3.3 to 3.3.4 ([#4644](https://github.com/opensearch-project/OpenSearch/pull/4644)) ### Changed - Dependency updates (httpcore, mockito, slf4j, httpasyncclient, commons-codec) ([#4308](https://github.com/opensearch-project/OpenSearch/pull/4308)) - Use RemoteSegmentStoreDirectory instead of RemoteDirectory ([#4240](https://github.com/opensearch-project/OpenSearch/pull/4240)) - Plugin ZIP publication groupId value is configurable ([#4156](https://github.com/opensearch-project/OpenSearch/pull/4156)) +- Weighted round-robin scheduling policy for shard coordination traffic ([#4241](https://github.com/opensearch-project/OpenSearch/pull/4241)) - Add index specific setting for remote repository ([#4253](https://github.com/opensearch-project/OpenSearch/pull/4253)) - [Segment Replication] Update replicas to commit SegmentInfos instead of relying on SIS files from primary shards. ([#4402](https://github.com/opensearch-project/OpenSearch/pull/4402)) - [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948)) +- [Remote Store] Change behaviour in replica recovery for remote translog enabled indices ([#4318](https://github.com/opensearch-project/OpenSearch/pull/4318)) +- PUT api for weighted shard routing ([#4272](https://github.com/opensearch-project/OpenSearch/pull/4272)) +- Unmute test RelocationIT.testRelocationWhileIndexingRandom ([#4580](https://github.com/opensearch-project/OpenSearch/pull/4580)) +- Add DecommissionService and helper to execute awareness attribute decommissioning ([#4084](https://github.com/opensearch-project/OpenSearch/pull/4084)) +- Further simplification of the ZIP publication implementation ([#4360](https://github.com/opensearch-project/OpenSearch/pull/4360)) +- Relax visibility of the HTTP_CHANNEL_KEY and HTTP_SERVER_CHANNEL_KEY to make it possible for the plugins to access associated Netty4HttpChannel / Netty4HttpServerChannel instance ([#4638](https://github.com/opensearch-project/OpenSearch/pull/4638)) +- Load the deprecated master role in a dedicated method instead of in setAdditionalRoles() ([#4582](https://github.com/opensearch-project/OpenSearch/pull/4582)) - Improve Gradle pre-commit checks to pre-empt Jenkins build ([#4174](https://github.com/opensearch-project/OpenSearch/pull/4660)) ### Deprecated ### Removed +- Remove deprecated code to add node name into log pattern of log4j property file ([#4568](https://github.com/opensearch-project/OpenSearch/pull/4568)) +- Unused object and import within TransportClusterAllocationExplainAction ([#4639](https://github.com/opensearch-project/OpenSearch/pull/4639)) ### Fixed @@ -70,11 +87,15 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - [Segment Replication] Update flaky testOnNewCheckpointFromNewPrimaryCancelOngoingReplication unit test ([#4414](https://github.com/opensearch-project/OpenSearch/pull/4414)) - Fixed the `_cat/shards/10_basic.yml` test cases fix. - [Segment Replication] Fix timeout issue by calculating time needed to process getSegmentFiles ([#4426](https://github.com/opensearch-project/OpenSearch/pull/4426)) -- [Bug]: gradle check failing with java heap OutOfMemoryError (([#4328](https://github.com/opensearch-project/OpenSearch/ +- [Bug]: gradle check failing with java heap OutOfMemoryError ([#4328](https://github.com/opensearch-project/OpenSearch/)) - `opensearch.bat` fails to execute when install path includes spaces ([#4362](https://github.com/opensearch-project/OpenSearch/pull/4362)) - Getting security exception due to access denied 'java.lang.RuntimePermission' 'accessDeclaredMembers' when trying to get snapshot with S3 IRSA ([#4469](https://github.com/opensearch-project/OpenSearch/pull/4469)) - Fixed flaky test `ResourceAwareTasksTests.testTaskIdPersistsInThreadContext` ([#4484](https://github.com/opensearch-project/OpenSearch/pull/4484)) - Fixed the ignore_malformed setting to also ignore objects ([#4494](https://github.com/opensearch-project/OpenSearch/pull/4494)) +- [Segment Replication] Ignore lock file when testing cleanupAndPreserveLatestCommitPoint ([#4544](https://github.com/opensearch-project/OpenSearch/pull/4544)) +- Updated jackson to 2.13.4 and snakeyml to 1.32 ([#4556](https://github.com/opensearch-project/OpenSearch/pull/4556)) +- [Bug]: Fixed invalid location of JDK dependency for arm64 architecture([#4613](https://github.com/opensearch-project/OpenSearch/pull/4613)) +- [Bug]: Alias filter lost after rollover ([#4499](https://github.com/opensearch-project/OpenSearch/pull/4499)) ### Security @@ -87,6 +108,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Github workflow for changelog verification ([#4085](https://github.com/opensearch-project/OpenSearch/pull/4085)) - Label configuration for dependabot PRs ([#4348](https://github.com/opensearch-project/OpenSearch/pull/4348)) - Added RestLayer Changes for PIT stats ([#4217](https://github.com/opensearch-project/OpenSearch/pull/4217)) +- Added GeoBounds aggregation on GeoShape field type.([#4266](https://github.com/opensearch-project/OpenSearch/pull/4266)) + - Addition of Doc values on the GeoShape Field + - Addition of GeoShape ValueSource level code interfaces for accessing the DocValues. + - Addition of Missing Value feature in the GeoShape Aggregations. ### Changed @@ -100,6 +125,5 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) - Commit workflow for dependabot changelog helper ([#4331](https://github.com/opensearch-project/OpenSearch/pull/4331)) ### Security - -[unreleased]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...HEAD +[Unreleased]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...HEAD [2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x diff --git a/build.gradle b/build.gradle index bcae5bc3884a7..11ba3bf9fe105 100644 --- a/build.gradle +++ b/build.gradle @@ -55,7 +55,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.10.0" apply false + id "com.diffplug.spotless" version "6.11.0" apply false id "org.gradle.test-retry" version "1.4.1" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java index 4b289de3f0619..1073ba01dafab 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java @@ -128,7 +128,7 @@ public void setArchitecture(final String architecture) { "unknown architecture [" + jdkArchitecture + "] for jdk [" + name + "], must be one of " + ALLOWED_ARCHITECTURES ); } - this.architecture.set(architecture); + this.architecture.set(jdkArchitecture); } public String getBaseVersion() { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java b/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java index 70c3737ba3674..6dc7d660922b2 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java @@ -9,30 +9,31 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; import org.gradle.api.publish.PublishingExtension; import org.gradle.api.publish.maven.MavenPublication; -import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; import java.nio.file.Path; import org.gradle.api.Task; +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; public class Publish implements Plugin { - private static final Logger LOGGER = Logging.getLogger(Publish.class); - - public final static String EXTENSION_NAME = "zipmavensettings"; + // public final static String PLUGIN_ZIP_PUBLISH_POM_TASK = "generatePomFileForPluginZipPublication"; public final static String PUBLICATION_NAME = "pluginZip"; public final static String STAGING_REPO = "zipStaging"; - public final static String PLUGIN_ZIP_PUBLISH_POM_TASK = "generatePomFileForPluginZipPublication"; - public final static String LOCALMAVEN = "publishToMavenLocal"; public final static String LOCAL_STAGING_REPO_PATH = "/build/local-staging-repo"; - public String zipDistributionLocation = "/build/distributions/"; + // TODO: Does the path ^^ need to use platform dependant file separators ? + + private boolean isZipPublicationPresent(Project project) { + PublishingExtension pe = project.getExtensions().findByType(PublishingExtension.class); + if (pe == null) { + return false; + } + return pe.getPublications().findByName(PUBLICATION_NAME) != null; + } - public static void configMaven(Project project) { + private void addLocalMavenRepo(Project project) { final Path buildDirectory = project.getRootDir().toPath(); - project.getPluginManager().apply(MavenPublishPlugin.class); project.getExtensions().configure(PublishingExtension.class, publishing -> { publishing.repositories(repositories -> { repositories.maven(maven -> { @@ -40,52 +41,40 @@ public static void configMaven(Project project) { maven.setUrl(buildDirectory.toString() + LOCAL_STAGING_REPO_PATH); }); }); + }); + } + + private void addZipArtifact(Project project) { + project.getExtensions().configure(PublishingExtension.class, publishing -> { publishing.publications(publications -> { MavenPublication mavenZip = (MavenPublication) publications.findByName(PUBLICATION_NAME); - - if (mavenZip == null) { - mavenZip = publications.create(PUBLICATION_NAME, MavenPublication.class); + if (mavenZip != null) { + mavenZip.artifact(project.getTasks().named("bundlePlugin")); } - - String groupId = mavenZip.getGroupId(); - if (groupId == null) { - // The groupId is not customized thus we get the value from "project.group". - // See https://docs.gradle.org/current/userguide/publishing_maven.html#sec:identity_values_in_the_generated_pom - groupId = getProperty("group", project); - } - - String artifactId = project.getName(); - String pluginVersion = getProperty("version", project); - mavenZip.artifact(project.getTasks().named("bundlePlugin")); - mavenZip.setGroupId(groupId); - mavenZip.setArtifactId(artifactId); - mavenZip.setVersion(pluginVersion); }); }); } - static String getProperty(String name, Project project) { - if (project.hasProperty(name)) { - Object property = project.property(name); - if (property != null) { - return property.toString(); - } - } - return null; - } - @Override public void apply(Project project) { + project.getPluginManager().apply("nebula.maven-base-publish"); + project.getPluginManager().apply(MavenPublishPlugin.class); project.afterEvaluate(evaluatedProject -> { - configMaven(project); - Task validatePluginZipPom = project.getTasks().findByName("validatePluginZipPom"); - if (validatePluginZipPom != null) { - project.getTasks().getByName("validatePluginZipPom").dependsOn("generatePomFileForNebulaPublication"); - } - Task publishPluginZipPublicationToZipStagingRepository = project.getTasks() - .findByName("publishPluginZipPublicationToZipStagingRepository"); - if (publishPluginZipPublicationToZipStagingRepository != null) { - publishPluginZipPublicationToZipStagingRepository.dependsOn("generatePomFileForNebulaPublication"); + if (isZipPublicationPresent(project)) { + addLocalMavenRepo(project); + addZipArtifact(project); + Task validatePluginZipPom = project.getTasks().findByName("validatePluginZipPom"); + if (validatePluginZipPom != null) { + validatePluginZipPom.dependsOn("generatePomFileForNebulaPublication"); + } + Task publishPluginZipPublicationToZipStagingRepository = project.getTasks() + .findByName("publishPluginZipPublicationToZipStagingRepository"); + if (publishPluginZipPublicationToZipStagingRepository != null) { + publishPluginZipPublicationToZipStagingRepository.dependsOn("generatePomFileForNebulaPublication"); + } + } else { + project.getLogger() + .warn(String.format("Plugin 'opensearch.pluginzip' is applied but no '%s' publication is defined.", PUBLICATION_NAME)); } }); } diff --git a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java index 06632e2dfa476..2ca0e507acb44 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java @@ -8,6 +8,8 @@ package org.opensearch.gradle.pluginzip; +import org.gradle.api.Project; +import org.gradle.testfixtures.ProjectBuilder; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; import org.gradle.testkit.runner.UnexpectedBuildFailure; @@ -54,20 +56,152 @@ public void tearDown() { projectDir.delete(); } + /** + * This test is used to verify that adding the 'opensearch.pluginzip' to the project + * adds some other transitive plugins and tasks under the hood. This is basically + * a behavioral test of the {@link Publish#apply(Project)} method. + * + * This is equivalent of having a build.gradle script with just the following section: + *
+     *     plugins {
+     *       id 'opensearch.pluginzip'
+     *     }
+     * 
+ */ + @Test + public void applyZipPublicationPluginNoConfig() { + // All we do here is creating an empty project and applying the Publish plugin. + Project project = ProjectBuilder.builder().build(); + project.getPluginManager().apply(Publish.class); + + // WARNING: ===================================================================== + // All the following tests will work only before the gradle project is evaluated. + // There are some methods that will cause the project to be evaluated, such as: + // project.getTasksByName() + // After the project is evaluated there are more tasks found in the project, like + // the [assemble, build, ...] and other standard tasks. + // This can potentially break in future gradle versions (?) + // =============================================================================== + + assertEquals( + "The Publish plugin is applied which adds total of five tasks from Nebula and MavenPublishing plugins.", + 5, + project.getTasks().size() + ); + + // Tasks applied from "nebula.maven-base-publish" + assertTrue( + project.getTasks() + .findByName("generateMetadataFileForNebulaPublication") instanceof org.gradle.api.publish.tasks.GenerateModuleMetadata + ); + assertTrue( + project.getTasks() + .findByName("generatePomFileForNebulaPublication") instanceof org.gradle.api.publish.maven.tasks.GenerateMavenPom + ); + assertTrue( + project.getTasks() + .findByName("publishNebulaPublicationToMavenLocal") instanceof org.gradle.api.publish.maven.tasks.PublishToMavenLocal + ); + + // Tasks applied from MavenPublishPlugin + assertTrue(project.getTasks().findByName("publishToMavenLocal") instanceof org.gradle.api.DefaultTask); + assertTrue(project.getTasks().findByName("publish") instanceof org.gradle.api.DefaultTask); + + // And we miss the pluginzip publication task (because no publishing was defined for it) + assertNull(project.getTasks().findByName(ZIP_PUBLISH_TASK)); + + // We have the following publishing plugins + assertEquals(4, project.getPlugins().size()); + // ... of the following types: + assertNotNull( + "Project is expected to have OpenSearch pluginzip Publish plugin", + project.getPlugins().findPlugin(org.opensearch.gradle.pluginzip.Publish.class) + ); + assertNotNull( + "Project is expected to have MavenPublishPlugin (applied from OpenSearch pluginzip plugin)", + project.getPlugins().findPlugin(org.gradle.api.publish.maven.plugins.MavenPublishPlugin.class) + ); + assertNotNull( + "Project is expected to have Publishing plugin (applied from MavenPublishPublish plugin)", + project.getPlugins().findPlugin(org.gradle.api.publish.plugins.PublishingPlugin.class) + ); + assertNotNull( + "Project is expected to have nebula MavenBasePublishPlugin plugin (applied from OpenSearch pluginzip plugin)", + project.getPlugins().findPlugin(nebula.plugin.publishing.maven.MavenBasePublishPlugin.class) + ); + } + + /** + * Verify that if the zip publication is configured then relevant tasks are chained correctly. + * This test that the dependsOn() is applied correctly. + */ + @Test + public void applyZipPublicationPluginWithConfig() throws IOException, URISyntaxException, InterruptedException { + + /* ------------------------------- + // The ideal approach would be to create a project (via ProjectBuilder) with publishzip plugin, + // have it evaluated (API call) and then check if there are tasks that the plugin uses to hookup into + // and how these tasks are chained. The problem is that there is a known gradle issue (#20301) that does + // not allow for it ATM. If, however, it is fixed in the future the following is the code that can + // be used... + + Project project = ProjectBuilder.builder().build(); + project.getPluginManager().apply(Publish.class); + // add publications via API + + // evaluate the project + ((DefaultProject)project).evaluate(); + + // - Check that "validatePluginZipPom" and/or "publishPluginZipPublicationToZipStagingRepository" + // tasks have dependencies on "generatePomFileForNebulaPublication". + // - Check that there is the staging repository added. + + // However, due to known issue(1): https://github.com/gradle/gradle/issues/20301 + // it is impossible to reach to individual tasks and work with them. + // (1): https://docs.gradle.org/7.4/release-notes.html#known-issues + + // I.e.: The following code throws exception, basically any access to individual tasks fails. + project.getTasks().getByName("validatePluginZipPom"); + ------------------------------- */ + + // Instead, we run the gradle project via GradleRunner (this way we get fully evaluated project) + // and using the minimal possible configuration (missingPOMEntity) we test that as soon as the zip publication + // configuration is specified then all the necessary tasks are hooked up and executed correctly. + // However, this does not test execution order of the tasks. + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle", ZIP_PUBLISH_TASK/*, "-m"*/); + BuildResult result = runner.build(); + + assertEquals(SUCCESS, result.task(":" + "bundlePlugin").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "generatePomFileForNebulaPublication").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "generatePomFileForPluginZipPublication").getOutcome()); + assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome()); + } + + /** + * If the plugin is used but relevant publication is not defined then a message is printed. + */ + @Test + public void missingPublications() throws IOException, URISyntaxException { + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPublications.gradle", "build", "-m"); + BuildResult result = runner.build(); + + assertTrue(result.getOutput().contains("Plugin 'opensearch.pluginzip' is applied but no 'pluginZip' publication is defined.")); + } + @Test public void missingGroupValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("missingGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("missingGroupValue.gradle", "build", ZIP_PUBLISH_TASK); Exception e = assertThrows(UnexpectedBuildFailure.class, runner::build); assertTrue(e.getMessage().contains("Invalid publication 'pluginZip': groupId cannot be empty.")); } /** - * This would be the most common use case where user declares Maven publication entity with basic info - * and the resulting POM file will use groupId and version values from the Gradle project object. + * This would be the most common use case where user declares Maven publication entity with minimal info + * and the resulting POM file will use artifactId, groupId and version values based on the Gradle project object. */ @Test - public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("groupAndVersionValue.gradle"); + public void useDefaultValues() throws IOException, URISyntaxException, XmlPullParserException { + GradleRunner runner = prepareGradleRunnerFromTemplate("useDefaultValues.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -108,7 +242,7 @@ public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPu ).exists() ); - // Parse the maven file and validate the groupID + // Parse the maven file and validate default values MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read( new FileReader( @@ -130,6 +264,10 @@ public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPu ); assertEquals(model.getVersion(), "2.0.0.0"); assertEquals(model.getGroupId(), "org.custom.group"); + assertEquals(model.getArtifactId(), PROJECT_NAME); + assertNull(model.getName()); + assertNull(model.getDescription()); + assertEquals(model.getUrl(), "https://github.com/doe/sample-plugin"); } @@ -139,7 +277,7 @@ public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPu */ @Test public void missingPOMEntity() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -186,7 +324,7 @@ public void missingPOMEntity() throws IOException, URISyntaxException, XmlPullPa */ @Test public void customizedGroupValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("customizedGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("customizedGroupValue.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -223,21 +361,94 @@ public void customizedGroupValue() throws IOException, URISyntaxException, XmlPu */ @Test public void customizedInvalidGroupValue() throws IOException, URISyntaxException { - GradleRunner runner = prepareGradleRunnerFromTemplate("customizedInvalidGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("customizedInvalidGroupValue.gradle", "build", ZIP_PUBLISH_TASK); Exception e = assertThrows(UnexpectedBuildFailure.class, runner::build); assertTrue( e.getMessage().contains("Invalid publication 'pluginZip': groupId ( ) is not a valid Maven identifier ([A-Za-z0-9_\\-.]+).") ); } - private GradleRunner prepareGradleRunnerFromTemplate(String templateName) throws IOException, URISyntaxException { + /** + * This test verifies that use of the pluginZip does not clash with other maven publication plugins. + * It covers the case when user calls the "publishToMavenLocal" task. + */ + @Test + public void publishToMavenLocal() throws IOException, URISyntaxException, XmlPullParserException { + // By default, the "publishToMavenLocal" publishes artifacts to a local m2 repo, typically + // found in `~/.m2/repository`. But this is not practical for this unit test at all. We need to point + // the 'maven-publish' plugin to a custom m2 repo located in temporary directory associated with this + // test case instead. + // + // According to Gradle documentation this should be possible by proper configuration of the publishing + // task (https://docs.gradle.org/current/userguide/publishing_maven.html#publishing_maven:install). + // But for some reason this never worked as expected and artifacts created during this test case + // were always pushed into the default local m2 repository (ie: `~/.m2/repository`). + // The only workaround that seems to work is to pass "-Dmaven.repo.local" property via runner argument. + // (Kudos to: https://stackoverflow.com/questions/72265294/gradle-publishtomavenlocal-specify-custom-directory) + // + // The temporary directory that is used as the local m2 repository is created via in task "prepareLocalMVNRepo". + GradleRunner runner = prepareGradleRunnerFromTemplate( + "publishToMavenLocal.gradle", + String.join(File.separator, "-Dmaven.repo.local=" + projectDir.getRoot(), "build", "local-staging-repo"), + "build", + "prepareLocalMVNRepo", + "publishToMavenLocal" + ); + BuildResult result = runner.build(); + + assertEquals(SUCCESS, result.task(":" + "build").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "publishToMavenLocal").getOutcome()); + + // Parse the maven file and validate it + MavenXpp3Reader reader = new MavenXpp3Reader(); + Model model = reader.read( + new FileReader( + new File( + projectDir.getRoot(), + String.join( + File.separator, + "build", + "local-staging-repo", + "org", + "custom", + "group", + PROJECT_NAME, + "2.0.0.0", + PROJECT_NAME + "-2.0.0.0.pom" + ) + ) + ) + ); + + // The "publishToMavenLocal" task will run ALL maven publications, hence we can expect the ZIP publication + // present as well: https://docs.gradle.org/current/userguide/publishing_maven.html#publishing_maven:tasks + assertEquals(model.getArtifactId(), PROJECT_NAME); + assertEquals(model.getGroupId(), "org.custom.group"); + assertEquals(model.getVersion(), "2.0.0.0"); + assertEquals(model.getPackaging(), "zip"); + + // We have two publications in the build.gradle file, both are "MavenPublication" based. + // Both the mavenJava and pluginZip publications publish to the same location (coordinates) and + // artifacts (the POM file) overwrite each other. However, we can verify that the Zip plugin is + // the last one and "wins" over the mavenJava. + assertEquals(model.getDescription(), "pluginZip publication"); + } + + /** + * A helper method for use cases + * + * @param templateName The name of the file (from "pluginzip" folder) to use as a build.gradle for the test + * @param gradleArguments Optional CLI parameters to pass into Gradle runner + */ + private GradleRunner prepareGradleRunnerFromTemplate(String templateName, String... gradleArguments) throws IOException, + URISyntaxException { useTemplateFile(projectDir.newFile("build.gradle"), templateName); prepareGradleFilesAndSources(); GradleRunner runner = GradleRunner.create() .forwardOutput() .withPluginClasspath() - .withArguments("build", ZIP_PUBLISH_TASK) + .withArguments(gradleArguments) .withProjectDir(projectDir.getRoot()); return runner; @@ -246,7 +457,7 @@ private GradleRunner prepareGradleRunnerFromTemplate(String templateName) throws private void prepareGradleFilesAndSources() throws IOException { // A dummy "source" file that is processed with bundlePlugin and put into a ZIP artifact file File bundleFile = new File(projectDir.getRoot(), PROJECT_NAME + "-source.txt"); - Path zipFile = Files.createFile(bundleFile.toPath()); + Files.createFile(bundleFile.toPath()); // Setting a project name via settings.gradle file writeString(projectDir.newFile("settings.gradle"), "rootProject.name = '" + PROJECT_NAME + "'"); } diff --git a/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle index 1bde3edda2d91..94f03132faa80 100644 --- a/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle index b6deeeb12ca6a..6f2abbdacd6d4 100644 --- a/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle index 602c178ea1a5b..8fcd1d6600b5a 100644 --- a/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle b/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle index 2cc67c2e98954..394bc53622769 100644 --- a/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle +++ b/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/missingPublications.gradle b/buildSrc/src/test/resources/pluginzip/missingPublications.gradle new file mode 100644 index 0000000000000..ba6b33ad86463 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/missingPublications.gradle @@ -0,0 +1,21 @@ +plugins { + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +group="org.custom.group" +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +//publishing { +// publications { +// pluginZip(MavenPublication) { +// } +// } +//} diff --git a/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle b/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle new file mode 100644 index 0000000000000..8d248dbe08a42 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle @@ -0,0 +1,47 @@ +plugins { + // The java-gradle-plugin adds a new publication called 'pluginMaven' that causes some warnings because it + // clashes a bit with other publications defined in this file. If you are running at the --info level then you can + // expect some warning like the following: + // "Multiple publications with coordinates 'org.custom.group:sample-plugin:2.0.0.0' are published to repository 'mavenLocal'." + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +group="org.custom.group" +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +// A task to prepare directory for a temporary maven local repository +tasks.register('prepareLocalMVNRepo') { + dependsOn ':bundlePlugin' + doFirst { + File localMVNRepo = new File (layout.buildDirectory.get().getAsFile().getPath(), 'local-staging-repo') + System.out.println('Creating temporary folder for mavenLocal repo: '+ localMVNRepo.toString()) + System.out.println("Success: " + localMVNRepo.mkdir()) + } +} + +publishing { + publications { + // Plugin zip publication + pluginZip(MavenPublication) { + pom { + url = 'http://www.example.com/library' + description = 'pluginZip publication' + } + } + // Standard maven publication + mavenJava(MavenPublication) { + pom { + url = 'http://www.example.com/library' + description = 'mavenJava publication' + } + } + } +} diff --git a/buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle b/buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle similarity index 90% rename from buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle rename to buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle index bdab385f6082c..52f1c042fd47c 100644 --- a/buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } @@ -18,8 +17,8 @@ publishing { publications { pluginZip(MavenPublication) { pom { - name = "sample-plugin" - description = "pluginDescription" +// name = "plugin name" +// description = "plugin description" licenses { license { name = "The Apache License, Version 2.0" diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index f0f9e74ba96a2..d67c33cb98193 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -16,7 +16,7 @@ repositories { mavenCentral() } dependencies { - implementation 'org.apache.logging.log4j:log4j-core:2.18.0' + implementation 'org.apache.logging.log4j:log4j-core:2.19.0' } ["0.0.1", "0.0.2"].forEach { v -> diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 072dcc4578977..aa6a14ca6e47d 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -9,9 +9,9 @@ bundled_jdk = 17.0.4+8 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.13.3 -jackson_databind = 2.13.3 -snakeyaml = 1.31 +jackson = 2.13.4 +jackson_databind = 2.13.4 +snakeyaml = 1.32 icu4j = 70.1 supercsv = 2.4.0 log4j = 2.17.1 diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index eedc27d1d2ea7..91c339cc92c1b 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -498,6 +498,10 @@ static Request deleteAllPits() { return new Request(HttpDelete.METHOD_NAME, "/_search/point_in_time/_all"); } + static Request getAllPits() { + return new Request(HttpGet.METHOD_NAME, "/_search/point_in_time/_all"); + } + static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_msearch"); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index 0c73c65f6175f..0a5880b778942 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -63,6 +63,7 @@ import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.DeletePitRequest; import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.GetAllPitNodesResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.SearchRequest; @@ -1368,6 +1369,40 @@ public final Cancellable deleteAllPitsAsync(RequestOptions options, ActionListen ); } + /** + * Get all point in time searches using list all PITs API + * + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + */ + public final GetAllPitNodesResponse getAllPits(RequestOptions options) throws IOException { + return performRequestAndParseEntity( + new MainRequest(), + (request) -> RequestConverters.getAllPits(), + options, + GetAllPitNodesResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously get all point in time searches using list all PITs API + * + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return the response + */ + public final Cancellable getAllPitsAsync(RequestOptions options, ActionListener listener) { + return performRequestAsyncAndParseEntity( + new MainRequest(), + (request) -> RequestConverters.getAllPits(), + options, + GetAllPitNodesResponse::fromXContent, + listener, + emptySet() + ); + } + /** * Clears one or more scroll ids using the Clear Scroll API. * diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java index 395ec6e46a7b3..cbb4db10cd519 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java @@ -18,12 +18,14 @@ import org.opensearch.action.search.DeletePitInfo; import org.opensearch.action.search.DeletePitRequest; import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.GetAllPitNodesResponse; import org.opensearch.common.unit.TimeValue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; /** * Tests point in time API with rest high level client @@ -52,21 +54,24 @@ public void indexDocuments() throws IOException { public void testCreateAndDeletePit() throws IOException { CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); - CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); - assertTrue(pitResponse.getId() != null); - assertEquals(1, pitResponse.getTotalShards()); - assertEquals(1, pitResponse.getSuccessfulShards()); - assertEquals(0, pitResponse.getFailedShards()); - assertEquals(0, pitResponse.getSkippedShards()); + CreatePitResponse createPitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + assertTrue(createPitResponse.getId() != null); + assertEquals(1, createPitResponse.getTotalShards()); + assertEquals(1, createPitResponse.getSuccessfulShards()); + assertEquals(0, createPitResponse.getFailedShards()); + assertEquals(0, createPitResponse.getSkippedShards()); + GetAllPitNodesResponse getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + List pits = getAllPitResponse.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(createPitResponse.getId())); List pitIds = new ArrayList<>(); - pitIds.add(pitResponse.getId()); + pitIds.add(createPitResponse.getId()); DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); DeletePitResponse deletePitResponse = execute(deletePitRequest, highLevelClient()::deletePit, highLevelClient()::deletePitAsync); assertTrue(deletePitResponse.getDeletePitResults().get(0).isSuccessful()); - assertTrue(deletePitResponse.getDeletePitResults().get(0).getPitId().equals(pitResponse.getId())); + assertTrue(deletePitResponse.getDeletePitResults().get(0).getPitId().equals(createPitResponse.getId())); } - public void testDeleteAllPits() throws IOException { + public void testDeleteAllAndListAllPits() throws IOException { CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); CreatePitResponse pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); @@ -80,6 +85,11 @@ public void testDeleteAllPits() throws IOException { pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); assertTrue(pitResponse.getId() != null); assertTrue(pitResponse1.getId() != null); + GetAllPitNodesResponse getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + + List pits = getAllPitResponse.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(pitResponse.getId())); + assertTrue(pits.contains(pitResponse1.getId())); ActionListener deletePitListener = new ActionListener<>() { @Override public void onResponse(DeletePitResponse response) { @@ -95,8 +105,27 @@ public void onFailure(Exception e) { } } }; + final CreatePitResponse pitResponse3 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + + ActionListener getPitsListener = new ActionListener() { + @Override + public void onResponse(GetAllPitNodesResponse response) { + List pits = response.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(pitResponse3.getId())); + } + + @Override + public void onFailure(Exception e) { + if (!(e instanceof OpenSearchStatusException)) { + throw new AssertionError("List all PITs failed", e); + } + } + }; + highLevelClient().getAllPitsAsync(RequestOptions.DEFAULT, getPitsListener); highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener); // validate no pits case + getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + assertTrue(getAllPitResponse.getPitInfos().size() == 0); highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener); } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java index cdd63743f2644..c0eb344a64dba 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java @@ -135,6 +135,7 @@ public class RestHighLevelClientTests extends OpenSearchTestCase { "ping", "info", "delete_all_pits", + "get_all_pits", // security "security.get_ssl_certificates", "security.authenticate", @@ -887,7 +888,8 @@ public void testApiNamingConventions() throws Exception { "nodes.usage", "nodes.reload_secure_settings", "search_shards", - "remote_store.restore", }; + "remote_store.restore", + "cluster.put_weighted_routing", }; List booleanReturnMethods = Arrays.asList("security.enable_user", "security.disable_user", "security.change_password"); Set deprecatedMethods = new HashSet<>(); deprecatedMethods.add("indices.force_merge"); diff --git a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..c21a7ba4d0043 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.13.4.jar.sha1 @@ -0,0 +1 @@ +0cf934c681294b97ef6d80082faeefbe1edadf56 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle index 248a714f4f3e3..a1fde7637796c 100644 --- a/gradle/missing-javadoc.gradle +++ b/gradle/missing-javadoc.gradle @@ -95,14 +95,6 @@ configure([ project(":client:client-benchmark-noop-api-plugin"), project(":client:rest-high-level"), project(":client:test"), - project(":doc-tools"), - project(":example-plugins:custom-settings"), - project(":example-plugins:custom-significance-heuristic"), - project(":example-plugins:custom-suggester"), - project(":example-plugins:painless-allowlist"), - project(":example-plugins:rescore"), - project(":example-plugins:rest-handler"), - project(":example-plugins:script-expert-scoring"), project(":libs:opensearch-cli"), project(":libs:opensearch-core"), project(":libs:opensearch-dissect"), @@ -155,9 +147,7 @@ configure([ project(":plugins:store-smb"), project(":plugins:transport-nio"), project(":qa:die-with-dignity"), - project(":qa:os"), project(":qa:wildfly"), - project(":rest-api-spec"), project(":test:external-modules:test-delayed-aggs"), project(":test:fixtures:azure-fixture"), project(":test:fixtures:gcs-fixture"), diff --git a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java index dfadf9269a097..8aca043017e32 100644 --- a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java +++ b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java @@ -88,6 +88,15 @@ public G get(int i) { return shapes.get(i); } + /** + * Returns a {@link List} of All {@link Geometry} present in this collection. + * + * @return a {@link List} of All {@link Geometry} + */ + public List getAll() { + return shapes; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..c21a7ba4d0043 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.13.4.jar.sha1 @@ -0,0 +1 @@ +0cf934c681294b97ef6d80082faeefbe1edadf56 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 deleted file mode 100644 index a1dd86f11312d..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf43eed9de0031521107dfea41d1e5d6bf1b9639 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..f8d776d40fdb5 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.4.jar.sha1 @@ -0,0 +1 @@ +ccaf21e6a02a20cae6591a12d20bf310544cf3ee \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 deleted file mode 100644 index 864f2da02463f..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4e03e361e2388e3a8a0b68e3b9988d3a07ee3f3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..6d4962b0b6fa9 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.13.4.jar.sha1 @@ -0,0 +1 @@ +4161a7c3914a12e7b7940ea53eb3c53e17aea91b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 deleted file mode 100644 index ba45b6520a1d7..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9363ded5441b1fee62d5be0604035690ca759a2a \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..312bd9ae91e4e --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.4.jar.sha1 @@ -0,0 +1 @@ +3142ec201e878372d1561e64bd1a947d9e88a03d \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 deleted file mode 100644 index 1ac9b78b88687..0000000000000 --- a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf26b7b05fef01e7bec00cb88ab4feeeba743e12 \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 new file mode 100644 index 0000000000000..3216ba485951a --- /dev/null +++ b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 @@ -0,0 +1 @@ +e80612549feb5c9191c498de628c1aa80693cf0b \ No newline at end of file diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle index 7f687a414e566..6b00709f08bf9 100644 --- a/modules/geo/build.gradle +++ b/modules/geo/build.gradle @@ -40,6 +40,7 @@ restResources { includeCore '_common', 'indices', 'index', 'search', 'bulk' } } + artifacts { restTests(project.file('src/yamlRestTest/resources/rest-api-spec/test')) } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index 7dc6f2c1b89b7..31ff2ef4689bd 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -21,6 +21,9 @@ * for the test cluster on which integration tests are running. */ public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase { + + protected static final double GEOHASH_TOLERANCE = 1E-5D; + /** * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this * geo plugin is not getting packaged in a zip, we need to load it before the tests run. diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java index 2ac73728b2dab..9bd082a6e1ffe 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java @@ -8,52 +8,149 @@ package org.opensearch.geo.search; +import org.hamcrest.MatcherAssert; +import org.junit.Before; import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.geo.GeoPoint; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.tests.common.AggregationBuilders; +import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.test.OpenSearchIntegTestCase; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; +/** + * Tests to validate if user specified a missingValue in the input while doing the aggregation + */ @OpenSearchIntegTestCase.SuiteScopeTestCase public class MissingValueIT extends GeoModulePluginIntegTestCase { + private static final String INDEX_NAME = "idx"; + private static final String GEO_SHAPE_FIELD_NAME = "myshape"; + private static final String GEO_SHAPE_FIELD_TYPE = "type=geo_shape"; + private static final String AGGREGATION_NAME = "bounds"; + private static final String NON_EXISTENT_FIELD = "non_existing_field"; + private static final WellKnownText WKT = WellKnownText.INSTANCE; + private static Geometry indexedGeometry; + private static GeoPoint indexedGeoPoint; + private GeoPoint bottomRight; + private GeoPoint topLeft; + @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); + assertAcked( + prepareCreate(INDEX_NAME).setMapping( + "date", + "type=date", + "location", + "type=geo_point", + "str", + "type=keyword", + GEO_SHAPE_FIELD_NAME, + GEO_SHAPE_FIELD_TYPE + ).get() + ); + indexedGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + indexedGeoPoint = RandomGeoGenerator.randomPoint(random()); + assert indexedGeometry != null; indexRandom( true, - client().prepareIndex("idx").setId("1").setSource(), - client().prepareIndex("idx") + client().prepareIndex(INDEX_NAME).setId("1").setSource(), + client().prepareIndex(INDEX_NAME) .setId("2") - .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") + .setSource( + "str", + "foo", + "long", + 3L, + "double", + 5.5, + "date", + "2015-05-07", + "location", + indexedGeoPoint.toString(), + GEO_SHAPE_FIELD_NAME, + WKT.toWKT(indexedGeometry) + ) ); } + @Before + public void runBeforeEachTest() { + bottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + topLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + } + public void testUnmappedGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("non_existing_field").missing("2,1")) + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsBottomRight(missingGeoPoint, bottomRight); + GeoBoundsHelper.updateBoundsTopLeft(missingGeoPoint, topLeft); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .field(NON_EXISTENT_FIELD) + .wrapLongitude(false) + .missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); } public void testGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("location").missing("2,1")) + GeoBoundsHelper.updateBoundsForGeoPoint(indexedGeoPoint, topLeft, bottomRight); + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsForGeoPoint(missingGeoPoint, topLeft, bottomRight); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME).field("location").wrapLongitude(false).missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testGeoBoundsWithMissingShape() { + // create GeoBounds for the indexed Field + GeoBoundsHelper.updateBoundsForGeometry(indexedGeometry, topLeft, bottomRight); + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + assert missingGeometry != null; + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .wrapLongitude(false) + .field(GEO_SHAPE_FIELD_NAME) + .missing(WKT.toWKT(missingGeometry)) + ) + .get(); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testUnmappedGeoBoundsOnGeoShape() { + // We cannot useGeometry other than Point as for GeoBoundsAggregation as the Default Value for the + // CoreValueSourceType is GeoPoint hence we need to use Point here. + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomPoint(random()); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation(AggregationBuilders.geoBounds(AGGREGATION_NAME).field(NON_EXISTENT_FIELD).missing(WKT.toWKT(missingGeometry))) + .get(); + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + private void validateResult(final GeoBounds bounds) { + MatcherAssert.assertThat(bounds.bottomRight().lat(), closeTo(bottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.bottomRight().lon(), closeTo(bottomRight.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lat(), closeTo(topLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lon(), closeTo(topLeft.lon(), GEOHASH_TOLERANCE)); } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java new file mode 100644 index 0000000000000..257cc98db69fc --- /dev/null +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java @@ -0,0 +1,187 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.common; + +import org.junit.Assert; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; + +import java.util.Locale; + +/** + * A helper class for finding the geo bounds for a shape or a point. + */ +public final class GeoBoundsHelper { + + /** + * Updates the GeoBounds for the input GeoPoint in topLeft and bottomRight GeoPoints. + * + * @param geoPoint {@link GeoPoint} + * @param topLeft {@link GeoPoint} + * @param bottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeoPoint(final GeoPoint geoPoint, final GeoPoint topLeft, final GeoPoint bottomRight) { + updateBoundsBottomRight(geoPoint, bottomRight); + updateBoundsTopLeft(geoPoint, topLeft); + } + + /** + * Find the bottom right for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsBottomRight(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() < currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() > currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the top left for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsTopLeft(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() > currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() < currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the bounds for an input shape. + * + * @param geometry {@link Geometry} + * @param geoShapeTopLeft {@link GeoPoint} + * @param geoShapeBottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeometry( + final Geometry geometry, + final GeoPoint geoShapeTopLeft, + final GeoPoint geoShapeBottomRight + ) { + final ShapeType shapeType = geometry.type(); + switch (shapeType) { + case POINT: + updateBoundsTopLeft((Point) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Point) geometry, geoShapeBottomRight); + return; + case MULTIPOINT: + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case POLYGON: + updateBoundsTopLeft((Polygon) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Polygon) geometry, geoShapeBottomRight); + return; + case LINESTRING: + updateBoundsTopLeft((Line) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Line) geometry, geoShapeBottomRight); + return; + case MULTIPOLYGON: + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case GEOMETRYCOLLECTION: + ((GeometryCollection) geometry).getAll() + .forEach(geo -> updateBoundsForGeometry(geo, geoShapeTopLeft, geoShapeBottomRight)); + return; + case MULTILINESTRING: + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsTopLeft(line, geoShapeTopLeft)); + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsBottomRight(line, geoShapeBottomRight)); + return; + case ENVELOPE: + updateBoundsTopLeft((Rectangle) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Rectangle) geometry, geoShapeBottomRight); + return; + default: + Assert.fail(String.format(Locale.ROOT, "The shape type %s is not supported", shapeType)); + } + } + + private static void updateBoundsTopLeft(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsTopLeft(geoPoint, currentBound); + } + + private static void updateBoundsTopLeft(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMaxLat() > currentBound.lat()) { + currentBound.resetLat(rectangle.getMaxLat()); + } + if (rectangle.getMinLon() < currentBound.lon()) { + currentBound.resetLon(rectangle.getMinLon()); + } + } + + private static void updateBoundsBottomRight(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsBottomRight(geoPoint, currentBound); + } + + private static void updateBoundsBottomRight(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMinLat() < currentBound.lat()) { + currentBound.resetLat(rectangle.getMinLat()); + } + if (rectangle.getMaxLon() > currentBound.lon()) { + currentBound.resetLon(rectangle.getMaxLon()); + } + } +} diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java index 92987d407f51d..b6f33ec2e0cae 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java @@ -22,14 +22,20 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; import org.opensearch.geometry.utils.Geohash; +import org.opensearch.geometry.utils.StandardValidator; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.search.SearchHit; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import java.util.ArrayList; import java.util.List; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -46,6 +52,7 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value"; protected static final String MULTI_VALUED_FIELD_NAME = "geo_values"; + protected static final String GEO_SHAPE_FIELD_NAME = "shape"; protected static final String NUMBER_FIELD_NAME = "l_values"; protected static final String UNMAPPED_IDX_NAME = "idx_unmapped"; protected static final String IDX_NAME = "idx"; @@ -57,11 +64,13 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static int numDocs; protected static int numUniqueGeoPoints; protected static GeoPoint[] singleValues, multiValues; + protected static Geometry[] geoShapesValues; protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid, - unmappedCentroid; + unmappedCentroid, geoShapeTopLeft, geoShapeBottomRight; protected static ObjectIntMap expectedDocCountsForGeoHash = null; protected static ObjectObjectMap expectedCentroidsForGeoHash = null; - protected static final double GEOHASH_TOLERANCE = 1E-5D; + + protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true)); @Override public void setupSuiteScopeCluster() throws Exception { @@ -75,7 +84,9 @@ public void setupSuiteScopeCluster() throws Exception { NUMBER_FIELD_NAME, "type=long", "tag", - "type=keyword" + "type=keyword", + GEO_SHAPE_FIELD_NAME, + "type=geo_shape" ) ); @@ -83,6 +94,8 @@ public void setupSuiteScopeCluster() throws Exception { singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); multiBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + geoShapeTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + geoShapeBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); singleCentroid = new GeoPoint(0, 0); multiCentroid = new GeoPoint(0, 0); unmappedCentroid = new GeoPoint(0, 0); @@ -95,17 +108,21 @@ public void setupSuiteScopeCluster() throws Exception { singleValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < singleValues.length; i++) { singleValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(singleValues[i], singleTopLeft); - updateBoundsBottomRight(singleValues[i], singleBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(singleValues[i], singleTopLeft, singleBottomRight); } multiValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < multiValues.length; i++) { multiValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(multiValues[i], multiTopLeft); - updateBoundsBottomRight(multiValues[i], multiBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(multiValues[i], multiTopLeft, multiBottomRight); } + geoShapesValues = new Geometry[numDocs]; + IntStream.range(0, numDocs).forEach(iterator -> { + geoShapesValues[iterator] = RandomGeoGeometryGenerator.randomGeometry(random()); + GeoBoundsHelper.updateBoundsForGeometry(geoShapesValues[iterator], geoShapeTopLeft, geoShapeBottomRight); + }); + List builders = new ArrayList<>(); GeoPoint singleVal; @@ -132,6 +149,7 @@ public void setupSuiteScopeCluster() throws Exception { .endArray() .field(NUMBER_FIELD_NAME, i) .field("tag", "tag" + i) + .field(GEO_SHAPE_FIELD_NAME, WKT.toWKT(geoShapesValues[i])) .endObject() ) ); @@ -147,7 +165,9 @@ public void setupSuiteScopeCluster() throws Exception { ); } - assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); + assertAcked( + prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape") + ); assertAcked( prepareCreate(DATELINE_IDX_NAME).setMapping( @@ -274,22 +294,4 @@ private GeoPoint updateHashCentroid(String hash, final GeoPoint location) { final double newLat = centroid.lat() + (location.lat() - centroid.lat()) / docCount; return centroid.reset(newLat, newLon); } - - private void updateBoundsBottomRight(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() < currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() > currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } - - private void updateBoundsTopLeft(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() > currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() < currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java index 8cc82da12d69a..ed3196319faca 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.geo.search.aggregations.metrics; +import org.hamcrest.MatcherAssert; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.util.BigArray; @@ -43,18 +44,18 @@ import java.util.List; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.closeTo; +import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.search.aggregations.AggregationBuilders.global; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; @OpenSearchIntegTestCase.SuiteScopeTestCase public class GeoBoundsITTestCase extends AbstractGeoAggregatorModulePluginTestCase { @@ -275,4 +276,36 @@ public void testSingleValuedFieldWithZeroLon() throws Exception { assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE)); } + + public void testGeoShapeValuedField() { + final SearchResponse response = client().prepareSearch(IDX_NAME) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + assertSearchResponse(response); + final GeoBounds geoBounds = response.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft.lat(), closeTo(geoShapeTopLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(topLeft.lon(), closeTo(geoShapeTopLeft.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lat(), closeTo(geoShapeBottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lon(), closeTo(geoShapeBottomRight.lon(), GEOHASH_TOLERANCE)); + } + + public void testEmptyAggregationOnGeoShapes() { + final SearchResponse searchResponse = client().prepareSearch(EMPTY_IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + + MatcherAssert.assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + final GeoBounds geoBounds = searchResponse.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft, equalTo(null)); + MatcherAssert.assertThat(bottomRight, equalTo(null)); + } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java index 25dcf8db2c407..77abba7f54677 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java +++ b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java @@ -40,6 +40,7 @@ import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.opensearch.geo.search.aggregations.metrics.GeoBoundsGeoShapeAggregator; import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.index.mapper.Mapper; @@ -47,10 +48,13 @@ import org.opensearch.plugins.Plugin; import org.opensearch.plugins.SearchPlugin; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Consumer; public class GeoModulePlugin extends Plugin implements MapperPlugin, SearchPlugin { @@ -102,4 +106,23 @@ public List getCompositeAggregations() { ) ); } + + /** + * Registering the GeoBounds Aggregation on the GeoShape Field. This function allows plugins to register new + * aggregations using aggregation names that are already defined in Core, as long as the new aggregations target + * different ValuesSourceTypes. + * + * @return A list of the new registrar functions + */ + @Override + public List> getAggregationExtentions() { + final Consumer geoShapeConsumer = builder -> builder.register( + GeoBoundsAggregationBuilder.REGISTRY_KEY, + CoreValuesSourceType.GEO_SHAPE, + GeoBoundsGeoShapeAggregator::new, + true + ); + return Collections.singletonList(geoShapeConsumer); + } + } diff --git a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java new file mode 100644 index 0000000000000..246ece4342cff --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java @@ -0,0 +1,190 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.algorithm; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.util.CollectionUtils; + +import java.awt.geom.Point2D; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Random; +import java.util.stream.IntStream; + +/** + * Helper class to generate a polygon. Keeping this in the src folder so that GeoSpatial plugin can take advantage of + * this helper to create the Polygons, rather than hardcoding the values. + */ +public class PolygonGenerator { + + private static final Logger LOG = LogManager.getLogger(PolygonGenerator.class); + + /** + * A helper function to create the Polygons for testing. The returned list of double array where first element + * contains all the X points and second contains all the Y points. + * + * @param xPool a {@link java.util.List} of {@link Double} + * @param yPool a {@link java.util.List} of {@link Double} + * @return a {@link List} of double array. + */ + public static List generatePolygon(final List xPool, final List yPool, final Random random) { + if (CollectionUtils.isEmpty(xPool) || CollectionUtils.isEmpty(yPool)) { + LOG.debug("One of the X or Y list is empty or null. X.size : {} Y.size : {}", xPool, yPool); + return Collections.emptyList(); + } + final List generatedPolygonPointsList = ValtrAlgorithm.generateRandomConvexPolygon(xPool, yPool, random); + final double[] x = new double[generatedPolygonPointsList.size()]; + final double[] y = new double[generatedPolygonPointsList.size()]; + IntStream.range(0, generatedPolygonPointsList.size()).forEach(iterator -> { + x[iterator] = generatedPolygonPointsList.get(iterator).getX(); + y[iterator] = generatedPolygonPointsList.get(iterator).getY(); + }); + final List pointsList = new ArrayList<>(); + pointsList.add(x); + pointsList.add(y); + return pointsList; + } + + /* + * MIT License + * + * Copyright (c) 2017 Sander Verdonschot + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + /** + * Provides a helper function to create a Polygon with a list of points. This source code is used to create the + * polygons in the test cases. + * Reference Link + * Visual Link + */ + private static class ValtrAlgorithm { + /** + * Generates a convex polygon using the points provided as a {@link List} of {@link Double} for both X and Y axis. + * + * @param xPool a {@link List} of {@link Double} + * @param yPool a {@link List} of {@link Double} + * @return a {@link List} of {@link Point2D.Double} + */ + private static List generateRandomConvexPolygon( + final List xPool, + final List yPool, + final Random random + ) { + final int n = xPool.size(); + // Sort them + Collections.sort(xPool); + Collections.sort(yPool); + + // Isolate the extreme points + final Double minX = xPool.get(0); + final Double maxX = xPool.get(n - 1); + final Double minY = yPool.get(0); + final Double maxY = yPool.get(n - 1); + + // Divide the interior points into two chains & Extract the vector components + java.util.List xVec = new ArrayList<>(n); + java.util.List yVec = new ArrayList<>(n); + + double lastTop = minX, lastBot = minX; + + for (int i = 1; i < n - 1; i++) { + double x = xPool.get(i); + + if (random.nextBoolean()) { + xVec.add(x - lastTop); + lastTop = x; + } else { + xVec.add(lastBot - x); + lastBot = x; + } + } + + xVec.add(maxX - lastTop); + xVec.add(lastBot - maxX); + + double lastLeft = minY, lastRight = minY; + + for (int i = 1; i < n - 1; i++) { + double y = yPool.get(i); + + if (random.nextBoolean()) { + yVec.add(y - lastLeft); + lastLeft = y; + } else { + yVec.add(lastRight - y); + lastRight = y; + } + } + + yVec.add(maxY - lastLeft); + yVec.add(lastRight - maxY); + + // Randomly pair up the X- and Y-components + Collections.shuffle(yVec, random); + + // Combine the paired up components into vectors + List vec = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + vec.add(new Point2D.Double(xVec.get(i), yVec.get(i))); + } + + // Sort the vectors by angle + Collections.sort(vec, Comparator.comparingDouble(v -> Math.atan2(v.getY(), v.getX()))); + + // Lay them end-to-end + double x = 0, y = 0; + double minPolygonX = 0; + double minPolygonY = 0; + List points = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + points.add(new Point2D.Double(x, y)); + + x += vec.get(i).getX(); + y += vec.get(i).getY(); + + minPolygonX = Math.min(minPolygonX, x); + minPolygonY = Math.min(minPolygonY, y); + } + + // Move the polygon to the original min and max coordinates + double xShift = minX - minPolygonX; + double yShift = minY - minPolygonY; + + for (int i = 0; i < n; i++) { + Point2D.Double p = points.get(i); + points.set(i, new Point2D.Double(p.x + xShift, p.y + yShift)); + } + + return points; + } + } + +} diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java new file mode 100644 index 0000000000000..918b9a6701490 --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.LeafReaderContext; +import org.opensearch.common.geo.GeoShapeDocValue; +import org.opensearch.common.util.BigArrays; +import org.opensearch.index.fielddata.GeoShapeValue; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.LeafBucketCollector; +import org.opensearch.search.aggregations.LeafBucketCollectorBase; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Map; + +/** + * Aggregate all docs into a geographic bounds for field geo_shape. + * + * @opensearch.internal + */ +public final class GeoBoundsGeoShapeAggregator extends AbstractGeoBoundsAggregator { + private static final Logger LOGGER = LogManager.getLogger(GeoBoundsGeoShapeAggregator.class); + + public GeoBoundsGeoShapeAggregator( + String name, + SearchContext searchContext, + Aggregator aggregator, + ValuesSourceConfig valuesSourceConfig, + boolean wrapLongitude, + Map metaData + ) throws IOException { + super(name, searchContext, aggregator, valuesSourceConfig, wrapLongitude, metaData); + } + + @Override + protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector leafBucketCollector) { + if (valuesSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final BigArrays bigArrays = context.bigArrays(); + final GeoShapeValue values = valuesSource.getGeoShapeValues(ctx); + return new LeafBucketCollectorBase(leafBucketCollector, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + setBucketSize(bucket, bigArrays); + if (values.advanceExact(doc)) { + final GeoShapeDocValue value = values.nextValue(); + final GeoShapeDocValue.BoundingRectangle boundingBox = value.getBoundingRectangle(); + if (boundingBox != null) { + double top = tops.get(bucket); + if (boundingBox.getMaxLatitude() > top) { + top = boundingBox.getMaxLatitude(); + } + + double bottom = bottoms.get(bucket); + if (boundingBox.getMinLatitude() < bottom) { + bottom = boundingBox.getMinLatitude(); + } + + double posLeft = posLefts.get(bucket); + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() < posLeft) { + posLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() < posLeft) { + posLeft = boundingBox.getMaxLongitude(); + } + + double posRight = posRights.get(bucket); + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() > posRight) { + posRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() > posRight) { + posRight = boundingBox.getMinLongitude(); + } + + double negLeft = negLefts.get(bucket); + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() < negLeft) { + negLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() < negLeft) { + negLeft = boundingBox.getMaxLongitude(); + } + + double negRight = negRights.get(bucket); + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() > negRight) { + negRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() > negRight) { + negRight = boundingBox.getMinLongitude(); + } + + tops.set(bucket, top); + bottoms.set(bucket, bottom); + posLefts.set(bucket, posLeft); + posRights.set(bucket, posRight); + negLefts.set(bucket, negLeft); + negRights.set(bucket, negRight); + } else { + LOGGER.error("The bounding box was null for the Doc id {}", doc); + } + } + } + }; + } +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java new file mode 100644 index 0000000000000..d449d72f0b148 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java @@ -0,0 +1,237 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LatLonShape; +import org.apache.lucene.document.ShapeDocValuesField; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.hamcrest.MatcherAssert; +import org.junit.Assert; +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.common.geo.GeoShapeUtils; +import org.opensearch.geo.GeoModulePlugin; +import org.opensearch.geo.tests.common.AggregationInspectionHelper; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeFieldMapper; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Random; + +import static org.hamcrest.Matchers.closeTo; + +public class GeoBoundsGeoShapeAggregatorTests extends AggregatorTestCase { + private static final Logger LOG = LogManager.getLogger(GeoBoundsGeoShapeAggregatorTests.class); + private static final double GEOHASH_TOLERANCE = 1E-5D; + private static final String AGGREGATION_NAME = "my_agg"; + private static final String FIELD_NAME = "field"; + + /** + * Overriding the Search Plugins list with {@link GeoModulePlugin} so that the testcase will know that this plugin is + * to be loaded during the tests. + * + * @return List of {@link SearchPlugin} + */ + @Override + protected List getSearchPlugins() { + return Collections.singletonList(new GeoModulePlugin()); + } + + /** + * Testing Empty aggregator results. + * + * @throws Exception if an error occurs accessing the index + */ + public void testEmpty() throws Exception { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + IndexSearcher searcher = new IndexSearcher(reader); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + assertTrue(Double.isInfinite(bounds.top)); + assertTrue(Double.isInfinite(bounds.bottom)); + assertTrue(Double.isInfinite(bounds.posLeft)); + assertTrue(Double.isInfinite(bounds.posRight)); + assertTrue(Double.isInfinite(bounds.negLeft)); + assertTrue(Double.isInfinite(bounds.negRight)); + assertFalse(AggregationInspectionHelper.hasValue(bounds)); + } + } + } + + /** + * Testing GeoBoundAggregator for random shapes which are indexed. + * + * @throws Exception if an error occurs accessing the index + */ + public void testRandom() throws Exception { + final int numDocs = randomIntBetween(50, 100); + final List Y = new ArrayList<>(); + final List X = new ArrayList<>(); + final Random random = random(); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random, dir)) { + for (int i = 0; i < numDocs; i++) { + final Document document = new Document(); + final Geometry geometry = randomLuceneGeometry(random); + LOG.debug("Random Geometry created for Indexing : {}", geometry); + document.add(createShapeDocValue(geometry)); + w.addDocument(document); + getAllXAndYPoints(geometry, X, Y); + } + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + final IndexSearcher searcher = new IndexSearcher(reader); + final InternalGeoBounds actualBounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + final GeoBoundingBox expectedGeoBounds = getExpectedGeoBounds(X, Y); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLat(), + closeTo(expectedGeoBounds.bottomRight().getLat(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLon(), + closeTo(expectedGeoBounds.bottomRight().getLon(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat(actualBounds.topLeft().getLat(), closeTo(expectedGeoBounds.topLeft().getLat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(actualBounds.topLeft().getLon(), closeTo(expectedGeoBounds.topLeft().getLon(), GEOHASH_TOLERANCE)); + assertTrue(AggregationInspectionHelper.hasValue(actualBounds)); + } + } + } + + private GeoBoundingBox getExpectedGeoBounds(final List X, final List Y) { + double top = Double.NEGATIVE_INFINITY; + double bottom = Double.POSITIVE_INFINITY; + double posLeft = Double.POSITIVE_INFINITY; + double posRight = Double.NEGATIVE_INFINITY; + double negLeft = Double.POSITIVE_INFINITY; + double negRight = Double.NEGATIVE_INFINITY; + // Finding the bounding box for the shapes. + for (final Double lon : X) { + if (lon >= 0 && lon < posLeft) { + posLeft = lon; + } + if (lon >= 0 && lon > posRight) { + posRight = lon; + } + if (lon < 0 && lon < negLeft) { + negLeft = lon; + } + if (lon < 0 && lon > negRight) { + negRight = lon; + } + } + for (final Double lat : Y) { + if (lat > top) { + top = lat; + } + if (lat < bottom) { + bottom = lat; + } + } + if (Double.isInfinite(posLeft)) { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, negRight)); + } else if (Double.isInfinite(negLeft)) { + return new GeoBoundingBox(new GeoPoint(top, posLeft), new GeoPoint(bottom, posRight)); + } else { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, posRight)); + } + } + + private void getAllXAndYPoints(final Geometry geometry, final List X, final List Y) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + X.add(point.getX()); + Y.add(point.getY()); + return; + } else if (geometry instanceof Polygon) { + final Polygon polygon = (Polygon) geometry; + for (int i = 0; i < polygon.getPolygon().getX().length; i++) { + X.add(polygon.getPolygon().getX(i)); + Y.add(polygon.getPolygon().getY(i)); + } + return; + } else if (geometry instanceof Line) { + final Line line = (Line) geometry; + for (int i = 0; i < line.getX().length; i++) { + X.add(line.getX(i)); + Y.add(line.getY(i)); + } + return; + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + } + + private ShapeDocValuesField createShapeDocValue(final Geometry geometry) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + return LatLonShape.createDocValueField(FIELD_NAME, point.getLat(), point.getLon()); + } else if (geometry instanceof Polygon) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLucenePolygon((Polygon) geometry)); + } else if (geometry instanceof Line) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLuceneLine((Line) geometry)); + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + return null; + } + + /** + * Random function to generate a {@link LatLonGeometry}. Now for indexing of GeoShape field, we index all the + * different Geometry shapes that we support({@link ShapeType}) in OpenSearch are broken down into 3 shapes only. + * Hence, we are generating only 3 shapes : {@link org.apache.lucene.geo.Point}, + * {@link org.apache.lucene.geo.Line}, {@link org.apache.lucene.geo.Polygon}. {@link Circle} is not supported. + * Check {@link GeoShapeIndexer#prepareForIndexing(org.opensearch.geometry.Geometry)} + * + * @return {@link LatLonGeometry} + */ + private static Geometry randomLuceneGeometry(final Random r) { + int shapeNumber = OpenSearchTestCase.randomIntBetween(0, 2); + if (shapeNumber == 0) { + // Point + return RandomGeoGeometryGenerator.randomPoint(r); + } else if (shapeNumber == 1) { + // LineString + return RandomGeoGeometryGenerator.randomLine(r); + } else { + // Polygon + return RandomGeoGeometryGenerator.randomPolygon(r); + } + } + +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java new file mode 100644 index 0000000000000..caf15507e08c5 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java @@ -0,0 +1,240 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.tests.common; + +import org.junit.Assert; +import org.opensearch.geo.algorithm.PolygonGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Random; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +/** + * Random geo generation utilities for randomized geo_shape type testing. + */ +public class RandomGeoGeometryGenerator { + // Just picking a number 10 to be the max edges of a polygon. Don't want to make too large which can impact + // debugging. + private static final int MAX_VERTEXES = 10; + private static final int MAX_MULTIPLE_GEOMETRIES = 10; + + private static final Predicate NOT_SUPPORTED_SHAPES = shapeType -> shapeType != ShapeType.CIRCLE + && shapeType != ShapeType.LINEARRING; + + /** + * Creating list of only supported geometries defined here: {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + */ + private static final List SUPPORTED_SHAPE_TYPES = Arrays.stream(ShapeType.values()) + .filter(NOT_SUPPORTED_SHAPES) + .collect(Collectors.toList()); + + /** + * Returns a random Geometry. It makes sure that only that geometry is returned which is supported by OpenSearch + * while indexing. Check {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + * + * @return {@link Geometry} + */ + public static Geometry randomGeometry(final Random r) { + final ShapeType randomShapeType = SUPPORTED_SHAPE_TYPES.get( + OpenSearchTestCase.randomIntBetween(0, SUPPORTED_SHAPE_TYPES.size() - 1) + ); + switch (randomShapeType) { + case POINT: + return randomPoint(r); + case MULTIPOINT: + return randomMultiPoint(r); + case POLYGON: + return randomPolygon(r); + case LINESTRING: + return randomLine(r); + case MULTIPOLYGON: + return randomMultiPolygon(r); + case GEOMETRYCOLLECTION: + return randomGeometryCollection(r); + case MULTILINESTRING: + return randomMultiLine(r); + case ENVELOPE: + return randomRectangle(r); + default: + Assert.fail(String.format(Locale.ROOT, "Cannot create a geometry of type %s ", randomShapeType)); + } + return null; + } + + /** + * Generate a random point on the Earth Surface. + * + * @param r {@link Random} + * @return {@link Point} + */ + public static Point randomPoint(final Random r) { + double[] pt = getLonAndLatitude(r); + return new Point(pt[0], pt[1]); + } + + /** + * Generate a random polygon on earth surface. + * + * @param r {@link Random} + * @return {@link Polygon} + */ + public static Polygon randomPolygon(final Random r) { + final int vertexCount = OpenSearchTestCase.randomIntBetween(3, MAX_VERTEXES); + return randomPolygonWithFixedVertexCount(r, vertexCount); + } + + /** + * Generate a random line on the earth Surface. + * + * @param r {@link Random} + * @return {@link Line} + */ + public static Line randomLine(final Random r) { + final double[] pt1 = getLonAndLatitude(r); + final double[] pt2 = getLonAndLatitude(r); + final double[] x = { pt1[0], pt2[0] }; + final double[] y = { pt1[1], pt2[1] }; + return new Line(x, y); + } + + /** + * Returns an object of {@link MultiPoint} denoting a list of points on earth surface. + * @param r {@link Random} + * @return {@link MultiPoint} + */ + public static MultiPoint randomMultiPoint(final Random r) { + int multiplePoints = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List pointsList = new ArrayList<>(); + IntStream.range(0, multiplePoints).forEach(i -> pointsList.add(randomPoint(r))); + return new MultiPoint(pointsList); + } + + /** + * Returns an object of {@link MultiPolygon} denoting various polygons on earth surface. + * + * @param r {@link Random} + * @return {@link MultiPolygon} + */ + public static MultiPolygon randomMultiPolygon(final Random r) { + int multiplePolygons = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List polygonList = new ArrayList<>(); + IntStream.range(0, multiplePolygons).forEach(i -> polygonList.add(randomPolygon(r))); + return new MultiPolygon(polygonList); + } + + /** + * Returns an object of {@link GeometryCollection} having various shapes on earth surface. + * + * @param r {@link Random} + * @return {@link GeometryCollection} + */ + public static GeometryCollection randomGeometryCollection(final Random r) { + final List geometries = new ArrayList<>(); + geometries.addAll(randomMultiPoint(r).getAll()); + geometries.addAll(randomMultiPolygon(r).getAll()); + geometries.addAll(randomMultiLine(r).getAll()); + geometries.add(randomPoint(r)); + geometries.add(randomLine(r)); + geometries.add(randomPolygon(r)); + geometries.add(randomRectangle(r)); + return new GeometryCollection<>(geometries); + } + + /** + * Returns a {@link MultiLine} object containing multiple lines on earth surface. + * + * @param r {@link Random} + * @return {@link MultiLine} + */ + public static MultiLine randomMultiLine(Random r) { + int multiLines = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List linesList = new ArrayList<>(); + IntStream.range(0, multiLines).forEach(i -> linesList.add(randomLine(r))); + return new MultiLine(linesList); + } + + /** + * Returns a random {@link Rectangle} created on earth surface. + * + * @param r {@link Random} + * @return {@link Rectangle} + */ + public static Rectangle randomRectangle(final Random r) { + final Polygon polygon = randomPolygonWithFixedVertexCount(r, 4); + double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY, maxY = Double.NEGATIVE_INFINITY, minY = + Double.POSITIVE_INFINITY; + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double x = polygon.getPolygon().getX()[i]; + double y = polygon.getPolygon().getY()[i]; + + minX = Math.min(minX, x); + minY = Math.min(minY, y); + maxX = Math.max(maxX, x); + maxY = Math.max(maxY, y); + } + return new Rectangle(minX, maxX, maxY, minY); + } + + /** + * Returns a double array where pt[0] : longitude and pt[1] : latitude + * + * @param r {@link Random} + * @return double[] + */ + private static double[] getLonAndLatitude(final Random r) { + double[] pt = new double[2]; + RandomGeoGenerator.randomPoint(r, pt); + return pt; + } + + private static Polygon randomPolygonWithFixedVertexCount(final Random r, final int vertexCount) { + final List xPool = new ArrayList<>(vertexCount); + final List yPool = new ArrayList<>(vertexCount); + IntStream.range(0, vertexCount).forEach(iterator -> { + double[] pt = getLonAndLatitude(r); + xPool.add(pt[0]); + yPool.add(pt[1]); + }); + final List pointsList = PolygonGenerator.generatePolygon(xPool, yPool, r); + // Checking the list + assert vertexCount == pointsList.get(0).length; + assert vertexCount == pointsList.get(1).length; + // Create the linearRing, as we need to close the polygon hence increasing vertexes count by 1 + final double[] x = new double[vertexCount + 1]; + final double[] y = new double[vertexCount + 1]; + IntStream.range(0, vertexCount).forEach(iterator -> { + x[iterator] = pointsList.get(0)[iterator]; + y[iterator] = pointsList.get(1)[iterator]; + }); + // making sure to close the polygon + x[vertexCount] = x[0]; + y[vertexCount] = y[0]; + final LinearRing linearRing = new LinearRing(x, y); + return new Polygon(linearRing); + } + +} diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java index 1e0a4d89f2fd5..e3fde75e5b551 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java @@ -332,8 +332,10 @@ public ChannelHandler configureServerChannelHandler() { return new HttpChannelHandler(this, handlingSettings); } - static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-channel"); - static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-server-channel"); + protected static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-channel"); + protected static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance( + "opensearch-http-server-channel" + ); protected static class HttpChannelHandler extends ChannelInitializer { diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 5755ff55bfff9..8ca9491f834a6 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -59,7 +59,7 @@ dependencies { api "com.sun.jersey:jersey-client:${versions.jersey}" api "com.sun.jersey:jersey-core:${versions.jersey}" api "com.sun.jersey:jersey-json:${versions.jersey}" - api 'org.codehaus.jettison:jettison:1.5.0' + api 'org.codehaus.jettison:jettison:1.5.1' api 'com.sun.xml.bind:jaxb-impl:2.2.3-1' // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 deleted file mode 100644 index ec93f83474541..0000000000000 --- a/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -933c7df7a4b78c9a9322f431014ea699b1fc0cc0 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 new file mode 100644 index 0000000000000..29227ed427953 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 @@ -0,0 +1 @@ +d8918f348f234f5046bd39ea1ed9fc91deac402f \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..2e9425b8ff6db --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.4.jar.sha1 @@ -0,0 +1 @@ +858c6cc78e1f08a885b1613e1d817c829df70a6e \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 new file mode 100644 index 0000000000000..fcc6491d1f78d --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.13.4.jar.sha1 @@ -0,0 +1 @@ +98b0edfa8e4084078f10b7b356c300ded4a71491 \ No newline at end of file diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java index 8413a750e2741..cb2e28210faf1 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -94,8 +94,13 @@ public class ExampleCustomSettingsConfig { private final List list; private final String filtered; + /** + * Instantiate this object based on the specified environment. + * + * @param environment The environment including paths to custom setting configuration files + */ public ExampleCustomSettingsConfig(final Environment environment) { - // Elasticsearch config directory + // OpenSearch config directory final Path configDir = environment.configDir(); // Resolve the plugin's custom settings file @@ -121,22 +126,47 @@ public ExampleCustomSettingsConfig(final Environment environment) { assert secured != null; } + /** + * Gets the value of the custom.simple String setting. + * + * @return the custom.simple value + */ public String getSimple() { return simple; } + /** + * Gets the value of the custom.bool boolean setting. + * + * @return the custom.bool value + */ public Boolean getBool() { return bool; } + /** + * Gets the value of the custom.validated String setting. + * + * @return the custom.validated value + */ public String getValidated() { return validated; } + /** + * Gets the value of the custom.filtered String setting. + * + * @return the custom.filtered value + */ public String getFiltered() { return filtered; } + /** + * Gets the value of the custom.list list of integers setting. + * + * @return the custom.list value + */ public List getList() { return list; } diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java index aa22938c72a01..0b619102c667f 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java @@ -42,10 +42,19 @@ import static java.util.stream.Collectors.toList; +/** + * An example plugin that includes custom settings. + */ public class ExampleCustomSettingsPlugin extends Plugin { private final ExampleCustomSettingsConfig config; + /** + * Instantiate this plugin with the specified settings and config path. + * + * @param settings The settings for this plugin. + * @param configPath The path to this plugin's configuration files. + */ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPath) { this.config = new ExampleCustomSettingsConfig(new Environment(settings, configPath)); @@ -53,9 +62,6 @@ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPat assert "secret".equals(config.getFiltered()); } - /** - * @return the plugin's custom settings - */ @Override public List> getSettings() { return Arrays.asList( diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java new file mode 100644 index 0000000000000..5af8654201da2 --- /dev/null +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of custom settings in a plugin. + */ +package org.opensearch.example.customsettings; diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java index 49098ae36e30f..c646592af63cb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java @@ -44,6 +44,12 @@ * Plugin declaring a custom {@link SignificanceHeuristic}. */ public class CustomSignificanceHeuristicPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this plugin. + */ + public CustomSignificanceHeuristicPlugin() {}; + @Override public List> getSignificanceHeuristics() { return singletonList(new SignificanceHeuristicSpec<>(SimpleHeuristic.NAME, SimpleHeuristic::new, SimpleHeuristic.PARSER)); diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java index 8365a56bcfe4e..9458bf5b75feb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java @@ -44,13 +44,25 @@ * A simple {@linkplain SignificanceHeuristic} used an example of declaring a custom heuristic. */ public class SimpleHeuristic extends SignificanceHeuristic { + /** + * The name of this NamedWriteable heuristic. + */ public static final String NAME = "simple"; + + /** + * The parser with which to deserialize this object from XContent. + */ public static final ObjectParser PARSER = new ObjectParser<>(NAME, SimpleHeuristic::new); + /** + * Instantiates this object. + */ public SimpleHeuristic() {} /** * Read from a stream. + * + * @param in Input to read the value from */ public SimpleHeuristic(StreamInput in) throws IOException { // Nothing to read diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java new file mode 100644 index 0000000000000..20809857273c4 --- /dev/null +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of a custom significance heuristic. + */ +package org.opensearch.example.customsigheuristic; diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java index da154609e5f2f..05f26a8e401e1 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java @@ -41,8 +41,16 @@ import java.io.IOException; import java.util.Locale; +/** + * A custom suggester supportiong suggestion-based search. + */ public class CustomSuggester extends Suggester { + /** + * Instantiate this object. + */ + public CustomSuggester() {} + // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 @Override public Suggest.Suggestion> innerExecute( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java index 5706b654ffbde..b71a90e700d21 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java @@ -38,7 +38,16 @@ import java.util.Collections; import java.util.List; +/** + * Plugin demonstrating custom suggestion-based search. + */ public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this class. + */ + public CustomSuggesterPlugin() {} + @Override public List> getSuggesters() { return Collections.singletonList( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java index 50ee700c3a253..f35fde03d261f 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java @@ -46,19 +46,43 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; +/** + * The suggestion responses corresponding with the suggestions in the request. + */ public class CustomSuggestion extends Suggest.Suggestion { + /** + * An integer representing the type of the suggestion formerly used for internal serialization over the network. + * + * This class is now serialized as a NamedWriteable and this value only remains for backwards compatibility + */ public static final int TYPE = 999; + /** + * A meaningless value used to test that plugin suggesters can add fields to their Suggestion types. + */ public static final ParseField DUMMY = new ParseField("dummy"); private String dummy; + /** + * Instantiate this object with the specified name, size, and value for the configured field. + * + * @param name The name of the suggestion as is defined in the request. + * @param size The suggested term size specified in request, only used for merging shard responses. + * @param dummy The added custom suggestion type. + */ public CustomSuggestion(String name, int size, String dummy) { super(name, size); this.dummy = dummy; } + /** + * Instantiate this object from a stream. + * + * @param in Input to read the value from + * @throws IOException on failure to read the value. + */ public CustomSuggestion(StreamInput in) throws IOException { super(in); dummy = in.readString(); @@ -85,6 +109,8 @@ public int getWriteableType() { * * This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place * to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior + * + * @return the value. */ public String getDummy() { return dummy; @@ -95,12 +121,23 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } + /** + * Instantiate a CustomSuggestion from XContent. + * + * @param parser The XContent parser to use + * @param name Tne name of the suggestion + * @return A new CustomSuggestion instance for the specified name. + * @throws IOException on deserialization error. + */ public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); parseEntries(parser, suggestion, Entry::fromXContent); return suggestion; } + /** + * Represents a part from the suggest text with suggested options. + */ public static class Entry extends Suggest.Suggestion.Entry { private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); @@ -117,13 +154,30 @@ public static class Entry extends Suggest.Suggestion.Entry otherEntry) { @@ -150,6 +206,8 @@ protected void merge(Suggest.Suggestion.Entry