diff --git a/.asf.yaml b/.asf.yaml index 602feeb246a..57999445552 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -28,6 +28,7 @@ github: branch_9_4: {} branch_9_5: {} branch_9_6: {} + branch_9_7: {} branch_9x: {} protected_tags: diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 960021a380c..60338d389cf 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -37,7 +37,7 @@ Please review the following and check all that apply: - [ ] I have reviewed the guidelines for [How to Contribute](https://github.com/apache/solr/blob/main/CONTRIBUTING.md) and my code conforms to the standards described there to the best of my ability. - [ ] I have created a Jira issue and added the issue ID to my pull request title. -- [ ] I have given Solr maintainers [access](https://help.github.com/en/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork) to contribute to my PR branch. (optional but recommended) +- [ ] I have given Solr maintainers [access](https://help.github.com/en/articles/allowing-changes-to-a-pull-request-branch-created-from-a-fork) to contribute to my PR branch. (optional but recommended, not available for branches on forks living under an organisation) - [ ] I have developed this patch against the `main` branch. - [ ] I have run `./gradlew check`. - [ ] I have added tests for my changes. diff --git a/.github/renovate.json b/.github/renovate.json index d59c413c1be..5cf53a4c5fd 100644 --- a/.github/renovate.json +++ b/.github/renovate.json @@ -3,8 +3,8 @@ "description": "Runs Renovate with solrbot, see dev-docs/dependency-upgrades.adoc for more", "enabled": true, "dependencyDashboard": false, - "enabledManagers": ["gradle"], - "includePaths": ["versions.*", "build.gradle"], + "enabledManagers": ["gradle", "github-actions"], + "includePaths": ["versions.*", "build.gradle", ".github/workflows/*"], "postUpgradeTasks": { "commands": ["./gradlew updateLicenses"], "fileFilters": ["solr/licenses/*.sha1"], @@ -14,7 +14,11 @@ { "description": "Fix for non-semantic versions for older artifacts", "matchDatasources": ["maven"], - "matchPackageNames": ["commons-collections:commons-collections", "commons-io:commons-io", "commons-lang:commons-lang"], + "matchPackageNames": [ + "commons-collections:commons-collections", + "commons-io:commons-io", + "commons-lang:commons-lang" + ], "versioning": "regex:^(?\\d{1,4})\\.(?\\d+)(\\.(?\\d+))?$" }, { @@ -55,29 +59,33 @@ }, { "description": "Changelog for commons-io", - "matchSourceUrls": ["https://gitbox.apache.org/repos/asf?p=commons-io.git"], - "customChangelogUrl": "https://commons.apache.org/proper/commons-io/changes-report.html" + "matchSourceUrls": [ + "https://gitbox.apache.org/repos/asf?p=commons-io.git" + ], + "changelogUrl": "https://commons.apache.org/proper/commons-io/changes-report.html" }, { "description": "Changelog for zookeeper", "matchSourceUrls": ["https://gitbox.apache.org/repos/asf/zookeeper.git"], - "customChangelogUrl": "https://zookeeper.apache.org/releases.html" + "changelogUrl": "https://zookeeper.apache.org/releases.html" }, { "description": "Changelog for commons-compress", - "matchSourceUrls": ["https://gitbox.apache.org/repos/asf?p=commons-compress.git"], - "customChangelogUrl": "https://commons.apache.org/proper/commons-compress/changes-report.html" + "matchSourceUrls": [ + "https://gitbox.apache.org/repos/asf?p=commons-compress.git" + ], + "changelogUrl": "https://commons.apache.org/proper/commons-compress/changes-report.html" }, { "description": "Changelog for commons-configuration", - "matchSourceUrls": ["https://gitbox.apache.org/repos/asf?p=commons-configuration.git"], - "customChangelogUrl": "https://commons.apache.org/proper/commons-configuration/changes-report.html" + "matchSourceUrls": [ + "https://gitbox.apache.org/repos/asf?p=commons-configuration.git" + ], + "changelogUrl": "https://commons.apache.org/proper/commons-configuration/changes-report.html" } ], - "schedule": [ - "* * * * 0" - ], - "prConcurrentLimit": 50, + "schedule": ["* * * * *"], + "prConcurrentLimit": 100, "prHourlyLimit": 10, - "stabilityDays": 5 + "minimumReleaseAge": "5 days" } diff --git a/.github/workflows/bin-solr-test.yml b/.github/workflows/bin-solr-test.yml index a832bdcc541..a0a33ccc51a 100644 --- a/.github/workflows/bin-solr-test.yml +++ b/.github/workflows/bin-solr-test.yml @@ -4,10 +4,13 @@ on: pull_request: branches: - 'main' + - 'branch_*' paths: - '.github/workflows/bin-solr-test.yml' - 'solr/bin/**' - 'solr/packaging/**' + - 'solr/core/src/java/org/apache/solr/cli/**' + - 'solr/prometheus-exporter/**' jobs: test: @@ -16,20 +19,22 @@ jobs: runs-on: ubuntu-latest env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} steps: # Setup - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 java-package: jdk + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: Grant execute permission for gradlew run: chmod +x gradlew - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: | ~/.gradle/caches @@ -41,8 +46,7 @@ jobs: run: ./gradlew integrationTests - name: Archive logs if: ${{ failure() }} - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: logs path: solr/packaging/build/test-output - diff --git a/.github/workflows/docker-test.yml b/.github/workflows/docker-test.yml index dc8cc9df728..0b00a6ab308 100644 --- a/.github/workflows/docker-test.yml +++ b/.github/workflows/docker-test.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - 'main' + - 'branch_*' paths: - '.github/workflows/docker-test.yml' - 'solr/bin/**' @@ -20,22 +21,24 @@ jobs: env: SOLR_DOCKER_IMAGE_REPO: github-pr/solr SOLR_DOCKER_IMAGE_TAG: ${{github.event.number}} - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} steps: # Setup - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 java-package: jdk + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: Install ACL run: sudo apt-get install acl - name: Grant execute permission for gradlew run: chmod +x gradlew - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: | ~/.gradle/caches diff --git a/.github/workflows/gradle-precommit.yml b/.github/workflows/gradle-precommit.yml index cd2ce38f108..dcc55ead323 100644 --- a/.github/workflows/gradle-precommit.yml +++ b/.github/workflows/gradle-precommit.yml @@ -3,7 +3,8 @@ name: Gradle Precommit on: pull_request: branches: - - '**' + - 'main' + - 'branch_*' jobs: test: @@ -12,23 +13,26 @@ jobs: runs-on: ubuntu-latest env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} steps: # Setup - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 java-package: jdk + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 + - name: Grant execute permission for gradlew run: chmod +x gradlew - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: | ~/.gradle/caches @@ -40,4 +44,4 @@ jobs: - name: Run gradle check (without tests) run: ./gradlew check -x test -Ptask.times=true - - uses: gradle/wrapper-validation-action@v1 + - uses: gradle/wrapper-validation-action@v3 diff --git a/.github/workflows/solrj-test.yml b/.github/workflows/solrj-test.yml index 0941fcd41c7..1a0f6bfebde 100644 --- a/.github/workflows/solrj-test.yml +++ b/.github/workflows/solrj-test.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - 'main' + - 'branch_*' paths: - '.github/workflows/solrj-test.yml' - 'solr/solrj/**' @@ -15,20 +16,22 @@ jobs: runs-on: ubuntu-latest env: - GRADLE_ENTERPRISE_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} + DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }} steps: # Setup - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: distribution: 'temurin' java-version: 11 java-package: jdk + - name: Setup Gradle + uses: gradle/actions/setup-gradle@v4 - name: Grant execute permission for gradlew run: chmod +x gradlew - - uses: actions/cache@v2 + - uses: actions/cache@v4 with: path: | ~/.gradle/caches diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index a6c2aa8a704..f8e30632059 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -29,10 +29,9 @@ jobs: stale-pr-label: "stale" # label to use when marking as stale stale-pr-message: > - This PR had no visible activity in the past 60 days, labeling it as stale. - Any new activity will remove the stale label. To attract more reviewers, please tag - someone or notify the dev@solr.apache.org mailing list. + This PR has had no activity for 60 days and is now labeled as stale. + Any new activity or converting it to draft will remove the stale label. + To attract more reviewers, please tag people who might be familiar with the code area and/or notify the dev@solr.apache.org mailing list. Thank you for your contribution! - # TODO: Increase budget after initial testing - operations-per-run: 30 # operations budget + operations-per-run: 100 # operations budget diff --git a/.github/workflows/tests-via-crave.yml b/.github/workflows/tests-via-crave.yml index 1a51a5eb393..91c5c3b4a5e 100644 --- a/.github/workflows/tests-via-crave.yml +++ b/.github/workflows/tests-via-crave.yml @@ -4,6 +4,7 @@ on: pull_request: branches: - 'main' + - 'branch_*' jobs: test: diff --git a/README.md b/README.md index 12fdc47b94b..920a4fa3005 100644 --- a/README.md +++ b/README.md @@ -18,8 +18,11 @@ # Welcome to the Apache Solr project! ----------------------------------- -Solr is the popular, blazing fast open source search platform for all your -enterprise, e-commerce, and analytics needs, built on [Apache Lucene](https://lucene.apache.org/). +Solr is the blazing-fast, open source, multi-modal search platform built on [Apache Lucene](https://lucene.apache.org/). +It powers full-text, vector, and geospatial search at many of the world's largest organizations. + +[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/badge/icon?subject=Solr%20Artifacts)](https://ci-builds.apache.org/job/Solr/job/Solr-Artifacts-main/) +[![Build Status](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/badge/icon?subject=Solr%20Check)](https://ci-builds.apache.org/job/Solr/job/Solr-Check-main/) For a complete description of the Solr project, team composition, source code repositories, and other details, please see the Solr web site at diff --git a/build.gradle b/build.gradle index caf0884e954..3b3c0e0999e 100644 --- a/build.gradle +++ b/build.gradle @@ -22,7 +22,7 @@ plugins { id 'base' id 'com.palantir.consistent-versions' version '2.16.0' id 'org.owasp.dependencycheck' version '9.0.8' - id 'ca.cutterslade.analyze' version '1.9.1' + id 'ca.cutterslade.analyze' version '1.10.0' id 'de.thetaphi.forbiddenapis' version '3.7' apply false id 'de.undercouch.download' version '5.5.0' apply false id 'net.ltgt.errorprone' version '3.1.0' apply false @@ -132,6 +132,7 @@ apply from: file('gradle/testing/fail-on-no-tests.gradle') apply from: file('gradle/testing/fail-on-unsupported-jdk.gradle') apply from: file('gradle/testing/alternative-jdk-support.gradle') apply from: file('gradle/java/jar-manifest.gradle') +apply from: file('gradle/testing/retry-test.gradle') // Publishing and releasing apply from: file('gradle/maven/defaults-maven.gradle') diff --git a/dev-docs/FAQ.adoc b/dev-docs/FAQ.adoc index 230fd0f1e41..b25d81bd6a9 100644 --- a/dev-docs/FAQ.adoc +++ b/dev-docs/FAQ.adoc @@ -26,7 +26,7 @@ You can review instructions for running Solr in Docker in the xref:running-in-do === Whats the fastest build lifecycle for frontend work on Solr Admin? Run `gradle dev`, and then `cd ./packaging/build/dev/`. Fire up your cluster -via `bin/solr start -e cloud -noprompt` and then as you make changes to assets in `/solr/webapp/web`, +via `bin/solr start -e cloud --no-prompt` and then as you make changes to assets in `/solr/webapp/web`, run `gradle dev` to redeploy the web assets. Do a hard refresh in your browser to pick up your changes. @@ -97,4 +97,3 @@ If you don't yet have an account, you have to ask for one in the 'users' or 'dev * http://fucit.org/solr-jenkins-reports/failure-report.html * https://ge.apache.org/scans/tests?search.relativeStartTime=P90D&search.rootProjectNames=solr* * https://lists.apache.org[Solr mailing list archives especially builds] - diff --git a/dev-docs/ref-guide/asciidoc-syntax.adoc b/dev-docs/ref-guide/asciidoc-syntax.adoc index 866ef854757..5f32b895409 100644 --- a/dev-docs/ref-guide/asciidoc-syntax.adoc +++ b/dev-docs/ref-guide/asciidoc-syntax.adoc @@ -503,90 +503,88 @@ This means that if we want an entire section of content to be given a specific r TIP: For more on Roles in Asciidoctor, see https://docs.asciidoctor.org/asciidoc/latest/attributes/roles/[Role Attribute] in the Asciidoctor User Guide. -=== Creating Tabbed Sections -Hopefully a little bit of background on roles is helpful to understanding the rest of what we'll do to create a tabbed section in a page. - -See the Bootstrap docs on https://getbootstrap.com/docs/4.1/components/navs/#tabs[nav tabs] for details on how to use tabs and pills with Bootstrap. -As a quick overview, tabs in Bootstrap are defined like this: - -[source,html] ----- - - -
<--2--> -
<--3--> -

Section 1

-

Some content.

-
-
-

Section 2

-

Some other content.

-
-
----- -<1> This section creates an unordered list with a line item for each tab. -The `data-toggle` and `class` parameters are what tell Bootstrap how to render the content. -<2> Note the class defined here: `
`. -This defines that what follows is the content that will make up the panes of our tabs. -We will need to define these in our document. -<3> In our document, we need to delineate the separate sections of content that will make up each pane. - -We have created some custom JavaScript that will do part of the above for us if we assign the proper roles to the blocks of content that we want to appear in the tab panes. -To do this, we can use Asciidoctor's block delimiters to define the tabbed content, and the content between the tab. - -. Define an "open block" (an unformatted content block), and give it the role `.dynamic-tabs`. -An open block is defined by two hyphens on a line before the content that goes in the block, and two hyphens on a line after the content to end the block. -We give a block a role by adding a period before the role name, like this: +== Tabbed Sections +Tabbed sections are supported via https://github.com/asciidoctor/asciidoctor-tabs[`@asciidoctor/tabs`]. + +There are different ways to display tabbed sections via the asciidoctor tabs extension, but most of the time only the format of an example is needed. +This format wraps each tab content into an example block. + +=== Referencing + +By providing an ID to the `[tabs]` block like below, you can control the ID used for referencing the tabbed section. +An ID is also generated for each tab by aggregating the normalized tab label to the end of the `[tabs]` ID. +This allows a direct referencing of a specific tab, even if it is not currently selected. + +=== Tab Synchronization + +The IDs are also used for synchronizing the tab selection across the entire page. +This synchronization requires the tab labels to be named the same, so that the generated ID is correctly recognized. +To enable this feature, add `:tabs-sync-option:` below the page title. + +[source,asciidoc] +---- += Page Title +:tab-sync-option: +... +---- + +It is also possible to group tabbed sections to sync only a set of tabs, or disable the syncing entirely. +See the https://github.com/asciidoctor/asciidoctor-tabs?tab=readme-ov-file#syntax[@asciidoctor/tabs syntax] fore more information. + +=== Tabs Example + +. Define a tabs block and give it an optional ID. + -[source,text] +[source,asciidoc] ---- -[.dynamic-tabs] --- -The stuff we'll put in the tabs will go here. --- +[tabs#tab-section-id] ---- -. Next we need to define the content for the tabs between the open block delimiters. -.. We enclose each tab pane in another type of block, and "example" block. -This allows us to include any kind of content in the block and be sure all of the various types of elements (heading, text, examples, etc.) are included in the pane. -.. We give the example block another role, `tab-pane`, and we must make sure that each pane has a unique ID. -We assign IDs with a hash mark (\#) followed by the ID value (`#sect1`). -.. We also need to define a label for each tab. -We do this by adding another role, `tab-label` to the content we want to appear as the name of the tab. -.. In the end one pane will look like this: +. Next, we add an example block to wrap the tab's content with an outline. This is used for improved readability. + -[source,text] +[source,asciidoc] ---- -[example.tab-pane#sect1] <--1--> -==== <--2--> -[.tab-label]*Section 1* <--3--> -My content... -==== +[tabs#tab-section-id] +====== + +====== ---- -<1> When we define the example block with `[example]`, it's followed by `.tab-pane#sect1` as the class (each class separated by a period `.`) and the ID defined in the tab definition earlier. -Those will become the classes (`class="tab-pane active"`) and ID (`id="sect1"`) in the resulting HTML. -<2> Example blocks are delimited by 4 equal signs (`====`) before and after the enclosed content. -<3> The words "Section 1" will appear in the HTML page as the label for this tab. -.. Create `[example.tab-pane#id]` sections for each tab, until you finally end up with something that looks like this: +. Inside the example block, we can add our tabs. Each tab label is suffixed with `::` and the tab's content is wrapped inside a listing block. + -[source,text] +[source,asciidoc] ---- -[.dynamic-tabs] --- -[example.tab-pane#sect1] +Tab 1:: ++ ==== -[.tab-label]*Section 1* -My content... +The first tab's content. ==== -[example.tab-pane#sect2] +Tab 2:: ++ ==== -[.tab-label]*Section 2* -My content... +The second tab's content. ==== --- ---- + +The final result will look something like this: + +[source,asciidoc] +---- +[tabs#tab-section-id] +====== +Tab 1:: ++ +==== +The first tab's content. +==== + +Tab 2:: ++ +==== +The second tab's content. +==== +====== +---- + +The tab section can be referenced via `#tab-section-id`, and each tab can be referenced via `#tab-section-id-tab-1` and `#tab-section-id-tab-2` accordingly. diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf index 5c3518128d9..acfbd01dbf6 100644 --- a/dev-tools/doap/solr.rdf +++ b/dev-tools/doap/solr.rdf @@ -68,6 +68,13 @@ + + + solr-9.7.0 + 2024-09-09 + 9.7.0 + + solr-9.6.1 diff --git a/dev-tools/scripts/cloud.sh b/dev-tools/scripts/cloud.sh index 6f4f4bb548b..3ab979d5ac3 100755 --- a/dev-tools/scripts/cloud.sh +++ b/dev-tools/scripts/cloud.sh @@ -352,7 +352,7 @@ stop() { SOLR=${CLUSTER_WD}/$(find . -maxdepth 1 -name 'solr*' -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(solr.*)/\1/' | head -n1) popd - "${SOLR}/bin/solr" stop -all + "${SOLR}/bin/solr" stop --all } ######################## diff --git a/dev-tools/scripts/releaseWizard.py b/dev-tools/scripts/releaseWizard.py index a13d7ec2cfe..4520a3c832a 100755 --- a/dev-tools/scripts/releaseWizard.py +++ b/dev-tools/scripts/releaseWizard.py @@ -1123,7 +1123,7 @@ def file_to_string(filename): return f.read().strip() def download_keys(): - download('KEYS', "https://archive.apache.org/dist/solr/KEYS", state.config_path) + download('KEYS', "https://downloads.apache.org/solr/KEYS", state.config_path) def keys_downloaded(): return os.path.exists(os.path.join(state.config_path, "KEYS")) diff --git a/dev-tools/scripts/releaseWizard.yaml b/dev-tools/scripts/releaseWizard.yaml index b604c0cef09..5de7067d331 100644 --- a/dev-tools/scripts/releaseWizard.yaml +++ b/dev-tools/scripts/releaseWizard.yaml @@ -87,7 +87,7 @@ templates: The Solr PMC is pleased to announce the release of Apache Solr {{ release_version }}. - Solr is the popular, blazing fast, open source NoSQL search platform from the Apache Solr project. Its major features include powerful full-text search, hit highlighting, faceted search, dynamic clustering, database integration, rich document handling, and geospatial search. Solr is highly scalable, providing fault tolerant distributed search and indexing, and powers the search and navigation features of many of the world's largest internet sites. + Solr is the blazing-fast, open source, multi-modal search platform built on Apache Lucene. It powers full-text, vector, analytics, and geospatial search at many of the world's largest organizations. Other major features include Kubernetes and docker integration, streaming, highlighting, faceting, and spellchecking. Solr {{ release_version }} is available for immediate download at: @@ -689,7 +689,7 @@ groups: {% if release_type == 'major' -%} . Change name of version `main ({{ release_version_major }}.0)` into `{{ release_version_major }}.0` {%- endif %} - . Create a new (unreleased) version `{{ get_next_version }}` + . Create a new (unreleased) version `{{ release_version_major }}.{{ release_version_minor + 1 }}` types: - major - minor diff --git a/gradle/ge.gradle b/gradle/develocity.gradle similarity index 92% rename from gradle/ge.gradle rename to gradle/develocity.gradle index a37660edde6..09aabbf3a0e 100644 --- a/gradle/ge.gradle +++ b/gradle/develocity.gradle @@ -20,14 +20,14 @@ def isCIBuild = System.getenv().keySet().any { it ==~ /(?i)((JENKINS|HUDSON)(_\w // https://docs.gradle.com/enterprise/gradle-plugin/ -gradleEnterprise { +develocity { server = "https://ge.apache.org" + projectId = "solr" buildScan { - capture { taskInputFiles = true } uploadInBackground = !isCIBuild - publishAlways() - publishIfAuthenticated() + + publishing.onlyIf { it.authenticated } obfuscation { ipAddresses { addresses -> addresses.collect { address -> "0.0.0.0"} } } @@ -58,7 +58,7 @@ buildCache { enabled = !isCIBuild } - remote(gradleEnterprise.buildCache) { + remote(develocity.buildCache) { enabled = false } } \ No newline at end of file diff --git a/gradle/documentation/render-javadoc.gradle b/gradle/documentation/render-javadoc.gradle index bd720b61615..bd90ad35426 100644 --- a/gradle/documentation/render-javadoc.gradle +++ b/gradle/documentation/render-javadoc.gradle @@ -370,7 +370,7 @@ class RenderJavadocTask extends DefaultTask { opts << [ '-linkoffline', url, dir ] } - opts << [ '--release', 11 ] + opts << [ '--release', project.minJavaVersion.toString() ] opts << '-Xdoclint:all,-missing' // Increase Javadoc's heap. diff --git a/gradle/testing/randomization.gradle b/gradle/testing/randomization.gradle index eb244a6ea21..9c809fc69e9 100644 --- a/gradle/testing/randomization.gradle +++ b/gradle/testing/randomization.gradle @@ -126,7 +126,6 @@ configure(allprojects.findAll {project -> project.path.startsWith(":solr") }) { plugins.withType(JavaPlugin) { ext { testOptions += [ - [propName: 'solr.directoryFactory', value: "org.apache.solr.core.MockDirectoryFactory", description: "Solr directory factory."], [propName: 'tests.src.home', value: null, description: "See SOLR-14023."], [propName: 'solr.tests.use.numeric.points', value: null, description: "Point implementation to use (true=numerics, false=trie)."], ] diff --git a/gradle/testing/randomization/policies/solr-tests.policy b/gradle/testing/randomization/policies/solr-tests.policy index 9dba0b9885d..dae3f218ec3 100644 --- a/gradle/testing/randomization/policies/solr-tests.policy +++ b/gradle/testing/randomization/policies/solr-tests.policy @@ -146,6 +146,7 @@ grant { // Needed by JWT integration tests & S3 tests permission java.lang.RuntimePermission "setFactory"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.pkcs12.default"; permission javax.xml.bind.JAXBPermission "setDatatypeConverter"; diff --git a/gradle/testing/retry-test.gradle b/gradle/testing/retry-test.gradle new file mode 100644 index 00000000000..4759b3fe2cf --- /dev/null +++ b/gradle/testing/retry-test.gradle @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +allprojects { + plugins.withType(JavaPlugin) { + tasks.withType(Test) { + develocity.testRetry { + if (isCIBuild) { + // For more info on params: https://github.com/gradle/test-retry-gradle-plugin + maxRetries = 3 + maxFailures = 10 + failOnPassedAfterRetry = true + } + } + } + } +} \ No newline at end of file diff --git a/gradle/validation/forbidden-apis/defaults.all.txt b/gradle/validation/forbidden-apis/defaults.all.txt index a7647ddbcb7..2faad099236 100644 --- a/gradle/validation/forbidden-apis/defaults.all.txt +++ b/gradle/validation/forbidden-apis/defaults.all.txt @@ -83,3 +83,9 @@ java.util.logging.** @defaultMessage Use List.sort(Comparator) instead of Collections.sort(List, Comparator) please. java.util.Collections#sort(java.util.List, java.util.Comparator) + +@defaultMessage Use URI.toURL() to construct an instance of URL. +java.net.URL#(**) + +@defaultMessage Use Locale.Builder instead. +java.util.Locale#(**) \ No newline at end of file diff --git a/gradle/validation/jar-checks.gradle b/gradle/validation/jar-checks.gradle index c3372b45c06..d416a9561fd 100644 --- a/gradle/validation/jar-checks.gradle +++ b/gradle/validation/jar-checks.gradle @@ -20,6 +20,10 @@ // 2) notice file // 3) checksum validation/ generation. +// WARNING: The tasks in this file share internal state between tasks without using files. +// Because of this all tasks here must always execute together, so they cannot define task outputs. +// TODO: Rewrite the internal state to use state files containing the ext.jarInfos and its referencedFiles + import org.apache.commons.codec.digest.DigestUtils // This should be false only for debugging. @@ -210,13 +214,6 @@ subprojects { description = "Validate license and notice files of dependencies" dependsOn collectJarInfos - def outputFileName = 'validateJarLicenses' - inputs.dir(file(project.rootDir.path + '/solr/licenses')) - .withPropertyName('licenses') - .withPathSensitivity(PathSensitivity.RELATIVE) - outputs.file(layout.buildDirectory.file(outputFileName)) - .withPropertyName('validateJarLicensesResult') - doLast { def errors = [] jarInfos.each { dep -> @@ -262,8 +259,7 @@ subprojects { } } } - def f = new File(project.buildDir.path + "/" + outputFileName) - f.text = errors + if (errors) { def msg = "Certain license/ notice files are missing:\n - " + errors.join("\n - ") if (failOnError) { diff --git a/settings.gradle b/settings.gradle index 69fc206de75..c4812ea0478 100644 --- a/settings.gradle +++ b/settings.gradle @@ -23,11 +23,11 @@ pluginManagement { } plugins { - id 'com.gradle.enterprise' version '3.15.1' - id 'com.gradle.common-custom-user-data-gradle-plugin' version '1.12' + id 'com.gradle.develocity' version '3.17.6' + id 'com.gradle.common-custom-user-data-gradle-plugin' version '2.0.2' } -apply from: file('gradle/ge.gradle') +apply from: file('gradle/develocity.gradle') rootProject.name = "solr-root" diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt index be2cba6139f..a8b54deb3ab 100644 --- a/solr/CHANGES.txt +++ b/solr/CHANGES.txt @@ -69,6 +69,8 @@ Deprecation Removals * SOLR-17313: Remove deprecated class SolrLogPostTool. PostLogsTool and the bin/solr post command has replaced this. (Eric Pugh) +* SOLR-17400: Remove deprecated script snapshotcli.sh. bin/solr snapshot-* commands have replaced this. (Eric Pugh) + Dependency Upgrades --------------------- (No changes) @@ -100,10 +102,73 @@ Other Changes * SOLR-17279: Introduce SecurityJson.java file to Test Framework to consolidate setting up authentication in tests. (Rudy Seitz via Eric Pugh) +================== 9.8.0 ================== +New Features +--------------------- +(No changes) + +Improvements +--------------------- +* SOLR-17397: SkipExistingDocumentsProcessor now functions correctly with child documents. (Tim Owens via Eric Pugh) + +* SOLR-17180: Deprecate snapshotscli.sh in favour of bin/solr snapshot sub commands. Now able to manage Snapshots from the CLI. HDFS module specific snapshot script now ships as part of that module in the modules/hdfs/bin directory. (Eric Pugh) + +* SOLR-17419: An alternate ShardHandlerFactory is now available, ParallelHttpShardHandlerFactory, + which may help reduce distributed-search latency in collections with many shards, especially + when PKI is used between nodes. (Jason Gerlowski) + +* SOLR-17382: Deprecate -a and -addlopts in favour of --jvm-opts for passing options into the JVM in bin/solr. (Eric Pugh, Christos Malliaridis) + +* SOLR-17431: Deprecate -p parameter where it doesn't refer to a port in bin/solr. (Eric Pugh, Christos Malliaridis) + +Optimizations +--------------------- +* SOLR-14985: Solrj CloudSolrClient with Solr URLs had serious performance regressions (since the + beginning?) in which its collection state cache was not being used, resulting in many extra + requests to Solr for cluster information. (Aparna Suresh, shalin, David Smiley) + +* SOLR-17102: The VersionBucket indexing lock mechanism was replaced with something just as fast yet + that which consumes almost no memory, saving 1MB of memory per SolrCore. (David Smiley) + +* SOLR-17381: Make CLUSTERSTATUS request configurable to improve performance by allowing retrieval of specific information, + reducing unnecessary data fetching. (Aparna Suresh, David Smiley) + +* SOLR-17396: Reduce thread contention in ZkStateReader.getCollectionProperties(). (Aparna Suresh, David Smiley, Paul McArthur) + +* SOLR-17408: COLSTATUS command was fetching details remotely from replicas when this information + wasn't asked for. (Mathieu Marie) + +* SOLR-3913: Optimize PostTool to call just optimize when both commit and optimize requested. (Eric Pugh) + +Bug Fixes +--------------------- +* SOLR-12429: Uploading a configset with a symbolic link produces a IOException. Now a error message to user generated instead. (Eric Pugh) + +* SOLR-17421: Fixed a rare case where overseer was stuck after a failure when changing overseer to honor the + node role for preferred overseer. (Pierre Salagnac) + +* SOLR-17416: Fixed ExportHandler bug that silently suppressed errors and returned partial results in some situations (hossman) + +* SOLR-16254: Clarify when a bin/solr create needs to be run on the same server as Solr. (Eric Pugh) + +Dependency Upgrades +--------------------- +(No changes) + +Other Changes +--------------------- +* SOLR-17359: Move Zk Arg parsing into Java Code from bin/solr scripts. (Eric Pugh, Rahul Goswami) + +* SOLR-17399: Replace the use of the deprecated java.util.Locale constructor with Locale Builder API. (Sanjay Dutt) + +* SOLR-17142: Fix Gradle build sometimes gives spurious "unreferenced license file" warnings. (Uwe Schindler) + +* SOLR-11318: Introduce unit testing for AssertTool. (Eric Pugh, Jason Gerlowski) + ================== 9.7.0 ================== New Features --------------------- -* SOLR-13350: Multithreaded search execution (Ishan Chattopadhyaya, Mark Miller, Christine Poerschke, David Smiley, noble) +* SOLR-13350, SOLR-17298: Opt-in multithreaded search execution (Ishan Chattopadhyaya, Mark Miller, Christine Poerschke, David Smiley, noble, Gus Heck) * SOLR-17192: Put an UpdateRequestProcessor-enforced soft-limit on the number of fields allowed in a core. The `NumFieldLimitingUpdateRequestProcessorFactory` limit may be adjusted by raising the factory's `maxFields` setting, toggled in and out of "warning-only" mode using the `warnOnly` setting, or disabled entirely @@ -113,8 +178,29 @@ New Features * SOLR-17277: Circuit breakers may now be configured in a "soft" or "warnOnly" mode in order to more easily test out new thresholds. Soft breakers will log out a message on each relevant request when tripped, but will not otherwise impact or short circuit the requests. (Jason Gerlowski) +* SOLR-17335: New "vectorSimilarity" QParser for matching documents mased on a minimum vector similarity threshold. (hossman) + +* SOLR-10654: Introduce output of Prometheus metrics directly from Solr. (Matthew Biscocho via David Smiley) + +* SOLR-17195: Configsets now include a `minPrefixQueryTermLength` setting, which instructs Solr to reject prefix queries whose prefixes are "too short". This can + be used as one line of defense against "runaway wildcard queries" consuming too many resources. The setting is disabled ('-1') in the default configset but can be + overridden with a property ('solr.query.minPrefixLength'). Users may also override their collection-wide setting for individual queries by providing a + `minPrefixQueryTermLength` local-param. (Jason Gerlowski, David Smiley) + +* SOLR-10255: Add support for docValues to solr.BinaryField. (Alexey Serba via Mikhail Khludnev, David Smiley) + +* SOLR-17276: Prometheus Exporter: now scrapes metrics at a fixed rate instead of delay. (Rafał Harabień) + Improvements --------------------- +* SOLR-10808, SOLR-12963: The Solr schema version has been increased to 1.7. + Starting in schema version 1.7, most fields/fieldTypes that support docValues will have them enabled by default. + These field types include primitive (Numeric, Date, Bool, String, Enum, UUID), sorting (SortableTextField, SortableBinaryField, CollationField, ICUCollationField) and LatLonPointSpacialField. + This behavior can be reverted by setting the 'docValues' parameter for a field or a field type to false, the default for schema versions 1.6 and below. + Also in schema version 1.7, all fields/fieldTypes will be unable to be uninverted by default. + This behavior can be reverted by setting the 'uninvertible' parameter for a field or a field type to true, the default for schema versions 1.6 and below. + (Houston Putman, hossman) + * SOLR-17137: Enable Prometheus exporter to communicate with SSL protected Solr. (Eivind Bergstøl via Eric Pugh) * SOLR-16921: use -solrUrl to derive the zk host connection for bin/solr zk subcommands (Eric Pugh) @@ -129,6 +215,30 @@ Improvements * SOLR-17109: Give security manager explicit read access to sharedLib (Tomás Fernández Löbbe via Eric Pugh) +* SOLR-17331: OrderedNodePlacementPlugin will give an even more optimal replica placements during ReplicaMigration commands (Houston Putman, Yohann Callea) + +* SOLR-15591: Make using debugger in Solr easier by avoiding NPE in ExternalPaths.determineSourceHome. (@charlygrappa via Eric Pugh) + +* SOLR-16824: Adopt Linux standard pattern of -- for long option commands, and make all commands "kebab" formatting. I.e -zkHost is now -zk-host. The old parameters + such as -zkHost continue to be supported in the 9.x line of Solr. -u is now used to specify user credentials everywhere, this only impacts the bin/solr assert + commands "same user" check which has -u as the short form of --same-user. (Eric Pugh, janhoy, Jason Gerlowski, Christos Malliaridis) + +* SOLR-17346: Synchronise stopwords from snowball with those in Lucene (Alastair Porter via Houston Putman) + +* SOLR-16198: Introduce tabbed sections again in the Ref Guide. (Christos Malliaridis via Eric Pugh) + +* SOLR-17160: Core Admin "async" request status tracking is no longer capped at 100; it's 10k. + Statuses are now removed 5 minutes after the read of a completed/failed status. Helps collection + async backup/restore and other operations scale to 100+ shards. (Pierre Salagnac, David Smiley) + +* SOLR-10808 : The Solr schema version has been increased to 1.7. Since schema version 1.7, all fields/fieldTypes that + support docValues will have them enabled by default. This behavior can be reverted by setting + 'docValues' parameter for a field or a field type to false, the default for schema versions 1.6 and below. (Houston Putman) + +* SOLR-17418: Streamline ConfigSet modification logic. (Houston Putman, Liu Huajin) + +* SOLR-17434: Avoid exposing Solr's IP in the Location header. (David Smiley) + Optimizations --------------------- * SOLR-17257: Both Minimize Cores and the Affinity replica placement strategies would over-gather @@ -136,6 +246,18 @@ Optimizations * SOLR-17099: Do not return spurious tags when fetching metrics from a Solr node to another. (Pierre Salagnac) +* SOLR-17269, SOLR-17386: Prevent the "Coordinator node" feature from registering synthetic cores in ZooKeeper (ellaeln, Patson Luk, David Smiley, Christine Poerschke) + +* SOLR-17330: When not set, loadOnStartup defaults to true, which is the default choice for a core. (Pierre Salagnac via Eric Pugh) + +* SOLR-16677: Update Solr to use new Lucene 9.5 storedFields() API. This removes the use of ThreadLocal for + stored field state, reducing heap usage especially for high-core-count, high-field-count, high-thread-count + cases (Vinayak Hegde, Christine Poerschke, Kevin Risden, David Smiley, Michael Gibney) + +* SOLR-17349: SolrDocumentFetcher should always skip lazy field loading overhead if documentCache==null (Michael Gibney) + +* SOLR-17340: Add cache on top of system metrics BeanInfos to make calls to /admin/info/system faster (Pierre Salagnac) + Bug Fixes --------------------- * SOLR-12813: subqueries should respect basic auth. (Rudy Seitz via Eric Pugh) @@ -161,9 +283,28 @@ Bug Fixes to the `` element in `solrconfig.xml`, which has long been silently ignored, will now be respected (Michael Gibney, David Smiley) +* SOLR-17255: Fix bugs in SolrParams.toLocalParamsString() (hossman) + +* SOLR-17333: Rate-limiting feature: fix live-update of config (Michael Gibney) + +* SOLR-17367: Restore the use of -params option to PostTool. (Bostoi via Eric Pugh) + +* SOLR-17369: Fix "flags" usage in FunctionQParser that caused some issues in vectorSimilarity() with BYTE vector constants (hossman) + +* SOLR-17337: Display all custom distributed stages in debug output. (Torsten Bøgh Köster, Christine Poerschke) + +* SOLR-17394: Detect and handle non-200 HTTP status codes for requests made by IndexFetcher (Jason Gerlowski) + +* SOLR-17391: Fixed performance regression of misconfigured threadpools from SOLR-16879 (Solr 9.4). + Shard splits and concurrent/large collection backup/restore performance was serial. UpdateLog + replay was a little suboptimal in thread usage too. (Pierre Salagnac, Hakan Özler, David Smiley) + +* SOLR-17417: Remove unnecessary code in PKIAuthPlugin and HttpSolrCall (Houston Putman, janhoy, Liu Huajin) + Dependency Upgrades --------------------- (No changes) +* SOLR-17325: Upgrade Lucene to 9.11.1 (hossman) Other Changes --------------------- @@ -176,6 +317,24 @@ Other Changes * SOLR-16503: Use Jetty HTTP2 for SyncStrategy and PeerSyncWithLeader for "recovery" operations (Sanjay Dutt, David Smiley) +* SOLR-16796: Include cyclonedx SBOMs with maven artifacts (Arnout Engelen, Houston Putman, Kevin Risden) + +* SOLR-17321: Remove Deprecated URL and replace it with URI in Preparation for Java 21 (Sanjay Dutt, David Smiley, Uwe Schindler) + +* PR#2524: QueryResult refactoring so that it's only returned from SolrIndexSearcher instead of + being provided to it. Deprecated APIs in 9x; should be removed later. (David Smiley) + +* SOLR-17347: EnvUtils: Removed "env" methods, as they were problematic. Code should almost never + refer to env vars. (David Smiley) + +* SOLR-15831: Refactor bin/solr and bin/solr.cmd commands integration with AuthTool, DeleteTool, and PackageTool to delegate arg parsing to Java code. Removed limitation of PackageTool only being executed on an active Solr node. (Eric Pugh) + +* SOLR-17322: Once again allow rank queries to use custom TopDocsCollectors that operate on types that extend + ScoreDocs (covariant generic types) broken in Solr 9.0. (Stephen Woods via Christine Poerschke) + +* SOLR-16996: Update Solr Exporter for Prometheus cli to use commons-cli instead of argparse4j. (Christos Malliaridis via Eric Pugh) + + ================== 9.6.1 ================== Bug Fixes --------------------- diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterFileStoreApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterFileStoreApis.java index 07746b5e92d..11ee4e1e26d 100644 --- a/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterFileStoreApis.java +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/ClusterFileStoreApis.java @@ -64,5 +64,10 @@ UploadToFileStoreResponse uploadFile( SolrJerseyResponse deleteFile( @Parameter(description = "Path to a file or directory within the filestore") @PathParam("path") - String path); + String path, + @Parameter( + description = + "Indicates whether the deletion should only be done on the receiving node. For internal use only") + @QueryParam("localDelete") + Boolean localDelete); } diff --git a/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeFileStoreApis.java b/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeFileStoreApis.java new file mode 100644 index 00000000000..15f4a73cfb1 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/endpoint/NodeFileStoreApis.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.endpoint; + +import static org.apache.solr.client.api.util.Constants.OMIT_FROM_CODEGEN_PROPERTY; + +import io.swagger.v3.oas.annotations.Operation; +import io.swagger.v3.oas.annotations.Parameter; +import io.swagger.v3.oas.annotations.extensions.Extension; +import io.swagger.v3.oas.annotations.extensions.ExtensionProperty; +import jakarta.ws.rs.GET; +import jakarta.ws.rs.Path; +import jakarta.ws.rs.PathParam; +import jakarta.ws.rs.QueryParam; +import org.apache.solr.client.api.model.SolrJerseyResponse; + +/** + * V2 APIs for fetching filestore files, syncing them across nodes, or fetching related metadata. + */ +@Path("/node") +public interface NodeFileStoreApis { + @GET + @Operation( + summary = "Retrieve file contents or metadata from the filestore.", + tags = {"file-store"}, + // The response of this v2 API is highly variable based on the parameters specified. It can + // return raw (potentially binary) file data, a JSON-ified representation of that file data, + // metadata regarding one or multiple file store entries, etc. This variability can be + // handled on the Jersey server side, but would be prohibitively difficult to accommodate in + // our code-generation templates. Ideally, cosmetic improvements (e.g. splitting it up into + // multiple endpoints) will make this unnecessary in the future. But for now, the extension + // property below ensures that this endpoint is ignored entirely when doing code generation. + extensions = { + @Extension( + properties = {@ExtensionProperty(name = OMIT_FROM_CODEGEN_PROPERTY, value = "true")}) + }) + @Path("/files{path:.+}") + SolrJerseyResponse getFile( + @Parameter(description = "Path to a file or directory within the filestore") + @PathParam("path") + String path, + @Parameter( + description = + "If true, triggers syncing for this file across all nodes in the filestore") + @QueryParam("sync") + Boolean sync, + @Parameter(description = "An optional Solr node name to fetch the file from") + @QueryParam("getFrom") + String getFrom, + @Parameter(description = "Indicates that (only) file metadata should be fetched") + @QueryParam("meta") + Boolean meta); +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/FileStoreDirectoryListingResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreDirectoryListingResponse.java new file mode 100644 index 00000000000..bcbc5f1f728 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreDirectoryListingResponse.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; + +/** + * One of several possible responses from {@link + * org.apache.solr.client.api.endpoint.NodeFileStoreApis#getFile(String, Boolean, String, Boolean)} + */ +public class FileStoreDirectoryListingResponse extends SolrJerseyResponse { + @JsonProperty public Map files; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/FileStoreEntryMetadata.java b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreEntryMetadata.java new file mode 100644 index 00000000000..5a9bc213452 --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreEntryMetadata.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonAnyGetter; +import com.fasterxml.jackson.annotation.JsonAnySetter; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +/** Represents the metadata for a single filestore file or directory */ +public class FileStoreEntryMetadata { + @JsonProperty public String name; + @JsonProperty public Boolean dir; + @JsonProperty public Long size; + @JsonProperty public Date timestamp; + + private Map additionalMetadata = new HashMap<>(); + + @JsonAnyGetter + public Map unknownProperties() { + return additionalMetadata; + } + + @JsonAnySetter + public void setUnknownProperty(String field, Object value) { + additionalMetadata.put(field, value); + } +} diff --git a/solr/api/src/java/org/apache/solr/client/api/model/FileStoreJsonFileResponse.java b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreJsonFileResponse.java new file mode 100644 index 00000000000..321faec01ec --- /dev/null +++ b/solr/api/src/java/org/apache/solr/client/api/model/FileStoreJsonFileResponse.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.api.model; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * One of several possible responses from {@link + * org.apache.solr.client.api.endpoint.NodeFileStoreApis#getFile(String, Boolean, String, Boolean)} + * + *

Typically used when 'wt=json' is specified while retrieving an individual file from the + * filestore + */ +public class FileStoreJsonFileResponse extends SolrJerseyResponse { + @JsonProperty public String response; +} diff --git a/solr/api/src/java/org/apache/solr/client/api/util/Constants.java b/solr/api/src/java/org/apache/solr/client/api/util/Constants.java index 560ce77af97..b4ef56c2050 100644 --- a/solr/api/src/java/org/apache/solr/client/api/util/Constants.java +++ b/solr/api/src/java/org/apache/solr/client/api/util/Constants.java @@ -27,6 +27,7 @@ private Constants() { public static final String INDEX_PATH_PREFIX = "/{" + INDEX_TYPE_PATH_PARAMETER + ":cores|collections}/{" + INDEX_NAME_PATH_PARAMETER + "}"; + public static final String OMIT_FROM_CODEGEN_PROPERTY = "omitFromCodegen"; public static final String GENERIC_ENTITY_PROPERTY = "genericEntity"; public static final String BINARY_CONTENT_TYPE_V2 = "application/vnd.apache.solr.javabin"; diff --git a/solr/benchmark/src/resources/configs/cloud-minimal/conf/schema.xml b/solr/benchmark/src/resources/configs/cloud-minimal/conf/schema.xml index cbdf1bac1f2..e517aea5930 100644 --- a/solr/benchmark/src/resources/configs/cloud-minimal/conf/schema.xml +++ b/solr/benchmark/src/resources/configs/cloud-minimal/conf/schema.xml @@ -15,9 +15,9 @@ See the License for the specific language governing permissions and limitations under the License. --> - + - + - + diff --git a/solr/bin/solr b/solr/bin/solr index a2b9445604c..036775a766b 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -17,7 +17,7 @@ # CONTROLLING STARTUP: # -# Use solr -help to see available command-line options. In addition +# Use solr --help to see available command-line options. In addition # to passing command-line options, this script looks for an include # file named solr.in.sh to set environment variables. Specifically, # the following locations are searched in this order: @@ -367,30 +367,30 @@ function print_usage() { if [[ "$CMD" == "start" || "$CMD" == "restart" ]]; then echo "" - echo "Usage: solr $CMD [-f] [-c] [-h hostname] [-p port] [-d directory] [-z zkHost] [-m memory] [-e example] [-s solr.solr.home] [-t solr.data.home] [-a \"additional-options\"] [-V]" + echo "Usage: solr $CMD [-f] [-c] [--host host] [-p port] [-d directory] [-z zkHost] [-m memory] [-e example] [-s solr.solr.home] [-t solr.data.home] [--jvm-opts \"jvm-opts\"] [-V]" echo "" echo " -f Start Solr in foreground; default starts Solr in the background" echo " and sends stdout / stderr to solr-PORT-console.log" echo "" - echo " -c or -cloud Start Solr in SolrCloud mode; if -z not supplied and ZK_HOST not defined in" + echo " -c or --cloud Start Solr in SolrCloud mode; if -z not supplied and ZK_HOST not defined in" echo " solr.in.sh, an embedded ZooKeeper instance is started on Solr port+1000," echo " such as 9983 if Solr is bound to 8983" echo "" - echo " -host Specify the hostname for this Solr instance" + echo " --host Specify the hostname for this Solr instance" echo "" - echo " -p Specify the port to start the Solr HTTP listener on; default is 8983" + echo " -p/--port Specify the port to start the Solr HTTP listener on; default is 8983" echo " The specified port (SOLR_PORT) will also be used to determine the stop port" echo " STOP_PORT=(\$SOLR_PORT-1000) and JMX RMI listen port RMI_PORT=(\$SOLR_PORT+10000). " echo " For instance, if you set -p 8985, then the STOP_PORT=7985 and RMI_PORT=18985" echo "" echo " -d

Specify the Solr server directory; defaults to server" echo "" - echo " -z/-zkHost Zookeeper connection string; only used when running in SolrCloud mode using -c" + echo " -z/--zk-host Zookeeper connection string; only used when running in SolrCloud mode using -c" echo " If neither ZK_HOST is defined in solr.in.sh nor the -z parameter is specified," echo " an embedded ZooKeeper instance will be launched." echo " Set the ZK_CREATE_CHROOT environment variable to true if your ZK host has a chroot path, and you want to create it automatically." echo "" - echo " -m Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g" + echo " -m/--memory Sets the min (-Xms) and max (-Xmx) heap size for the JVM, such as: -m 4g" echo " results in: -Xms4g -Xmx4g; by default, this script sets the heap size to 512m" echo "" echo " -s Sets the solr.solr.home system property; Solr will create core directories under" @@ -401,7 +401,7 @@ function print_usage() { echo " on which example is run. The default value is server/solr. If passed relative dir," echo " validation with current dir will be done, before trying default server/" echo "" - echo " -t Sets the solr.data.home system property, where Solr will store index data in /data subdirectories." + echo " -t/--data-home Sets the solr.data.home system property, where Solr will store index data in /data subdirectories." echo " If not set, Solr uses solr.solr.home for config and data." echo "" echo " -e Name of the example to run; available examples:" @@ -410,9 +410,9 @@ function print_usage() { echo " schemaless: Schema-less example (schema is inferred from data during indexing)" echo " films: Example of starting with _default configset and adding explicit fields dynamically" echo "" - echo " -a Additional parameters to pass to the JVM when starting Solr, such as to setup" + echo " --jvm-opts Additional parameters to pass to the JVM when starting Solr, such as to setup" echo " Java debug options. For example, to enable a Java debugger to attach to the Solr JVM" - echo " you could pass: -a \"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=18983\"" + echo " you could pass: --jvm-opts \"-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=18983\"" echo " In most cases, you should wrap the additional parameters in double quotes." echo "" echo " -j Additional parameters to pass to Jetty when starting Solr." @@ -420,14 +420,14 @@ function print_usage() { echo " you could pass: -j \"--include-jetty-dir=/etc/jetty/custom/server/\"" echo " In most cases, you should wrap the additional parameters in double quotes." echo "" - echo " -noprompt Don't prompt for input; accept all defaults when running examples that accept user input" + echo " --no-prompt Don't prompt for input; accept all defaults when running examples that accept user input" echo "" - echo " -force If attempting to start Solr as the root user, the script will exit with a warning that running Solr as \"root\" can cause problems." - echo " It is possible to override this warning with the '-force' parameter." + echo " --force If attempting to start Solr as the root user, the script will exit with a warning that running Solr as \"root\" can cause problems." + echo " It is possible to override this warning with the '--force' parameter." echo "" echo " -v and -q Verbose (-v) or quiet (-q) logging. Sets default log level of Solr to DEBUG or WARN instead of INFO" echo "" - echo " -V/-verbose Verbose messages from this script" + echo " -V/--verbose Verbose messages from this script" echo "" elif [ "$CMD" == "stop" ]; then echo "" @@ -437,171 +437,15 @@ function print_usage() { echo "" echo " -p Specify the port the Solr HTTP listener is bound to" echo "" - echo " -all Find and stop all running Solr servers on this host" + echo " --all Find and stop all running Solr servers on this host" echo "" - echo " -V/-verbose Verbose messages from this script" + echo " -V/--verbose Verbose messages from this script" echo "" echo " NOTE: To see if any Solr servers are running, do: solr status" echo "" - elif [ "$CMD" == "zk" ]; then - print_short_zk_usage "" - echo " Can be run on remote (non-Solr) hosts, as long as valid ZK_HOST information is provided" - echo " Be sure to check the Solr logs in case of errors." - echo "" - echo " -z zkHost Optional Zookeeper connection string for all commands. If specified it" - echo " overrides the 'ZK_HOST=...'' defined in solr.in.sh." - echo "" - echo " -s solrUrl Optional Solr URL to look up the correct zkHost connection string via." - echo "" - echo " -V/-verbose Enable more verbose output for this script." - echo "" - echo " upconfig uploads a configset from the local machine to Zookeeper." - echo "" - echo " downconfig downloads a configset from Zookeeper to the local machine." - echo "" - echo " -n Name of the configset in Zookeeper that will be the destination of" - echo " 'upconfig' and the source for 'downconfig'." - echo "" - echo " -d The local directory the configuration will be uploaded from for" - echo " 'upconfig' or downloaded to for 'downconfig'. If 'confdir' is a child of" - echo " ...solr/server/solr/configsets' then the configs will be copied from/to" - echo " that directory. Otherwise it is interpreted as a simple local path." - echo "" - echo " cp copies files or folders to/from Zookeeper or Zookeeper -> Zookeeper" - echo "" - echo " -r Recursively copy to . Command will fail if has children and " - echo " -r is not specified. Optional" - echo "" - echo " , : [file:][/]path/to/local/file or zk:/path/to/zk/node" - echo " NOTE: and may both be Zookeeper resources prefixed by 'zk:'" - echo " When is a zk resource, may be '.'" - echo " If ends with '/', then will be a local folder or parent znode and the last" - echo " element of the path will be appended unless also ends in a slash. " - echo " may be zk:, which may be useful when using the cp -r form to backup/restore " - echo " the entire zk state." - echo " You must enclose local paths that end in a wildcard in quotes or just" - echo " end the local path in a slash. That is," - echo " 'bin/solr zk cp -r /some/dir/ zk:/ -z localhost:2181' is equivalent to" - echo " 'bin/solr zk cp -r \"/some/dir/*\" zk:/ -z localhost:2181'" - echo " but 'bin/solr zk cp -r /some/dir/* zk:/ -z localhost:2181' will throw an error" - echo "" - echo " here's an example of backup/restore for a ZK configuration:" - echo " to copy to local: 'bin/solr zk cp -r zk:/ /some/dir -z localhost:2181'" - echo " to restore to ZK: 'bin/solr zk cp -r /some/dir/ zk:/ -z localhost:2181'" - echo "" - echo " The 'file:' prefix is stripped, thus 'file:/wherever' specifies an absolute local path and" - echo " 'file:somewhere' specifies a relative local path. All paths on Zookeeper are absolute." - echo "" - echo " Zookeeper nodes CAN have data, so moving a single file to a parent znode" - echo " will overlay the data on the parent Znode so specifying the trailing slash" - echo " can be important." - echo "" - echo " Wildcards are supported when copying from local, trailing only and must be quoted." - echo "" - echo " rm deletes files or folders on Zookeeper" - echo "" - echo " -r Recursively delete if is a directory. Command will fail if " - echo " has children and -r is not specified. Optional" - echo " : [zk:]/path/to/zk/node. may not be the root ('/')" - echo "" - echo " mv moves (renames) znodes on Zookeeper" - echo "" - echo " , : Zookeeper nodes, the 'zk:' prefix is optional." - echo " If ends with '/', then will be a parent znode" - echo " and the last element of the path will be appended." - echo " Zookeeper nodes CAN have data, so moving a single file to a parent znode" - echo " will overlay the data on the parent Znode so specifying the trailing slash" - echo " is important." - echo "" - echo " ls lists the znodes on Zookeeper" - echo "" - echo " -r Recursively descends the path listing all znodes. Optional" - echo " : The Zookeeper path to use as the root." - echo "" - echo " Only the node names are listed, not data" - echo "" - echo " mkroot makes a znode in Zookeeper with no data. Can be used to make a path of arbitrary" - echo " depth but primarily intended to create a 'chroot'." - echo "" - echo " : The Zookeeper path to create. Leading slash is assumed if not present." - echo " Intermediate nodes are created as needed if not present." - echo "" - elif [ "$CMD" == "auth" ]; then - echo "" - echo "Usage: solr auth enable [-type basicAuth] -credentials user:pass [-blockUnknown ] [-updateIncludeFileOnly ] [-V]" - echo " solr auth enable [-type basicAuth] -prompt [-blockUnknown ] [-updateIncludeFileOnly ] [-V]" - echo " solr auth enable -type kerberos -config \"\" [-updateIncludeFileOnly ] [-V]" - echo " solr auth disable [-updateIncludeFileOnly ] [-V]" - echo "" - echo " Updates or enables/disables authentication. Must be run on the machine hosting Solr." - echo "" - echo " -type or -t The authentication mechanism (basicAuth or kerberos) to enable. Defaults to 'basicAuth'." - echo "" - echo " -credentials The username and password of the initial user. Applicable for basicAuth only." - echo " Note: only one of -prompt or -credentials must be provided" - echo "" - echo " -config \"\" Configuration parameters (Solr startup parameters). Required and applicable only for Kerberos" - echo "" - echo " -solrIncludeFile Specify the full path to the include file in the environment." - echo " If not specified this script looks for an include file named solr.in.sh to set environment variables. " - echo " Specifically,the following locations are searched in this order:" - echo " diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java index fdb61ee3eb1..45ce93c30c4 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/SolrClientCache.java @@ -41,13 +41,13 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** The SolrClientCache caches SolrClients so they can be reused by different TupleStreams. */ +/** The SolrClientCache caches SolrClients, so they can be reused by different TupleStreams. */ public class SolrClientCache implements Closeable { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); // Set the floor for timeouts to 60 seconds. - // Timeouts cans be increased by setting the system properties defined below. + // Timeouts can be increased by setting the system properties defined below. private static final int MIN_TIMEOUT = 60000; private static final int minConnTimeout = Math.max( @@ -148,7 +148,7 @@ private static CloudHttp2SolrClient newCloudHttp2SolrClient( /** * Create (and cache) a SolrClient based around the provided URL * - * @param baseUrl a Solr URL. May be either a "base" URL (i.e. ending in "/solr"), or point to a + * @param baseUrl a Solr URL. May either be a "base" URL (i.e. ending in "/solr"), or point to a * particular collection or core. * @return a SolrClient configured to use the provided URL. The cache retains a reference to the * returned client, and will close it when callers invoke {@link SolrClientCache#close()} diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/Tuple.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/Tuple.java index fdf992a9ed4..6e2f62ce91c 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/Tuple.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/Tuple.java @@ -49,8 +49,13 @@ public class Tuple implements Cloneable, MapWriter { */ public boolean EXCEPTION; + /** Tuple fields. */ private final Map fields = CollectionUtil.newHashMap(2); + + /** External serializable field names. */ private List fieldNames; + + /** Mapping of external field names to internal tuple field names. */ private Map fieldLabels; public Tuple() { @@ -242,7 +247,7 @@ public void setFieldLabels(Map fieldLabels) { /** * A list of field names to serialize. This list (together with the mapping in {@link - * #getFieldLabels()} determines what tuple values are serialized and their external (serialized) + * #getFieldLabels()}) determines what tuple values are serialized and their external (serialized) * names. * * @return list of external field names or null @@ -279,8 +284,9 @@ public Tuple clone() { } /** - * The other tuples fields and fieldLabels will be putAll'd directly to this's fields and - * fieldLabels while other's fieldNames will be added such that duplicates aren't present. + * The other tuples fields and fieldLabels will be merged via putAll directly into this Tuple's + * fields and fieldLabels while other's fieldNames will be added such that duplicates aren't + * present. * * @param other Tuple to be merged into this. */ diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java index e47bfd90da8..5a67c581962 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/comp/FieldComparator.java @@ -97,14 +97,14 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { } /* - * What're we doing here messing around with lambdas for the comparator logic? - * We want the compare(...) function to run as fast as possible because it will be called many many + * What are we doing here messing around with lambdas for the comparator logic? + * We want the compare(...) function to run as fast as possible because it will be called many * times over the lifetime of this object. For that reason we want to limit the number of comparisons * taking place in the compare(...) function. Because this class supports both ascending and * descending comparisons and the logic for each is slightly different, we want to do the * if(ascending){ compare like this } else { compare like this } * check only once - we can do that in the constructor of this class, create a lambda, and then execute - * that lambda in the compare function. A little bit of branch prediction savings right here. + * that lambda in the compare function. A bit of branch prediction savings right here. */ @SuppressWarnings({"unchecked"}) private void assignComparator() { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java index abcb9752372..015dafeb879 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/AscEvaluator.java @@ -75,7 +75,7 @@ public Object doWork(Object value) throws IOException { throw new IOException( String.format( Locale.ROOT, - "Invalid expression %s - value %s is of type %s but we are expeting type %s", + "Invalid expression %s - value %s is of type %s but we are expecting type %s", toExpression(constructingFactory), item.toString(), item.getClass().getSimpleName(), diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java index 0535aca5478..37ee06dc474 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/BicubicSplineEvaluator.java @@ -36,7 +36,7 @@ public Object doWork(Object... objects) throws IOException { if (objects.length != 3) { throw new IOException( - "The bicubicSpline function requires three paremeters," + objects.length + " found."); + "The bicubicSpline function requires three parameters," + objects.length + " found."); } Object first = objects[0]; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/IsNullEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/IsNullEvaluator.java index 0d079f538e3..897c7ad8936 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/IsNullEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/IsNullEvaluator.java @@ -45,7 +45,7 @@ public Object doWork(Object... values) throws IOException { } if (values[0] instanceof String) { - // Check to see if the this tuple had a null value for that string. + // Check to see if this tuple had a null value for that string. String nullField = getStreamContext().getTupleContext().get("null"); if (nullField != null && nullField.equals(values[0])) { return true; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java index 6280436270a..024ec64ac4a 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/KnnRegressionEvaluator.java @@ -58,7 +58,7 @@ public Object doWork(Object... values) throws IOException { if (values.length < 3) { throw new IOException( - "knnRegress expects atleast three parameters: an observation matrix, an outcomes vector and k."); + "knnRegress expects at least three parameters: an observation matrix, an outcomes vector and k."); } Matrix observations = null; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java index 9378661977d..44058b1d04b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/Matrix.java @@ -61,8 +61,8 @@ public List getRowLabels() { return rowLabels; } - public void setRowLabels(List rowLables) { - this.rowLabels = rowLables; + public void setRowLabels(List rowLabels) { + this.rowLabels = rowLabels; } public double[][] getData() { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java index ab927a7fd84..57d69e6619a 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/NotNullEvaluator.java @@ -45,7 +45,7 @@ public Object doWork(Object... values) throws IOException { } if (values[0] instanceof String) { - // Check to see if the this tuple had a null value for that string. + // Check to see if this tuple had a null value for that string. String nullField = getStreamContext().getTupleContext().get("null"); if (nullField != null && nullField.equals(values[0])) { return false; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java index 8c04efa7702..83491cf38dc 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/PredictEvaluator.java @@ -178,7 +178,7 @@ public Object doWork(Object... objects) throws IOException { y = ((Number) third).doubleValue(); return bivariateFunction.value(x, y); } else { - throw new IOException("BivariateFunction requires two numberic parameters."); + throw new IOException("BivariateFunction requires two numeric parameters."); } } else if (objects.length == 2) { if (second instanceof Matrix) { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/ProbabilityEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/ProbabilityEvaluator.java index 2ed4ae30140..31ec03be479 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/ProbabilityEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/ProbabilityEvaluator.java @@ -60,7 +60,7 @@ public Object doWork(Object... values) throws IOException { throw new IOException( String.format( Locale.ROOT, - "Invalid expression %s - found type %s for the first value, expecting a IntegerDistributionm for probability at a specific value.", + "Invalid expression %s - found type %s for the first value, expecting a IntegerDistribution for probability at a specific value.", toExpression(constructingFactory), first.getClass().getSimpleName())); } diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java index baa8bd592ca..a30d633b8df 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/eval/RecursiveEvaluator.java @@ -126,7 +126,7 @@ protected Object normalizeOutputType(Object value) { return ((List) value) .stream().map(innerValue -> normalizeOutputType(innerValue)).collect(Collectors.toList()); } else if (value instanceof Tuple && value.getClass().getEnclosingClass() == null) { - // If its a tuple and not a inner class that has extended tuple, which is done in a number of + // If it's a tuple and not an inner class that has extended tuple, which occurs in a number of // cases so that mathematical models can be contained within a tuple. Tuple tuple = (Tuple) value; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/sql/DriverImpl.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/sql/DriverImpl.java index 3509b54abd1..cb4668838e2 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/sql/DriverImpl.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/sql/DriverImpl.java @@ -32,7 +32,7 @@ import org.apache.solr.common.util.SuppressForbidden; /** - * Get a Connection with with a url and properties. + * Get a Connection with an url and properties. * *

jdbc:solr://zkhost:port?collection=collection&aggregationMode=map_reduce */ diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/BiJoinStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/BiJoinStream.java index 5c8ecc3d8d9..ac8ec398233 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/BiJoinStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/BiJoinStream.java @@ -28,8 +28,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; /** - * Joins leftStream with rightStream based on a Equalitor. Both streams must be sorted by the fields - * being joined on. Resulting stream is sorted by the equalitor. + * Joins leftStream with rightStream based on an {@link StreamEqualitor}. Both streams must be + * sorted by the fields being joined on. Resulting stream is sorted by the equalitor. * * @since 6.0.0 */ diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java index c9973c5496e..8cbbea7d16b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CartesianProductStream.java @@ -277,8 +277,8 @@ private Deque generateTupleList(Tuple original) throws IOException { private boolean iterate( List evaluators, int[] indexes, Map evaluatedValues) { - // this assumes evaluators and indexes are the same length, which is ok cause we created it so - // we know it is + // this assumes evaluators and indexes are the same length, which is ok because we created it, + // so we know it is // go right to left and increment, returning true if we're not at the end for (int offset = indexes.length - 1; offset >= 0; --offset) { Object evaluatedValue = evaluatedValues.get(evaluators.get(offset).getName()); diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java index 0f87df7bcf3..84d34f99839 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CloudSolrStream.java @@ -107,14 +107,14 @@ public CloudSolrStream(StreamExpression expression, StreamFactory factory) throw expression)); } - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size()) { throw new IOException( String.format(Locale.ROOT, "invalid expression %s - unknown operands found", expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( @@ -246,7 +246,8 @@ void init(String collectionName, String zkHost, SolrParams params) throws IOExce this.collection = collectionName; this.params = new ModifiableSolrParams(params); - // If the comparator is null then it was not explicitly set so we will create one using the sort + // If the comparator is null then it was not explicitly set, so we will create one using the + // sort // parameter of the query. While doing this we will also take into account any aliases such that // if we are sorting on fieldA but fieldA is aliased to alias.fieldA then the comparator will be // against alias.fieldA. diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java index 79f8abfab23..6ed5d3a06c7 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java @@ -170,7 +170,7 @@ public Tuple read() throws IOException { } } else { // if the read document contains field 'batchIndexed' then it's a summary - // document and we can update our count based on it's value. If not then + // document, and we can update our count based on its value. If not then // just increment by 1 if (tuple.getFields().containsKey(UpdateStream.BATCH_INDEXED_FIELD_NAME) && isInteger(tuple.getString(UpdateStream.BATCH_INDEXED_FIELD_NAME))) { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ComplementStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ComplementStream.java index 1d589315a15..8698099d71b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ComplementStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ComplementStream.java @@ -207,7 +207,7 @@ public Tuple read() throws IOException { } // if a == b then ignore a cause it exists in b - // else we know that b < a so we can ignore b + // else we know that b < a, so we can ignore b if (eq.test(a, b)) { streamB.pushBack(b); } else { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java index 414d2466a38..efc602e43a6 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java @@ -398,7 +398,7 @@ private void stream() { } } catch (Throwable t) { log.error("Fatal Error in DaemonStream: {}", id, t); - // For anything other then IOException break out of the loop and shutdown the thread. + // For anything other than IOException break out of the loop and shutdown the thread. break OUTER; } finally { try { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java index 8598cdb1604..0be344a0921 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/DeepRandomStream.java @@ -102,14 +102,14 @@ public DeepRandomStream(StreamExpression expression, StreamFactory factory) thro expression)); } - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size()) { throw new IOException( String.format(Locale.ROOT, "invalid expression %s - unknown operands found", expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java index 9e39198c287..68343b5f0e1 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FacetStream.java @@ -159,7 +159,7 @@ public FacetStream(StreamExpression expression, StreamFactory factory) throws IO expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( @@ -512,7 +512,7 @@ private void init( this.serializeBucketSizeLimit = serializeBucketSizeLimit; this.overfetch = overfetch; - // In a facet world it only makes sense to have the same field name in all of the sorters + // In a facet world it only makes sense to have the same field name in all the sorters // Because FieldComparator allows for left and right field names we will need to validate // that they are the same for (FieldComparator sort : bucketSorts) { @@ -800,12 +800,12 @@ private FieldComparator[] adjustSorts(Bucket[] _buckets, FieldComparator[] _sort } else if (_sorts.length == 1) { FieldComparator[] adjustedSorts = new FieldComparator[_buckets.length]; if (_sorts[0].getLeftFieldName().contains("(")) { - // Its a metric sort so apply the same sort criteria at each level. + // It's a metric sort so apply the same sort criteria at each level. for (int i = 0; i < adjustedSorts.length; i++) { adjustedSorts[i] = _sorts[0]; } } else { - // Its an index sort so apply an index sort at each level. + // It's an index sort so apply an index sort at each level. for (int i = 0; i < adjustedSorts.length; i++) { adjustedSorts[i] = new FieldComparator(_buckets[i].toString(), _sorts[0].getOrder()); } diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java index 9c7478a4623..1f7d7d65e0f 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/FeaturesSelectionStream.java @@ -97,7 +97,7 @@ public FeaturesSelectionStream(StreamExpression expression, StreamFactory factor List namedParams = factory.getNamedOperands(expression); StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost"); - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size()) { throw new IOException( @@ -113,7 +113,7 @@ public FeaturesSelectionStream(StreamExpression expression, StreamFactory factor expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/HashJoinStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/HashJoinStream.java index 53cc4678010..e0d6dfefbd8 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/HashJoinStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/HashJoinStream.java @@ -55,7 +55,7 @@ public class HashJoinStream extends TupleStream implements Expressible { protected Tuple workingFullTuple = null; protected String workingFullHash = null; - protected int workngHashSetIdx = 0; + protected int workingHashSetIdx = 0; public HashJoinStream(TupleStream fullStream, TupleStream hashStream, List hashOn) throws IOException { @@ -283,23 +283,23 @@ public Tuple read() throws IOException { workingFullTuple = fullTuple; workingFullHash = fullHash; - workngHashSetIdx = 0; + workingHashSetIdx = 0; } // At this point we know we have at least one doc to match on due to the check at the end, // before returning, we know we have at least one to match with left List matches = hashedTuples.get(workingFullHash); Tuple returnTuple = workingFullTuple.clone(); - returnTuple.merge(matches.get(workngHashSetIdx)); + returnTuple.merge(matches.get(workingHashSetIdx)); // Increment this so the next time we hit the next matching tuple - workngHashSetIdx++; + workingHashSetIdx++; - if (workngHashSetIdx >= matches.size()) { + if (workingHashSetIdx >= matches.size()) { // well, now we've reached all the matches, clear it all out workingFullTuple = null; workingFullHash = null; - workngHashSetIdx = 0; + workingHashSetIdx = 0; } return returnTuple; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/InnerJoinStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/InnerJoinStream.java index 4bcf0aeef1d..4ecbbab581b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/InnerJoinStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/InnerJoinStream.java @@ -26,8 +26,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; /** - * Joins leftStream with rightStream based on a Equalitor. Both streams must be sorted by the fields - * being joined on. Resulting stream is sorted by the equalitor. + * Joins leftStream with rightStream based on an Equalitor. Both streams must be sorted by the + * fields being joined on. Resulting stream is sorted by the equalitor. * * @since 6.0.0 */ diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/IntersectStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/IntersectStream.java index 04f32dcf72b..e0fed5a49f9 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/IntersectStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/IntersectStream.java @@ -202,7 +202,7 @@ public Tuple read() throws IOException { return a; } - // We're not at the end and they're not equal. We now need to decide which we can + // We're not at the end, and they're not equal. We now need to decide which we can // throw away. This is accomplished by checking which is less than the other. The // one that is less (determined by the sort) can be tossed. The other should // be pushed back and the loop continued. We don't have to worry about an == 0 diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java index bd9ed7e44c0..920aa819d75 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JDBCStream.java @@ -180,7 +180,7 @@ public JDBCStream(StreamExpression expression, StreamFactory factory) throws IOE StreamExpressionNamedParameter fetchSizeExpression = factory.getNamedOperand(expression, "fetchSize"); - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != namedParams.size()) { throw new IOException( @@ -283,7 +283,7 @@ protected Driver getDriver() throws IOException { // than java was started with, it will likely not work. Instead, create a class that inherits // this class and override this getDriver() method. // Unfortunately it is impossible to use a custom ClassLoader with DriverManager, so we would - // need to remove our use of this class in order to support JDBC drivers loaded in via solr's + // need to remove our use of this class in order to support JDBC drivers loaded in via Solr's // additional library methods. This comment is relevant for JDBC drivers loaded in via custom // plugins and even Solr Modules. try { @@ -389,7 +389,7 @@ private ResultSetValueSelector[] constructValueSelectors(ResultSetMetaData metad protected ResultSetValueSelector determineValueSelector(int columnIdx, ResultSetMetaData metadata) throws SQLException { final int columnNumber = columnIdx + 1; // cause it starts at 1 - // Use getColumnLabel instead of getColumnName to make sure fields renamed with AS as picked up + // Use getColumnLabel instead of getColumnName to make sure fields renamed with AS are picked up // properly final String columnName = metadata.getColumnLabel(columnNumber); final int jdbcType = metadata.getColumnType(columnNumber); @@ -425,7 +425,7 @@ public String getColumnName() { } // We're checking the Java class names because there are lots of SQL types across // lots of database drivers that can be mapped to standard Java types. Basically, - // this makes it easier and we don't have to worry about esoteric type names in the + // this makes it easier, and we don't have to worry about esoteric type names in the // JDBC family of types else if (Short.class.getName().equals(className)) { valueSelector = diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java index 781fcc3d10c..35a7b021d64 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/JSONTupleStream.java @@ -78,7 +78,7 @@ public Map next() throws IOException { atDocs = true; if (!found) return null; } - // advance past ARRAY_START (in the case that we just advanced to docs, or OBJECT_END left over + // advance past ARRAY_START, in the case that we just advanced to docs, or OBJECT_END left over // from the last call. int event = parser.nextEvent(); if (event == JSONParser.ARRAY_END) return null; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java index 41cd08d18bf..7f514a71b04 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/KnnStream.java @@ -86,7 +86,7 @@ public KnnStream(StreamExpression expression, StreamFactory factory) throws IOEx expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (namedParams.size() < 2) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LeftOuterJoinStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LeftOuterJoinStream.java index 8d61828c2f1..c6efab74840 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LeftOuterJoinStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/LeftOuterJoinStream.java @@ -26,8 +26,8 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; /** - * Joins leftStream with rightStream based on a Equalitor. Both streams must be sorted by the fields - * being joined on. Resulting stream is sorted by the equalitor. + * Joins leftStream with rightStream based on an Equalitor. Both streams must be sorted by the + * fields being joined on. Resulting stream is sorted by the equalitor. * * @since 6.0.0 */ diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java index 7d0f16a9a6c..b2e64bef03b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ListStream.java @@ -112,7 +112,8 @@ public Tuple read() throws IOException { if (currentStream == null) { if (streamIndex < streams.length) { currentStream = streams[streamIndex]; - // Set the stream to null in the array of streams once its been set to the current stream. + // Set the stream to null in the array of streams once it's been set to the current + // stream. // This will remove the reference to the stream // and should allow it to be garbage collected once it's no longer the current stream. streams[streamIndex] = null; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/MergeStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/MergeStream.java index cb915119543..4b368eadf71 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/MergeStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/MergeStream.java @@ -224,7 +224,6 @@ public Tuple read() throws IOException { // If all EOF then min will be null, else min is the current minimum if (null == minimum) { - // return EOF, doesn't matter which cause we're done return streams[0].read(); } diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java index 096f4a55e9c..ce3e37fdbab 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ModelStream.java @@ -76,7 +76,7 @@ public ModelStream(StreamExpression expression, StreamFactory factory) throws IO expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java index bf83ed40340..f680c5fe0e2 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NoOpStream.java @@ -29,7 +29,7 @@ import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; /** - * A simple no-operation stream. Immediately returns eof. Mostly intended for use as a place holder + * A simple no-operation stream. Immediately returns eof. Mostly intended for use as a placeholder * in {@link org.apache.solr.client.solrj.io.stream.expr.InjectionDefense}. * * @since 8.0.0 diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java index ba3fc0b9126..142b7b960cd 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/NullStream.java @@ -31,7 +31,7 @@ /** * The NullStream Iterates over a TupleStream and eats the tuples. It returns the tuple count in the - * EOF tuple. Because the NullStreaam eats all the Tuples it see's it can be used as a simple tool + * EOF tuple. Because the NullStream eats all the Tuples it sees, it can be used as a simple tool * for performance analysis of underlying streams. * * @since 6.4.0 diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/OuterHashJoinStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/OuterHashJoinStream.java index 417bf464271..cf064014c1d 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/OuterHashJoinStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/OuterHashJoinStream.java @@ -113,23 +113,23 @@ public Tuple read() throws IOException { workingFullTuple = fullTuple; workingFullHash = fullHash; - workngHashSetIdx = 0; + workingHashSetIdx = 0; } // At this point we know we have at least one doc to match on due to the check at the end, // before returning, we know we have at least one to match with left List matches = hashedTuples.get(workingFullHash); Tuple returnTuple = workingFullTuple.clone(); - returnTuple.merge(matches.get(workngHashSetIdx)); + returnTuple.merge(matches.get(workingHashSetIdx)); // Increment this so the next time we hit the next matching tuple - workngHashSetIdx++; + workingHashSetIdx++; - if (workngHashSetIdx >= matches.size()) { + if (workingHashSetIdx >= matches.size()) { // well, now we've reached all the matches, clear it all out workingFullTuple = null; workingFullHash = null; - workngHashSetIdx = 0; + workingHashSetIdx = 0; } return returnTuple; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java index a1ae5326bce..1199f2c1059 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/PlotStream.java @@ -68,7 +68,7 @@ public PlotStream(StreamExpression expression, StreamFactory factory) throws IOE // fieldLabels.put(name, name); StreamExpressionParameter param = np.getParameter(); - // we're going to split these up here so we only make the choice once + // we're going to split these up here, so we only make the choice once // order of these in read() doesn't matter if (param instanceof StreamExpressionValue) { stringParams.put(name, ((StreamExpressionValue) param).getValue()); @@ -141,7 +141,7 @@ public Explanation toExplanation(StreamFactory factory) throws IOException { public void setStreamContext(StreamContext context) { this.streamContext = context; - // also set in evalators and streams + // also set in evaluators and streams for (StreamEvaluator evaluator : evaluatorParams.values()) { evaluator.setStreamContext(context); } diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java index f1e6c33c2fb..a00e5fb8bd5 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SearchStream.java @@ -75,7 +75,7 @@ public SearchStream(StreamExpression expression, StreamFactory factory) throws I expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java index f315b4af1eb..8e62902c98c 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ShuffleStream.java @@ -54,14 +54,14 @@ public ShuffleStream(StreamExpression expression, StreamFactory factory) throws expression)); } - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size()) { throw new IOException( String.format(Locale.ROOT, "invalid expression %s - unknown operands found", expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java index 3b79a3c1fa4..e362d34c73b 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SignificantTermsStream.java @@ -91,7 +91,7 @@ public SignificantTermsStream(StreamExpression expression, StreamFactory factory List namedParams = factory.getNamedOperands(expression); StreamExpressionNamedParameter zkHostExpression = factory.getNamedOperand(expression, "zkHost"); - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size()) { throw new IOException( @@ -107,7 +107,7 @@ public SignificantTermsStream(StreamExpression expression, StreamFactory factory expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SqlStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SqlStream.java index b39ab1dbdc1..24560cb61ae 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SqlStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/SqlStream.java @@ -76,7 +76,7 @@ public SqlStream(StreamExpression expression, StreamFactory factory) throws IOEx collectionName = factory.getDefaultCollection(); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java index 1461a3af70d..6f79ef04876 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TextLogitStream.java @@ -127,7 +127,7 @@ public TextLogitStream(StreamExpression expression, StreamFactory factory) throw factory.getExpressionOperandsRepresentingTypes( expression, Expressible.class, TupleStream.class); - // Validate there are no unknown parameters - zkHost and alias are namedParameter so we don't + // Validate there are no unknown parameters - zkHost and alias are namedParameter, so we don't // need to count it twice if (expression.getParameters().size() != 1 + namedParams.size() + streamExpressions.size()) { throw new IOException( @@ -143,7 +143,7 @@ public TextLogitStream(StreamExpression expression, StreamFactory factory) throw expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java index 383389d2551..ccfeb68e3c0 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TimeSeriesStream.java @@ -172,7 +172,7 @@ public TimeSeriesStream(StreamExpression expression, StreamFactory factory) thro expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java index 8e3844c152a..dbc295e79e3 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TopicStream.java @@ -170,7 +170,7 @@ public TopicStream(StreamExpression expression, StreamFactory factory) throws IO expression)); } - // Named parameters - passed directly to solr as solrparams + // Named parameters - passed directly to solr as SolrParams if (0 == namedParams.size()) { throw new IOException( String.format( diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java index bf982011f6a..9b07be811e3 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/TupStream.java @@ -67,7 +67,7 @@ public TupStream(StreamExpression expression, StreamFactory factory) throws IOEx fieldLabels.put(name, name); StreamExpressionParameter param = np.getParameter(); - // we're going to split these up here so we only make the choice once + // we're going to split these up here, so we only make the choice once // order of these in read() doesn't matter if (param instanceof StreamExpressionValue) { stringParams.put(name, ((StreamExpressionValue) param).getValue()); diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java index 8c3ba20fc8b..7d10407ee68 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java @@ -343,7 +343,7 @@ private void addMultivaluedField(SolrInputDocument doc, String fieldName, List documentBatch) throws IOException { diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java index d79534849ab..a994c55e646 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java @@ -213,7 +213,7 @@ public void open() throws IOException { outTuples.add(tuple); } - // Generate the x axis if the tuples contain y and not x + // Generate the x-axis if the tuples contain y and not x if (outTuples.get(0).getFields().containsKey("y") && !outTuples.get(0).getFields().containsKey("x")) { int x = 0; diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParser.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParser.java index 7d9a1fe09a7..2d0621ed25f 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParser.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParser.java @@ -171,7 +171,7 @@ private static boolean isExpressionClause(String clause) { return false; } - // Must end with ) + // Must end with ')' character return clause.endsWith(")"); } diff --git a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java index dc3e0d67eed..dfc2a391d82 100644 --- a/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java +++ b/solr/solrj-streaming/src/java/org/apache/solr/client/solrj/io/stream/expr/StreamFactory.java @@ -136,7 +136,7 @@ public List getValueOperands(StreamExpression expression) { } /** - * Given an expression, will return the value parameter at the given index, or null if doesn't + * Given an expression, will return the value parameter at the given index, or null if it doesn't * exist */ public String getValueOperand(StreamExpression expression, int parameterIndex) { diff --git a/solr/solrj-streaming/src/test-files/solrj/solr/configsets/ml/conf/schema.xml b/solr/solrj-streaming/src/test-files/solrj/solr/configsets/ml/conf/schema.xml index 0c9ee9c9688..35099afce8e 100644 --- a/solr/solrj-streaming/src/test-files/solrj/solr/configsets/ml/conf/schema.xml +++ b/solr/solrj-streaming/src/test-files/solrj/solr/configsets/ml/conf/schema.xml @@ -25,28 +25,28 @@ --> - + - - - - + + + + - - - - + + + + - + - - + + diff --git a/solr/solrj-streaming/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml b/solr/solrj-streaming/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml index 42b1e70017d..5a202baa2b8 100644 --- a/solr/solrj-streaming/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml +++ b/solr/solrj-streaming/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml @@ -25,7 +25,7 @@ --> - + - - - - + + + + - - - - + + + + @@ -110,13 +110,13 @@ - + - - + + - + diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java index a64cba4caf8..822f7d38350 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/sql/JdbcTest.java @@ -383,7 +383,7 @@ public void testConnectionParams() throws Exception { @Test public void testJDBCUrlParameters() throws Exception { - // Test JDBC paramters in URL + // Test JDBC parameters in URL try (Connection con = DriverManager.getConnection( "jdbc:solr://" @@ -436,7 +436,7 @@ public void testJDBCUrlParameters() throws Exception { @Test public void testJDBCPropertiesParameters() throws Exception { - // Test JDBC paramters in properties + // Test JDBC parameters in properties Properties providedProperties = new Properties(); providedProperties.put("collection", COLLECTIONORALIAS); providedProperties.put("username", ""); diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java index cc860697c42..799330fbd0e 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/CloudAuthStreamTest.java @@ -50,7 +50,7 @@ /** * tests various streaming expressions (via the SolrJ {@link SolrStream} API) against a SolrCloud - * cluster using both Authenticationand Role based Authorization + * cluster using both Authentication and Role based Authorization */ public class CloudAuthStreamTest extends SolrCloudTestCase { @@ -67,8 +67,8 @@ public class CloudAuthStreamTest extends SolrCloudTestCase { private static String solrUrl = null; /** - * Helper that returns the original {@link SolrRequest} with it's original type so it can - * be chained. This menthod knows that for the purpose of this test, every user name is it's own + * Helper that returns the original {@link SolrRequest} with its original type so it can + * be chained. This method knows that for the purpose of this test, every username is its own * password * * @see SolrRequest#setBasicAuthCredentials @@ -253,7 +253,7 @@ public void testEchoStream() throws Exception { assertEquals("hello world", tuples.get(0).get("echo")); } - public void testEchoStreamNoCredentials() throws Exception { + public void testEchoStreamNoCredentials() { final SolrStream solrStream = new SolrStream( solrUrl + "/" + COLLECTION_X, @@ -270,7 +270,7 @@ public void testEchoStreamNoCredentials() throws Exception { }); } - public void testEchoStreamInvalidCredentials() throws Exception { + public void testEchoStreamInvalidCredentials() { final SolrStream solrStream = new SolrStream( solrUrl + "/" + COLLECTION_X, @@ -489,7 +489,7 @@ public void testExecutorUpdateStreamInsufficientCredentials() throws Exception { params("qt", "/stream", "_trace", "executor_via_" + trace, "expr", expr)); solrStream.setCredentials(user, user); - // NOTE: Becaue of the backgroun threads, no failures will to be returned to client... + // NOTE: Because of the background threads, no failures will to be returned to client... final List tuples = getTuples(solrStream); assertEquals(0, tuples.size()); @@ -511,7 +511,7 @@ public void testDaemonUpdateStream() throws Exception { { // NOTE: in spite of what is implied by 'terminate=true', this daemon will NEVER terminate on - // it's own as long as the updates are successful (apparently that requires usage of a topic() + // its own as long as the updates are successful (apparently that requires usage of a topic() // stream to set a "sleepMillis"?!) final String expr = "daemon(id=daemonId,runInterval=1000,terminate=true,update(" @@ -936,7 +936,7 @@ protected static long countDocsInCollection(final String collection, final Strin /** Slurps a stream into a List */ protected static List getTuples(final TupleStream tupleStream) throws IOException { - List tuples = new ArrayList(); + List tuples = new ArrayList<>(); try { log.trace("TupleStream: {}", tupleStream); tupleStream.open(); @@ -952,8 +952,8 @@ protected static List getTuples(final TupleStream tupleStream) throws IOE return tuples; } - /** Sigh. DaemonStream requires polling the same core where the stream was exectured. */ - protected static String getRandomCoreUrl(final String collection) throws Exception { + /** Sigh. DaemonStream requires polling the same core where the stream was executed. */ + protected static String getRandomCoreUrl(final String collection) { final List replicaUrls = cluster .getZkStateReader() diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java index 635376be1df..3a2e028130d 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/JDBCStreamTest.java @@ -25,7 +25,6 @@ import java.sql.Statement; import java.util.ArrayList; import java.util.List; -import java.util.Locale; import java.util.Map; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.solr.SolrTestCaseJ4.SuppressPointFields; @@ -94,7 +93,7 @@ public static void setupCluster() throws Exception { public static void setupDatabase() throws Exception { // Initialize Database - // Ok, so.....hsqldb is doing something totally weird so I thought I'd take a moment to explain + // Ok, so.....hsqldb is doing something totally weird, so I thought I'd take a moment to explain // it. // According to http://www.hsqldb.org/doc/1.8/guide/guide.html#N101EF, section "Components of // SQL Expressions", clause "name", "When an SQL statement is issued, any lowercase characters @@ -279,7 +278,7 @@ public void testJDBCSolrMerge() throws Exception { // Load Solr new UpdateRequest() - .add(id, "0", "code_s", "GB", "name_s", "Great Britian") + .add(id, "0", "code_s", "GB", "name_s", "Great Britain") .add(id, "1", "code_s", "CA", "name_s", "Canada") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); @@ -323,7 +322,7 @@ public void testJDBCSolrMerge() throws Exception { "name_s", "Algeria", "Canada", - "Great Britian", + "Great Britain", "Netherlands", "Norway", "Nepal", @@ -773,7 +772,7 @@ protected List getTuples(TupleStream tupleStream) throws IOException { return tuples; } - protected boolean assertOrderOf(List tuples, String fieldName, int... values) + protected void assertOrderOf(List tuples, String fieldName, int... values) throws Exception { int i = 0; for (int val : values) { @@ -784,11 +783,9 @@ protected boolean assertOrderOf(List tuples, String fieldName, int... val } ++i; } - return true; } - protected boolean assertOrderOf(List tuples, String fieldName, double... values) - throws Exception { + protected void assertOrderOf(List tuples, String fieldName, double... values) { int i = 0; for (double val : values) { Tuple t = tuples.get(i); @@ -796,10 +793,9 @@ protected boolean assertOrderOf(List tuples, String fieldName, double... assertEquals("Found value:" + tip + " expecting:" + val, val, tip, 0.00001); ++i; } - return true; } - protected boolean assertOrderOf(List tuples, String fieldName, String... values) + protected void assertOrderOf(List tuples, String fieldName, String... values) throws Exception { int i = 0; for (String val : values) { @@ -807,7 +803,7 @@ protected boolean assertOrderOf(List tuples, String fieldName, String... if (null == val) { if (null != t.get(fieldName)) { - throw new Exception("Found value:" + (String) t.get(fieldName) + " expecting:null"); + throw new Exception("Found value:" + t.get(fieldName) + " expecting:null"); } } else { String tip = (String) t.get(fieldName); @@ -817,29 +813,6 @@ protected boolean assertOrderOf(List tuples, String fieldName, String... } ++i; } - return true; - } - - protected boolean assertFields(List tuples, String... fields) throws Exception { - for (Tuple tuple : tuples) { - for (String field : fields) { - if (!tuple.getFields().containsKey(field)) { - throw new Exception(String.format(Locale.ROOT, "Expected field '%s' not found", field)); - } - } - } - return true; - } - - protected boolean assertNotFields(List tuples, String... fields) throws Exception { - for (Tuple tuple : tuples) { - for (String field : fields) { - if (tuple.getFields().containsKey(field)) { - throw new Exception(String.format(Locale.ROOT, "Unexpected field '%s' found", field)); - } - } - } - return true; } public boolean assertLong(Tuple tuple, String fieldName, long l) throws Exception { @@ -862,9 +835,7 @@ public boolean assertDouble(Tuple tuple, String fieldName, double d) throws Exce public boolean assertString(Tuple tuple, String fieldName, String expected) throws Exception { String actual = (String) tuple.get(fieldName); - if ((null == expected && null != actual) - || (null != expected && null == actual) - || (null != expected && !expected.equals(actual))) { + if ((null == expected && null != actual) || (null != expected && !expected.equals(actual))) { throw new Exception("Longs not equal:" + expected + " : " + actual); } diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java index c0255addf12..1d35aa2e353 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java @@ -1812,8 +1812,8 @@ public void testZplot() throws Exception { // there are times when tuples are discarded because // they contain values with NaN values. This will occur // only on the very end of the tails of the normal distribution or other - // real distributions and doesn't effect the visual quality of the curve very much. - // But it does effect the reliability of tests. + // real distributions and doesn't affect the visual quality of the curve very much. + // But it does affect the reliability of tests. // For this reason the loop below is in place to run the test N times looking // for the correct number of tuples before asserting the mean. diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java index d7e90a09c42..347894bae42 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ParallelFacetStreamOverAliasTest.java @@ -100,7 +100,7 @@ public static void setupCluster() throws Exception { solrClientCache = new SolrClientCache(); } - /** setup the testbed with necessary collections, documents, and alias */ + /** set up the testbed with necessary collections, documents, and alias */ public static void setupCollectionsAndAlias() throws Exception { final NormalDistribution[] dists = new NormalDistribution[CARDINALITY]; @@ -299,7 +299,7 @@ public void testParallelStats() throws Exception { assertNull(statsStream.parallelizedStream); } - // execute the provided expression with tiered=true and compare to results of tiered=false + // execute the provided expression with tiered=true and compare to result of tiered=false private void compareTieredStreamWithNonTiered(String facetExprTmpl, int dims) throws IOException { String facetExpr = String.format(Locale.US, facetExprTmpl, ALIAS_NAME, "true"); diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java index c2639919cb0..ac4a4102e55 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/SelectWithEvaluatorsTest.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.Map; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.solr.client.solrj.io.SolrClientCache; import org.apache.solr.client.solrj.io.Tuple; @@ -47,8 +46,6 @@ public class SelectWithEvaluatorsTest extends SolrCloudTestCase { private static final int TIMEOUT = DEFAULT_TIMEOUT; private static final String id = "id"; - private static boolean useAlias; - @BeforeClass public static void setupCluster() throws Exception { configureCluster(4) @@ -71,7 +68,7 @@ public static void setupCluster() throws Exception { .configure(); String collection; - useAlias = random().nextBoolean(); + boolean useAlias = random().nextBoolean(); if (useAlias) { collection = COLLECTIONORALIAS + "_collection"; } else { @@ -145,39 +142,7 @@ protected List getTuples(TupleStream tupleStream) throws IOException { return tuples; } - protected boolean assertOrder(List tuples, int... ids) throws Exception { - return assertOrderOf(tuples, "id", ids); - } - - protected boolean assertOrderOf(List tuples, String fieldName, int... ids) - throws Exception { - int i = 0; - for (int val : ids) { - Tuple t = tuples.get(i); - String tip = t.getString(fieldName); - if (!tip.equals(Integer.toString(val))) { - throw new Exception("Found value:" + tip + " expecting:" + val); - } - ++i; - } - return true; - } - - protected boolean assertMapOrder(List tuples, int... ids) throws Exception { - int i = 0; - for (int val : ids) { - Tuple t = tuples.get(i); - List> tip = t.getMaps("group"); - int id = (int) tip.get(0).get("id"); - if (id != val) { - throw new Exception("Found value:" + id + " expecting:" + val); - } - ++i; - } - return true; - } - - protected boolean assertFields(List tuples, String... fields) throws Exception { + protected void assertFields(List tuples, String... fields) throws Exception { for (Tuple tuple : tuples) { for (String field : fields) { if (!tuple.getFields().containsKey(field)) { @@ -185,10 +150,9 @@ protected boolean assertFields(List tuples, String... fields) throws Exce } } } - return true; } - protected boolean assertNotFields(List tuples, String... fields) throws Exception { + protected void assertNotFields(List tuples, String... fields) throws Exception { for (Tuple tuple : tuples) { for (String field : fields) { if (tuple.getFields().containsKey(field)) { @@ -196,21 +160,6 @@ protected boolean assertNotFields(List tuples, String... fields) throws E } } } - return true; - } - - protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception { - List group = (List) tuple.get("tuples"); - int i = 0; - for (int val : ids) { - Map t = (Map) group.get(i); - Long tip = (Long) t.get("id"); - if (tip.intValue() != val) { - throw new Exception("Found value:" + tip.intValue() + " expecting:" + val); - } - ++i; - } - return true; } public boolean assertLong(Tuple tuple, String fieldName, long l) throws Exception { @@ -235,12 +184,9 @@ public boolean assertDouble(Tuple tuple, String fieldName, double expectedValue) public boolean assertString(Tuple tuple, String fieldName, String expected) throws Exception { String actual = (String) tuple.get(fieldName); - if ((null == expected && null != actual) - || (null != expected && null == actual) - || (null != expected && !expected.equals(actual))) { - throw new Exception("Longs not equal:" + expected + " : " + actual); + if ((null != expected || null == actual) && (null == expected || expected.equals(actual))) { + return true; } - - return true; + throw new Exception("Longs not equal:" + expected + " : " + actual); } } diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java index 453528d8338..2b19742ae6f 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java @@ -5152,12 +5152,11 @@ protected List getTuples(TupleStream tupleStream) throws IOException { return tuples; } - protected boolean assertOrder(List tuples, int... ids) throws Exception { - return assertOrderOf(tuples, "id", ids); + protected void assertOrder(List tuples, int... ids) throws Exception { + assertOrderOf(tuples, "id", ids); } - protected boolean assertOrderOf(List tuples, String fieldName, int... ids) - throws Exception { + protected void assertOrderOf(List tuples, String fieldName, int... ids) throws Exception { int i = 0; for (int val : ids) { Tuple t = tuples.get(i); @@ -5167,10 +5166,9 @@ protected boolean assertOrderOf(List tuples, String fieldName, int... ids } ++i; } - return true; } - protected boolean assertFields(List tuples, String... fields) throws Exception { + protected void assertFields(List tuples, String... fields) throws Exception { for (Tuple tuple : tuples) { for (String field : fields) { if (!tuple.getFields().containsKey(field)) { @@ -5178,10 +5176,9 @@ protected boolean assertFields(List tuples, String... fields) throws Exce } } } - return true; } - protected boolean assertNotFields(List tuples, String... fields) throws Exception { + protected void assertNotFields(List tuples, String... fields) throws Exception { for (Tuple tuple : tuples) { for (String field : fields) { if (tuple.getFields().containsKey(field)) { @@ -5189,7 +5186,6 @@ protected boolean assertNotFields(List tuples, String... fields) throws E } } } - return true; } protected boolean assertGroupOrder(Tuple tuple, int... ids) throws Exception { diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExecutorHelperTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExecutorHelperTest.java index ae80cea1060..287aa851734 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExecutorHelperTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExecutorHelperTest.java @@ -36,7 +36,7 @@ public void submitAllTest() throws IOException { List results = new ArrayList<>(); results.addAll(StreamExecutorHelper.submitAllAndAwaitAggregatingExceptions(tasks, "test")); Collections.sort(results); - List expected = List.of(0l, 1l, 2l, 3l, 4l); + List expected = List.of(0L, 1L, 2L, 3L, 4L); assertEquals(expected, results); } diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java index 050b5726ff2..1541312d02e 100644 --- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java +++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java @@ -73,7 +73,6 @@ public class StreamExpressionTest extends SolrCloudTestCase { private static final String COLLECTIONORALIAS = "collection1"; private static final String FILESTREAM_COLLECTION = "filestream_collection"; - private static final int TIMEOUT = DEFAULT_TIMEOUT; private static final String id = "id"; private static boolean useAlias; @@ -256,7 +255,7 @@ public void testCloudSolrStream() throws Exception { assertOrder(tuples, 0, 2, 1, 3, 4); assertLong(tuples.get(0), "a_i", 0); - // Execersise the /stream hander + // Exercise the /stream handler // Add the shards http parameter for the myCollection StringBuilder buf = new StringBuilder(); @@ -1040,7 +1039,7 @@ public void testStatsStream() throws Exception { assertEquals(6.0D, perf, 0.0); assertEquals(10, count, 0.0); - // Execersise the /stream hander + // Exercise the /stream handler // Add the shards http parameter for the myCollection StringBuilder buf = new StringBuilder(); @@ -1112,8 +1111,7 @@ public void testFacet2DStream() throws Exception { .add(id, "9", "diseases_s", "diabetes", "symptoms_s", "thirsty", "cases_i", "20") .add(id, "10", "diseases_s", "diabetes", "symptoms_s", "thirsty", "cases_i", "20") .commit(cluster.getSolrClient(), COLLECTIONORALIAS); - StreamExpression expression; - TupleStream stream; + List tuples; ModifiableSolrParams paramsLoc = new ModifiableSolrParams(); @@ -3667,15 +3665,6 @@ public void testSearchBacktick() throws Exception { assertEquals("l b c d color`s e", tuple2.get("test_t")); } - private Map getIdToLabel(TupleStream stream, String outField) throws IOException { - Map idToLabel = new HashMap<>(); - List tuples = getTuples(stream); - for (Tuple tuple : tuples) { - idToLabel.put(tuple.getString("id"), tuple.getDouble(outField)); - } - return idToLabel; - } - @Test public void testBasicTextLogitStream() throws Exception { Assume.assumeTrue(!useAlias); @@ -4047,7 +4036,7 @@ public void testSignificantTermsStream() throws Exception { assertEquals(5600, tuples.get(1).getLong("background").longValue()); assertEquals(5000, tuples.get(1).getLong("foreground").longValue()); - // Execersise the /stream hander + // Exercise the /stream handler // Add the shards http parameter for the myCollection StringBuilder buf = new StringBuilder(); @@ -4108,7 +4097,7 @@ public void tooLargeForGetRequest() throws IOException, SolrServerException { StreamContext streamContext = new StreamContext(); streamContext.setSolrClientCache(cache); // use filter() to allow being parsed as 'terms in set' query instead of a (weighted/scored) - // BooleanQuery so we don't trip too many boolean clauses + // BooleanQuery, so we don't trip too many boolean clauses String longQuery = "\"filter(id:(" + IntStream.range(0, 4000).mapToObj(i -> "a").collect(Collectors.joining(" ", "", "")) @@ -4374,7 +4363,7 @@ private static Path findUserFilesDataDir() { /** * Creates a tree of files underneath a provided data-directory. * - *

The filetree created looks like: + *

The file tree created looks like: * *

    * dataDir
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpressionTest.java
similarity index 98%
rename from solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
rename to solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpressionTest.java
index 2c941f142d7..37429f7a939 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpessionTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionToExpressionTest.java
@@ -30,11 +30,11 @@
 import org.junit.Test;
 
 /** */
-public class StreamExpressionToExpessionTest extends SolrTestCase {
+public class StreamExpressionToExpressionTest extends SolrTestCase {
 
-  private StreamFactory factory;
+  private final StreamFactory factory;
 
-  public StreamExpressionToExpessionTest() {
+  public StreamExpressionToExpressionTest() {
     super();
 
     factory =
@@ -77,7 +77,6 @@ public void testCloudSolrStream() throws Exception {
                 "search(collection1, q=*:*, fl=\"id,a_s,a_i,a_f\", sort=\"a_f asc, a_i asc\", fq=\"a_s:one\", fq=\"a_s:two\")"),
             factory)) {
       expressionString = stream.toExpression(factory).toString();
-      System.out.println("ExpressionString: " + expressionString.toString());
       assertTrue(expressionString.contains("search(collection1,"));
       assertTrue(expressionString.contains("q=\"*:*\""));
       assertTrue(expressionString.contains("fl=\"id,a_s,a_i,a_f\""));
@@ -550,7 +549,7 @@ public void testCloudSolrStreamWithEscapedQuote() throws Exception {
 
     // The purpose of this test is to ensure that a parameter with a contained " character is
     // properly escaped when it is turned back into an expression. This is important when an
-    // expression is passedto a worker (parallel stream) or even for other reasons when an
+    // expression is passed to a worker (parallel stream) or even for other reasons when an
     // expression is string-ified.
 
     // Basic test
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
index d8770711876..5eadc7ed727 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/StreamingTest.java
@@ -89,13 +89,12 @@ public class StreamingTest extends SolrCloudTestCase {
 
   private static String zkHost;
 
-  private static int numShards;
   private static int numWorkers;
   private static boolean useAlias;
 
   @BeforeClass
   public static void configureCluster() throws Exception {
-    numShards = random().nextInt(2) + 1; // 1 - 3
+    int numShards = random().nextInt(2) + 1; // 1 - 3
     numWorkers = numShards > 2 ? random().nextInt(numShards - 1) + 1 : numShards;
     configureCluster(numShards)
         .addConfig(
@@ -1349,7 +1348,7 @@ private void checkReturnValsForEmpty(String[] fields) throws IOException {
     }
   }
 
-  // Goes away after after LUCENE-7548
+  // Goes away after LUCENE-7548
   static final String[] ascOrder =
       new String[] {
         "aaa1", "aaa2", "aaa3", "eee1",
@@ -1362,7 +1361,7 @@ private void checkReturnValsForEmpty(String[] fields) throws IOException {
         "aaa8", "eee8", "iii8", "ooo8"
       };
 
-  // Goes away after after LUCENE-7548
+  // Goes away after LUCENE-7548
   static final String[] descOrder =
       new String[] {
         "aaa8", "eee8", "iii8", "ooo8",
@@ -1375,7 +1374,7 @@ private void checkReturnValsForEmpty(String[] fields) throws IOException {
         "iii3", "ooo1", "ooo2", "ooo3"
       };
 
-  // Goes away after after LUCENE-7548
+  // Goes away after LUCENE-7548
   static final String[] ascOrderBool =
       new String[] {
         "aaa1", "aaa2", "aaa3", "eee1",
@@ -1388,7 +1387,7 @@ private void checkReturnValsForEmpty(String[] fields) throws IOException {
         "iii8", "ooo4", "ooo6", "ooo8"
       };
 
-  // Goes away after after LUCENE-7548
+  // Goes away after LUCENE-7548
   static final String[] descOrderBool =
       new String[] {
         "aaa4", "aaa6", "aaa8", "eee4",
@@ -1419,7 +1418,7 @@ public void testMissingFields() throws Exception {
         .add(id, "aaa3")
         .add(id, "ooo3")
 
-        // Docs with values in for all of the types we want to sort on.
+        // Docs with values for all the types we want to sort on.
 
         .add(docPairs(4, "iii"))
         .add(docPairs(4, "eee"))
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java
index b6e9fe36354..0b5053b7a20 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/CoalesceEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for multitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
index 60f42b7d155..614cab5ad73 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ConversionEvaluatorsTest.java
@@ -47,7 +47,7 @@ public ConversionEvaluatorsTest() {
   }
 
   @Test
-  public void testInvalidExpression() throws Exception {
+  public void testInvalidExpression() {
 
     StreamEvaluator evaluator;
 
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
index 935175f144a..73b521cf413 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/DivideEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for multitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldValueEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldValueEvaluatorTest.java
index 413049268d7..e2b3f2e11d2 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldValueEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/FieldValueEvaluatorTest.java
@@ -36,7 +36,6 @@ public FieldValueEvaluatorTest() {
     values = new HashMap<>();
   }
 
-  @SuppressWarnings("serial")
   @Test
   public void listTypes() throws Exception {
     values.clear();
@@ -87,7 +86,6 @@ public void arrayTypes() throws Exception {
         "second", ((Collection) new FieldValueEvaluator("e").evaluate(tuple)).toArray()[1]);
   }
 
-  @SuppressWarnings("serial")
   @Test
   public void iterableTypes() throws Exception {
     values.clear();
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java
index d61be7821a8..7a9319b5634 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/ModuloEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for multitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
index b363ab4cc0c..a2f368d6d81 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/MultiplyEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for multitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/PowerEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/PowerEvaluatorTest.java
index 817b04c86f9..287bc939e50 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/PowerEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/PowerEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for multitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
index 0ca2eeaf3cb..91063467ee1 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/SubtractEvaluatorTest.java
@@ -1,7 +1,7 @@
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
  * contributor license agreements.  See the NOTICE file distributed with
- * this work for subitional information regarding copyright ownership.
+ * this work for additional information regarding copyright ownership.
  * The ASF licenses this file to You under the Apache License, Version 2.0
  * (the "License"); you may not use this file except in compliance with
  * the License.  You may obtain a copy of the License at
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
index ade96428b16..2701f55cd66 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/eval/TemporalEvaluatorsTest.java
@@ -81,7 +81,7 @@ public TemporalEvaluatorsTest() {
   }
 
   @Test
-  public void testInvalidExpression() throws Exception {
+  public void testInvalidExpression() {
 
     StreamEvaluator evaluator;
 
@@ -175,7 +175,7 @@ public void testAllFunctions() throws Exception {
     testFunction("quarter(a)", "1995-12-31T23:59:59Z", 4L);
     testFunction("week(a)", "1995-12-31T23:59:59Z", 52L);
     testFunction("second(a)", "1995-12-31T23:59:58Z", 58L);
-    testFunction("epoch(a)", "1995-12-31T23:59:59Z", 820454399000l);
+    testFunction("epoch(a)", "1995-12-31T23:59:59Z", 820454399000L);
 
     testFunction("year(a)", "2017-03-17T10:30:45Z", 2017L);
     testFunction("year('a')", "2017-03-17T10:30:45Z", 2017L);
@@ -189,10 +189,10 @@ public void testAllFunctions() throws Exception {
     testFunction("quarter(a)", "2017-03-17T10:30:45Z", 1L);
     testFunction("week(a)", "2017-03-17T10:30:45Z", 11L);
     testFunction("second(a)", "2017-03-17T10:30:45Z", 45L);
-    testFunction("epoch(a)", "2017-03-17T10:30:45Z", 1489746645000l);
+    testFunction("epoch(a)", "2017-03-17T10:30:45Z", 1489746645000L);
 
-    testFunction("epoch(a)", new Date(1489746645500l).toInstant().toString(), 1489746645500l);
-    testFunction("epoch(a)", new Date(820454399990l).toInstant().toString(), 820454399990l);
+    testFunction("epoch(a)", new Date(1489746645500L).toInstant().toString(), 1489746645500L);
+    testFunction("epoch(a)", new Date(820454399990L).toInstant().toString(), 820454399990L);
   }
 
   @Test
@@ -238,7 +238,7 @@ public void testFunctionsLocalDateTime() throws Exception {
   @Test
   public void testFunctionsOnLong() throws Exception {
 
-    Long longDate = 1512518340000l;
+    Long longDate = 1512518340000L;
 
     testFunction("year(a)", longDate, 2017);
     testFunction("month(a)", longDate, 12);
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParserTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParserTest.java
index 915a64f030e..36f6bc348bd 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParserTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/expr/StreamExpressionParserTest.java
@@ -27,7 +27,7 @@ public StreamExpressionParserTest() {
   }
 
   @Test
-  public void testParsing() throws Exception {
+  public void testParsing() {
     StreamExpression actual, expected;
 
     actual = StreamExpressionParser.parse("aliases(a_i=alias.a_i)");
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java
index 029176c75fb..3a1e0326c6f 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/ConcatOperationTest.java
@@ -40,7 +40,7 @@ public ConcatOperationTest() {
   }
 
   @Test
-  public void concatSingleField() throws Exception {
+  public void concatSingleField() {
     Tuple tuple;
     StreamOperation operation;
 
@@ -59,7 +59,7 @@ public void concatSingleField() throws Exception {
   }
 
   @Test
-  public void concatMultipleFields() throws Exception {
+  public void concatMultipleFields() {
     Tuple tuple;
     StreamOperation operation;
 
@@ -79,7 +79,7 @@ public void concatMultipleFields() throws Exception {
     assertNotNull(tuple.get("fieldABConcat"));
     assertEquals("bar-baz", tuple.get("fieldABConcat"));
 
-    // do the same in oposite order
+    // do the same in reverse order
     operation = new ConcatOperation(new String[] {"fieldB", "fieldA"}, "fieldABConcat", "-");
     tuple = new Tuple(values);
     operation.operate(tuple);
@@ -95,7 +95,7 @@ public void concatMultipleFields() throws Exception {
   }
 
   @Test
-  public void concatMultipleFieldsWithIgnoredFields() throws Exception {
+  public void concatMultipleFieldsWithIgnoredFields() {
     Tuple tuple;
     StreamOperation operation;
 
@@ -123,7 +123,7 @@ public void concatMultipleFieldsWithIgnoredFields() throws Exception {
     assertNotNull(tuple.get("fieldABConcat"));
     assertEquals("bar-baz", tuple.get("fieldABConcat"));
 
-    // do the same in oposite order
+    // do the same in opposite order
     operation = new ConcatOperation(new String[] {"fieldB", "fieldA"}, "fieldABConcat", "-");
     tuple = new Tuple(values);
     operation.operate(tuple);
@@ -139,7 +139,7 @@ public void concatMultipleFieldsWithIgnoredFields() throws Exception {
   }
 
   @Test
-  public void concatWithNullValues() throws Exception {
+  public void concatWithNullValues() {
     Tuple tuple;
     StreamOperation operation;
 
@@ -207,7 +207,7 @@ public void concatMultipleFieldsExpression() throws Exception {
     assertNotNull(tuple.get("fieldABConcat"));
     assertEquals("bar-baz", tuple.get("fieldABConcat"));
 
-    // do the same in oposite order
+    // do the same in opposite order
     operation =
         new ConcatOperation(
             StreamExpressionParser.parse(
@@ -259,7 +259,7 @@ public void concatMultipleFieldsWithIgnoredFieldsExpression() throws Exception {
     assertNotNull(tuple.get("fieldABConcat"));
     assertEquals("bar-baz", tuple.get("fieldABConcat"));
 
-    // do the same in oposite order
+    // do the same in opposite order
     operation =
         new ConcatOperation(
             StreamExpressionParser.parse(
diff --git a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java
index fc854cf8baa..4ec6b65ddf4 100644
--- a/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java
+++ b/solr/solrj-streaming/src/test/org/apache/solr/client/solrj/io/stream/ops/OperationsTest.java
@@ -228,7 +228,7 @@ public void replaceFieldNullWithInt() throws Exception {
   }
 
   @Test
-  public void replaceFieldNullWithNonExistantField() throws Exception {
+  public void replaceFieldNullWithNonExistentField() throws Exception {
     Tuple tuple;
     StreamOperation operation;
 
diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
new file mode 100644
index 00000000000..93ea4d9cfe3
--- /dev/null
+++ b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/CollectionPropertiesZkStateReader.java
@@ -0,0 +1,411 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.common.cloud;
+
+import static java.util.Collections.emptyMap;
+
+import java.io.Closeable;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ZkStateReader.CollectionWatch;
+import org.apache.solr.common.util.ExecutorUtil;
+import org.apache.solr.common.util.SolrNamedThreadFactory;
+import org.apache.solr.common.util.Utils;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.WatchedEvent;
+import org.apache.zookeeper.Watcher;
+import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/** Fetches and manages collection properties from a ZooKeeper ensemble */
+public class CollectionPropertiesZkStateReader implements Closeable {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private volatile boolean closed = false;
+
+  private final SolrZkClient zkClient;
+
+  /** Collection properties being actively watched */
+  private final ConcurrentHashMap watchedCollectionProps =
+      new ConcurrentHashMap<>();
+
+  /**
+   * Manages ZooKeeper watchers for each collection. These watchers monitor changes to the
+   * properties of the collection in ZooKeeper. When a change is detected in ZooKeeper, the watcher
+   * triggers an update, which then notifies the relevant "collectionPropsObserver".
+   */
+  private final ConcurrentHashMap collectionPropsWatchers =
+      new ConcurrentHashMap<>();
+
+  /**
+   * Manages a list of observers (listeners) for each collection. These observers need to be
+   * notified when the properties of the collection change. When a collection's properties change,
+   * all registered observers for that collection are notified by a "collectionPropWatcher".
+   */
+  private ConcurrentHashMap>
+      collectionPropsObservers = new ConcurrentHashMap<>();
+
+  /** Used to submit notifications to Collection Properties watchers in order */
+  private final ExecutorService collectionPropsNotifications =
+      ExecutorUtil.newMDCAwareSingleThreadExecutor(
+          new SolrNamedThreadFactory("collectionPropsNotifications"));
+
+  private volatile ScheduledThreadPoolExecutor cacheCleanerExecutor = null;
+
+  private final ConcurrentHashMap collectionLocks = new ConcurrentHashMap<>();
+
+  public CollectionPropertiesZkStateReader(ZkStateReader zkStateReader) {
+    this.zkClient = zkStateReader.getZkClient();
+  }
+
+  /**
+   * Get and cache collection properties for a given collection. If the collection is watched, or
+   * still cached simply return it from the cache, otherwise fetch it directly from zookeeper and
+   * retain the value for at least cacheForMillis milliseconds. Cached properties are watched in
+   * zookeeper and updated automatically. This version of {@code getCollectionProperties} should be
+   * used when properties need to be consulted frequently in the absence of an active {@link
+   * CollectionPropsWatcher}.
+   *
+   * @param collection The collection for which properties are desired
+   * @param cacheForMillis The minimum number of milliseconds to maintain a cache for the specified
+   *     collection's properties. Setting a {@code CollectionPropsWatcher} will override this value
+   *     and retain the cache for the life of the watcher. A lack of changes in zookeeper may allow
+   *     the caching to remain for a greater duration up to the cycle time of {@code CacheCleaner}.
+   *     Passing zero for this value will explicitly remove the cached copy if and only if it is due
+   *     to expire and no watch exists. Any positive value will extend the expiration time if
+   *     required.
+   * @return a map representing the key/value properties for the collection.
+   */
+  public Map getCollectionProperties(final String collection, long cacheForMillis) {
+    Watcher watcher = null; // synchronized on the specific collection
+    if (cacheForMillis > 0) {
+      watcher =
+          collectionPropsWatchers.compute(
+              collection,
+              (c, w) -> w == null ? new PropsWatcher(c, cacheForMillis) : w.renew(cacheForMillis));
+    }
+    VersionedCollectionProps vprops = watchedCollectionProps.get(collection);
+    boolean haveUnexpiredProps = vprops != null && vprops.cacheUntilNs > System.nanoTime();
+    long untilNs =
+        System.nanoTime() + TimeUnit.NANOSECONDS.convert(cacheForMillis, TimeUnit.MILLISECONDS);
+    if (haveUnexpiredProps) {
+      vprops.cacheUntilNs = Math.max(vprops.cacheUntilNs, untilNs);
+      return vprops.props;
+    }
+    // Synchronize only when properties are expired or not present
+    synchronized (getCollectionLock(collection)) {
+      // Re-check inside the synchronized block to avoid race conditions
+      vprops = watchedCollectionProps.get(collection);
+      haveUnexpiredProps = vprops != null && vprops.cacheUntilNs > System.nanoTime();
+      if (haveUnexpiredProps) {
+        vprops.cacheUntilNs = Math.max(vprops.cacheUntilNs, untilNs);
+        return vprops.props;
+      }
+      try {
+        VersionedCollectionProps vcp = fetchCollectionProperties(collection, watcher);
+        Map properties = vcp.props;
+        if (cacheForMillis > 0) {
+          vcp.cacheUntilNs = untilNs;
+          watchedCollectionProps.put(collection, vcp);
+        } else {
+          // we're synchronized on watchedCollectionProps and we can only get here if we have
+          // found an expired vprops above, so it is safe to remove the cached value and let the
+          // GC free up some mem a bit sooner.
+          if (!collectionPropsObservers.containsKey(collection)) {
+            watchedCollectionProps.remove(collection);
+          }
+        }
+        return properties;
+      } catch (Exception e) {
+        throw new SolrException(
+            SolrException.ErrorCode.SERVER_ERROR,
+            "Error reading collection properties",
+            SolrZkClient.checkInterrupted(e));
+      }
+    }
+  }
+
+  @Override
+  public void close() {
+    this.closed = true;
+    ExecutorUtil.shutdownAndAwaitTermination(cacheCleanerExecutor);
+    ExecutorUtil.shutdownAndAwaitTermination(collectionPropsNotifications);
+  }
+
+  private static class VersionedCollectionProps {
+    int zkVersion;
+    Map props;
+    long cacheUntilNs = 0;
+
+    VersionedCollectionProps(int zkVersion, Map props) {
+      this.zkVersion = zkVersion;
+      this.props = props;
+    }
+  }
+
+  /** Watches collection properties */
+  class PropsWatcher implements Watcher {
+    private final String coll;
+    private long watchUntilNs;
+
+    PropsWatcher(String coll) {
+      this.coll = coll;
+      watchUntilNs = 0;
+    }
+
+    PropsWatcher(String coll, long forMillis) {
+      this.coll = coll;
+      watchUntilNs =
+          System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS);
+    }
+
+    public PropsWatcher renew(long forMillis) {
+      watchUntilNs =
+          System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS);
+      return this;
+    }
+
+    @Override
+    public void process(WatchedEvent event) {
+      // session events are not change events, and do not remove the watcher
+      if (Event.EventType.None.equals(event.getType())) {
+        return;
+      }
+
+      boolean expired = System.nanoTime() > watchUntilNs;
+      if (!collectionPropsObservers.containsKey(coll) && expired) {
+        // No one can be notified of the change, we can ignore it and "unset" the watch
+        log.debug("Ignoring property change for collection {}", coll);
+        return;
+      }
+
+      log.info(
+          "A collection property change: [{}] for collection [{}] has occurred - updating...",
+          event,
+          coll);
+
+      refreshAndWatch(true);
+    }
+
+    /**
+     * Refresh collection properties from ZK and leave a watch for future changes. Updates the
+     * properties in watchedCollectionProps with the results of the refresh. Optionally notifies
+     * watchers
+     */
+    void refreshAndWatch(boolean notifyWatchers) {
+      try {
+        synchronized (getCollectionLock(coll)) {
+          // making decisions based on the result of a get...
+          VersionedCollectionProps vcp = fetchCollectionProperties(coll, this);
+          Map properties = vcp.props;
+          VersionedCollectionProps existingVcp = watchedCollectionProps.get(coll);
+          if (existingVcp == null
+              || // never called before, record what we found
+              vcp.zkVersion > existingVcp.zkVersion
+              || // newer info we should update
+              vcp.zkVersion == -1) { // node was deleted start over
+            watchedCollectionProps.put(coll, vcp);
+            if (notifyWatchers) {
+              notifyPropsWatchers(coll, properties);
+            }
+            if (vcp.zkVersion == -1 && existingVcp != null) { // Collection DELETE detected
+
+              // We should not be caching a collection that has been deleted.
+              watchedCollectionProps.remove(coll);
+
+              // core ref counting not relevant here, don't need canRemove(), we just sent
+              // a notification of an empty set of properties, no reason to watch what doesn't
+              // exist.
+              collectionPropsObservers.remove(coll);
+
+              // This is the one time we know it's safe to throw this out. We just failed to set the
+              // watch due to an NoNodeException, so it isn't held by ZK and can't re-set itself due
+              // to an update.
+              collectionPropsWatchers.remove(coll);
+            }
+          }
+        }
+      } catch (KeeperException.SessionExpiredException
+          | KeeperException.ConnectionLossException e) {
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e);
+      } catch (KeeperException e) {
+        log.error("Lost collection property watcher for {} due to ZK error", coll, e);
+        throw new ZooKeeperException(
+            SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
+      } catch (InterruptedException e) {
+        Thread.currentThread().interrupt();
+        log.error(
+            "Lost collection property watcher for {} due to the thread being interrupted", coll, e);
+      }
+    }
+  }
+
+  private Object getCollectionLock(String collection) {
+    return collectionLocks.computeIfAbsent(collection, k -> new Object());
+  }
+
+  public void registerCollectionPropsWatcher(
+      final String collection, CollectionPropsWatcher propsWatcher) {
+    AtomicBoolean watchSet = new AtomicBoolean(false);
+    collectionPropsObservers.compute(
+        collection,
+        (k, v) -> {
+          if (v == null) {
+            v = new CollectionWatch<>();
+            watchSet.set(true);
+          }
+          v.stateWatchers.add(propsWatcher);
+          return v;
+        });
+
+    if (watchSet.get()) {
+      collectionPropsWatchers.computeIfAbsent(collection, PropsWatcher::new).refreshAndWatch(false);
+    }
+  }
+
+  protected void refreshCollectionProperties() {
+    collectionPropsObservers.forEach(
+        (k, v) -> {
+          collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
+        });
+  }
+
+  public static String getCollectionPropsPath(final String collection) {
+    return ZkStateReader.COLLECTIONS_ZKNODE
+        + '/'
+        + collection
+        + '/'
+        + ZkStateReader.COLLECTION_PROPS_ZKNODE;
+  }
+
+  private VersionedCollectionProps fetchCollectionProperties(String collection, Watcher watcher)
+      throws KeeperException, InterruptedException {
+    final String znodePath = getCollectionPropsPath(collection);
+    // lazy init cache cleaner once we know someone is using collection properties.
+    if (cacheCleanerExecutor == null) {
+      synchronized (this) {
+        if (cacheCleanerExecutor == null) {
+          cacheCleanerExecutor = new ScheduledThreadPoolExecutor(1);
+          cacheCleanerExecutor.scheduleAtFixedRate(new CacheCleaner(), 0, 1, TimeUnit.MINUTES);
+        }
+      }
+    }
+    while (true) {
+      try {
+        Stat stat = new Stat();
+        byte[] data = zkClient.getData(znodePath, watcher, stat, true);
+        @SuppressWarnings("unchecked")
+        Map props = (Map) Utils.fromJSON(data);
+        return new VersionedCollectionProps(stat.getVersion(), props);
+      } catch (ClassCastException e) {
+        throw new SolrException(
+            SolrException.ErrorCode.SERVER_ERROR,
+            "Unable to parse collection properties for collection " + collection,
+            e);
+      } catch (KeeperException.NoNodeException e) {
+        if (watcher != null) {
+          // Leave an exists watch in place in case a collectionprops.json is created later.
+          Stat exists = zkClient.exists(znodePath, watcher, true);
+          if (exists != null) {
+            // Rare race condition, we tried to fetch the data and couldn't find it, then we found
+            // it exists. Loop and try again.
+            continue;
+          }
+        }
+        return new VersionedCollectionProps(-1, emptyMap());
+      }
+    }
+  }
+
+  private void notifyPropsWatchers(String collection, Map properties) {
+    try {
+      collectionPropsNotifications.submit(new PropsNotification(collection, properties));
+    } catch (RejectedExecutionException e) {
+      if (!closed) {
+        log.error("Couldn't run collection properties notifications for {}", collection, e);
+      }
+    }
+  }
+
+  private class PropsNotification implements Runnable {
+
+    private final String collection;
+    private final Map collectionProperties;
+    private final List watchers = new ArrayList<>();
+
+    private PropsNotification(String collection, Map collectionProperties) {
+      this.collection = collection;
+      this.collectionProperties = collectionProperties;
+      // guarantee delivery of notification regardless of what happens to collectionPropsObservers
+      // while we wait our turn in the executor by capturing the list on creation.
+      collectionPropsObservers.compute(
+          collection,
+          (k, v) -> {
+            if (v == null) return null;
+            watchers.addAll(v.stateWatchers);
+            return v;
+          });
+    }
+
+    @Override
+    public void run() {
+      for (CollectionPropsWatcher watcher : watchers) {
+        if (watcher.onStateChanged(collectionProperties)) {
+          removeCollectionPropsWatcher(collection, watcher);
+        }
+      }
+    }
+  }
+
+  private class CacheCleaner implements Runnable {
+    @Override
+    public void run() {
+      watchedCollectionProps
+          .entrySet()
+          .removeIf(
+              entry ->
+                  entry.getValue().cacheUntilNs < System.nanoTime()
+                      && !collectionPropsObservers.containsKey(entry.getKey()));
+    }
+  }
+
+  public void removeCollectionPropsWatcher(String collection, CollectionPropsWatcher watcher) {
+    collectionPropsObservers.compute(
+        collection,
+        (k, v) -> {
+          if (v == null) return null;
+          v.stateWatchers.remove(watcher);
+          if (v.canBeRemoved()) {
+            // don't want this to happen in middle of other blocks that might add it back.
+            synchronized (getCollectionLock(collection)) {
+              watchedCollectionProps.remove(collection);
+              return null;
+            }
+          }
+          return v;
+        });
+  }
+}
diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
index 7acb40eb3b5..a3fe06f4a11 100644
--- a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
+++ b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkMaintenanceUtils.java
@@ -137,7 +137,7 @@ public static void zkTransfer(
       throw new SolrServerException("One or both of source or destination must specify ZK nodes.");
     }
 
-    // Make sure -recurse is specified if the source has children.
+    // Make sure --recurse is specified if the source has children.
     if (recurse == false) {
       if (srcIsZk) {
         if (zkClient.getChildren(src, null, true).size() != 0) {
diff --git a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index b9882ddcc11..c2f76d62de3 100644
--- a/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj-zookeeper/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -37,7 +37,6 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
 import java.util.concurrent.RejectedExecutionException;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
@@ -58,7 +57,6 @@
 import org.apache.solr.common.util.CommonTestInjection;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.ObjectReleaseTracker;
-import org.apache.solr.common.util.SolrNamedThreadFactory;
 import org.apache.solr.common.util.Utils;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.KeeperException.NoNodeException;
@@ -156,6 +154,7 @@ public class ZkStateReader implements SolrCloseable {
   public static final String CONTAINER_PLUGINS = "plugin";
 
   public static final String PLACEMENT_PLUGIN = "placement-plugin";
+  private final CollectionPropertiesZkStateReader collectionPropertiesZkStateReader;
 
   /** A view of the current state of all collections. */
   protected volatile ClusterState clusterState;
@@ -173,14 +172,6 @@ public class ZkStateReader implements SolrCloseable {
   private final ConcurrentHashMap lazyCollectionStates =
       new ConcurrentHashMap<>();
 
-  /** Collection properties being actively watched */
-  private final ConcurrentHashMap watchedCollectionProps =
-      new ConcurrentHashMap<>();
-
-  /** Watchers of Collection properties */
-  private final ConcurrentHashMap collectionPropsWatchers =
-      new ConcurrentHashMap<>();
-
   private volatile SortedSet liveNodes = emptySortedSet();
 
   private volatile Map clusterProperties = Collections.emptyMap();
@@ -191,11 +182,6 @@ public class ZkStateReader implements SolrCloseable {
    */
   private DocCollectionWatches collectionWatches = new DocCollectionWatches();
 
-  // named this observers so there's less confusion between CollectionPropsWatcher map and the
-  // PropsWatcher map.
-  private ConcurrentHashMap>
-      collectionPropsObservers = new ConcurrentHashMap<>();
-
   private Set cloudCollectionsListeners = ConcurrentHashMap.newKeySet();
 
   private final ExecutorService notifications = ExecutorUtil.newMDCAwareCachedThreadPool("watches");
@@ -204,17 +190,9 @@ public class ZkStateReader implements SolrCloseable {
 
   private Set clusterPropertiesListeners = ConcurrentHashMap.newKeySet();
 
-  /** Used to submit notifications to Collection Properties watchers in order */
-  private final ExecutorService collectionPropsNotifications =
-      ExecutorUtil.newMDCAwareSingleThreadExecutor(
-          new SolrNamedThreadFactory("collectionPropsNotifications"));
-
   private static final long LAZY_CACHE_TIME =
       TimeUnit.NANOSECONDS.convert(STATE_UPDATE_DELAY, TimeUnit.MILLISECONDS);
 
-  // only kept to identify if the cleaner has already been started.
-  private Future collectionPropsCacheCleaner;
-
   /**
    * Gets the ZkStateReader inside a ZK based SolrClient.
    *
@@ -229,7 +207,7 @@ public static ZkStateReader from(CloudSolrClient solrClient) {
     }
   }
 
-  private static class CollectionWatch {
+  protected static class CollectionWatch {
 
     int coreRefCount = 0;
     Set stateWatchers = ConcurrentHashMap.newKeySet();
@@ -417,6 +395,7 @@ public ZkStateReader(SolrZkClient zkClient, Runnable securityNodeListener) {
     this.zkClient = zkClient;
     this.closeClient = false;
     this.securityNodeWatcher = new SecurityNodeWatcher(this, securityNodeListener);
+    collectionPropertiesZkStateReader = new CollectionPropertiesZkStateReader(this);
     assert ObjectReleaseTracker.track(this);
   }
 
@@ -454,7 +433,7 @@ public ZkStateReader(
     this.zkClient = builder.build();
     this.closeClient = true;
     this.securityNodeWatcher = null;
-
+    collectionPropertiesZkStateReader = new CollectionPropertiesZkStateReader(this);
     assert ObjectReleaseTracker.track(this);
   }
 
@@ -592,11 +571,7 @@ public synchronized void createClusterStateWatchersAndUpdate()
       if (securityNodeWatcher != null) {
         securityNodeWatcher.register();
       }
-
-      collectionPropsObservers.forEach(
-          (k, v) -> {
-            collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
-          });
+      collectionPropertiesZkStateReader.refreshCollectionProperties();
     } catch (KeeperException.NoNodeException nne) {
       throw new SolrException(
           ErrorCode.SERVICE_UNAVAILABLE,
@@ -896,6 +871,10 @@ public Object getUpdateLock() {
     return this;
   }
 
+  public SolrZkClient getZKClient() {
+    return zkClient;
+  }
+
   @Override
   public void close() {
     this.closed = true;
@@ -909,7 +888,7 @@ public void close() {
             });
 
     ExecutorUtil.shutdownAndAwaitTermination(notifications);
-    ExecutorUtil.shutdownAndAwaitTermination(collectionPropsNotifications);
+    collectionPropertiesZkStateReader.close();
     if (closeClient) {
       zkClient.close();
     }
@@ -1190,131 +1169,18 @@ private void loadClusterProperties() {
     }
   }
 
-  /**
-   * Get collection properties for a given collection. If the collection is watched, simply return
-   * it from the cache, otherwise fetch it directly from zookeeper. This is a convenience for {@code
-   * getCollectionProperties(collection,0)}
-   *
-   * @param collection the collection for which properties are desired
-   * @return a map representing the key/value properties for the collection.
-   */
+  /** Get properties for a specific collection */
   public Map getCollectionProperties(final String collection) {
-    return getCollectionProperties(collection, 0);
+    return collectionPropertiesZkStateReader.getCollectionProperties(collection, 0);
   }
 
-  /**
-   * Get and cache collection properties for a given collection. If the collection is watched, or
-   * still cached simply return it from the cache, otherwise fetch it directly from zookeeper and
-   * retain the value for at least cacheForMillis milliseconds. Cached properties are watched in
-   * zookeeper and updated automatically. This version of {@code getCollectionProperties} should be
-   * used when properties need to be consulted frequently in the absence of an active {@link
-   * CollectionPropsWatcher}.
-   *
-   * @param collection The collection for which properties are desired
-   * @param cacheForMillis The minimum number of milliseconds to maintain a cache for the specified
-   *     collection's properties. Setting a {@code CollectionPropsWatcher} will override this value
-   *     and retain the cache for the life of the watcher. A lack of changes in zookeeper may allow
-   *     the caching to remain for a greater duration up to the cycle time of {@code CacheCleaner}.
-   *     Passing zero for this value will explicitly remove the cached copy if and only if it is due
-   *     to expire and no watch exists. Any positive value will extend the expiration time if
-   *     required.
-   * @return a map representing the key/value properties for the collection.
-   */
+  /** Get and cache collection properties for a given collection */
   public Map getCollectionProperties(final String collection, long cacheForMillis) {
-    synchronized (watchedCollectionProps) { // making decisions based on the result of a get...
-      Watcher watcher = null;
-      if (cacheForMillis > 0) {
-        watcher =
-            collectionPropsWatchers.compute(
-                collection,
-                (c, w) ->
-                    w == null ? new PropsWatcher(c, cacheForMillis) : w.renew(cacheForMillis));
-      }
-      VersionedCollectionProps vprops = watchedCollectionProps.get(collection);
-      boolean haveUnexpiredProps = vprops != null && vprops.cacheUntilNs > System.nanoTime();
-      long untilNs =
-          System.nanoTime() + TimeUnit.NANOSECONDS.convert(cacheForMillis, TimeUnit.MILLISECONDS);
-      Map properties;
-      if (haveUnexpiredProps) {
-        properties = vprops.props;
-        vprops.cacheUntilNs = Math.max(vprops.cacheUntilNs, untilNs);
-      } else {
-        try {
-          VersionedCollectionProps vcp = fetchCollectionProperties(collection, watcher);
-          properties = vcp.props;
-          if (cacheForMillis > 0) {
-            vcp.cacheUntilNs = untilNs;
-            watchedCollectionProps.put(collection, vcp);
-          } else {
-            // we're synchronized on watchedCollectionProps and we can only get here if we have
-            // found an expired vprops above, so it is safe to remove the cached value and let the
-            // GC free up some mem a bit sooner.
-            if (!collectionPropsObservers.containsKey(collection)) {
-              watchedCollectionProps.remove(collection);
-            }
-          }
-        } catch (Exception e) {
-          throw new SolrException(
-              ErrorCode.SERVER_ERROR,
-              "Error reading collection properties",
-              SolrZkClient.checkInterrupted(e));
-        }
-      }
-      return properties;
-    }
-  }
-
-  private static class VersionedCollectionProps {
-    int zkVersion;
-    Map props;
-    long cacheUntilNs = 0;
-
-    VersionedCollectionProps(int zkVersion, Map props) {
-      this.zkVersion = zkVersion;
-      this.props = props;
-    }
+    return collectionPropertiesZkStateReader.getCollectionProperties(collection, cacheForMillis);
   }
 
   static String getCollectionPropsPath(final String collection) {
-    return COLLECTIONS_ZKNODE + '/' + collection + '/' + COLLECTION_PROPS_ZKNODE;
-  }
-
-  private VersionedCollectionProps fetchCollectionProperties(String collection, Watcher watcher)
-      throws KeeperException, InterruptedException {
-    final String znodePath = getCollectionPropsPath(collection);
-    // lazy init cache cleaner once we know someone is using collection properties.
-    if (collectionPropsCacheCleaner == null) {
-      synchronized (this) { // There can be only one! :)
-        if (collectionPropsCacheCleaner == null) {
-          collectionPropsCacheCleaner = notifications.submit(new CacheCleaner());
-        }
-      }
-    }
-    while (true) {
-      try {
-        Stat stat = new Stat();
-        byte[] data = zkClient.getData(znodePath, watcher, stat, true);
-        @SuppressWarnings("unchecked")
-        Map props = (Map) Utils.fromJSON(data);
-        return new VersionedCollectionProps(stat.getVersion(), props);
-      } catch (ClassCastException e) {
-        throw new SolrException(
-            ErrorCode.SERVER_ERROR,
-            "Unable to parse collection properties for collection " + collection,
-            e);
-      } catch (KeeperException.NoNodeException e) {
-        if (watcher != null) {
-          // Leave an exists watch in place in case a collectionprops.json is created later.
-          Stat exists = zkClient.exists(znodePath, watcher, true);
-          if (exists != null) {
-            // Rare race condition, we tried to fetch the data and couldn't find it, then we found
-            // it exists. Loop and try again.
-            continue;
-          }
-        }
-        return new VersionedCollectionProps(-1, emptyMap());
-      }
-    }
+    return CollectionPropertiesZkStateReader.getCollectionPropsPath(collection);
   }
 
   /**
@@ -1453,101 +1319,6 @@ private void refreshAndWatchChildren() throws KeeperException, InterruptedExcept
     }
   }
 
-  /** Watches collection properties */
-  class PropsWatcher implements Watcher {
-    private final String coll;
-    private long watchUntilNs;
-
-    PropsWatcher(String coll) {
-      this.coll = coll;
-      watchUntilNs = 0;
-    }
-
-    PropsWatcher(String coll, long forMillis) {
-      this.coll = coll;
-      watchUntilNs =
-          System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS);
-    }
-
-    public PropsWatcher renew(long forMillis) {
-      watchUntilNs =
-          System.nanoTime() + TimeUnit.NANOSECONDS.convert(forMillis, TimeUnit.MILLISECONDS);
-      return this;
-    }
-
-    @Override
-    public void process(WatchedEvent event) {
-      // session events are not change events, and do not remove the watcher
-      if (EventType.None.equals(event.getType())) {
-        return;
-      }
-
-      boolean expired = System.nanoTime() > watchUntilNs;
-      if (!collectionPropsObservers.containsKey(coll) && expired) {
-        // No one can be notified of the change, we can ignore it and "unset" the watch
-        log.debug("Ignoring property change for collection {}", coll);
-        return;
-      }
-
-      log.info(
-          "A collection property change: [{}] for collection [{}] has occurred - updating...",
-          event,
-          coll);
-
-      refreshAndWatch(true);
-    }
-
-    /**
-     * Refresh collection properties from ZK and leave a watch for future changes. Updates the
-     * properties in watchedCollectionProps with the results of the refresh. Optionally notifies
-     * watchers
-     */
-    void refreshAndWatch(boolean notifyWatchers) {
-      try {
-        synchronized (watchedCollectionProps) { // making decisions based on the result of a get...
-          VersionedCollectionProps vcp = fetchCollectionProperties(coll, this);
-          Map properties = vcp.props;
-          VersionedCollectionProps existingVcp = watchedCollectionProps.get(coll);
-          if (existingVcp == null
-              || // never called before, record what we found
-              vcp.zkVersion > existingVcp.zkVersion
-              || // newer info we should update
-              vcp.zkVersion == -1) { // node was deleted start over
-            watchedCollectionProps.put(coll, vcp);
-            if (notifyWatchers) {
-              notifyPropsWatchers(coll, properties);
-            }
-            if (vcp.zkVersion == -1 && existingVcp != null) { // Collection DELETE detected
-
-              // We should not be caching a collection that has been deleted.
-              watchedCollectionProps.remove(coll);
-
-              // core ref counting not relevant here, don't need canRemove(), we just sent
-              // a notification of an empty set of properties, no reason to watch what doesn't
-              // exist.
-              collectionPropsObservers.remove(coll);
-
-              // This is the one time we know it's safe to throw this out. We just failed to set the
-              // watch due to an NoNodeException, so it isn't held by ZK and can't re-set itself due
-              // to an update.
-              collectionPropsWatchers.remove(coll);
-            }
-          }
-        }
-      } catch (KeeperException.SessionExpiredException
-          | KeeperException.ConnectionLossException e) {
-        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: ", e);
-      } catch (KeeperException e) {
-        log.error("Lost collection property watcher for {} due to ZK error", coll, e);
-        throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
-      } catch (InterruptedException e) {
-        Thread.currentThread().interrupt();
-        log.error(
-            "Lost collection property watcher for {} due to the thread being interrupted", coll, e);
-      }
-    }
-  }
-
   /** Watches /collections children . */
   class CollectionsChildWatcher implements Watcher {
 
@@ -2069,38 +1840,12 @@ Map getCollectionWatches() {
 
   public void registerCollectionPropsWatcher(
       final String collection, CollectionPropsWatcher propsWatcher) {
-    AtomicBoolean watchSet = new AtomicBoolean(false);
-    collectionPropsObservers.compute(
-        collection,
-        (k, v) -> {
-          if (v == null) {
-            v = new CollectionWatch<>();
-            watchSet.set(true);
-          }
-          v.stateWatchers.add(propsWatcher);
-          return v;
-        });
 
-    if (watchSet.get()) {
-      collectionPropsWatchers.computeIfAbsent(collection, PropsWatcher::new).refreshAndWatch(false);
-    }
+    collectionPropertiesZkStateReader.registerCollectionPropsWatcher(collection, propsWatcher);
   }
 
   public void removeCollectionPropsWatcher(String collection, CollectionPropsWatcher watcher) {
-    collectionPropsObservers.compute(
-        collection,
-        (k, v) -> {
-          if (v == null) return null;
-          v.stateWatchers.remove(watcher);
-          if (v.canBeRemoved()) {
-            // don't want this to happen in middle of other blocks that might add it back.
-            synchronized (watchedCollectionProps) {
-              watchedCollectionProps.remove(collection);
-            }
-            return null;
-          }
-          return v;
-        });
+    collectionPropertiesZkStateReader.removeCollectionPropsWatcher(collection, watcher);
   }
 
   public static class ConfigData {
@@ -2352,66 +2097,6 @@ private boolean setIfNewer(SolrZkClient.NodeData n) {
     }
   }
 
-  private void notifyPropsWatchers(String collection, Map properties) {
-    try {
-      collectionPropsNotifications.submit(new PropsNotification(collection, properties));
-    } catch (RejectedExecutionException e) {
-      if (!closed) {
-        log.error("Couldn't run collection properties notifications for {}", collection, e);
-      }
-    }
-  }
-
-  private class PropsNotification implements Runnable {
-
-    private final String collection;
-    private final Map collectionProperties;
-    private final List watchers = new ArrayList<>();
-
-    private PropsNotification(String collection, Map collectionProperties) {
-      this.collection = collection;
-      this.collectionProperties = collectionProperties;
-      // guarantee delivery of notification regardless of what happens to collectionPropsObservers
-      // while we wait our turn in the executor by capturing the list on creation.
-      collectionPropsObservers.compute(
-          collection,
-          (k, v) -> {
-            if (v == null) return null;
-            watchers.addAll(v.stateWatchers);
-            return v;
-          });
-    }
-
-    @Override
-    public void run() {
-      for (CollectionPropsWatcher watcher : watchers) {
-        if (watcher.onStateChanged(collectionProperties)) {
-          removeCollectionPropsWatcher(collection, watcher);
-        }
-      }
-    }
-  }
-
-  private class CacheCleaner implements Runnable {
-    @Override
-    public void run() {
-      while (!Thread.interrupted()) {
-        try {
-          Thread.sleep(60000);
-        } catch (InterruptedException e) {
-          // Executor shutdown will send us an interrupt
-          break;
-        }
-        watchedCollectionProps
-            .entrySet()
-            .removeIf(
-                entry ->
-                    entry.getValue().cacheUntilNs < System.nanoTime()
-                        && !collectionPropsObservers.containsKey(entry.getKey()));
-      }
-    }
-  }
-
   /**
    * Helper class that acts as both a {@link DocCollectionWatcher} and a {@link LiveNodesListener}
    * while wraping and delegating to a {@link CollectionStateWatcher}
diff --git a/solr/solrj-zookeeper/src/test/org/apache/solr/common/cloud/TestZkConfigSetService.java b/solr/solrj-zookeeper/src/test/org/apache/solr/common/cloud/TestZkConfigSetService.java
index 5375f62cdeb..8a2ff9f84cf 100644
--- a/solr/solrj-zookeeper/src/test/org/apache/solr/common/cloud/TestZkConfigSetService.java
+++ b/solr/solrj-zookeeper/src/test/org/apache/solr/common/cloud/TestZkConfigSetService.java
@@ -94,13 +94,14 @@ public void testUploadConfig() throws IOException {
       Files.createDirectory(tempConfig.resolve(".ignoreddir"));
       Files.createFile(tempConfig.resolve(".ignoreddir").resolve("ignored"));
 
-      configSetService.uploadConfig("testconfig", tempConfig);
+      configSetService.uploadConfig("testconfig", tempConfig, true);
 
       // uploading a directory creates a new config
       List configs = configSetService.listConfigs();
       assertEquals(1, configs.size());
       assertEquals("testconfig", configs.get(0));
       assertTrue(configSetService.checkConfigExists("testconfig"));
+      assertTrue(configSetService.isConfigSetTrusted("testconfig"));
 
       // check downloading
       Path downloadPath = createTempDir("download");
@@ -118,16 +119,17 @@ public void testUploadConfig() throws IOException {
       // uploading to the same config overwrites
       byte[] overwritten = "new test data".getBytes(StandardCharsets.UTF_8);
       Files.write(tempConfig.resolve("file1"), overwritten);
-      configSetService.uploadConfig("testconfig", tempConfig);
+      configSetService.uploadConfig("testconfig", tempConfig, false);
 
       assertEquals(1, configSetService.listConfigs().size());
       Path download2 = createTempDir("download2");
       configSetService.downloadConfig("testconfig", download2);
       byte[] checkdata2 = Files.readAllBytes(download2.resolve("file1"));
       assertArrayEquals(overwritten, checkdata2);
+      assertFalse(configSetService.isConfigSetTrusted("testconfig"));
 
       // uploading same files to a new name creates a new config
-      configSetService.uploadConfig("config2", tempConfig);
+      configSetService.uploadConfig("config2", tempConfig, true);
       assertEquals(2, configSetService.listConfigs().size());
 
       // Test copying a config works in both flavors
@@ -136,6 +138,7 @@ public void testUploadConfig() throws IOException {
       configs = configSetService.listConfigs();
       assertTrue("config2copy should exist", configs.contains("config2copy"));
       assertTrue("config2copy2 should exist", configs.contains("config2copy2"));
+      assertTrue(configSetService.isConfigSetTrusted("config2"));
     }
   }
 
@@ -210,7 +213,7 @@ protected Collection createCredentials() {
     try (SolrZkClient client =
         buildZkClient(zkServer.getZkAddress("/acl"), aclProvider, writeable)) {
       ConfigSetService configSetService = new ZkConfigSetService(client);
-      configSetService.uploadConfig("acltest", configPath);
+      configSetService.uploadConfig("acltest", configPath, false);
       assertEquals(1, configSetService.listConfigs().size());
     }
 
@@ -221,7 +224,8 @@ protected Collection createCredentials() {
       assertEquals(1, configSetService.listConfigs().size());
       IOException ioException =
           assertThrows(
-              IOException.class, () -> configSetService.uploadConfig("acltest2", configPath));
+              IOException.class,
+              () -> configSetService.uploadConfig("acltest2", configPath, false));
       assertEquals(KeeperException.NoAuthException.class, ioException.getCause().getClass());
     }
 
diff --git a/solr/solrj/build.gradle b/solr/solrj/build.gradle
index ac83fdae43f..dc1656a4624 100644
--- a/solr/solrj/build.gradle
+++ b/solr/solrj/build.gradle
@@ -93,6 +93,7 @@ dependencies {
   })
   testImplementation 'org.apache.commons:commons-lang3'
   testImplementation 'io.dropwizard.metrics:metrics-core'
+  testImplementation 'com.fasterxml.jackson.core:jackson-core'
 }
 
 /**
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/FastStreamingDocsCallback.java b/solr/solrj/src/java/org/apache/solr/client/solrj/FastStreamingDocsCallback.java
index 8398a13f562..cb5a7967747 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/FastStreamingDocsCallback.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/FastStreamingDocsCallback.java
@@ -40,7 +40,7 @@ default Object initDocList(Long numFound, Long start, Float maxScore) {
   Object startDoc(Object docListObj);
 
   /**
-   * FOund a new field
+   * Found a new field
    *
    * @param field Read the appropriate value
    * @param docObj The object returned by {{@link #startDoc(Object)}} method
@@ -57,7 +57,7 @@ default void endDoc(Object docObj) {}
   /**
    * A new child doc starts
    *
-   * @param parentDocObj an objec that will be shared across all the {{@link
+   * @param parentDocObj an object that will be shared across all the {{@link
    *     FastStreamingDocsCallback#field(DataEntry, Object)}}
    * @return any custom object that be shared with the fields in this child doc
    */
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/ResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/ResponseParser.java
index 1ef789157d7..1fcb7c7603d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/ResponseParser.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/ResponseParser.java
@@ -33,7 +33,7 @@ public abstract class ResponseParser {
   public abstract NamedList processResponse(Reader reader);
 
   /**
-   * A well behaved ResponseParser will return its content-type.
+   * A well-behaved ResponseParser will return its content-type.
    *
    * @return the content-type this parser expects to parse
    * @deprecated use {@link #getContentTypes()} instead
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
index 31ff4f2c71a..7bdd6b27ca6 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
@@ -544,7 +544,7 @@ public UpdateResponse commit(boolean waitFlush, boolean waitSearcher, boolean so
    *
    * 

Note: In most cases it is not required to do explicit optimize * - * @param collection the Solr collection to send the optimize to + * @param collection the Solr collection to send the optimize command to * @return an {@link org.apache.solr.client.solrj.response.UpdateResponse} containing the response * from the server * @throws IOException If there is a low-level I/O error. @@ -575,7 +575,7 @@ public UpdateResponse optimize() throws SolrServerException, IOException { * *

Note: In most cases it is not required to do explicit optimize * - * @param collection the Solr collection to send the optimize to + * @param collection the Solr collection to send the optimize command to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the main query * searcher, making the changes visible @@ -612,7 +612,7 @@ public UpdateResponse optimize(boolean waitFlush, boolean waitSearcher) * *

Note: In most cases it is not required to do explicit optimize * - * @param collection the Solr collection to send the optimize to + * @param collection the Solr collection to send the optimize command to * @param waitFlush block until index changes are flushed to disk * @param waitSearcher block until a new searcher is opened and registered as the main query * searcher, making the changes visible @@ -973,7 +973,7 @@ public QueryResponse query(SolrParams params, METHOD method) /** * Query solr, and stream the results. Unlike the standard query, this will send events for each - * Document rather then add them to the QueryResponse. + * Document rather than add them to the QueryResponse. * *

Although this function returns a 'QueryResponse' it should be used with care since it * excludes anything that was passed to callback. Also note that future version may pass even more @@ -1013,7 +1013,7 @@ private QueryResponse getQueryResponse( /** * Query solr, and stream the results. Unlike the standard query, this will send events for each - * Document rather then add them to the QueryResponse. + * Document rather than add them to the QueryResponse. * *

Although this function returns a 'QueryResponse' it should be used with care since it * excludes anything that was passed to callback. Also note that future version may pass even more diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java index 5719efcd180..d82f46a399a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrQuery.java @@ -615,7 +615,7 @@ public SolrQuery setSorts(List value) { } /** - * Gets an a list of current sort clauses. + * Gets a list of current sort clauses. * * @return an immutable list of current sort clauses * @since 4.2 @@ -683,7 +683,7 @@ public SolrQuery addOrUpdateSort(String field, ORDER order) { /** * Updates or adds a single sort field specification to the current sort information. If the sort * field already exist in the sort information map, its position is unchanged and the sort order - * is set; if it does not exist, it is appended at the end with the specified order.. + * is set; if it does not exist, it is appended at the end with the specified order. * * @return the modified SolrQuery object, for easy chaining * @since 4.2 @@ -1333,7 +1333,7 @@ public static SortClause asc(String item) { } /** - * Creates a decending SortClause for an item + * Creates a descending SortClause for an item * * @param item item to sort on */ @@ -1342,7 +1342,7 @@ public static SortClause desc(String item) { } /** - * Gets the item to sort, typically a function or a fieldname + * Gets the item to sort, typically a function or a field name * * @return item to sort */ @@ -1353,7 +1353,7 @@ public String getItem() { /** * Gets the order to sort * - * @return order to sort + * @return order to sort by */ public ORDER getOrder() { return order; @@ -1373,7 +1373,7 @@ public int hashCode() { } /** - * Gets a human readable description of the sort clause. + * Gets a human-readable description of the sort clause. * *

The returned string is not suitable for passing to Solr, but may be useful in debug output * and the like. diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java index 4bb077065ce..a615d478b98 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrRequest.java @@ -52,6 +52,21 @@ public enum METHOD { DELETE }; + public enum ApiVersion { + V1("/solr"), + V2("/api"); + + private final String apiPrefix; + + ApiVersion(String apiPrefix) { + this.apiPrefix = apiPrefix; + } + + public String getApiPrefix() { + return apiPrefix; + } + } + public enum SolrRequestType { QUERY, UPDATE, @@ -188,6 +203,15 @@ public boolean requiresCollection() { return false; } + /** + * Indicates which API version this request will make + * + *

Defaults implementation returns 'V1'. + */ + public ApiVersion getApiVersion() { + return ApiVersion.V1; + } + /** * @deprecated Please use {@link SolrRequest#getContentWriter(String)} instead. */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/DelegatingClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/DelegatingClusterStateProvider.java index b8e7322828f..4177f0a017a 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/DelegatingClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/DelegatingClusterStateProvider.java @@ -78,6 +78,15 @@ public String resolveSimpleAlias(String alias) throws IllegalArgumentException { } } + @Override + public Object getClusterProperty(String propertyName) { + if (delegate != null) { + return delegate.getClusterProperty(propertyName); + } else { + return null; + } + } + @Override public ClusterState getClusterState() { if (delegate != null) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ShardTerms.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ShardTerms.java index b693e7d624c..103e7cba74f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ShardTerms.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/ShardTerms.java @@ -125,7 +125,7 @@ private boolean skipIncreaseTermOf(String key, Set replicasNeedingRecove } /** - * Return a new {@link ShardTerms} in which highest terms are not zero + * Return a new {@link ShardTerms} in which the highest terms are not zero * * @return null if highest terms are already larger than zero */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java index 5dd5d1268f6..154ff7d1336 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/SocketProxy.java @@ -97,7 +97,7 @@ public void open(URI uri) throws Exception { @Override public String toString() { - return "SocketyProxy: port=" + listenPort + "; target=" + target; + return "SocketProxy: port=" + listenPort + "; target=" + target; } public void setReceiveBufferSize(int receiveBufferSize) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java index d5e2d188a75..75c50167d2b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpClusterStateProvider.java @@ -39,6 +39,7 @@ import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.PerReplicaStates; import org.apache.solr.common.params.ModifiableSolrParams; +import org.apache.solr.common.util.EnvUtils; import org.apache.solr.common.util.NamedList; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.common.util.Utils; @@ -55,7 +56,8 @@ public abstract class BaseHttpClusterStateProvider implements ClusterStateProvid volatile Map> aliasProperties; long aliasesTimestamp = 0; - private int cacheTimeout = 5; // the liveNodes and aliases cache will be invalidated after 5 secs + // the liveNodes and aliases cache will be invalidated after 5 secs + private int cacheTimeout = EnvUtils.getPropertyAsInteger("solr.solrj.cache.timeout.sec", 5); public void init(List solrUrls) throws Exception { for (String solrUrl : solrUrls) { @@ -84,6 +86,13 @@ public void init(List solrUrls) throws Exception { /** Create a SolrClient implementation that uses the specified Solr node URL */ protected abstract SolrClient getSolrClient(String baseUrl); + @Override + public DocCollection getCollection(String collection) { + // This change is to prevent BaseHttpCSP make a call to fetch the entire cluster state, as the + // default implementation calls getClusterState().getCollectionOrNull(name) + return getState(collection).get(); + } + @Override public ClusterState.CollectionRef getState(String collection) { for (String nodeName : liveNodes) { @@ -103,7 +112,7 @@ public ClusterState.CollectionRef getState(String collection) { log.warn("Attempt to fetch cluster state from {} failed.", baseUrl, e); } catch (NotACollectionException e) { // Cluster state for the given collection was not found, could be an alias. - // Lets fetch/update our aliases: + // Let's fetch/update our aliases: getAliases(true); return null; } @@ -122,15 +131,9 @@ public ClusterState.CollectionRef getState(String collection) { private ClusterState fetchClusterState( SolrClient client, String collection, Map clusterProperties) throws SolrServerException, IOException, NotACollectionException { - ModifiableSolrParams params = new ModifiableSolrParams(); - if (collection != null) { - params.set("collection", collection); - } - params.set("action", "CLUSTERSTATUS"); - params.set("prs", "true"); - QueryRequest request = new QueryRequest(params); - request.setPath("/admin/collections"); - SimpleOrderedMap cluster = (SimpleOrderedMap) client.request(request).get("cluster"); + SimpleOrderedMap cluster = + submitClusterStateRequest(client, collection, ClusterStateRequestType.FETCH_COLLECTION); + Map collectionsMap; if (collection != null) { collectionsMap = @@ -149,10 +152,16 @@ private ClusterState fetchClusterState( } else { znodeVersion = -1; } - Set liveNodes = new HashSet<>((List) (cluster.get("live_nodes"))); - this.liveNodes = liveNodes; - liveNodesTimestamp = System.nanoTime(); - ClusterState cs = new ClusterState(liveNodes, new HashMap<>()); + + ClusterState cs = new ClusterState(this.liveNodes, new HashMap<>()); + List liveNodesList = (List) cluster.get("live_nodes"); + if (liveNodesList != null) { + Set liveNodes = new HashSet<>(liveNodesList); + this.liveNodes = liveNodes; + liveNodesTimestamp = System.nanoTime(); + cs = new ClusterState(liveNodes, new HashMap<>()); + } + for (Map.Entry e : collectionsMap.entrySet()) { @SuppressWarnings("rawtypes") Map m = (Map) e.getValue(); @@ -173,6 +182,30 @@ private ClusterState fetchClusterState( return cs; } + private SimpleOrderedMap submitClusterStateRequest( + SolrClient client, String collection, ClusterStateRequestType requestType) + throws SolrServerException, IOException { + + ModifiableSolrParams params = new ModifiableSolrParams(); + params.set("action", "CLUSTERSTATUS"); + + if (requestType == ClusterStateRequestType.FETCH_COLLECTION && collection != null) { + params.set("collection", collection); + } else if (requestType == ClusterStateRequestType.FETCH_LIVE_NODES) { + params.set("liveNodes", "true"); + } else if (requestType == ClusterStateRequestType.FETCH_CLUSTER_PROP) { + params.set("clusterProperties", "true"); + } else if (requestType == ClusterStateRequestType.FETCH_NODE_ROLES) { + params.set("roles", "true"); + } + + params.set("includeAll", "false"); + params.set("prs", "true"); + QueryRequest request = new QueryRequest(params); + request.setPath("/admin/collections"); + return (SimpleOrderedMap) client.request(request).get("cluster"); + } + @SuppressWarnings({"rawtypes", "unchecked"}) private DocCollection fillPrs( int znodeVersion, Map.Entry e, Instant creationTime, Map m) { @@ -228,12 +261,10 @@ > getCacheTimeout()) { } @SuppressWarnings({"rawtypes", "unchecked"}) - private static Set fetchLiveNodes(SolrClient client) throws Exception { - ModifiableSolrParams params = new ModifiableSolrParams(); - params.set("action", "CLUSTERSTATUS"); - QueryRequest request = new QueryRequest(params); - request.setPath("/admin/collections"); - NamedList cluster = (SimpleOrderedMap) client.request(request).get("cluster"); + private Set fetchLiveNodes(SolrClient client) throws Exception { + + SimpleOrderedMap cluster = + submitClusterStateRequest(client, null, ClusterStateRequestType.FETCH_LIVE_NODES); return (Set) new HashSet((List) (cluster.get("live_nodes"))); } @@ -319,7 +350,7 @@ public ClusterState getClusterState() { } catch (SolrServerException | BaseHttpSolrClient.RemoteSolrException | IOException e) { log.warn("Attempt to fetch cluster state from {} failed.", baseUrl, e); } catch (NotACollectionException e) { - // not possible! (we passed in null for collection so it can't be an alias) + // not possible! (we passed in null for collection, so it can't be an alias) throw new RuntimeException( "null should never cause NotACollectionException in " + "fetchClusterState() Please report this as a bug!"); @@ -335,21 +366,18 @@ public ClusterState getClusterState() { + " solrUrl(s) or zkHost(s)."); } + @SuppressWarnings("unchecked") @Override public Map getClusterProperties() { + // Map clusterPropertiesMap = new HashMap<>(); for (String nodeName : liveNodes) { String baseUrl = Utils.getBaseUrlForNodeName(nodeName, urlScheme); try (SolrClient client = getSolrClient(baseUrl)) { - Map clusterProperties = new HashMap<>(); - fetchClusterState(client, null, clusterProperties); - return clusterProperties; + SimpleOrderedMap cluster = + submitClusterStateRequest(client, null, ClusterStateRequestType.FETCH_CLUSTER_PROP); + return (Map) cluster.get("properties"); } catch (SolrServerException | BaseHttpSolrClient.RemoteSolrException | IOException e) { log.warn("Attempt to fetch cluster state from {} failed.", baseUrl, e); - } catch (NotACollectionException e) { - // not possible! (we passed in null for collection so it can't be an alias) - throw new RuntimeException( - "null should never cause NotACollectionException in " - + "fetchClusterState() Please report this as a bug!"); } } throw new RuntimeException( @@ -385,10 +413,6 @@ public int getCacheTimeout() { return cacheTimeout; } - public void setCacheTimeout(int cacheTimeout) { - this.cacheTimeout = cacheTimeout; - } - // This exception is not meant to escape this class it should be caught and wrapped. private static class NotACollectionException extends Exception {} @@ -399,4 +423,11 @@ public String getQuorumHosts() { } return String.join(",", this.liveNodes); } + + private enum ClusterStateRequestType { + FETCH_LIVE_NODES, + FETCH_CLUSTER_PROP, + FETCH_NODE_ROLES, + FETCH_COLLECTION + } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java index 2781adb6405..c1da336d7d5 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/BaseHttpSolrClient.java @@ -43,8 +43,8 @@ public RemoteSolrException(String remoteHost, int code, String msg, Throwable th } /** - * This should be thrown when a server has an error in executing the request and it sends a proper - * payload back to the client + * This should be thrown when a server has an error in executing the request, and it sends a + * proper payload back to the client */ public static class RemoteExecutionException extends RemoteSolrException { private NamedList meta; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java index ffd481fe494..f45945ae045 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudHttp2SolrClient.java @@ -233,7 +233,7 @@ public Builder(List zkHosts, Optional zkChroot) { if (zkChroot.isPresent()) this.zkChroot = zkChroot.get(); } - /** Whether or not to use the default ZK ACLs when building a ZK Client. */ + /** Whether to use the default ZK ACLs when building a ZK Client. */ public Builder canUseZkACLs(boolean canUseZkACLs) { this.canUseZkACLs = canUseZkACLs; return this; @@ -346,7 +346,7 @@ public Builder withParallelCacheRefreshes(int parallelCacheRefreshesLocks) { } /** - * This is the time to wait to refetch the state after getting the same state version from ZK + * This is the time to wait to re-fetch the state after getting the same state version from ZK * * @deprecated Please use {@link #withRetryExpiryTime(long, TimeUnit)} */ @@ -357,7 +357,7 @@ public Builder setRetryExpiryTime(int secs) { } /** - * This is the time to wait to refetch the state after getting the same state version from ZK + * This is the time to wait to re-fetch the state after getting the same state version from ZK */ public Builder withRetryExpiryTime(long expiryTime, TimeUnit unit) { this.retryExpiryTimeNano = TimeUnit.NANOSECONDS.convert(expiryTime, unit); @@ -413,7 +413,7 @@ public Builder withHttpClient(Http2SolrClient httpClient) { /** * If provided, the CloudHttp2SolrClient will build it's internal Http2SolrClient using this * builder (instead of the empty default one). Providing this builder allows users to configure - * the internal clients (authentication, timeouts, etc). + * the internal clients (authentication, timeouts, etc.). * * @param internalClientBuilder the builder to use for creating the internal http client. * @return this diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudLegacySolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudLegacySolrClient.java index e1f3840cfbd..224757200ff 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudLegacySolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudLegacySolrClient.java @@ -232,7 +232,7 @@ public Builder(List zkHosts, Optional zkChroot) { if (zkChroot.isPresent()) this.zkChroot = zkChroot.get(); } - /** Whether or not to use the default ZK ACLs when building a ZK Client. */ + /** Whether to use the default ZK ACLs when building a ZK Client. */ public Builder canUseZkACLs(boolean canUseZkACLs) { this.canUseZkACLs = canUseZkACLs; return this; @@ -288,7 +288,7 @@ public Builder sendUpdatesToAnyReplica() { /** * This method has no effect. * - *

In older versions of Solr, this method was an incorrectly named equivilent to {@link + *

In older versions of Solr, this method was an incorrectly named equivalent to {@link * #sendUpdatesToAnyReplica}, which had no effect because that setting was ignored in the * created clients. When the underlying {@link CloudSolrClient} behavior was fixed, this method * was modified to be an explicit No-Op, since the implied behavior of sending updates to @@ -323,7 +323,7 @@ public Builder sendDirectUpdatesToShardLeadersOnly() { * Tells {@link Builder} that created clients can send updates to any shard replica (shard * leaders and non-leaders). * - *

Shard leaders are still preferred, but the created clients will fallback to using other + *

Shard leaders are still preferred, but the created clients will fall back to using other * replicas if a leader cannot be found. * * @see #sendDirectUpdatesToShardLeadersOnly diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java index 223fef6ab23..666f5662627 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/CloudSolrClient.java @@ -298,7 +298,7 @@ public boolean isParallelUpdates() { /** * Connect to the zookeeper ensemble. This is an optional method that may be used to force a - * connect before any other requests are sent. + * connection before any other requests are sent. */ public void connect() { getClusterStateProvider().connect(); @@ -362,8 +362,9 @@ private NamedList directUpdate(AbstractUpdateRequest request, String col // Check to see if the collection is an alias. Updates to multi-collection aliases are ok as // long as they are routed aliases - List aliasedCollections = getClusterStateProvider().resolveAlias(collection); - if (getClusterStateProvider().isRoutedAlias(collection) || aliasedCollections.size() == 1) { + List aliasedCollections = + new ArrayList<>(resolveAliases(Collections.singletonList(collection))); + if (aliasedCollections.size() == 1 || getClusterStateProvider().isRoutedAlias(collection)) { collection = aliasedCollections.get(0); // pick 1st (consistent with HttpSolrCall behavior) } else { throw new SolrException( @@ -397,7 +398,7 @@ private NamedList directUpdate(AbstractUpdateRequest request, String col if (routes == null) { if (directUpdatesToLeadersOnly && hasInfoToFindLeaders(updateRequest, routeField)) { // we have info (documents with ids and/or ids to delete) with - // which to find the leaders but we could not find (all of) them + // which to find the leaders, but we could not find (all of) them throw new SolrException( SolrException.ErrorCode.SERVICE_UNAVAILABLE, "directUpdatesToLeadersOnly==true but could not find leader(s)"); @@ -774,7 +775,7 @@ protected NamedList requestWithRetryOnStaleState( throws SolrServerException, IOException { connect(); // important to call this before you start working with the ZkStateReader - // build up a _stateVer_ param to pass to the server containing all of the + // build up a _stateVer_ param to pass to the server containing all the // external collection state versions involved in this request, which allows // the server to notify us that our cached state for one or more of the external // collections is stale and needs to be refreshed ... this code has no impact on internal @@ -851,7 +852,8 @@ protected NamedList requestWithRetryOnStaleState( // or request is v2 api and its method is not GET if (inputCollections.isEmpty() || isAdmin - || (request instanceof V2Request && request.getMethod() != SolrRequest.METHOD.GET)) { + || (request.getApiVersion() == SolrRequest.ApiVersion.V2 + && request.getMethod() != SolrRequest.METHOD.GET)) { if (exc instanceof SolrServerException) { throw (SolrServerException) exc; } else if (exc instanceof IOException) { @@ -892,7 +894,7 @@ protected NamedList requestWithRetryOnStaleState( } } if (retryCount < MAX_STALE_RETRIES) { // if it is a communication error , we must try again - // may be, we have a stale version of the collection state + // may be, we have a stale version of the collection state, // and we could not get any information from the server // it is probably not worth trying again and again because // the state would not have been updated @@ -931,7 +933,7 @@ protected NamedList requestWithRetryOnStaleState( // re-issue request using updated state stateWasStale = true; - // just re-read state for all of them, which is a little heavy handed but hopefully a rare + // just re-read state for all of them, which is a little heavy-handed but hopefully a rare // occurrence for (DocCollection ext : requestedCollections) { collectionStateCache.remove(ext.getName()); @@ -1025,7 +1027,7 @@ protected NamedList sendRequest(SolrRequest request, List inp final List requestEndpoints = new ArrayList<>(); // we populate this as follows... - if (request instanceof V2Request) { + if (request.getApiVersion() == SolrRequest.ApiVersion.V2) { if (!liveNodes.isEmpty()) { List liveNodesList = new ArrayList<>(liveNodes); Collections.shuffle(liveNodesList, rand); @@ -1115,7 +1117,7 @@ protected NamedList sendRequest(SolrRequest request, List inp replica -> { if (seenNodes.add(replica.getNodeName())) { if (inputCollections.size() == 1 && collectionNames.size() == 1) { - // If we have a single collection name (and not a alias to multiple collection), + // If we have a single collection name (and not an alias to multiple collection), // send the query directly to a replica of this collection. requestEndpoints.add( new LBSolrClient.Endpoint(replica.getBaseUrl(), replica.getCoreName())); @@ -1149,7 +1151,7 @@ private Set resolveAliases(List inputCollections) { } LinkedHashSet uniqueNames = new LinkedHashSet<>(); // consistent ordering for (String collectionName : inputCollections) { - if (getClusterStateProvider().getState(collectionName) == null) { + if (getDocCollection(collectionName, -1) == null) { // perhaps it's an alias uniqueNames.addAll(getClusterStateProvider().resolveAlias(collectionName)); } else { @@ -1173,7 +1175,7 @@ public boolean isUpdatesToLeaders() { /** * If true, this client has been configured such that "direct updates" will only be sent - * to the current leader of the corrisponding shard, and will not be retried with other replicas. + * to the current leader of the corresponding shard, and will not be retried with other replicas. * This method has no effect if {@link #isUpdatesToLeaders()} or {@link * IsUpdateRequest#isSendToLeaders} returns false. * @@ -1182,7 +1184,7 @@ public boolean isUpdatesToLeaders() { * the default router; non-direct updates are things like commits and "delete by query"). * *

NOTE: If a single {@link UpdateRequest} contains multiple "direct updates" for different - * shards, this client may break the request up and merge th resposes. + * shards, this client may break the request up and merge the responses. * * @return true if direct updates are sent to shard leaders only */ @@ -1208,26 +1210,22 @@ protected DocCollection getDocCollection(String collection, Integer expectedVers if (expectedVersion <= col.getZNodeVersion() && !cacheEntry.shouldRetry()) return col; } - ClusterState.CollectionRef ref = getCollectionRef(collection); - if (ref == null) { - // no such collection exists - return null; - } - if (!ref.isLazilyLoaded()) { - // it is readily available just return it - return ref.get(); - } Object[] locks = this.locks; int lockId = Math.abs(Hash.murmurhash3_x86_32(collection, 0, collection.length(), 0) % locks.length); final Object lock = locks[lockId]; synchronized (lock) { - /*we have waited for sometime just check once again*/ + /*we have waited for some time just check once again*/ cacheEntry = collectionStateCache.get(collection); col = cacheEntry == null ? null : cacheEntry.cached; if (col != null) { if (expectedVersion <= col.getZNodeVersion() && !cacheEntry.shouldRetry()) return col; } + ClusterState.CollectionRef ref = getCollectionRef(collection); + if (ref == null) { + // no such collection exists + return null; + } // We are going to fetch a new version // we MUST try to get a new version DocCollection fetchedCol = ref.get(); // this is a call to ZK diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java index 81bb885c38b..b6afac3114f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ClusterStateProvider.java @@ -91,7 +91,11 @@ default boolean isRoutedAlias(String alias) { .anyMatch(e -> e.getKey().startsWith(CollectionAdminParams.ROUTER_PREFIX)); } - /** Obtain the current cluster state. */ + /** + * Obtain the current cluster state. WARNING: This method is quite expensive as it involves + * fetching remote information. Use with caution and be aware of the potential performance + * implications. + */ ClusterState getClusterState(); default DocCollection getCollection(String name) throws IOException { @@ -108,15 +112,13 @@ default DocCollection getCollection(String name) throws IOException { /** Obtain a cluster property, or the default value if it doesn't exist. */ default T getClusterProperty(String key, T defaultValue) { @SuppressWarnings({"unchecked"}) - T value = (T) getClusterProperties().get(key); + T value = (T) getClusterProperty(key); if (value == null) return defaultValue; return value; } /** Obtain a cluster property, or null if it doesn't exist. */ - default Object getClusterProperty(String propertyName) { - return getClusterProperties().get(propertyName); - } + Object getClusterProperty(String propertyName); /** Get the collection-specific policy */ String getPolicyNameByCollection(String coll); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java index d39fd607a15..f485a75ef5f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateHttp2SolrClient.java @@ -407,9 +407,9 @@ public NamedList request(final SolrRequest request, String collection int lastQueueSize = -1; for (; ; ) { synchronized (runners) { - // see if queue is half full and we can add more runners + // see if queue is half full, and we can add more runners // special case: if only using a threadCount of 1 and the queue - // is filling up, allow 1 add'l runner to help process the queue + // is filling up, allow 1 additional runner to help process the queue if (runners.isEmpty() || (queue.remainingCapacity() < queue.size() && runners.size() < threadCount)) { // We need more runners, so start a new one. @@ -486,7 +486,7 @@ public synchronized void blockUntilFinished() throws IOException { synchronized (runners) { - // NOTE: if the executor is shut down, runners may never become empty (a scheduled task may + // NOTE: if the executor is shut down, runners may never become empty. A scheduled task may // never be run, which means it would never remove itself from the runners list. This is why // we don't wait forever and periodically check if the scheduler is shutting down. int loopCount = 0; @@ -627,7 +627,7 @@ public void handleError(Throwable ex) { } /** - * Intended to be used as an extension point for doing post processing after a request completes. + * Intended to be used as an extension point for doing post-processing after a request completes. * * @param respBody the body of the response, subclasses must not close this stream. */ @@ -819,7 +819,7 @@ public Builder alwaysStreamDeletes() { /** * Configures created clients to not stream delete requests. * - *

With this option set when the created ConcurrentUpdateHttp2SolrClient sents a delete + *

With this option set when the created ConcurrentUpdateHttp2SolrClient sends a delete * request it will first will lock the queue and block until all queued updates have been sent, * and then send the delete request. */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java index 6800f435b07..3896f1e8e87 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClient.java @@ -533,9 +533,9 @@ public NamedList request(final SolrRequest request, String collection int lastQueueSize = -1; for (; ; ) { synchronized (runners) { - // see if queue is half full and we can add more runners + // see if queue is half full, and we can add more runners // special case: if only using a threadCount of 1 and the queue - // is filling up, allow 1 add'l runner to help process the queue + // is filling up, allow 1 additional runner to help process the queue if (runners.isEmpty() || (queue.remainingCapacity() < queue.size() && runners.size() < threadCount)) { // We need more runners, so start a new one. @@ -612,7 +612,7 @@ public synchronized void blockUntilFinished() throws IOException { synchronized (runners) { - // NOTE: if the executor is shut down, runners may never become empty (a scheduled task may + // NOTE: if the executor is shut down, runners may never become empty. A scheduled task may // never be run, which means it would never remove itself from the runners list. This is why // we don't wait forever and periodically check if the scheduler is shutting down. int loopCount = 0; @@ -756,7 +756,7 @@ public void handleError(Throwable ex) { } /** - * Intended to be used as an extension point for doing post processing after a request completes. + * Intended to be used as an extension point for doing post-processing after a request completes. */ public void onSuccess(HttpResponse resp) { // no-op by design, override to add functionality @@ -791,7 +791,7 @@ public synchronized void close() { if (internalHttpClient) IOUtils.closeQuietly(client); if (log.isDebugEnabled()) { log.debug( - "STATS pollInteruppts={} pollExists={} blockLoops={} emptyQueueLoops={}", + "STATS pollInterrupts={} pollExists={} blockLoops={} emptyQueueLoops={}", pollInterrupts.get(), pollExits.get(), blockLoops.get(), @@ -928,7 +928,7 @@ public Builder alwaysStreamDeletes() { /** * Configures created clients to not stream delete requests. * - *

With this option set when the created ConcurrentUpdateSolrClient sents a delete request it + *

With this option set when the created ConcurrentUpdateSolrClient sends a delete request it * will first will lock the queue and block until all queued updates have been sent, and then * send the delete request. */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java index 0164c42e63c..f3de7a75727 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Http2SolrClient.java @@ -367,7 +367,7 @@ public OutStream initOutStream(String baseUrl, UpdateRequest updateRequest, Stri String contentType = requestWriter.getUpdateContentType(); final ModifiableSolrParams origParams = new ModifiableSolrParams(updateRequest.getParams()); ModifiableSolrParams requestParams = - initalizeSolrParams(updateRequest, responseParser(updateRequest)); + initializeSolrParams(updateRequest, responseParser(updateRequest)); String basePath = baseUrl; if (collection != null) basePath += "/" + collection; @@ -624,7 +624,7 @@ private static class MakeRequestReturnValue { private MakeRequestReturnValue makeRequest( SolrRequest solrRequest, String url, boolean isAsync) throws IOException, SolrServerException { - ModifiableSolrParams wparams = initalizeSolrParams(solrRequest, responseParser(solrRequest)); + ModifiableSolrParams wparams = initializeSolrParams(solrRequest, responseParser(solrRequest)); if (SolrRequest.METHOD.GET == solrRequest.getMethod()) { validateGetRequest(solrRequest); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientBuilderFactory.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientBuilderFactory.java index 55d58f307c2..ed8cb32ec60 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientBuilderFactory.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientBuilderFactory.java @@ -30,7 +30,7 @@ public interface HttpClientBuilderFactory extends Closeable { * This method configures the {@linkplain SolrHttpClientBuilder} by overriding the configuration * of passed SolrHttpClientBuilder or as a new instance. * - * @param builder The instance of the {@linkplain SolrHttpClientBuilder} which should by + * @param builder The instance of the {@linkplain SolrHttpClientBuilder} which should be * configured (optional). * @return the {@linkplain SolrHttpClientBuilder} */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java index b0d86fa2c6d..6aafaceef46 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java @@ -70,7 +70,7 @@ *

This class can touch internal HttpClient details and is subject to change. * * @lucene.experimental - * @deprecated Used to configure the Apache HTTP client. Please use The Http2 client + * @deprecated Used to configure the Apache HTTP client. Please use the Http2 client */ @Deprecated(since = "9.0") public class HttpClientUtil { @@ -111,7 +111,7 @@ public class HttpClientUtil { public static final String SYS_PROP_CHECK_PEER_NAME = "solr.ssl.checkPeerName"; // * NOTE* The following params configure the default request config and this - // is overridden by SolrJ clients. Use the setters on the SolrJ clients to + // is overridden by SolrJ clients. Use the setters on the SolrJ clients // to configure these settings if that is the intent. // Follow redirects @@ -552,7 +552,7 @@ public static void setHttpClientRequestContextBuilder( /** * Create a HttpClientContext object and {@link HttpClientContext#setUserToken(Object)} to an - * internal singleton. It allows to reuse underneath {@link HttpClient} in connection pools if + * internal singleton. It allows reusing underlying {@link HttpClient} in connection pools if * client authentication is enabled. */ public static HttpClientContext createNewHttpClientRequestContext() { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java index 32f988ddaa3..d3610f8d544 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpJdkSolrClient.java @@ -200,7 +200,7 @@ private PreparedRequest prepareRequest(SolrRequest solrRequest, String collec } String url = getRequestUrl(solrRequest, collection); ResponseParser parserToUse = responseParser(solrRequest); - ModifiableSolrParams queryParams = initalizeSolrParams(solrRequest, parserToUse); + ModifiableSolrParams queryParams = initializeSolrParams(solrRequest, parserToUse); var reqb = HttpRequest.newBuilder(); PreparedRequest pReq = null; try { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java index 41eb07207a9..86f8b932f37 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java @@ -75,7 +75,6 @@ import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.RequestWriter; -import org.apache.solr.client.solrj.request.V2Request; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -256,7 +255,7 @@ public NamedList request( } private boolean isV2ApiRequest(final SolrRequest request) { - return request instanceof V2Request || request.getPath().contains("/____v2"); + return request.getApiVersion() == SolrRequest.ApiVersion.V2; } private void setBasicAuthHeader(SolrRequest request, HttpRequestBase method) @@ -736,7 +735,7 @@ protected NamedList executeMethod( // When raising an error using HTTP sendError, mime types can be mismatched. This is specifically // true when SolrDispatchFilter uses the sendError mechanism since the expected MIME type of - // response is not HTML but HTTP sendError generates a HTML output, which can lead to mismatch + // response is not HTML but HTTP sendError generates an HTML output, which can lead to mismatch private boolean isUnmatchedErrorCode(String mimeType, int httpStatus) { if (mimeType == null) { return false; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClientBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClientBase.java index 6645e4f7cae..3debb681378 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClientBase.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClientBase.java @@ -40,7 +40,6 @@ import org.apache.solr.client.solrj.SolrRequest; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.request.RequestWriter; -import org.apache.solr.client.solrj.request.V2Request; import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.common.SolrException; import org.apache.solr.common.params.CommonParams; @@ -121,8 +120,15 @@ protected ResponseParser responseParser(SolrRequest solrRequest) { return solrRequest.getResponseParser() == null ? this.parser : solrRequest.getResponseParser(); } + // TODO: Remove this for 10.0, there is a typo in the method name + @Deprecated(since = "9.8", forRemoval = true) protected ModifiableSolrParams initalizeSolrParams( SolrRequest solrRequest, ResponseParser parserToUse) { + return initializeSolrParams(solrRequest, parserToUse); + } + + protected ModifiableSolrParams initializeSolrParams( + SolrRequest solrRequest, ResponseParser parserToUse) { // The parser 'wt=' and 'version=' params are used instead of the original // params ModifiableSolrParams wparams = new ModifiableSolrParams(solrRequest.getParams()); @@ -283,7 +289,7 @@ protected NamedList processErrorsAndResponse( try { is.close(); } catch (IOException e) { - // quitely + // quietly } } } @@ -381,7 +387,7 @@ public CompletableFuture> requestAsync(final SolrRequest re } public boolean isV2ApiRequest(final SolrRequest request) { - return request instanceof V2Request || request.getPath().contains("/____v2"); + return request.getApiVersion() == SolrRequest.ApiVersion.V2; } public String getBaseURL() { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java index 8b0971a53ac..221cbd4bb7e 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBHttpSolrClient.java @@ -38,8 +38,8 @@ * but this class may be used for updates because the server will forward them to the appropriate * leader. * - *

It offers automatic failover when a server goes down and it detects when the server comes back - * up. + *

It offers automatic failover when a server goes down, and it detects when the server comes + * back up. * *

Load balancing is done using a simple round-robin on the list of endpoints. Endpoint URLs are * expected to point to the Solr "root" path (i.e. "/solr"). @@ -85,7 +85,7 @@ * LBHttpSolrClient.Builder#setAliveCheckInterval(int)}. The default is set to one minute. * *

When to use this?
- * This can be used as a software load balancer when you do not wish to setup an external load + * This can be used as a software load balancer when you do not wish to set up an external load * balancer. Alternatives to this code are to use a dedicated hardware load balancer or using Apache * httpd with mod_proxy_balancer as a load balancer. See Load balancing on Wikipedia diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java index 1effc3b8a0a..795f2a003cb 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/LBSolrClient.java @@ -57,6 +57,7 @@ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.ExecutorUtil; import org.apache.solr.common.util.NamedList; +import org.apache.solr.common.util.ObjectReleaseTracker; import org.apache.solr.common.util.SolrNamedThreadFactory; import org.apache.solr.common.util.URLUtil; import org.slf4j.MDC; @@ -403,6 +404,7 @@ public LBSolrClient(List solrEndpoints) { } updateAliveList(); } + ObjectReleaseTracker.track(this); } protected void updateAliveList() { @@ -432,7 +434,7 @@ public static String normalize(String server) { * *

If no live servers are found a SolrServerException is thrown. * - * @param req contains both the request as well as the list of servers to query + * @param req contains both the request and the list of servers to query * @return the result of the request * @throws IOException If there is a low-level I/O error. */ @@ -590,7 +592,8 @@ private void checkAZombieServer(EndpointWrapper zombieServer) { QueryResponse resp = queryRequest.process(getClient(zombieEndpoint), effectiveCollection); if (resp.getStatus() == 0) { // server has come back up. - // make sure to remove from zombies before adding to alive to avoid a race condition + // make sure to remove from zombies before adding to the alive list to avoid a race + // condition // where another thread could mark it down, move it back to zombie, and then we delete // from zombie and lose it forever. EndpointWrapper wrapper = zombieServers.remove(zombieServer.getEndpoint().toString()); @@ -722,7 +725,8 @@ public NamedList request( final String effectiveCollection = endpoint.getCore() == null ? collection : endpoint.getCore(); NamedList rsp = getClient(endpoint).request(request, effectiveCollection); - // remove from zombie list *before* adding to alive to avoid a race that could lose a server + // remove from zombie list *before* adding to the alive list to avoid a race that could lose + // a server zombieServers.remove(endpoint.getUrl()); addToAlive(wrapper); return rsp; @@ -764,7 +768,7 @@ public NamedList request( } /** - * Pick a server from list to execute request. By default servers are picked in round-robin + * Pick a server from list to execute request. By default, servers are picked in round-robin * manner, custom classes can override this method for more advance logic * * @param aliveServerList list of currently alive servers @@ -791,5 +795,6 @@ public void close() { ExecutorUtil.shutdownAndAwaitTermination(aliveCheckExecutor); } } + ObjectReleaseTracker.release(this); } } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/PreemptiveBasicAuthClientBuilderFactory.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/PreemptiveBasicAuthClientBuilderFactory.java index cc26e3f1edd..0d1231baa74 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/PreemptiveBasicAuthClientBuilderFactory.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/PreemptiveBasicAuthClientBuilderFactory.java @@ -46,7 +46,7 @@ public class PreemptiveBasicAuthClientBuilderFactory implements HttpClientBuilderFactory { /** * A system property used to specify a properties file containing default parameters used for - * creating a HTTP client. This is specifically useful for configuring the HTTP basic auth + * creating an HTTP client. This is specifically useful for configuring the HTTP basic auth * credentials (i.e. username/password). The name of the property must match the relevant Solr * config property name. */ @@ -64,7 +64,7 @@ public class PreemptiveBasicAuthClientBuilderFactory implements HttpClientBuilde private static CredentialsResolver CREDENTIAL_RESOLVER = new CredentialsResolver(); /** - * This method enables configuring system wide defaults (apart from using a config file based + * This method enables configuring system-wide defaults (apart from using a config file based * approach). */ public static void setDefaultSolrParams(SolrParams params) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrHttpRequestRetryHandler.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrHttpRequestRetryHandler.java index 4ec079a10d5..2ec94c5cbf4 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrHttpRequestRetryHandler.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/SolrHttpRequestRetryHandler.java @@ -51,7 +51,7 @@ public class SolrHttpRequestRetryHandler implements HttpRequestRetryHandler { /** the number of times a method will be retried */ private final int retryCount; - private final Set> nonRetriableClasses; + private final Set> nonRetryableClasses; /** * Create the request retry handler using the specified IOException classes @@ -64,14 +64,14 @@ protected SolrHttpRequestRetryHandler( final int retryCount, final Collection> clazzes) { super(); this.retryCount = retryCount; - this.nonRetriableClasses = new HashSet<>(); + this.nonRetryableClasses = new HashSet<>(); for (final Class clazz : clazzes) { - this.nonRetriableClasses.add(clazz); + this.nonRetryableClasses.add(clazz); } } /** - * Create the request retry handler using the following list of non-retriable IOException classes: + * Create the request retry handler using the following list of non-retryable IOException classes: *
* *
    @@ -96,7 +96,7 @@ public SolrHttpRequestRetryHandler(final int retryCount) { /** * Create the request retry handler with a retry count of 3, requestSentRetryEnabled false and - * using the following list of non-retriable IOException classes:
    + * using the following list of non-retryable IOException classes:
    * *
      *
    • InterruptedIOException @@ -118,9 +118,9 @@ public boolean retryRequest( return false; } - if (!isRetriable(exception)) { + if (!isRetryable(exception)) { if (log.isDebugEnabled()) { - log.debug("Do not retry, non retriable class {}", exception.getClass().getName()); + log.debug("Do not retry, non retryable class {}", exception.getClass().getName()); } return false; } @@ -142,7 +142,7 @@ public boolean retryRequest( return false; } - private boolean isRetriable(IOException exception) { + private boolean isRetryable(IOException exception) { // Workaround for "recv failed" issue on hard-aborted sockets on Windows // (and other operating systems, possibly). // https://issues.apache.org/jira/browse/SOLR-13778 @@ -152,8 +152,8 @@ private boolean isRetriable(IOException exception) { } // Fast check for exact class followed by slow-check with instanceof. - if (nonRetriableClasses.contains(exception.getClass()) - || nonRetriableClasses.stream() + if (nonRetryableClasses.contains(exception.getClass()) + || nonRetryableClasses.stream() .anyMatch(rejectException -> rejectException.isInstance(exception))) { return false; } @@ -167,7 +167,7 @@ public int getRetryCount() { protected boolean handleAsIdempotent(final HttpClientContext context) { String method = context.getRequest().getRequestLine().getMethod(); - // do not retry admin requests, even if they are GET as they are not idempotent + // do not retry admin requests, even if they are GET requests as they are not idempotent if (context.getRequest().getRequestLine().getUri().startsWith("/admin/")) { log.debug("Do not retry, this is an admin request"); return false; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java index ecf37240db2..62a0df519a1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/StreamingBinaryResponseParser.java @@ -34,7 +34,7 @@ import org.apache.solr.common.util.NamedList; /** - * A BinaryResponseParser that sends callback events rather then build a large response + * A BinaryResponseParser that sends callback events rather than build a large response * * @since solr 4.0 */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/XMLResponseParser.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/XMLResponseParser.java index b33f6771c16..83be4eee8fb 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/XMLResponseParser.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/XMLResponseParser.java @@ -67,7 +67,7 @@ public class XMLResponseParser extends ResponseParser { factory.setProperty("reuse-instance", Boolean.FALSE); } catch (IllegalArgumentException ex) { // Other implementations will likely throw this exception since "reuse-instance" - // isimplementation specific. + // is implementation specific. log.debug("Unable to set the 'reuse-instance' property for the input factory: {}", factory); } factory.setXMLReporter(xmllog); @@ -492,7 +492,7 @@ protected SolrDocument readDocument(XMLStreamReader parser) throws XMLStreamExce continue; // may be more child docs, or other fields } - // other then nested documents, all other possible nested elements require a name... + // other than nested documents, all other possible nested elements require a name... name = null; int cnt = parser.getAttributeCount(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java index c3de347777d..4c21ce6c776 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/ConfigSetAdminRequest.java @@ -113,7 +113,7 @@ public String getRequestType() { * Uploads files to create a new configset, or modify an existing config set. * *

      When creating a new configset, the file to be uploaded must be a ZIP file containing the - * entire configset being uploaded. When modifing an existing configset, the file to be uploaded + * entire configset being uploaded. When modifying an existing configset, the file to be uploaded * should either be a ZIP file containing the entire configset being uploaded, or an individual * file to upload if {@link #setFilePath} is being used. */ @@ -153,7 +153,7 @@ public final String getFilePath() { } /** - * A convinience method for specifying an existing File to use as the upload data. + * A convenience method for specifying an existing File to use as the upload data. * *

      This should either be a ZIP file containing the entire configset being uploaded, or an * individual file to upload into an existing configset if {@link #setFilePath} is being used. diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/DocumentAnalysisRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/DocumentAnalysisRequest.java index 912ba039756..069a263fef1 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/DocumentAnalysisRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/DocumentAnalysisRequest.java @@ -97,9 +97,9 @@ public ModifiableSolrParams getParams() { // ===== Helper Methods ===== /** - * Returns the xml be be set as the request body. + * Returns the xml being set as the request body. * - * @return The xml be be set as the request body. + * @return The xml being set as the request body. * @throws IOException When constructing the xml fails */ String getXML(Writer writer) throws IOException { @@ -141,7 +141,7 @@ public DocumentAnalysisRequest addDocuments(Collection docs) } /** - * Sets the query to be analyzed. By default the query is set to null, meaning no query analysis + * Sets the query to be analyzed. By default, the query is set to null, meaning no query analysis * will be performed. * * @param query The query to be analyzed. @@ -154,7 +154,7 @@ public DocumentAnalysisRequest setQuery(String query) { /** * Sets whether index time tokens that match query time tokens should be marked as a "match". By - * default this is set to {@code false}. Obviously, this flag is ignored if when the query is set + * default, this is set to {@code false}. Obviously, this flag is ignored if when the query is set * to {@code null}. * * @param showMatch Sets whether index time tokens that match query time tokens should be marked diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/FieldAnalysisRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/FieldAnalysisRequest.java index 8dda80d22f4..b513b917428 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/FieldAnalysisRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/FieldAnalysisRequest.java @@ -89,7 +89,7 @@ public String getRequestType() { // ===== Helper Methods ===== /** - * Convers the given list of string to a comma-separated string. + * Converts the given list of strings to a comma-separated string. * * @param list The list of string. * @return The comma-separated string. @@ -152,7 +152,7 @@ public String getQuery() { /** * Sets whether index time tokens that match query time tokens should be marked as a "match". By - * default this is set to {@code false}. Obviously, this flag is ignored if when the query is set + * default, this is set to {@code false}. Obviously, this flag is ignored if when the query is set * to {@code null}. * * @param showMatch Sets whether index time tokens that match query time tokens should be marked @@ -236,7 +236,7 @@ public FieldAnalysisRequest setFieldTypes(List fieldTypes) { /** * Returns a list of field types the analysis should be performed on. May return {@code null} - * indicating that no analysis will be peformed on field types. + * indicating that no analysis will be performed on field types. * * @return The field types the analysis should be performed on. */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/GenericV2SolrRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/GenericV2SolrRequest.java new file mode 100644 index 00000000000..f4267ab7815 --- /dev/null +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/GenericV2SolrRequest.java @@ -0,0 +1,58 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.solr.client.solrj.request; + +import org.apache.solr.common.params.SolrParams; + +/** A {@link GenericSolrRequest} implementation intended for v2 APIs */ +public class GenericV2SolrRequest extends GenericSolrRequest { + + /** + * @param m the HTTP method to use for this request + * @param path the HTTP path to use for this request. Path may include the v2 API root path (i.e. + * "/api"), but does not need to. If users are making a collection-aware request (i.e. {@link + * #setRequiresCollection(boolean)} is called with 'true'), only the section of the API path + * following the collection or core should be provided here. + */ + public GenericV2SolrRequest(METHOD m, String path) { + super(m, removeLeadingApiRoot(path)); + } + + /** + * @param m the HTTP method to use for this request + * @param path the HTTP path to use for this request. If users are making a collection-aware + * request (i.e. {@link #setRequiresCollection(boolean)} is called with 'true'), only the + * section of the API path following the collection or core should be provided here. + * @param params query parameter names and values for making this request. + */ + public GenericV2SolrRequest(METHOD m, String path, SolrParams params) { + super(m, path); + this.params = params; + } + + @Override + public ApiVersion getApiVersion() { + return ApiVersion.V2; + } + + private static String removeLeadingApiRoot(String path) { + if (path.startsWith("/api")) { + return path.replaceFirst("/api", ""); + } + return path; + } +} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java index 53907201c2b..7937beadd02 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/HealthCheckRequest.java @@ -57,7 +57,7 @@ public SolrParams getParams() { @Override protected HealthCheckResponse createResponse(SolrClient client) { // TODO: Accept requests w/ CloudSolrClient while ensuring that the request doesn't get routed - // to an unintended recepient. + // to an unintended recipient. assert client instanceof HttpSolrClient; return new HealthCheckResponse(); } diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java index 242b24591f3..bcef4488560 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java @@ -111,6 +111,11 @@ public String getCollection() { return collection; } + @Override + public ApiVersion getApiVersion() { + return ApiVersion.V2; + } + @Override protected V2Response createResponse(SolrClient client) { return new V2Response(); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java index ef1b66e5971..e13aa32f36b 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java @@ -179,7 +179,7 @@ public TermsFacetMap includeMissingBucket(boolean missingBucket) { /** * Indicates that Solr should include the total number of buckets for this facet. * - *

      Note that this is different than the number of buckets returned. Defaults to false if not + *

      Note that this is different from the number of buckets returned. Defaults to false if not * specified * * @param numBuckets true if the "numBuckets" field should be computed; false otherwise @@ -192,7 +192,7 @@ public TermsFacetMap includeTotalNumBuckets(boolean numBuckets) { /** * Creates a bucket representing the union of all other buckets. * - *

      For multi-valued fields this is different than a bucket for the entire domain, since + *

      For multi-valued fields this is different from a bucket for the entire domain, since * documents can belong to multiple buckets. Defaults to false if not specified. * * @param shouldInclude true if the union bucket "allBuckets" should be computed; false otherwise diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java index 3c5cb6b006d..f458bdc01c9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/AnalysisResponseBase.java @@ -96,8 +96,7 @@ protected TokenInfo buildTokenInfoFromString(String value) { } /** - * Parses the given named list and builds a token infoform it. Expects a named list of the form: - *
      + * Parses the given named list and builds a token from it. Expects a named list of the form:
      * *

      
          *  <arr name="Tokenizer">
      @@ -142,7 +141,7 @@ public static class AnalysisPhase {
           }
       
           /**
      -     * The name of the class (analyzer, tokenzier, or filter) that produced the token stream for
      +     * The name of the class (analyzer, tokenizer, or filter) that produced the token stream for
            * this phase.
            *
            * @return The name of the class that produced the token stream for this phase.
      @@ -213,7 +212,7 @@ public String getText() {
       
           /**
            * Returns the raw text of the token. If the token is index in a special format (e.g. date or
      -     * paddded numbers) it will be returned as the raw text. Returns {@code null} if the token is
      +     * padded numbers) it will be returned as the raw text. Returns {@code null} if the token is
            * indexed as is.
            *
            * @return Returns the raw text of the token.
      @@ -223,8 +222,8 @@ public String getRawText() {
           }
       
           /**
      -     * Returns the type of the token. Typically this will be {@code word} or {@code }, but
      -     * it really depends on the tokenizer and filters that are used.
      +     * Returns the type of the token. Typically, this will be {@code word} or {@code },
      +     * but it really depends on the tokenizer and filters that are used.
            *
            * @return The type of the token.
            */
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/Cluster.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/Cluster.java
      index e91f216f918..fad992c1388 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/Cluster.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/Cluster.java
      @@ -36,7 +36,7 @@ public Cluster(List labels, double score, List docIds) {
         }
       
         /**
      -   * @param labels the list of human readable labels associated to the cluster
      +   * @param labels the list of human-readable labels associated to the cluster
          * @param score the score produced by the clustering algorithm for the current cluster
          * @param docIds the list of document Ids belonging to the cluster
          */
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
      index 563303fb9de..71aeb2c8d6e 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/CollectionAdminResponse.java
      @@ -37,7 +37,7 @@ public String getWarning() {
           return (String) getResponse().get("warning");
         }
       
      -  // this messages are typically from individual nodes, since
      +  // these messages are typically from individual nodes, since
         // all the failures at the router are propagated as exceptions
         @SuppressWarnings("unchecked")
         public NamedList getErrorMessages() {
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FacetField.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FacetField.java
      index ec00da4ee20..9e9025d7672 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FacetField.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FacetField.java
      @@ -24,7 +24,7 @@
       
       /**
        * A utility class to hold the facet response. It could use the NamedList container, but for JSTL,
      - * it is nice to have something that implements List so it can be iterated
      + * it is nice to have something that implements List, so it can be iterated
        *
        * @since solr 1.3
        */
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldAnalysisResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldAnalysisResponse.java
      index 3c9125ef1e4..409ffe2a958 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldAnalysisResponse.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldAnalysisResponse.java
      @@ -114,9 +114,9 @@ public Analysis getFieldNameAnalysis(String fieldName) {
         }
       
         /**
      -   * Returns all field name analysese with their associated field names.
      +   * Returns all field name analyzed with their associated field names.
          *
      -   * @return all field name analysese with their associated field names.
      +   * @return all field name analyzed with their associated field names.
          */
         public Iterable> getAllFieldNameAnalysis() {
           return analysisByFieldName.entrySet();
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java
      index c0d8702dcab..cd1ade0b3e3 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/FieldStatsInfo.java
      @@ -168,7 +168,7 @@ public Long getCount() {
         }
       
         public Long getCountDistinct() {
      -    // :TODO: as client convinience, should we return cardinality if this is null?
      +    // :TODO: as client convenience, should we return cardinality if this is null?
           return countDistinct;
         }
       
      @@ -204,9 +204,9 @@ public Map getPercentiles() {
           return percentiles;
         }
       
      -  /** The cardinality of of the set of values if requested, otherwise null. */
      +  /** The cardinality of the set of values if requested, otherwise null. */
         public Long getCardinality() {
      -    // :TODO: as client convinience, should we return countDistinct if this is null?
      +    // :TODO: as client convenience, should we return countDistinct if this is null?
           return cardinality;
         }
       }
      diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/Group.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/Group.java
      index 6751d04bba3..5b6691186c5 100644
      --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/Group.java
      +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/Group.java
      @@ -24,7 +24,8 @@
        * share and documents that belong to this group.
        *
        * 

      A group value can be a field value, function result or a query string depending on the {@link - * GroupCommand}. In case of a field value or a function result the value is always a indexed value. + * GroupCommand}. In case of a field value or a function result the value is always an indexed + * value. * * @since solr 3.4 */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/LukeResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/LukeResponse.java index bd81ed288b4..c38a2caf830 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/LukeResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/LukeResponse.java @@ -205,7 +205,7 @@ public NamedList getTopTerms() { public void setResponse(NamedList res) { super.setResponse(res); - // Parse indexinfo + // Parse index information indexInfo = (NamedList) res.get("index"); NamedList schema = (NamedList) res.get("schema"); diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java index 40070429c63..2a785632d24 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/QueryResponse.java @@ -637,7 +637,7 @@ public FacetField getFacetDate(String name) { } /** - * @return a list of FacetFields where the count is less then then #getResults() {@link + * @return a list of FacetFields where the count is less than the #getResults() {@link * SolrDocumentList#getNumFound()} *

      If you want all results exactly as returned by solr, use: {@link #getFacetFields()} */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RequestStatusState.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RequestStatusState.java index a0eb7b3a2e1..a6166fb97c2 100755 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/RequestStatusState.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/RequestStatusState.java @@ -37,7 +37,7 @@ public enum RequestStatusState { /** The request was submitted, but has not yet started. */ SUBMITTED("submitted"), - /** The request Id was not found. */ + /** The request was not found. */ NOT_FOUND("notfound"); private final String key; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java index 709c3ce8556..2269365ceaf 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/SpellCheckResponse.java @@ -56,7 +56,7 @@ public SpellCheckResponse(NamedList spellInfo) { @SuppressWarnings("unchecked") NamedList coll = (NamedList) spellInfo.get("collations"); if (coll != null) { - // The 'collationInternalRank' values are ignored so we only care 'collation's. + // The 'collationInternalRank' values are ignored, so we only care about collation's. List collationInfo = coll.getAll("collation"); collations = new ArrayList<>(collationInfo.size()); for (Object o : collationInfo) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/TermsResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/TermsResponse.java index 59aa42c97aa..9d2c1e544d9 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/TermsResponse.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/TermsResponse.java @@ -51,7 +51,7 @@ public TermsResponse(NamedList> termsInfo) { } /** - * Get's the term list for a given field + * Gets the term list for a given field * * @return the term list or null if no terms for the given field exist */ diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java index a5271524af9..525a4de04a3 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/BucketBasedJsonFacet.java @@ -89,7 +89,7 @@ public List getBuckets() { * *

      This value can only be computed on "terms" facets where the user has specifically requested * it with the {@code numBuckets} option. {@link #UNSET_FLAG} is returned if this is a "range" - * facet or {@code numBuckets} computation was not requested in the intiial request. + * facet or {@code numBuckets} computation was not requested in the initial request. */ public long getNumBuckets() { return numBuckets; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/AffinityReplicaListTransformerFactory.java b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/AffinityReplicaListTransformerFactory.java index 1229ce56933..09609619c23 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/AffinityReplicaListTransformerFactory.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/AffinityReplicaListTransformerFactory.java @@ -59,7 +59,7 @@ public AffinityReplicaListTransformerFactory(NamedList c) { * CommonParams#Q}. Empty String is translated to null, allowing users to explicitly disable * hash-based stable routing. * - * @param hashParam configured hash param (null indicates unconfigured). + * @param hashParam configured hash param (null indicates no configuration). * @return translated value to be used as default hash param in RLT. */ private static String translateHashParam(String hashParam) { diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparator.java index 0585fd3f7d3..527a8a30d5d 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparator.java @@ -31,7 +31,7 @@ /** * This comparator makes sure that the given replicas are sorted according to the given list of * preferences. E.g. If all nodes prefer local cores then a bad/heavily-loaded node will receive - * less requests from healthy nodes. This will help prevent a distributed deadlock or timeouts in + * fewer requests from healthy nodes. This will help prevent a distributed deadlock or timeouts in * all the healthy nodes due to one bad node. * *

      Optional final preferenceRule is *not* used for pairwise sorting, but instead defines how diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java index 748d13f4b5d..28e2072e77f 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/routing/RequestReplicaListTransformerGenerator.java @@ -181,7 +181,7 @@ public void transform(List choices) { } while (iter.hasNext()); boundaries[boundaryCount++] = idx; - // Finally inspect boundaries to apply base transformation, where necessary (separate phase + // Finally, inspect boundaries to apply base transformation, where necessary (separate phase // to avoid ConcurrentModificationException) int startIdx = 0; int endIdx; diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java index 16f27410400..6f0ff47ee47 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java @@ -20,7 +20,7 @@ import java.io.StringWriter; import java.io.Writer; import java.net.MalformedURLException; -import java.net.URL; +import java.net.URI; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -69,7 +69,7 @@ public static Collection toContentStreams( * @param solrRequest the {@link SolrRequest} to build the URL for * @param requestWriter a {@link RequestWriter} from the {@link SolrClient} that will be sending * the request - * @param serverRootUrl the root URL of the Solr server being targeted. May by overridden {@link + * @param serverRootUrl the root URL of the Solr server being targeted. May be overridden {@link * SolrRequest#getBasePath()}, if present. * @param collection the collection to send the request to. May be null if no collection is * needed. @@ -84,11 +84,11 @@ public static String buildRequestUrl( throws MalformedURLException { String basePath = solrRequest.getBasePath() == null ? serverRootUrl : solrRequest.getBasePath(); - if (solrRequest instanceof V2Request) { - if (System.getProperty("solr.v2RealPath") == null) { - basePath = changeV2RequestEndpoint(basePath); - } else { + if (solrRequest.getApiVersion() == SolrRequest.ApiVersion.V2) { + if (solrRequest instanceof V2Request && System.getProperty("solr.v2RealPath") != null) { basePath = serverRootUrl + "/____v2"; + } else { + basePath = addNormalV2ApiRoot(basePath); } } @@ -102,10 +102,18 @@ public static String buildRequestUrl( return basePath + path; } - private static String changeV2RequestEndpoint(String basePath) throws MalformedURLException { - URL oldURL = new URL(basePath); - String newPath = oldURL.getPath().replaceFirst("/solr", "/api"); - return new URL(oldURL.getProtocol(), oldURL.getHost(), oldURL.getPort(), newPath).toString(); + private static String addNormalV2ApiRoot(String basePath) throws MalformedURLException { + final var oldURI = URI.create(basePath); + final var revisedPath = buildReplacementV2Path(oldURI.getPath()); + return oldURI.resolve(revisedPath).toString(); + } + + private static String buildReplacementV2Path(String existingPath) { + if (existingPath.contains("/solr")) { + return existingPath.replaceFirst("/solr", "/api"); + } else { + return existingPath + "/api"; + } } // ------------------------------------------------------------------------ @@ -124,7 +132,7 @@ public static void writeXML(SolrInputDocument doc, Writer writer) throws IOExcep if (v instanceof SolrInputDocument) { writeVal(writer, name, v, null); } else if (v instanceof Map) { - // currently only supports a single value + // currently, only supports a single value for (Entry entry : ((Map) v).entrySet()) { update = entry.getKey().toString(); v = entry.getValue(); @@ -211,7 +219,7 @@ public static String toXML(SolrInputDocument doc) { * See: Lucene query * parser syntax for more information on Escaping Special Characters */ - // NOTE: its broken to link to any lucene-queryparser.jar docs, not in classpath!!!!! + // NOTE: It is a broken link to any lucene-queryparser.jar docs, not in classpath!!!!! public static String escapeQueryChars(String s) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < s.length(); i++) { @@ -246,14 +254,16 @@ public static String escapeQueryChars(String s) { } /** - * Returns the value encoded properly so it can be appended after a + * Returns the (literal) value encoded properly, so it can be appended after a name= + * local-param key. * - *

      name=
      - * - * local-param. + *

      NOTE: This method assumes $ is a literal character that must be quoted. (It + * does not assume strings starting $ should be treated as param references) */ public static String encodeLocalParamVal(String val) { int len = val.length(); + if (0 == len) return "''"; // quoted empty string + int i = 0; if (len > 0 && val.charAt(0) != '$') { for (; i < len; i++) { @@ -295,7 +305,7 @@ public static void addSlices( } /** - * Determines whether any SolrClient "default" collection should applied to the specified request + * Determines whether any SolrClient "default" collection should apply to the specified request * * @param providedCollection a collection/core explicitly provided to the SolrClient (typically * through {@link org.apache.solr.client.solrj.SolrClient#request(SolrRequest, String)} diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java index 0a9115068ef..b8476ace462 100644 --- a/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java +++ b/solr/solrj/src/java/org/apache/solr/client/solrj/util/SolrIdentifierValidator.java @@ -22,7 +22,7 @@ /** * Ensures that provided identifiers align with Solr's recommendations/requirements for choosing - * collection, core, etc identifiers. + * collection, core, etc. identifiers. * *

      Identifiers are allowed to contain underscores, periods, hyphens, and alphanumeric characters. */ diff --git a/solr/solrj/src/java/org/apache/solr/cluster/api/Resource.java b/solr/solrj/src/java/org/apache/solr/cluster/api/Resource.java index 0170de6e4e8..13548f6a822 100644 --- a/solr/solrj/src/java/org/apache/solr/cluster/api/Resource.java +++ b/solr/solrj/src/java/org/apache/solr/cluster/api/Resource.java @@ -23,7 +23,7 @@ /** A binary resource. The impl is agnostic of the content type */ public interface Resource { - /** This is a full path. e.g schema.xml , solrconfig.xml , lang/stopwords.txt etc */ + /** This is a full path. e.g schema.xml, solrconfig.xml, lang/stopwords.txt etc */ String name(); /** diff --git a/solr/solrj/src/java/org/apache/solr/cluster/api/SimpleMap.java b/solr/solrj/src/java/org/apache/solr/cluster/api/SimpleMap.java index 2b768a6f6bb..4398d12742f 100644 --- a/solr/solrj/src/java/org/apache/solr/cluster/api/SimpleMap.java +++ b/solr/solrj/src/java/org/apache/solr/cluster/api/SimpleMap.java @@ -28,7 +28,7 @@ /** * A simplified read-only key-value structure. It is designed to support large datasets without - * consuming lot of memory The objective is to provide implementations that are cheap and memory + * consuming a lot of memory The objective is to provide implementations that are cheap and memory * efficient to implement and consume. The keys are always {@link CharSequence} objects, The values * can be of any type */ diff --git a/solr/solrj/src/java/org/apache/solr/common/ConfigNode.java b/solr/solrj/src/java/org/apache/solr/common/ConfigNode.java index 52bf65d043b..be86c1e5c7b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/ConfigNode.java +++ b/solr/solrj/src/java/org/apache/solr/common/ConfigNode.java @@ -52,8 +52,8 @@ default ConfigNode child(String name) { } /** - * Child by name or return an empty node if null if there are multiple values , it returns the - * first elem This never returns a null + * Child by name or return an empty node if null. If there are multiple values, it returns the + * first element. This never returns a null. */ default ConfigNode get(String name) { ConfigNode child = child(null, name); diff --git a/solr/solrj/src/java/org/apache/solr/common/EnumFieldValue.java b/solr/solrj/src/java/org/apache/solr/common/EnumFieldValue.java index 88f9f661f96..ec61abe5845 100644 --- a/solr/solrj/src/java/org/apache/solr/common/EnumFieldValue.java +++ b/solr/solrj/src/java/org/apache/solr/common/EnumFieldValue.java @@ -19,7 +19,7 @@ import java.io.Serializable; /** - * Represents a Enum field value, which includes integer value (indicating the sort order) and + * Represents an Enum field value, which includes integer value (indicating the sort order) and * string (displayed) value. Note: this class has a natural ordering that is inconsistent with * equals */ diff --git a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java index 188ee18a5cf..7241f792aaa 100644 --- a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java +++ b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java @@ -23,7 +23,7 @@ /** * This class contains helper methods for navigating deeply nested Objects. Keep in mind that it may - * be expensive depending on the underlying implementation. each level needs an extra lookup and the + * be expensive depending on the underlying implementation. Each level needs an extra lookup and the * lookup may be as expensive as O(log(n)) to O(n) depending on the underlying impl */ public interface NavigableObject { diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrCloseableLatch.java b/solr/solrj/src/java/org/apache/solr/common/SolrCloseableLatch.java index e3e15937a1b..630993546a2 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrCloseableLatch.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrCloseableLatch.java @@ -22,7 +22,7 @@ import java.util.concurrent.TimeUnit; /** - * This class mimicks the operation of {@link java.util.concurrent.CountDownLatch}, but it also + * This class mimics the operation of {@link java.util.concurrent.CountDownLatch}, but it also * periodically checks the state of the provided {@link SolrCloseable} and terminates the wait if * it's closed by throwing an {@link InterruptedException}. */ diff --git a/solr/solrj/src/java/org/apache/solr/common/SolrException.java b/solr/solrj/src/java/org/apache/solr/common/SolrException.java index 2bcab2cab3a..e19883539ff 100644 --- a/solr/solrj/src/java/org/apache/solr/common/SolrException.java +++ b/solr/solrj/src/java/org/apache/solr/common/SolrException.java @@ -30,7 +30,7 @@ public class SolrException extends RuntimeException { private final Map mdcContext; /** - * This list of valid HTTP Status error codes that Solr may return in the case of a "Server Side" + * This list of valid HTTP Status error codes that Solr may return when there is a "Server Side" * error. * * @since solr 1.2 @@ -95,7 +95,7 @@ protected SolrException(int code, String msg, Throwable th) { /** * The HTTP Status code associated with this Exception. For SolrExceptions thrown by Solr "Server - * Side", this should valid {@link ErrorCode}, however client side exceptions may contain an + * Side", this should be a valid {@link ErrorCode}, however client side exceptions may contain an * arbitrary error code based on the behavior of the Servlet Container hosting Solr, or any HTTP * Proxies that may exist between the client and the server. * diff --git a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java index a99605f12f3..20a9c07e574 100644 --- a/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java +++ b/solr/solrj/src/java/org/apache/solr/common/ToleratedUpdateError.java @@ -49,7 +49,7 @@ public static int getEffectiveMaxErrors(int maxErrors) { /** * Given a 'maxErrors' value such that-1 <= maxErrors <= {@link Integer#MAX_VALUE} * this method returns the original input unless it is {@link Integer#MAX_VALUE} in which - * case -1 is returned for user convinience. Input of maxErrors < -1 + * case -1 is returned for user convenience. Input of maxErrors < -1 * will trip an assertion and otherwise have undefined behavior. * * @see #getEffectiveMaxErrors diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java index 5f7a23f2ed0..b1d8782cb10 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterState.java @@ -438,7 +438,7 @@ public CollectionRef(DocCollection coll) { } /** - * Return the DocCollection, always refetching if lazy. Equivalent to get(false) + * Return the DocCollection, always re-fetching if lazy. Equivalent to get(false) * * @return The collection state modeled in zookeeper */ diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java index af0ba278c0f..db9ea7cef17 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/CollectionStatePredicate.java @@ -20,7 +20,7 @@ import java.util.Set; /** - * Interface to determine if a set of liveNodes and a collection's state matches some expecatations. + * Interface to determine if a set of liveNodes and a collection's state matches some expectations. * * @see "ZkStateReader#waitForState(String, long, TimeUnit, CollectionStatePredicate)" * @see "ZkStateReader#waitForState(String, long, TimeUnit, Predicate)" diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java index 9094ddd84e8..38ea73b0925 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/CompositeIdRouter.java @@ -40,7 +40,7 @@ * app/2!user/4!uniqueid * * - * Lets say you had a set of records you want to index together such as a contact in a database, + * Let's say you had a set of records you want to index together such as a contact in a database, * using a prefix of contact!contactid would allow all contact ids to be bucketed together. * *

      @@ -99,7 +99,7 @@ public class CompositeIdRouter extends HashBasedRouter {
       
         /**
          * Parse out the route key from {@code id} up to and including the {@link #SEPARATOR}, returning
      -   * it's length. If no route key is detected then 0 is returned.
      +   * its length. If no route key is detected then 0 is returned.
          */
         public int getRouteKeyWithSeparator(byte[] id, int idOffset, int idLength) {
           final byte SEPARATOR_BYTE = (byte) CompositeIdRouter.SEPARATOR;
      @@ -323,7 +323,7 @@ public List partitionRange(int partitions, Range range) {
           return ranges;
         }
       
      -  /** Helper class to calculate parts, masks etc for an id. */
      +  /** Helper class to calculate parts, masks etc. for an id. */
         protected static class KeyParser {
           String key;
           int[] numBits;
      @@ -347,7 +347,7 @@ public KeyParser(final String key) {
                 if (-1 == secondSeparatorPos) {
                   partsList.add(key.substring(firstSeparatorPos + 1));
                 } else if (secondSeparatorPos == lastPos) {
      -            // Don't make any more parts if the key has exactly two separators and
      +            // Don't make any more parts if the key has exactly two separators, and
                   // they're the last two chars - back-compatibility with the behavior of
                   // String.split() - see SOLR-6257.
                   if (firstSeparatorPos < secondSeparatorPos - 1) {
      @@ -406,7 +406,7 @@ public Range getRange() {
             //  0xF0000000 0xFfffffff
       
             if ((masks[0] == 0 && !triLevel) || (masks[0] == 0 && masks[1] == 0 && triLevel)) {
      -        // no bits used from first part of key.. the code above will produce 0x000000000->0xffffffff
      +        // no bits used from first part of key, the code above will produce 0x000000000->0xffffffff
               // which only works on unsigned space, but we're using signed space.
               lowerBound = Integer.MIN_VALUE;
               upperBound = Integer.MAX_VALUE;
      diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java
      index 490a0a21eda..3c8ff93b816 100644
      --- a/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java
      +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/DocRouter.java
      @@ -237,7 +237,7 @@ public abstract Slice getTargetSlice(
       
         /**
          * This method is consulted to determine what slices should be queried for a request when an
      -   * explicit shards parameter was not used. This method only accepts a single shard key (or null).
      +   * explicit shards' parameter was not used. This method only accepts a single shard key (or null).
          * If you have a comma separated list of shard keys, call getSearchSlices
          */
         public abstract Collection getSearchSlicesSingle(
      @@ -245,7 +245,7 @@ public abstract Collection getSearchSlicesSingle(
       
         /**
          * This method is consulted to determine what search range (the part of the hash ring) should be
      -   * queried for a request when an explicit shards parameter was not used. This method only accepts
      +   * queried for a request when an explicit shards' parameter was not used. This method only accepts
          * a single shard key (or null).
          */
         public Range getSearchRangeSingle(String shardKey, SolrParams params, DocCollection collection) {
      @@ -263,7 +263,7 @@ public abstract boolean isTargetSlice(
       
         /**
          * This method is consulted to determine what slices should be queried for a request when an
      -   * explicit shards parameter was not used. This method accepts a multi-valued shardKeys parameter
      +   * explicit shards' parameter was not used. This method accepts a multi-valued shardKeys parameter
          * (normally comma separated from the shard.keys request parameter) and aggregates the slices
          * returned by getSearchSlicesSingle for each shardKey.
          */
      diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java b/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java
      index b3019edc794..e3dfa9dc209 100644
      --- a/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java
      +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/Replica.java
      @@ -56,7 +56,7 @@ public enum State {
            * trying to move to {@link State#RECOVERING}.
            *
            * 

      NOTE: a replica's state may appear DOWN in ZK also when the node it's hosted on - * gracefully shuts down. This is a best effort though, and should not be relied on. + * gracefully shuts down. This is a "best effort" though, and should not be relied on. */ DOWN("D"), @@ -67,10 +67,10 @@ public enum State { RECOVERING("R"), /** - * Recovery attempts have not worked, something is not right. + * Recovery attempts has not worked, something is not right. * *

      NOTE: This state doesn't matter if the node is not part of {@code /live_nodes} in - * ZK; in that case the node is not part of the cluster and it's state should be discarded. + * ZK; in that case the node is not part of the cluster, and it's state should be discarded. */ RECOVERY_FAILED("F"); @@ -113,7 +113,7 @@ public enum Type { */ TLOG(true, true, true, CollectionAdminParams.TLOG_REPLICAS), /** - * Doesn’t index or writes to transaction log. Just replicates from {@link Type#NRT} or {@link + * Does not index or writes to transaction log. Just replicates from {@link Type#NRT} or {@link * Type#TLOG} replicas. {@link Type#PULL} replicas can’t become shard leaders (i.e., if there * are only pull replicas in the collection at some point, updates will fail same as if there is * no leaders, queries continue to work), so they don’t even participate in elections. diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ReplicaCount.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ReplicaCount.java index a76f09d163d..3235671b5e0 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ReplicaCount.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ReplicaCount.java @@ -151,7 +151,7 @@ public boolean contains(Replica.Type type) { return countByType.containsKey(type); } - /** Returns the replica types for which a number of replicas was explicitely defined. */ + /** Returns the replica types for which a number of replicas was explicitly defined. */ public Set keySet() { return countByType.keySet(); } diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkCoreNodeProps.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkCoreNodeProps.java index 5e789efb7eb..9c060511445 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkCoreNodeProps.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkCoreNodeProps.java @@ -48,7 +48,7 @@ public String getCoreName() { } private static String getBaseUrl(ZkNodeProps nodeProps) { - // if storing baseUrl in ZK is enabled and it's stored, just use what's stored, i.e. no + // if storing baseUrl in ZK is enabled, and it's stored, just use what's stored, i.e. no // self-healing here String baseUrl = nodeProps.getStr(ReplicaStateProps.BASE_URL); if (baseUrl == null) { diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java index b187e475fe8..1844ad27a8b 100644 --- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java +++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkNodeProps.java @@ -58,7 +58,7 @@ public ZkNodeProps plus(Map newVals) { } /** - * Constructor that populates the from array of Strings in form key1, value1, key2, value2, ..., + * Constructor that populates from an array of Strings in form key1, value1, key2, value2, ..., * keyN, valueN */ public ZkNodeProps(String... keyVals) { diff --git a/solr/solrj/src/java/org/apache/solr/common/params/AnalysisParams.java b/solr/solrj/src/java/org/apache/solr/common/params/AnalysisParams.java index 6b04e6d2298..d50db80de03 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/AnalysisParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/AnalysisParams.java @@ -43,6 +43,6 @@ public interface AnalysisParams { /** Holds a comma-separated list of field types that the analysis should be peformed for. */ static final String FIELD_TYPE = PREFIX + ".fieldtype"; - /** Hodls a comma-separated list of field named that the analysis should be performed for. */ + /** Holds a comma-separated list of field named that the analysis should be performed for. */ static final String FIELD_VALUE = PREFIX + ".fieldvalue"; } diff --git a/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java index 984bffcb537..f88775b5f86 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/AppendedSolrParams.java @@ -18,7 +18,7 @@ /** * SolrParams wrapper which acts similar to DefaultSolrParams except that it "appends" the values of - * multi-value params from both sub instances, so that all of the values are returned. + * multi-value params from both sub instances, so that all the values are returned. */ public class AppendedSolrParams extends DefaultSolrParams { diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java index 9fca76238b3..198bd7ef79c 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java @@ -131,7 +131,8 @@ public interface CommonParams { String TRACK = "track"; /** - * boolean indicating whether score explanations should structured (true), or plain text (false) + * boolean indicating whether score explanations should be structured (true), or plain text + * (false) */ String EXPLAIN_STRUCT = "debug.explain.structured"; @@ -153,7 +154,7 @@ public interface CommonParams { */ String STREAM_CONTENTTYPE = "stream.contentType"; - /** Whether or not the search may be terminated early within a segment. */ + /** Whether the search may be terminated early within a segment. */ String SEGMENT_TERMINATE_EARLY = "segmentTerminateEarly"; boolean SEGMENT_TERMINATE_EARLY_DEFAULT = false; @@ -167,6 +168,9 @@ public interface CommonParams { /** Timeout value in milliseconds. If not set, or the value is < 0, there is no timeout. */ String TIME_ALLOWED = "timeAllowed"; + /** Whether the search may use the multi-threaded logic */ + String MULTI_THREADED = "multiThreaded"; + /** * Maximum query CPU usage value in milliseconds. If not set, or the value is < 0, there is no * timeout. @@ -276,8 +280,8 @@ public static EchoParamStyle get(String v) { /** * Used as a local param on filter queries in conjunction with cache=false. Filters are checked in - * order, from smallest cost to largest. If cost>=100 and the query implements PostFilter, then - * that interface will be used to do post query filtering. + * order, from the smallest cost to largest. If cost>=100 and the query implements PostFilter, + * then that interface will be used to do post query filtering. */ String COST = "cost"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java index 4d9427f31bb..15d4397bd39 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CoreAdminParams.java @@ -159,6 +159,9 @@ public abstract class CoreAdminParams { */ public static final String REPLICA_TYPE = "replicaType"; + /** Whether the request that generated the admin command is trusted */ + public static final String TRUSTED = "trusted"; + public enum CoreAdminAction { STATUS(true), UNLOAD, @@ -179,7 +182,7 @@ public enum CoreAdminAction { REJOINLEADERELECTION, // internal API used by force shard leader election FORCEPREPAREFORLEADERSHIP, - // Internal APIs to backup and restore a core + // Internal APIs to back up and restore a core BACKUPCORE, RESTORECORE, INSTALLCOREDATA, diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CursorMarkParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CursorMarkParams.java index e74bd3e856f..d641d3337e5 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/CursorMarkParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/CursorMarkParams.java @@ -37,7 +37,7 @@ public interface CursorMarkParams { /** * Special value for {@link #CURSOR_MARK_PARAM} indicating that cursor functionality should be - * used, and a new cursor value should be computed afte the last result, but that currently the + * used, and a new cursor value should be computed after the last result, but that currently the * "first page" of results is being requested */ public static final String CURSOR_MARK_START = "*"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java index 9f95dd5f34e..1dc2e2daed9 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/DefaultSolrParams.java @@ -47,7 +47,7 @@ public String[] getParams(String param) { public Iterator getParameterNamesIterator() { // We need to compute the set of all param names in advance // So we don't wind up with an iterator that returns the same - // String more then once (SOLR-6780) + // String more than once (SOLR-6780) LinkedHashSet allKeys = new LinkedHashSet<>(); for (SolrParams p : new SolrParams[] {params, defaults}) { Iterator localKeys = p.getParameterNamesIterator(); diff --git a/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java b/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java index d6bc7273bcb..53fce3670bd 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/FacetParams.java @@ -55,7 +55,7 @@ public interface FacetParams { public static final String FACET_METHOD_uif = "uif"; /** - * Any lucene formated queries the user would like to use for Facet Constraint Counts + * Any lucene formatted queries the user would like to use for Facet Constraint Counts * (multi-value) */ public static final String FACET_QUERY = FACET + ".query"; @@ -222,7 +222,7 @@ public interface FacetParams { public static final String FACET_DATE_INCLUDE = FACET_DATE + ".include"; /** - * Any numerical field whose terms the user wants to enumerate over Facet Contraint Counts for + * Any numerical field whose terms the user wants to enumerate over Facet Constraint Counts for * selected ranges. */ public static final String FACET_RANGE = FACET + ".range"; @@ -309,7 +309,7 @@ public interface FacetParams { *

      ["-150 10" TO "-100 30"]
      * * (the first is bottom-left and second is bottom-right, both of which are parsed as points are - * parsed). OR, any WKT can be provided and it's bounding box will be taken. + * parsed). OR, any WKT can be provided, and it's bounding box will be taken. */ public static final String FACET_HEATMAP_GEOM = FACET_HEATMAP + ".geom"; @@ -330,7 +330,7 @@ public interface FacetParams { * Used to determine the heatmap grid level to compute (optional). It has the same interpretation * of maxDistErr or distErr with RPT. It's an absolute distance (in units of what's specified on * the field type) that a grid square must maximally fit into (width & height). It can be used - * to to more explicitly specify the maximum grid square size without knowledge of what particular + * to more explicitly specify the maximum grid square size without knowledge of what particular * grid levels translate to. This can in turn be used with knowledge of the size of 'bbox' to get * a target minimum number of grid cells. Mutually exclusive with distErrPct & gridLevel. */ @@ -423,11 +423,11 @@ public static FacetRangeInclude get(String label) { } /** - * Convinience method for parsing the param value according to the correct semantics and + * Convenience method for parsing the param value according to the correct semantics and * applying the default of "LOWER" */ public static EnumSet parseParam(final String[] param) { - // short circut for default behavior + // short circuit for default behavior if (null == param || 0 == param.length) return EnumSet.of(LOWER); // build up set containing whatever is specified @@ -436,7 +436,7 @@ public static EnumSet parseParam(final String[] param) { include.add(FacetRangeInclude.get(o)); } - // if set contains all, then we're back to short circuting + // if set contains all, then we're back to short-circuiting if (include.contains(FacetRangeInclude.ALL)) return EnumSet.allOf(FacetRangeInclude.class); // use whatever we've got. diff --git a/solr/solrj/src/java/org/apache/solr/common/params/GroupParams.java b/solr/solrj/src/java/org/apache/solr/common/params/GroupParams.java index 9dad710181b..43d5380a7b4 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/GroupParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/GroupParams.java @@ -47,8 +47,8 @@ public interface GroupParams { // Note: Since you can supply multiple fields to group on, but only have a facets for the whole // result. It only makes sense to me to support these parameters for the first group. /** - * Whether the docSet (for example for faceting) should be based on plain documents (a.k.a - * UNGROUPED) or on the groups (a.k.a GROUPED). The docSet will only the most relevant documents + * Whether the docSet (for example for faceting) should be based on plain documents (a.k.a. + * UNGROUPED) or on the groups (a.k.a. GROUPED). The docSet will only the most relevant documents * per group. It is if you query for everything with group.limit=1 */ public static final String GROUP_TRUNCATE = GROUP + ".truncate"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java index 9c3c0d2a4a9..25535f06942 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/ModifiableSolrParams.java @@ -51,7 +51,7 @@ public ModifiableSolrParams(SolrParams params) { } /** - * If the input params are of type MofifiableSolrParams, returns the input, otherwise, constructs + * If the input params are of type ModifiableSolrParams, returns the input, otherwise, constructs * a new ModifiableSolrParams, copying values from the given params. If params is null, returns an * empty ModifiableSolrParams instance. */ @@ -129,8 +129,8 @@ public ModifiableSolrParams add(String name, String... val) { } /** - * Add all of the params provided in the parameter to this params. Any current value(s) - * for the same key will be overridden. + * Add all the params provided in the parameter to this params. Any current value(s) for + * the same key will be overridden. */ public void add(SolrParams params) { for (Map.Entry pair : params) { diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java index 875b17c87fe..273db7651e8 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java @@ -45,7 +45,7 @@ public interface ShardParams { /** * The requested URL for this shard * - * @deprecated This was an internally used param never ment for clients to specify; it is no + * @deprecated This was an internally used param, never meant for clients to specify; it is no * longer used by Solr. */ @Deprecated String SHARD_URL = "shard.url"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java index 2d2fbe9d837..0ac7e8de317 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/SolrParams.java @@ -22,12 +22,7 @@ import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -240,9 +235,7 @@ public Integer getInt(String param) { } } - /** - * Returns int value of the the param or default value for int - zero (0) if not set. - */ + /** Returns int value of the param or default value for int - zero (0) if not set. */ public int getPrimitiveInt(String param) { return getInt(param, 0); } @@ -437,66 +430,6 @@ public static SolrParams wrapAppended(SolrParams params, SolrParams defaults) { return AppendedSolrParams.wrapAppended(params, defaults); } - /** Create a Map<String,String> from a NamedList given no keys are repeated */ - @Deprecated // Doesn't belong here (no SolrParams). Just remove. - public static Map toMap(NamedList params) { - HashMap map = new HashMap<>(); - for (int i = 0; i < params.size(); i++) { - map.put(params.getName(i), params.getVal(i).toString()); - } - return map; - } - - /** Create a Map<String,String[]> from a NamedList */ - @Deprecated // Doesn't belong here (no SolrParams). Just remove. - public static Map toMultiMap(NamedList params) { - HashMap map = new HashMap<>(); - for (int i = 0; i < params.size(); i++) { - String name = params.getName(i); - Object val = params.getVal(i); - if (val instanceof String[]) { - MultiMapSolrParams.addParam(name, (String[]) val, map); - } else if (val instanceof List) { - List l = (List) val; - String[] s = new String[l.size()]; - for (int j = 0; j < l.size(); j++) { - s[j] = l.get(j) == null ? null : String.valueOf(l.get(j)); - } - MultiMapSolrParams.addParam(name, s, map); - } else { - MultiMapSolrParams.addParam(name, val.toString(), map); - } - } - return map; - } - - /** - * Create SolrParams from NamedList. - * - * @deprecated Use {@link NamedList#toSolrParams()}. - */ - @Deprecated // move to NamedList to allow easier flow - public static SolrParams toSolrParams(NamedList params) { - return params.toSolrParams(); - } - - @Deprecated - public SolrParams toFilteredSolrParams(List names) { - // TODO do this better somehow via a view that filters? See SolrCore.preDecorateResponse. - // ... and/or add some optional predicates to iterator()? - NamedList nl = new NamedList<>(); - for (Iterator it = getParameterNamesIterator(); it.hasNext(); ) { - final String name = it.next(); - if (names.contains(name)) { - final String[] values = getParams(name); - for (String value : values) { - nl.add(name, value); - } - } - } - return nl.toSolrParams(); - } - /** * Convert this to a NamedList of unique keys with either String or String[] values depending on * how many values there are for the parameter. @@ -510,40 +443,15 @@ public NamedList toNamedList() { if (values.length == 1) { result.add(name, values[0]); } else { - // currently no reason not to use the same array + // currently, no reason not to use the same array result.add(name, values); } } return result; } - // Deprecated because there isn't a universal way to deal with multi-values (always - // String[] or only for > 1 or always 1st value). And what to do with nulls or empty string. - // And SolrParams now implements MapWriter.toMap(Map) (a default method). So what do we do? - @Deprecated - public Map getAll(Map sink, Collection params) { - if (sink == null) sink = new LinkedHashMap<>(); - for (String param : params) { - String[] v = getParams(param); - if (v != null && v.length > 0) { - if (v.length == 1) { - sink.put(param, v[0]); - } else { - sink.put(param, v); - } - } - } - return sink; - } - - /** Copy all params to the given map or if the given map is null create a new one */ - @Deprecated - public Map getAll(Map sink, String... params) { - return getAll(sink, params == null ? Collections.emptyList() : Arrays.asList(params)); - } - /** - * Returns this SolrParams as a properly URL encoded string, starting with {@code "?"}, if not + * Returns this SolrParams as a proper URL encoded string, starting with {@code "?"}, if not * empty. */ public String toQueryString() { @@ -569,9 +477,9 @@ public String toQueryString() { } /** - * Generates a local-params string of the form - * - *
      {! name=value name2=value2}
      + * Generates a local-params string of the form {! name=value name2=value2}, + * Protecting (without any quoting or escaping) any values that start with $ (param + * references). */ public String toLocalParamsString() { final StringBuilder sb = new StringBuilder(128); @@ -583,7 +491,12 @@ public String toLocalParamsString() { sb.append(' '); // do so even the first time; why not. sb.append(name); // no escaping for name; it must follow "Java Identifier" rules. sb.append('='); - sb.append(ClientUtils.encodeLocalParamVal(val)); + if (val.startsWith("$")) { + // maintain literal param ref... + sb.append(val); + } else { + sb.append(ClientUtils.encodeLocalParamVal(val)); + } } } sb.append('}'); diff --git a/solr/solrj/src/java/org/apache/solr/common/params/SpellingParams.java b/solr/solrj/src/java/org/apache/solr/common/params/SpellingParams.java index e6551ea30fb..db03b003098 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/SpellingParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/SpellingParams.java @@ -51,9 +51,9 @@ public interface SpellingParams { /** * The maximum number of hits the request can return in order to both generate spelling * suggestions and set the "correctlySpelled" element to "false". This can be specified either as - * a whole number number of documents, or it can be expressed as a fractional percentage of - * documents returned by a chosen filter query. By default, the chosen filter is the most - * restrictive fq clause. This can be overridden with {@link + * a whole number of documents, or it can be expressed as a fractional percentage of documents + * returned by a chosen filter query. By default, the chosen filter is the most restrictive fq + * clause. This can be overridden with {@link * SpellingParams#SPELLCHECK_MAX_RESULTS_FOR_SUGGEST_FQ} . * *

      If left unspecified, the default behavior will prevail. That is, "correctlySpelled" will be diff --git a/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java b/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java index ae2b19950e8..d9dc41d1f4a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/TermsParams.java @@ -106,7 +106,7 @@ public int getValue() { /** * Optional. If true, return the raw characters of the indexed term, regardless of if it is - * readable. For instance, the index form of numeric numbers is not human readable. The default is + * readable. For instance, the index form of numeric numbers is not human-readable. The default is * false. */ String TERMS_RAW = TERMS_PREFIX + "raw"; diff --git a/solr/solrj/src/java/org/apache/solr/common/params/UpdateParams.java b/solr/solrj/src/java/org/apache/solr/common/params/UpdateParams.java index 48dab1a8cae..f14252fb970 100644 --- a/solr/solrj/src/java/org/apache/solr/common/params/UpdateParams.java +++ b/solr/solrj/src/java/org/apache/solr/common/params/UpdateParams.java @@ -73,7 +73,7 @@ public interface UpdateParams { /** * If set to true, then Solr must fail to process any Atomic Update which can not be done - * "In-Place" with out re-indexing the entire document. + * "In-Place" without re-indexing the entire document. */ public static final String REQUIRE_PARTIAL_DOC_UPDATES_INPLACE = "update.partial.requireInPlace"; } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java index 8379b5051c1..c4baf6326e8 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ByteArrayUtf8CharSequence.java @@ -73,7 +73,7 @@ public byte byteAt(int idx) { } /** - * this is for internal use to get a cached string value. returns null if There is no cached + * this is for internal use, to get a cached string value. returns null if there is no cached * String value */ public String getStringOrNull() { diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java index 824ab4dbb15..b7a13dca974 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ByteUtils.java @@ -26,7 +26,7 @@ public class ByteUtils { public static final int MAX_UTF8_BYTES_PER_CHAR = 3; /** - * Converts utf8 to utf16 and returns the number of 16 bit Java chars written. Full characters are + * Converts utf8 to utf16 and returns the number of 16-bit Java chars written. Full characters are * read, even if this reads past the length passed (and can result in an ArrayOutOfBoundsException * if invalid UTF8 is passed). Explicit checks for valid UTF8 are not performed. The char[] out * should probably have enough room to hold the worst case of each byte becoming a Java char. @@ -147,7 +147,7 @@ public static int writeUTF16toUTF8( final int code = (int) s.charAt(i); if (upto > scratch.length - 4) { - // a code point may take upto 4 bytes and we don't have enough space, so reset + // a code point may take up to 4 bytes, and we don't have enough space, so reset totalBytes += upto; if (fos == null) throw new IOException("buffer over flow"); fos.write(scratch, 0, upto); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java deleted file mode 100644 index b1b2bd45644..00000000000 --- a/solr/solrj/src/java/org/apache/solr/common/util/BytesOutputStream.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.solr.common.util; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; -import java.util.Arrays; - -public class BytesOutputStream extends OutputStream { - private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; - - protected byte[] buf; - - protected int sz; - - public BytesOutputStream() { - this(64); - } - - public BytesOutputStream(int size) { - if (size < 0) { - throw new IllegalArgumentException("Size must be > 0: " + size); - } - buf = new byte[size]; - } - - public byte[] toBytes() { - return Arrays.copyOf(buf, sz); - } - - public Bytes bytes() { - return new Bytes(buf, 0, sz); - } - - public InputStream inputStream() { - return new ByteArrayInputStream(buf); - } - - private void ensureCapacity(int minCapacity) { - if (minCapacity - buf.length > 0) expandBuf(minCapacity); - } - - /** * Write a byte to the stream. */ - @Override - public void write(int b) { - - try { - buf[sz] = (byte) b; - sz += 1; - } catch (IndexOutOfBoundsException e) { - ensureCapacity(sz + 1); - buf[sz] = (byte) b; - sz += 1; - } - } - - @Override - public void write(byte[] b, int off, int len) { - try { - System.arraycopy(b, off, buf, sz, len); - sz += len; - } catch (IndexOutOfBoundsException e) { - ensureCapacity(sz + len); - System.arraycopy(b, off, buf, sz, len); - sz += len; - } - } - - public void writeBytes(byte[] b) { - write(b, 0, b.length); - } - - public void reset() { - sz = 0; - } - - public int size() { - return sz; - } - - public String toString(String charset) { - try { - return new String(buf, 0, sz, charset); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException(e); - } - } - - private void expandBuf(int minCapacity) { - int oldCapacity = buf.length; - int newCapacity = oldCapacity << 1; - if (newCapacity - minCapacity < 0) newCapacity = minCapacity; - if (newCapacity - MAX_ARRAY_SIZE > 0) { - if (minCapacity < 0) - // overflow - throw new OutOfMemoryError(); - newCapacity = (minCapacity > MAX_ARRAY_SIZE) ? Integer.MAX_VALUE : MAX_ARRAY_SIZE; - } - buf = Arrays.copyOf(buf, newCapacity); - } - - @Override - public void close() { - // noop - } - - public static class Bytes { - - public final byte[] bytes; - public final int offset; - public final int length; - - public Bytes(byte[] bytes, int offset, int length) { - this.bytes = bytes; - this.offset = offset; - this.length = length; - } - } -} diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommonTestInjection.java b/solr/solrj/src/java/org/apache/solr/common/util/CommonTestInjection.java index f6f75a8ef32..3acf4d75163 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/CommonTestInjection.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/CommonTestInjection.java @@ -54,9 +54,9 @@ public static Map injectAdditionalProps() { } /** - * Set test delay (sleep) in unit of millisec + * Set test delay (sleep) in unit of milliseconds * - * @param delay delay in millisec, null to remove such delay + * @param delay delay in milliseconds, null to remove such delay */ public static void setDelay(Integer delay) { CommonTestInjection.delay = delay; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ContentStream.java b/solr/solrj/src/java/org/apache/solr/common/util/ContentStream.java index 0c3cbbbfcdb..7c1de75568e 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ContentStream.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ContentStream.java @@ -55,7 +55,7 @@ public interface ContentStream { * work. The runtime behavior for additional calls is undefined. * *

      Note: you must call getStream() or getReader() before the - * attributes (name, contentType, etc) are guaranteed to be set. Streams may be lazy loaded only + * attributes (name, contentType, etc.) are guaranteed to be set. Streams may be lazy loaded only * when this method is called. */ InputStream getStream() throws IOException; @@ -77,7 +77,7 @@ public interface ContentStream { * work. The runtime behavior for additional calls is undefined. * *

      Note: you must call getStream() or getReader() before the - * attributes (name, contentType, etc) are guaranteed to be set. Streams may be lazy loaded only + * attributes (name, contentType, etc.) are guaranteed to be set. Streams may be lazy loaded only * when this method is called. */ Reader getReader() throws IOException; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java index 48999ef79f0..321b08a0f24 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ContentStreamBase.java @@ -245,7 +245,7 @@ public InputStream getStream() throws IOException { return new ByteArrayInputStream(str.getBytes(DEFAULT_CHARSET)); } - /** If an charset is defined (by the contentType) use that, otherwise use a StringReader */ + /** If a charset is defined (by the contentType) use that, otherwise use a StringReader */ @Override public Reader getReader() throws IOException { String charset = getCharsetFromContentType(contentType); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/DOMUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/DOMUtil.java index 9b2e7583cdf..840c64b5d87 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/DOMUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/DOMUtil.java @@ -308,7 +308,7 @@ private static void getText(Node nd, StringBuilder buf) { match the definition of how textContent should behave according to the DOM Level-3 Core documentation - which specifies that the Attr's children should have their - textContent concated (Attr's can have a single child which + textContent contacted (Attr's can have a single child which is either Text node or an EntityReference). In practice, DOM implementations do not seem to use child nodes of Attributes, storing the "text" directly as the nodeValue. @@ -346,7 +346,7 @@ public static void substituteSystemProperties(Node node) { /** * Replaces ${property[:default value]} references in all attributes and text nodes of supplied - * node. If the property is not defined neither in the given Properties instance nor in + * node. If the property is not defined, either in the given Properties instance nor in * System.getProperty and no default value is provided, a runtime exception is thrown. * * @param node DOM node to walk for substitutions @@ -444,7 +444,7 @@ private static void parsePropertyString( fragments.add("$"); prev = pos + 1; } else if (value.charAt(pos + 1) != '{') { - // peek ahead to see if the next char is a property or not + // peek ahead to see if the next char is a property or // not a property: insert the char as a literal /* fragments.addElement(value.substring(pos + 1, pos + 2)); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/DataEntry.java b/solr/solrj/src/java/org/apache/solr/common/util/DataEntry.java index 46ec680d96f..a3fd6ba9883 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/DataEntry.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/DataEntry.java @@ -22,9 +22,9 @@ /** * This represents a data entry in the payload/stream. There are multiple ways to consume the data - * entry a) listen to it, if it's a container object, and get callbacks for each sub-entry b) read + * entry: 1) listen to it, if it's a container object, and get callbacks for each sub-entry 2) read * as an object using the {{@link #val()}} method. Please note that it creates objects and expect - * more memory usage c) read the corresponding primitive value Do not keep a reference of this + * more memory usage 3) read the corresponding primitive value Do not keep a reference of this * Object beyond the scope where it is called. Read the relevant data out. */ public interface DataEntry { @@ -80,7 +80,7 @@ default String strValue() { Object ctx(); /** - * If it is a non-primitive type type and size is known in advance + * If it is a non-primitive type and size is known in advance * *

      if it's a map/list, it's the no:of items in this container * @@ -132,8 +132,8 @@ enum Type { interface EntryListener { /** - * Callback for each entry in this container. once the method call returns, the entry object is - * not valid anymore It is usually reused. If the object value is a {{@link Utf8CharSequence}} + * Callback for each entry in this container. Once the method call returns, the entry object is + * not valid anymore. It is usually reused. If the object value is a {{@link Utf8CharSequence}} * do a {{@link Object#clone()}} because the object may be reused * * @param e The entry in the container diff --git a/solr/solrj/src/java/org/apache/solr/common/util/EnvUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/EnvUtils.java index 29c9586ca96..6adac222ab7 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/EnvUtils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/EnvUtils.java @@ -34,13 +34,14 @@ import org.apache.solr.common.SolrException; /** - * This class is a unified provider of environment variables and system properties. It exposes a - * mutable copy of the environment variables. It also converts 'SOLR_FOO' variables to system - * properties 'solr.foo' and provide various convenience accessors for them. + * Provides convenient access to System Properties for Solr. It also converts 'SOLR_FOO' env vars to + * system properties 'solr.foo', which is done on first access of this class. All Solr code should + * use this in lieu of JDK equivalents. */ public class EnvUtils { - private static final SortedMap ENV = new TreeMap<>(System.getenv()); + /** Maps ENV keys to sys prop keys for special/custom mappings */ private static final Map CUSTOM_MAPPINGS = new HashMap<>(); + private static final Map camelCaseToDotsMap = new ConcurrentHashMap<>(); static { @@ -52,7 +53,7 @@ public class EnvUtils { for (String key : props.stringPropertyNames()) { CUSTOM_MAPPINGS.put(key, props.getProperty(key)); } - init(false); + init(false, System.getenv()); } } catch (IOException e) { throw new SolrException( @@ -60,74 +61,6 @@ public class EnvUtils { } } - /** - * Get Solr's mutable copy of all environment variables. - * - * @return sorted map of environment variables - */ - public static SortedMap getEnvs() { - return ENV; - } - - /** Get a single environment variable as string */ - public static String getEnv(String key) { - return ENV.get(key); - } - - /** Get a single environment variable as string, or default */ - public static String getEnv(String key, String defaultValue) { - return ENV.getOrDefault(key, defaultValue); - } - - /** Get an environment variable as long */ - public static long getEnvAsLong(String key) { - return Long.parseLong(ENV.get(key)); - } - - /** Get an environment variable as long, or default value */ - public static long getEnvAsLong(String key, long defaultValue) { - String value = ENV.get(key); - if (value == null) { - return defaultValue; - } - return Long.parseLong(value); - } - - /** Get an env var as boolean */ - public static boolean getEnvAsBool(String key) { - return StrUtils.parseBool(ENV.get(key)); - } - - /** Get an env var as boolean, or default value */ - public static boolean getEnvAsBool(String key, boolean defaultValue) { - String value = ENV.get(key); - if (value == null) { - return defaultValue; - } - return StrUtils.parseBool(value); - } - - /** Get comma separated strings from env as List */ - public static List getEnvAsList(String key) { - return getEnv(key) != null ? stringValueToList(getEnv(key)) : null; - } - - /** Get comma separated strings from env as List */ - public static List getEnvAsList(String key, List defaultValue) { - return ENV.get(key) != null ? getEnvAsList(key) : defaultValue; - } - - /** Set an environment variable */ - public static void setEnv(String key, String value) { - ENV.put(key, value); - } - - /** Set all environment variables */ - public static synchronized void setEnvs(Map env) { - ENV.clear(); - ENV.putAll(env); - } - /** Get all Solr system properties as a sorted map */ public static SortedMap getProperties() { return System.getProperties().entrySet().stream() @@ -178,6 +111,20 @@ private static String camelCaseToDotSeparated(String key) { } /** Get property as integer */ + public static Integer getPropertyAsInteger(String key) { + return getPropertyAsInteger(key, null); + } + + /** Get property as integer, or default value */ + public static Integer getPropertyAsInteger(String key, Integer defaultValue) { + String value = getProperty(key); + if (value == null) { + return defaultValue; + } + return Integer.parseInt(value); + } + + /** Get property as long */ public static Long getPropertyAsLong(String key) { return getPropertyAsLong(key, null); } @@ -231,17 +178,15 @@ public static void setProperty(String key, String value) { /** * Re-reads environment variables and updates the internal map. Mainly for internal and test use. - * - * @param overwrite if true, overwrite existing system properties with environment variables */ - static synchronized void init(boolean overwrite) { + static synchronized void init(boolean overwrite, Map env) { // Convert eligible environment variables to system properties - for (String key : ENV.keySet()) { + for (String key : env.keySet()) { if (key.startsWith("SOLR_") || CUSTOM_MAPPINGS.containsKey(key)) { String sysPropKey = envNameToSyspropName(key); // Existing system properties take precedence if (!sysPropKey.isBlank() && (overwrite || getProperty(sysPropKey, null) == null)) { - setProperty(sysPropKey, ENV.get(key)); + setProperty(sysPropKey, env.get(key)); } } } diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java index 0fb3f8f4153..29873bf09b9 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java @@ -65,8 +65,8 @@ public static synchronized void addThreadLocalProvider(InheritableThreadLocalPro } /** - * Any class which wants to carry forward the threadlocal values to the threads run by threadpools - * must implement this interface and the implementation should be registered here + * Any class which wants to carry forward the thread local values to the threads run by thread + * pools must implement this interface and the implementation should be registered here */ public interface InheritableThreadLocalProvider { /** @@ -76,13 +76,13 @@ public interface InheritableThreadLocalProvider { void store(AtomicReference ctx); /** - * This is invoked in the Threadpool thread. set the appropriate values in the threadlocal of + * This is invoked in the thread pool thread. set the appropriate values in the thread local of * this thread. */ void set(AtomicReference ctx); /** - * This method is invoked in the threadpool thread after the execution clean all the variables + * This method is invoked in the thread pool thread after the execution clean all the variables * set in the set method */ void clean(AtomicReference ctx); @@ -110,7 +110,7 @@ public static boolean isTerminated(ExecutorService pool) { * Shutdown the {@link ExecutorService} and wait for 60 seconds for the threads to complete. More * detail on the waiting can be found in {@link #awaitTermination(ExecutorService)}. * - * @param pool The ExecutorService to shutdown and wait on + * @param pool The ExecutorService to shut down and wait on */ public static void shutdownAndAwaitTermination(ExecutorService pool) { if (pool == null) return; @@ -122,13 +122,13 @@ public static void shutdownAndAwaitTermination(ExecutorService pool) { * Shutdown the {@link ExecutorService} and wait forever for the threads to complete. More detail * on the waiting can be found in {@link #awaitTerminationForever(ExecutorService)}. * - *

      This should likely not be used in {@code close()} methods, as we want to timebound when + *

      This should likely not be used in {@code close()} methods, as we want to time bound when * shutting down. However, sometimes {@link ExecutorService}s are used to submit a list of tasks * and awaiting termination is akin to waiting on the list of {@link Future}s to complete. In that * case, this method should be used as there is no inherent time bound to waiting on those tasks * to complete. * - * @param pool The ExecutorService to shutdown and wait on + * @param pool The ExecutorService to shut down and wait on */ public static void shutdownAndAwaitTerminationForever(ExecutorService pool) { if (pool == null) return; @@ -201,6 +201,18 @@ public static ExecutorService newMDCAwareFixedThreadPool( nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(), threadFactory); } + public static ExecutorService newMDCAwareFixedThreadPool( + int nThreads, int queueCapacity, ThreadFactory threadFactory, Runnable beforeExecute) { + return new MDCAwareThreadPoolExecutor( + nThreads, + nThreads, + 0L, + TimeUnit.MILLISECONDS, + new LinkedBlockingQueue<>(queueCapacity), + threadFactory, + beforeExecute); + } + /** * See {@link java.util.concurrent.Executors#newSingleThreadExecutor(ThreadFactory)}. Note the * thread is always active, even if no tasks are submitted to the executor. @@ -225,29 +237,48 @@ public static ExecutorService newMDCAwareCachedThreadPool(String name) { return newMDCAwareCachedThreadPool(new SolrNamedThreadFactory(name)); } - /** See {@link java.util.concurrent.Executors#newCachedThreadPool(ThreadFactory)} */ + /** + * Create a new pool of threads, with no limit for the number of threads. The pool has no task + * queue. Each submitted task is executed immediately, either by reusing an existing thread if one + * is available, or by starting a new thread. Unused threads will be closed after 60 seconds. + */ public static ExecutorService newMDCAwareCachedThreadPool(ThreadFactory threadFactory) { return new MDCAwareThreadPoolExecutor( 0, Integer.MAX_VALUE, 60L, TimeUnit.SECONDS, new SynchronousQueue<>(), threadFactory); } + /** + * Create a new pool of threads. Threads are created for new work if there is room to do so up to + * {@code maxThreads}. Beyond that, the queue is used up to {@code queueCapacity}. Beyond that, + * work is rejected with an exception. Unused threads will be closed after 60 seconds. + */ public static ExecutorService newMDCAwareCachedThreadPool( int maxThreads, int queueCapacity, ThreadFactory threadFactory) { - return new MDCAwareThreadPoolExecutor( - 0, - maxThreads, - 60L, - TimeUnit.SECONDS, - new LinkedBlockingQueue<>(queueCapacity), - threadFactory); + // Create an executor with same value of core size and max total size. With an unbounded queue, + // the ThreadPoolExecutor ignores the configured max value and only considers core pool size. + // Since we allow core threads to die when idle for too long, this ends in having a pool with + // lazily-initialized and cached threads. + MDCAwareThreadPoolExecutor executor = + new MDCAwareThreadPoolExecutor( + maxThreads, + maxThreads, + 60L, + TimeUnit.SECONDS, + new LinkedBlockingQueue<>(queueCapacity), + threadFactory); + // Allow core threads to die + executor.allowCoreThreadTimeOut(true); + return executor; } @SuppressForbidden(reason = "class customizes ThreadPoolExecutor so it can be used instead") public static class MDCAwareThreadPoolExecutor extends ThreadPoolExecutor { private static final int MAX_THREAD_NAME_LEN = 512; + public static final Runnable NOOP = () -> {}; private final boolean enableSubmitterStackTrace; + private final Runnable beforeExecuteTask; public MDCAwareThreadPoolExecutor( int corePoolSize, @@ -259,6 +290,7 @@ public MDCAwareThreadPoolExecutor( RejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler); this.enableSubmitterStackTrace = true; + this.beforeExecuteTask = NOOP; } public MDCAwareThreadPoolExecutor( @@ -269,6 +301,7 @@ public MDCAwareThreadPoolExecutor( BlockingQueue workQueue) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue); this.enableSubmitterStackTrace = true; + this.beforeExecuteTask = NOOP; } public MDCAwareThreadPoolExecutor( @@ -278,7 +311,8 @@ public MDCAwareThreadPoolExecutor( TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory) { - this(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, true); + this( + corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, true, NOOP); } public MDCAwareThreadPoolExecutor( @@ -288,9 +322,30 @@ public MDCAwareThreadPoolExecutor( TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, - boolean enableSubmitterStackTrace) { + Runnable beforeExecuteTask) { + this( + corePoolSize, + maximumPoolSize, + keepAliveTime, + unit, + workQueue, + threadFactory, + true, + beforeExecuteTask); + } + + public MDCAwareThreadPoolExecutor( + int corePoolSize, + int maximumPoolSize, + long keepAliveTime, + TimeUnit unit, + BlockingQueue workQueue, + ThreadFactory threadFactory, + boolean enableSubmitterStackTrace, + Runnable beforeExecuteTask) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory); this.enableSubmitterStackTrace = enableSubmitterStackTrace; + this.beforeExecuteTask = beforeExecuteTask; } public MDCAwareThreadPoolExecutor( @@ -302,6 +357,37 @@ public MDCAwareThreadPoolExecutor( RejectedExecutionHandler handler) { super(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, handler); this.enableSubmitterStackTrace = true; + this.beforeExecuteTask = NOOP; + } + + public MDCAwareThreadPoolExecutor( + int corePoolSize, + int maximumPoolSize, + int keepAliveTime, + TimeUnit timeUnit, + BlockingQueue blockingQueue, + SolrNamedThreadFactory httpShardExecutor, + boolean enableSubmitterStackTrace) { + super( + corePoolSize, maximumPoolSize, keepAliveTime, timeUnit, blockingQueue, httpShardExecutor); + this.enableSubmitterStackTrace = enableSubmitterStackTrace; + this.beforeExecuteTask = NOOP; + } + + public MDCAwareThreadPoolExecutor( + int i, + int maxValue, + long l, + TimeUnit timeUnit, + BlockingQueue es, + SolrNamedThreadFactory testExecutor, + boolean b) { + this(i, maxValue, l, timeUnit, es, testExecutor, b, NOOP); + } + + @Override + protected void beforeExecute(Thread t, Runnable r) { + this.beforeExecuteTask.run(); } @Override diff --git a/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java b/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java index 79da8d35aa3..b54fc539165 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/FastInputStream.java @@ -27,7 +27,7 @@ public class FastInputStream extends DataInputInputStream { protected final byte[] buf; protected int pos; protected int end; - protected long readFromStream; // number of bytes read from the underlying inputstream + protected long readFromStream; // number of bytes read from the underlying input stream public FastInputStream(InputStream in) { // use default BUFSIZE of BufferedOutputStream so if we wrap that diff --git a/solr/solrj/src/java/org/apache/solr/common/util/GlobPatternUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/GlobPatternUtil.java index 32badc75c1c..2c370718701 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/GlobPatternUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/GlobPatternUtil.java @@ -48,7 +48,7 @@ public static boolean matches(String pattern, String input) { int wcsIdx = 0; final Deque backtrack = new ArrayDeque<>(wcs.length); - // loop around a backtrack stack, to handle complex * matching + // loop around a back track stack, to handle complex * matching do { if (!backtrack.isEmpty()) { final int[] array = backtrack.pop(); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Hash.java b/solr/solrj/src/java/org/apache/solr/common/util/Hash.java index 05e9f291f0d..e327893b4d9 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Hash.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Hash.java @@ -20,10 +20,10 @@ * Fast, well distributed, cross-platform hash functions. * *

      Development background: I was surprised to discovered that there isn't a good cross-platform - * hash function defined for strings. MD5, SHA, FVN, etc, all define hash functions over bytes, + * hash function defined for strings. MD5, SHA, FVN, etc., all define hash functions over bytes, * meaning that it's under-specified for strings. * - *

      So I set out to create a standard 32 bit string hash that would be well defined for + *

      So I set out to create a standard 32 bit string hash that would be well-defined for * implementation in all languages, have very high performance, and have very good hash properties * such as distribution. After evaluating all the options, I settled on using Bob Jenkins' lookup3 * as a base. It's a well studied and very fast hash function, and the hashword variant can work @@ -63,7 +63,7 @@ public class Hash { * @param offset offset of the start of the key * @param length length of the key * @param initval initial value to fold into the hash - * @return the 32 bit hash code + * @return the 32-bit hash code */ @SuppressWarnings("fallthrough") public static int lookup3(int[] k, int offset, int length, int initval) { @@ -77,7 +77,7 @@ public static int lookup3(int[] k, int offset, int length, int initval) { c += k[i + 2]; // mix(a,b,c)... Java needs "out" parameters!!! - // Note: recent JVMs (Sun JDK6) turn pairs of shifts (needed to do a rotate) + // Note: recent JVMs (Sun JDK6) turn pairs of shifts (needed to do a rotate operation) // into real x86 rotate instructions. { a -= c; @@ -251,7 +251,7 @@ public static long lookup3ycs64(CharSequence s, int start, int end, long initval if (i >= end) break; // mix(a,b,c)... Java needs "out" parameters!!! - // Note: recent JVMs (Sun JDK6) turn pairs of shifts (needed to do a rotate) + // Note: recent JVMs (Sun JDK6) turn pairs of shifts (needed to do a rotate operation) // into real x86 rotate instructions. { a -= c; diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java index 83b6570e0d7..9a16ea5abbc 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JavaBinCodec.java @@ -1250,7 +1250,7 @@ public interface ObjectResolver { * Examine and attempt to serialize the given object, using a {@link JavaBinCodec} to write it * to a stream. * - * @param o the object that the caller wants serialized. + * @param o the object that the caller wants to be serialized. * @param codec used to actually serialize {@code o}. * @return the object {@code o} itself if it could not be serialized, or {@code null} if the * whole object was successfully serialized. diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java index 36ad860fb1b..86d3b94c236 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonRecordReader.java @@ -65,7 +65,7 @@ public static JsonRecordReader getInst(String split, List fieldMappings) private JsonRecordReader() {} /** - * a '|' separated list of path expressions which define sub sections of the JSON stream that are + * a '|' separated list of path expressions which define subsections of the JSON stream that are * to be emitted as separate records. It is possible to have multiple levels of split one for * parent and one for child each child record (or a list of records) will be emitted as a part of * the parent record with null as the key @@ -164,7 +164,7 @@ private static class Node { private boolean useFqn = false; public Node(String name, Node p) { - // Create a basic Node, suitable for the mid portions of any path. + // Create a basic Node, suitable for the mid-portions of any path. // Node.pathName and Node.name are set to same value. this.name = name; parent = p; @@ -210,7 +210,7 @@ private void buildOptimize() { /** * Build a Node tree structure representing all paths of interest to us. This must be done - * before parsing of the JSON stream starts. Each node holds one portion of an path. Taking each + * before parsing of the JSON stream starts. Each node holds one portion of a path. Taking each * path segment in turn this method walks the Node tree and finds where the new segment should * be inserted. It creates a Node representing a field's name, PATH and some flags and inserts * the Node into the Node tree. @@ -306,7 +306,7 @@ private void parse(JSONParser parser, Handler handler, Map value * them against the new tag. If matched then "jump" to that node, otherwise ignore the tag. * *

      Note, the list of // expressions found while walking back up the tree is cached in the - * HashMap descendants. Then if the new tag is to be skipped, any inner child tags are compared + * HashMap descendants. Then, if the new tag is to be skipped, any inner child tags are compared * against the cache and jumped to if matched. */ private void handleObjectStart( @@ -319,17 +319,17 @@ private void handleObjectStart( throws IOException { final boolean isRecordStarted = recordStarted || isRecord; - Set valuesAddedinThisFrame = null; + Set valuesAddedInThisFrame = null; if (isRecord || !recordStarted) { // This Node is a match for an PATH from a forEach attribute, - // prepare for the clean up that will occur when the record + // prepare for the cleanup that will occur when the record // is emitted after its END_ELEMENT is matched - valuesAddedinThisFrame = new HashSet<>(); - stack.push(valuesAddedinThisFrame); + valuesAddedInThisFrame = new HashSet<>(); + stack.push(valuesAddedInThisFrame); } else if (recordStarted) { // This node is a child of some parent which matched against forEach // attribute. Continue to add values to an existing record. - valuesAddedinThisFrame = stack.peek(); + valuesAddedInThisFrame = stack.peek(); } class Wrapper extends MethodFrameWrapper { @@ -410,7 +410,7 @@ void walkObject() throws IOException { Object val = parseSingleFieldValue(event, parser, runnable); if (val != null) { putValue(values, nameInRecord, val); - valuesAddedinThisFrame.add(nameInRecord); + valuesAddedInThisFrame.add(nameInRecord); } } else { @@ -440,7 +440,7 @@ void walkObject() throws IOException { } } finally { if ((isRecord() || !isRecordStarted)) { - for (String fld : valuesAddedinThisFrame) { + for (String fld : valuesAddedInThisFrame) { values.remove(fld); } } @@ -509,7 +509,7 @@ public String toString() { } // end of class Node /** - * The path is split into segments using the '/' as a separator. However this method deals with + * The path is split into segments using the '/' as a separator. However, this method deals with * special cases where there is a slash '/' character inside the attribute value e.g. * x/@html='text/html'. We split by '/' but then reassemble things were the '/' appears within a * quoted sub-string. diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java index b5162019ab7..391a36884a3 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonSchemaValidator.java @@ -30,8 +30,8 @@ /** * A very basic and lightweight json schema parsing and data validation tool. This custom tool is - * created because a) we need to support non json inputs b) to avoiding double parsing (this accepts - * an already parsed json as a map) It validates most aspects of json schema but it is NOT A FULLY + * created because 1) we need to support non json inputs 2) to avoiding double parsing (this accepts + * an already parsed json as a map). It validates most aspects of json schema, but it is NOT A FULLY * COMPLIANT JSON schema parser or validator. This validator borrow some design's idea from * https://github.com/networknt/json-schema-validator */ @@ -60,7 +60,7 @@ public JsonSchemaValidator(Map jsonSchema) { } } - // This is a heterogeneous type, there's probably imrpovements to be had by using some concrete + // This is a heterogeneous type, there's probably improvements to be had by using some concrete // types instead static final Map, ?>, Validator>> VALIDATORS = new HashMap<>(); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java b/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java index ddd984cc689..9cb39b09320 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/JsonTextWriter.java @@ -75,7 +75,7 @@ default void writeStrRaw(String name, String val) throws IOException { @Override default void writeStr(String name, String val, boolean needsEscaping) throws IOException { - // it might be more efficient to use a stringbuilder or write substrings + // it might be more efficient to use a StringBuilder or write substrings // if writing chars to the stream is slow. if (needsEscaping) { diff --git a/solr/solrj/src/java/org/apache/solr/common/util/MurmurHash2.java b/solr/solrj/src/java/org/apache/solr/common/util/MurmurHash2.java index 9547ac414f6..a16db14abea 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/MurmurHash2.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/MurmurHash2.java @@ -77,12 +77,12 @@ public static int hash(byte[] data, int seed, int offset, int len) { } /** - * Generates 32 bit hash from byte array with default seed value. + * Generates 32-bit hash from byte array with default seed value. * * @param data byte array to hash * @param offset the start position in the array to hash * @param len length of the array elements to hash - * @return 32 bit hash of the given array + * @return 32-bit hash of the given array */ public static final int hash32(final byte[] data, int offset, int len) { return MurmurHash2.hash(data, 0x9747b28c, offset, len); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java index b6587746173..aef0edaa4cf 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java @@ -138,7 +138,7 @@ public NamedList(Map nameValueMap) { /** * Method to serialize Map.Entry<String, ?> to a List in which the even indexed elements - * (0,2,4. ..etc) are Strings and odd elements (1,3,5,) are of the type "T". + * (0,2,4, etc.) are Strings and odd elements (1,3,5,) are of the type "T". * *

      NOTE: This a temporary placeholder method until the guts of the class are actually replaced * by List<String, ?>. @@ -343,7 +343,7 @@ public Object findRecursive(String... args) { * assign it to value. * * On the next loop, we check whether the retrieved value is a NamedList. - * If it is, then we drop down to that NamedList, grab the value of the + * If it is, then we drop to that NamedList, grab the value of the * next key, and start the loop over. If it is not a NamedList, then we * assign the value to null and break out of the loop. * @@ -690,7 +690,7 @@ public List removeAll(String name) { /** * Used for getting a boolean argument from a NamedList object. If the name is not present, * returns null. If there is more than one value with that name, or if the value found is not a - * Boolean or a String, throws an exception. If there is only one value present and it is a + * Boolean or a String, throws an exception. If there is only one value present, and it is a * Boolean or a String, the value is removed and returned as a Boolean. If an exception is thrown, * the NamedList is not modified. See {@link #removeAll(String)} and {@link * #removeConfigArgs(String)} for additional ways of gathering configuration information from a @@ -712,7 +712,7 @@ public Boolean removeBooleanArg(final String name) { /** * Used for getting a boolean argument from a NamedList object. If the name is not present, * returns null. If there is more than one value with that name, or if the value found is not a - * Boolean or a String, throws an exception. If there is only one value present and it is a + * Boolean or a String, throws an exception. If there is only one value present, and it is a * Boolean or a String, the value is returned as a Boolean. The NamedList is not modified. See * {@link #remove(String)}, {@link #removeAll(String)} and {@link #removeConfigArgs(String)} for * additional ways of gathering configuration information from a NamedList. diff --git a/solr/solrj/src/java/org/apache/solr/common/util/PropertiesUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/PropertiesUtil.java index 3e00414408d..9ae768c1f0a 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/PropertiesUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/PropertiesUtil.java @@ -105,7 +105,7 @@ private static void parsePropertyString( fragments.add("$"); prev = pos + 1; } else if (value.charAt(pos + 1) != '{') { - // peek ahead to see if the next char is a property or not + // peek ahead to see if the next char is a property or // not a property: insert the char as a literal /* fragments.addElement(value.substring(pos + 1, pos + 2)); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java index e2992204856..28723375fe5 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java @@ -74,7 +74,7 @@ public static void splitSmart(String s, char separator, List lst) { } else if (inString != 0 && ch == inString) { inString = 0; } else if (ch == '\'' || ch == '"') { - // If char is directly preceeded by a number or letter + // If char is directly preceded by a number or letter // then don't treat it as the start of a string. // Examples: 50" TV, or can't if (!Character.isLetterOrDigit(prevChar)) { @@ -164,7 +164,7 @@ public static List splitSmart(String s, String separator, boolean decode * by backslash '\' * * @param fileNames the string containing file names - * @return a list of file names with the escaping backslashed removed + * @return a list of file names with the escaping backslashes removed */ public static List splitFileNames(String fileNames) { if (fileNames == null) return Collections.emptyList(); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java b/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java index 6365882c250..f5313288dc9 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/TimeSource.java @@ -172,7 +172,8 @@ public String toString() { * Obtain an instance of time source. * * @param type supported types: currentTime, nanoTime and accelerated - * time with a double factor in the form of simTime:FACTOR, eg. simTime:2.5 + * time with a double factor in the form of simTime:FACTOR, e.g. + * simTime:2.5 * * @return one of the supported types */ @@ -228,7 +229,7 @@ public static TimeSource get(String type) { public abstract long[] getTimeAndEpochNs(); /** - * Sleep according to this source's notion of time. Eg. accelerated time source such as {@link + * Sleep according to this source's notion of time. E.g. accelerated time source such as {@link * SimTimeSource} will sleep proportionally shorter, according to its multiplier. * * @param ms number of milliseconds to sleep @@ -238,8 +239,8 @@ public static TimeSource get(String type) { /** * This method allows using TimeSource with APIs that require providing just plain time intervals, - * eg. {@link Object#wait(long)}. Values returned by this method are adjusted according to the - * time source's notion of time - eg. accelerated time source provided by {@link SimTimeSource} + * e.g. {@link Object#wait(long)}. Values returned by this method are adjusted according to the + * time source's notion of time - e.g. accelerated time source provided by {@link SimTimeSource} * will return intervals that are proportionally shortened by the multiplier. * *

      NOTE: converting small values may significantly affect precision of the returned values due diff --git a/solr/solrj/src/java/org/apache/solr/common/util/URLUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/URLUtil.java index 78de07ba5b3..6ddd81052b7 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/URLUtil.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/URLUtil.java @@ -67,7 +67,8 @@ public static String extractBaseUrl(String coreUrl) { final var indexOfLastSlash = coreUrl.lastIndexOf("/"); if (indexOfLastSlash == -1) { log.warn( - "Solr core URL [{}] did not contain expected path segments when parsing, ignoring..."); + "Solr core URL [{}] did not contain expected path segments when parsing, ignoring...", + coreUrl); return coreUrl; } return coreUrl.substring(0, coreUrl.lastIndexOf("/")); diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java index 98899633bff..7102ab75598 100644 --- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java +++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java @@ -695,8 +695,8 @@ public static String parseMetricsReplicaName(String collectionName, String coreN } /** - * Applies one json over other. The 'input' is applied over the sink The values in input isapplied - * over the values in 'sink' . If a value is 'null' that value is removed from sink + * Applies one json over another. The 'input' is applied over the sink The values in input is + * applied over the values in 'sink' . If a value is 'null' that value is removed from sink * * @param sink the original json object to start with. Ensure that this Map is mutable * @param input the json with new values @@ -1175,29 +1175,10 @@ public byte[] getbuf() { } } + /** Reads an input stream into a byte array. Does not close the input. */ public static ByteBuffer toByteArray(InputStream is) throws IOException { - return toByteArray(is, Integer.MAX_VALUE); - } - - /** - * Reads an input stream into a byte array - * - * @param is the input stream - * @return the byte array - * @throws IOException If there is a low-level I/O error. - */ - public static ByteBuffer toByteArray(InputStream is, long maxSize) throws IOException { try (BAOS bos = new BAOS()) { - long sz = 0; - int next = is.read(); - while (next > -1) { - if (++sz > maxSize) { - throw new BufferOverflowException(); - } - bos.write(next); - next = is.read(); - } - bos.flush(); + is.transferTo(bos); return bos.getByteBuffer(); } } diff --git a/solr/solrj/src/java/org/noggit/JSONParser.java b/solr/solrj/src/java/org/noggit/JSONParser.java index 187dba3ee15..48ddc447f31 100644 --- a/solr/solrj/src/java/org/noggit/JSONParser.java +++ b/solr/solrj/src/java/org/noggit/JSONParser.java @@ -27,7 +27,7 @@ public class JSONParser { /** Event indicating a JSON string value, including member names of objects */ public static final int STRING = 1; - /** Event indicating a JSON number value which fits into a signed 64 bit integer */ + /** Event indicating a JSON number value which fits into a signed 64-bit integer */ public static final int LONG = 2; /** @@ -39,7 +39,7 @@ public class JSONParser { /** * Event indicating a JSON number value that was not produced by toString of any Java primitive - * numerics such as Double or Long. It is either an integer outside the range of a 64 bit signed + * numerics such as Double or Long. It is either an integer outside the range of a 64-bit signed * integer, or a floating point value with a string representation of more than 23 chars. */ public static final int BIGNUMBER = 4; @@ -75,8 +75,8 @@ public class JSONParser { /** * ALLOW_EXTRA_COMMAS causes any number of extra commas in arrays and objects to be ignored Note - * that a trailing comma in [] would be [,] (hence calling the feature "trailing" commas is either - * limiting or misleading. Since trailing commas is fundamentally incompatible with any future + * that a trailing comma in [] would be [,], hence calling the feature "trailing" commas is either + * limiting or misleading. Since trailing commas are fundamentally incompatible with any future * "fill-in-missing-values-with-null", it was decided to extend this feature to handle any number * of extra commas. */ @@ -276,7 +276,7 @@ protected int getCharNWS() throws IOException { for (; ; ) { int ch = getChar(); // getCharNWS is normally called in the context of expecting certain JSON special characters - // such as ":}"]," all of these characters are below 64 (including comment chars '/' and '#', + // such as ":}"]," all of these characters are below 64, including comment chars '/' and '#', // so we can make this the fast path even w/o checking the range first. We'll only get some // false-positives while using bare strings (chars "IJMc") if (((WS_MASK >> ch) & 0x01) == 0) { @@ -321,7 +321,7 @@ protected int getCharNWS() throws IOException { protected int getCharNWS(int ch) throws IOException { for (; ; ) { // getCharNWS is normally called in the context of expecting certain JSON special characters - // such as ":}"]," all of these characters are below 64 (including comment chars '/' and '#', + // such as ":}"]," all of these characters are below 64, including comment chars '/' and '#', // so we can make this the fast path even w/o checking the range first. We'll only get some // false-positives while using bare strings (chars "IJMc") if (((WS_MASK >> ch) & 0x01) == 0) { @@ -480,7 +480,7 @@ private long readNumber(int firstChar, boolean isNeg) throws IOException { // the positive plane. long v = (long) '0' - firstChar; // can't overflow a long in 18 decimal digits (i.e. 17 additional after the first). - // we also need 22 additional to handle double so we'll handle in 2 separate loops. + // we also need 22 additional to handle double, so we'll handle in 2 separate loops. int i; for (i = 0; i < 17; i++) { int ch = getChar(); @@ -1197,7 +1197,7 @@ public void getString(CharArr output) throws IOException { /** * Reads a number from the input stream and parses it as a long, only if the value will in fact - * fit into a signed 64 bit integer. + * fit into a signed 64-bit integer. */ public long getLong() throws IOException { goTo(LONG); diff --git a/solr/solrj/src/resources/java-template/api.mustache b/solr/solrj/src/resources/java-template/api.mustache index a4171667b0a..c3961cf50b1 100644 --- a/solr/solrj/src/resources/java-template/api.mustache +++ b/solr/solrj/src/resources/java-template/api.mustache @@ -73,7 +73,8 @@ import {{modelPackage}}.{{dataType}}; public class {{classname}} { {{#operation}} - public static class {{operationIdCamelCase}}Response extends JacksonParsingResponse<{{modelPackage}}.{{returnType}}> { + {{^vendorExtensions.x-omitFromCodegen}} + public static class {{operationIdCamelCase}}Response extends JacksonParsingResponse<{{modelPackage}}.{{returnType}}> { public {{operationIdCamelCase}}Response() { super({{modelPackage}}.{{returnType}}.class); } @@ -184,6 +185,11 @@ public class {{classname}} { return SolrRequestType.ADMIN.toString(); } + @Override + public ApiVersion getApiVersion() { + return ApiVersion.V2; + } + @Override public SolrParams getParams() { final ModifiableSolrParams params = new ModifiableSolrParams(); @@ -206,6 +212,7 @@ public class {{classname}} { return new InputStreamResponseParser("json"); } } + {{/vendorExtensions.x-omitFromCodegen}} {{/operation}} } {{/operations}} diff --git a/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-replication1.xml b/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-replication1.xml index db9ecd69039..e0d04c62517 100644 --- a/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-replication1.xml +++ b/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-replication1.xml @@ -25,9 +25,9 @@ --> - + - + diff --git a/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-sql.xml b/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-sql.xml index 4f4e616f9ae..f557a80e64d 100644 --- a/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-sql.xml +++ b/solr/solrj/src/test-files/solrj/solr/collection1/conf/schema-sql.xml @@ -25,7 +25,7 @@ --> - + - - - - + + - - - - + + + + - + - - - - + + + + - - - - + + + + diff --git a/solr/solrj/src/test-files/solrj/solr/collection1/conf/solrconfig-follower1.xml b/solr/solrj/src/test-files/solrj/solr/collection1/conf/solrconfig-follower1.xml index ab2773d6eb4..43ff0410a8c 100644 --- a/solr/solrj/src/test-files/solrj/solr/collection1/conf/solrconfig-follower1.xml +++ b/solr/solrj/src/test-files/solrj/solr/collection1/conf/solrconfig-follower1.xml @@ -23,7 +23,7 @@ ${useCompoundFile:false} ${solr.data.dir:} - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/configset-1/conf/schema-minimal.xml b/solr/solrj/src/test-files/solrj/solr/configsets/configset-1/conf/schema-minimal.xml index 287d4fe0149..51d407dfc1d 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/configset-1/conf/schema-minimal.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/configset-1/conf/schema-minimal.xml @@ -15,7 +15,7 @@ See the License for the specific language governing permissions and limitations under the License. --> - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/configset-2/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/configset-2/conf/schema.xml index 287d4fe0149..51d407dfc1d 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/configset-2/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/configset-2/conf/schema.xml @@ -15,7 +15,7 @@ See the License for the specific language governing permissions and limitations under the License. --> - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/ml/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/ml/conf/schema.xml index 0c9ee9c9688..be212b86919 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/ml/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/ml/conf/schema.xml @@ -25,22 +25,22 @@ --> - + - - - - + + + + - - - - + + + + - + - + @@ -42,7 +42,7 @@ - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml index 11ea52b8da2..fb89854b08c 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/shared/conf/solrconfig.xml @@ -26,7 +26,7 @@ ${useCompoundFile:false} ${tempDir}/data/${l10n:}-${version:} - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/spatial/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/spatial/conf/schema.xml index 03865792ad9..7519343d446 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/spatial/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/spatial/conf/schema.xml @@ -19,11 +19,11 @@ - + - + - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml index 42b1e70017d..5a202baa2b8 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/streaming/conf/schema.xml @@ -25,7 +25,7 @@ --> - + - - - - + + + + - - - - + + + + @@ -110,13 +110,13 @@ - + - - + + - + diff --git a/solr/solrj/src/test-files/solrj/solr/configsets/tracking-updates/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/configsets/tracking-updates/conf/schema.xml index 4124feab0c3..fc23706512e 100644 --- a/solr/solrj/src/test-files/solrj/solr/configsets/tracking-updates/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/configsets/tracking-updates/conf/schema.xml @@ -15,10 +15,10 @@ See the License for the specific language governing permissions and limitations under the License. --> - + - - + + diff --git a/solr/solrj/src/test-files/solrj/solr/crazy-path-to-schema.xml b/solr/solrj/src/test-files/solrj/solr/crazy-path-to-schema.xml index 9d0cfa23f90..77728a9588b 100644 --- a/solr/solrj/src/test-files/solrj/solr/crazy-path-to-schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/crazy-path-to-schema.xml @@ -26,7 +26,7 @@ See comments from Yonik on SOLR-9083. It costs us nothing to leave these in give that just finds leaf and nodes and there's no reason to break schemas for people who leave and tags in their schema for no good purpose. --> - + diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/core0/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/multicore/core0/conf/schema.xml index 68c251b34c1..8dddc57a24e 100644 --- a/solr/solrj/src/test-files/solrj/solr/multicore/core0/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/multicore/core0/conf/schema.xml @@ -16,10 +16,10 @@ limitations under the License. --> - + - + diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/core1/conf/schema.xml b/solr/solrj/src/test-files/solrj/solr/multicore/core1/conf/schema.xml index 9f4cde8ae35..079a2683edb 100644 --- a/solr/solrj/src/test-files/solrj/solr/multicore/core1/conf/schema.xml +++ b/solr/solrj/src/test-files/solrj/solr/multicore/core1/conf/schema.xml @@ -16,10 +16,10 @@ limitations under the License. --> - + - + diff --git a/solr/solrj/src/test-files/solrj/solr/multicore/zoo.cfg b/solr/solrj/src/test-files/solrj/solr/multicore/zoo.cfg index cbd6970e3fc..f234b7568a9 100644 --- a/solr/solrj/src/test-files/solrj/solr/multicore/zoo.cfg +++ b/solr/solrj/src/test-files/solrj/solr/multicore/zoo.cfg @@ -16,4 +16,4 @@ syncLimit=5 # NOTE: Solr sets this based on zkRun / zkHost params # Disable ZK AdminServer since we do not use it -admin.enableServer=false \ No newline at end of file +admin.enableServer=false diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/ApiMustacheTemplateTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/ApiMustacheTemplateTests.java index 48111acf7d2..1dd0c524053 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/ApiMustacheTemplateTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/ApiMustacheTemplateTests.java @@ -57,7 +57,7 @@ public void testParsedReturnTypes() { Assert.assertNotSame(data.getClass(), response.getClass()); Assert.assertFalse(data instanceof JacksonParsingResponse); - // Currently all response types extend SolrJerseyResponse. Adjust if this change in the future. + // Currently, all response types extend SolrJerseyResponse. Adjust if this change in the future. Assert.assertTrue(data instanceof SolrJerseyResponse); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/CollectionAdminRequestRequiredParamsTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/CollectionAdminRequestRequiredParamsTest.java index 243e16da9fa..942b0dbea48 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/CollectionAdminRequestRequiredParamsTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/CollectionAdminRequestRequiredParamsTest.java @@ -208,7 +208,7 @@ public void testListCollections() { public void testMigrate() { final CollectionAdminRequest.Migrate request = - CollectionAdminRequest.migrateData("collection", "targer", "splitKey"); + CollectionAdminRequest.migrateData("collection", "target", "splitKey"); assertContainsParams(request.getParams(), ACTION, COLLECTION, "target.collection", "split.key"); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java index 77771c79e24..93db65f586b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/LargeVolumeTestBase.java @@ -55,7 +55,7 @@ public void testMultiThreaded() throws Exception { threads[i].join(); } - // some of the commits could have failed because maxWarmingSearchers exceeded, + // some commits could have failed because maxWarmingSearchers exceeded, // so do a final commit to make sure everything is visible. // This should no longer be true as of SOLR-9712 (Solr 6.4) // client.commit(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleCborTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleCborTest.java index dd1bbb0f0de..f0d420a69ec 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleCborTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleCborTest.java @@ -56,7 +56,7 @@ public SolrClient createNewSolrClient() { .withDefaultCollection(DEFAULT_TEST_CORENAME) .allowMultiPartPost(random().nextBoolean()) .withRequestWriter(cborRequestWriter()) - .withResponseParser(cborResponseparser()) + .withResponseParser(cborResponseParser()) .build(); } @@ -199,7 +199,7 @@ public void testQueryWithParams() { @Override @Ignore - public void testChildDoctransformer() { + public void testChildDocTransformer() { /*Ignore*/ } @@ -293,7 +293,7 @@ public String getPath(SolrRequest req) { }; } - private static ResponseParser cborResponseparser() { + private static ResponseParser cborResponseParser() { return new ResponseParser() { @Override diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java index bb5f97ac7ab..66154e7c6ec 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTests.java @@ -2575,7 +2575,7 @@ public void testQueryWithParams() throws SolrServerException, IOException { } @Test - public void testChildDoctransformer() throws IOException, SolrServerException { + public void testChildDocTransformer() throws IOException, SolrServerException { SolrClient client = getSolrClient(); client.deleteByQuery("*:*"); client.commit(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java index 01b0940f357..4f0fc0ed374 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/SolrExampleTestsBase.java @@ -164,7 +164,7 @@ public void testCommitWithinOnDelete() throws Exception { rsp = client.query(new SolrQuery("id:id3")); assertEquals(1, rsp.getResults().getNumFound()); - // now test commitWithin on a delete + // now test commitWithin on a delete operation UpdateRequest up = new UpdateRequest(); up.setCommitWithin(1000); up.deleteById("id3"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java b/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java index 0941775980d..68ba3e42606 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/TestSolrJErrorHandling.java @@ -24,6 +24,7 @@ import java.lang.invoke.MethodHandles; import java.net.HttpURLConnection; import java.net.Socket; +import java.net.URI; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; @@ -280,7 +281,7 @@ public void testHttpURLConnection() throws Exception { String urlString = getCoreUrl() + "/update"; HttpURLConnection conn = null; - URL url = new URL(urlString); + URL url = URI.create(urlString).toURL(); conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java b/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java index 5d8fafc7160..8d65a55ade1 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/beans/TestDocumentObjectBinder.java @@ -198,7 +198,7 @@ public void setInStock(Boolean b) { inStock = b; } - // required if you want to fill SolrDocuments with the same annotaion... + // required if you want to fill SolrDocuments with the same annotation. public boolean isInStock() { return inStock; } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java index 61968db2e4d..024b79b5c1d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/JettyWebappTest.java @@ -18,7 +18,7 @@ import java.io.File; import java.io.InputStream; -import java.net.URL; +import java.net.URI; import java.util.Locale; import java.util.Random; import org.apache.http.Header; @@ -89,7 +89,7 @@ public void tearDown() throws Exception { public void testAdminUI() throws Exception { // Not an extensive test, but it does connect to Solr and verify the Admin ui shows up. String adminPath = "http://127.0.0.1:" + port + "/solr/"; - try (InputStream is = new URL(adminPath).openStream()) { + try (InputStream is = URI.create(adminPath).toURL().openStream()) { assertNotNull(is.readAllBytes()); // real error will be an exception } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java index de79f75a6a2..bf0e3ad00bf 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/BasicHttpSolrClientTest.java @@ -873,7 +873,7 @@ public void testQueryString() throws Exception { setReqParamsOf(req4, "serverOnly", "requestOnly", "both", "neither"); expectThrows(BaseHttpSolrClient.RemoteSolrException.class, () -> client.request(req4)); // NOTE: single stream requests send all the params - // as part of the query string. So add "neither" to the request + // as part of the query string. So add "neither" to the request, // so it passes the verification step. req4.setQueryParams(Set.of("requestOnly", "both", "neither")); verifyServletState(client, req4); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java index 0b82d36a31d..adc259460c1 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudHttp2SolrClientTest.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.net.URL; +import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -70,6 +70,9 @@ import org.apache.solr.handler.admin.CollectionsHandler; import org.apache.solr.handler.admin.ConfigSetsHandler; import org.apache.solr.handler.admin.CoreAdminHandler; +import org.apache.solr.servlet.HttpSolrCall; +import org.apache.solr.util.LogLevel; +import org.apache.solr.util.LogListener; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -114,6 +117,7 @@ public static void setupCluster() throws Exception { @AfterClass public static void tearDownAfterClass() throws Exception { + if (httpBasedCloudSolrClient != null) { try { httpBasedCloudSolrClient.close(); @@ -246,6 +250,58 @@ public void testAliasHandling() throws Exception { 2, client.query(null, paramsWithMixedCollectionAndAlias).getResults().getNumFound()); } + @Test + @LogLevel("org.apache.solr.servlet.HttpSolrCall=DEBUG") + public void testHttpCspPerf() throws Exception { + + String collectionName = "HTTPCSPTEST"; + CollectionAdminRequest.createCollection(collectionName, "conf", 2, 1) + .process(cluster.getSolrClient()); + cluster.waitForActiveCollection(collectionName, 2, 2); + + try (LogListener adminLogs = LogListener.info(HttpSolrCall.class).substring("[admin]"); + CloudSolrClient solrClient = createHttpCSPBasedCloudSolrClient(); ) { + + assertEquals(1, adminLogs.getCount()); + assertTrue( + adminLogs + .pollMessage() + .contains( + "path=/admin/collections params={prs=true&liveNodes=true&action" + + "=CLUSTERSTATUS&includeAll=false")); + + SolrInputDocument doc = new SolrInputDocument("id", "1", "title_s", "my doc"); + solrClient.add(collectionName, doc); + + // getCount seems to return a cumulative count, but add() results in only 1 additional admin + // request to fetch CLUSTERSTATUS for the collection + assertEquals(2, adminLogs.getCount()); + assertTrue( + adminLogs + .pollMessage() + .contains( + "path=/admin/collections " + + "params={prs=true&action=CLUSTERSTATUS&includeAll=false")); + + solrClient.commit(collectionName); + // No additional admin requests sent + assertEquals(2, adminLogs.getCount()); + + for (int i = 0; i < 3; i++) { + assertEquals( + 1, solrClient.query(collectionName, params("q", "*:*")).getResults().getNumFound()); + // No additional admin requests sent + assertEquals(2, adminLogs.getCount()); + } + } + } + + private CloudSolrClient createHttpCSPBasedCloudSolrClient() { + final List solrUrls = new ArrayList<>(); + solrUrls.add(cluster.getJettySolrRunner(0).getBaseUrl().toString()); + return new CloudHttp2SolrClient.Builder(solrUrls).build(); + } + @Test public void testRouting() throws Exception { CollectionAdminRequest.createCollection("routing_collection", "conf", 2, 1) @@ -443,7 +499,7 @@ public void queryWithLocalShardsPreferenceRulesTest() throws Exception { // For this case every shard should have // all its cores on the same node. - // Hence the below configuration for our collection + // Hence, the below configuration for our collection CollectionAdminRequest.createCollection(collectionName, "conf", liveNodes, liveNodes) .processAndWait(cluster.getSolrClient(), TIMEOUT); cluster.waitForActiveCollection(collectionName, liveNodes, liveNodes * liveNodes); @@ -499,8 +555,8 @@ private void queryWithShardsPreferenceRules(CloudSolrClient cloudClient, String // Make sure the distributed queries were directed to a single node only Set ports = new HashSet(); for (String shardAddr : shardAddresses) { - URL url = new URL(shardAddr); - ports.add(url.getPort()); + URI uri = URI.create(shardAddr); + ports.add(uri.getPort()); } // This assertion would hold true as long as every shard has a core on each node @@ -1056,7 +1112,7 @@ public void preferReplicaTypesTest() throws Exception { int liveNodes = cluster.getJettySolrRunners().size(); // For these tests we need to have multiple replica types. - // Hence the below configuration for our collection + // Hence, the below configuration for our collection int pullReplicas = Math.max(1, liveNodes - 2); CollectionAdminRequest.createCollection(collectionName, "conf", liveNodes, 1, 1, pullReplicas) .processAndWait(cluster.getSolrClient(), TIMEOUT); @@ -1117,7 +1173,7 @@ private void queryWithPreferReplicaTypes( for (Replica replica : slice.getReplicas()) { String coreUrl = replica.getCoreUrl(); // It seems replica reports its core URL with a trailing slash while shard - // info returned from the query doesn't. Oh well. + // info returned from the query doesn't. if (coreUrl.endsWith("/")) { coreUrl = coreUrl.substring(0, coreUrl.length() - 1); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRoutingTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRoutingTest.java index 6b805a53d83..f2824b9e550 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRoutingTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientRoutingTest.java @@ -99,7 +99,7 @@ public void routeParamHandling() throws IOException, SolrServerException { // at the mercy of org.apache.solr.servlet.HttpSolrCall.randomlyGetSolrCore, and if this // randomly selected replica is not the one the doc id lives in, the request is forwarded // from there. - // The decision whether or not the request is forwarded is made at + // The decision whether the request is forwarded is made at // org.apache.solr.handler.component.HttpShardHandler.canShortCircuit() boolean forwardedWithoutRoute = diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java index f0246b53fc8..ad718210e91 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.net.URL; +import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -435,7 +435,7 @@ public void queryWithLocalShardsPreferenceRulesTest() throws Exception { int liveNodes = cluster.getJettySolrRunners().size(); // For this case every shard should have all its cores on the same node. - // Hence the below configuration for our collection + // Hence, the below configuration for our collection CollectionAdminRequest.createCollection(collectionName, "conf", liveNodes, liveNodes) .processAndWait(cluster.getSolrClient(), TIMEOUT); cluster.waitForActiveCollection(collectionName, liveNodes, liveNodes * liveNodes); @@ -491,8 +491,8 @@ private void queryWithShardsPreferenceRules(CloudSolrClient cloudClient, String // Make sure the distributed queries were directed to a single node only Set ports = new HashSet(); for (String shardAddr : shardAddresses) { - URL url = new URL(shardAddr); - ports.add(url.getPort()); + URI uri = URI.create(shardAddr); + ports.add(uri.getPort()); } // This assertion would hold true as long as every shard has a core on each node @@ -1052,7 +1052,7 @@ public void preferReplicaTypesTest() throws Exception { int liveNodes = cluster.getJettySolrRunners().size(); // For these tests we need to have multiple replica types. - // Hence the below configuration for our collection + // Hence, the below configuration for our collection int pullReplicas = Math.max(1, liveNodes - 2); CollectionAdminRequest.createCollection(collectionName, "conf", liveNodes, 1, 1, pullReplicas) .processAndWait(cluster.getSolrClient(), TIMEOUT); @@ -1112,7 +1112,7 @@ private void queryWithPreferReplicaTypes( for (Replica replica : slice.getReplicas()) { String coreUrl = replica.getCoreUrl(); // It seems replica reports its core URL with a trailing slash while shard - // info returned from the query doesn't. Oh well. + // info returned from the query doesn't. if (coreUrl.endsWith("/")) { coreUrl = coreUrl.substring(0, coreUrl.length() - 1); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java index d2c5686d6bb..111b306e21e 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/ConcurrentUpdateSolrClientBuilderTest.java @@ -71,7 +71,7 @@ public void testSocketTimeoutOnCommit() throws IOException, SolrServerException if (!(e.getCause() instanceof SocketTimeoutException)) { throw e; } - // else test passses + // else test passes } } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java index 835f4c04cbe..65c240a8e5f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientCompatibilityTest.java @@ -86,7 +86,8 @@ public void testConnectToNewNodesUsingHttp1() throws Exception { } public void testConnectToOldNodesUsingHttp2() throws Exception { - // if this test some how failure, this mean that Jetty client now be able to switch between + // if this test somehow fails, this means that the Jetty client may now be able to switch + // between // HTTP/1 and HTTP/2.2 protocol dynamically therefore rolling updates will be easier we should // then notify this to users JettyConfig jettyConfig = diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientProxyTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientProxyTest.java index 6b48b1777da..99ea3af66c1 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientProxyTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/Http2SolrClientProxyTest.java @@ -87,7 +87,7 @@ public void testProxyWithHttpSolrClientJdkImpl() throws Exception { try (HttpJdkSolrClient client = builder.build()) { testProxy(client); } - // This is a workaround for java.net.http.HttpClient not implementing closeable/autoclosable + // This is a workaround for java.net.http.HttpClient not implementing closeable/autocloseable // until Java 21. Thread[] threads = new Thread[Thread.currentThread().getThreadGroup().activeCount()]; Thread.currentThread().getThreadGroup().enumerate(threads); @@ -102,7 +102,7 @@ public void testProxyWithHttpSolrClientJdkImpl() throws Exception { System.gc(); } - /** Setup a simple http proxy and verify a request works */ + /** Set up a simple http proxy and verify a request works */ public void testProxy(HttpSolrClientBase client) throws Exception { String id = "1234"; SolrInputDocument doc = new SolrInputDocument(); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClusterStateSSLTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClusterStateSSLTest.java index db385e26b83..1242cde9945 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClusterStateSSLTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpClusterStateSSLTest.java @@ -23,7 +23,6 @@ import java.util.Map; import org.apache.solr.client.solrj.request.CollectionAdminRequest; import org.apache.solr.cloud.SolrCloudTestCase; -import org.apache.solr.common.cloud.ClusterState; import org.apache.solr.common.cloud.DocCollection; import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.util.Utils; @@ -88,7 +87,7 @@ public void testHttpClusterStateWithSSL() throws Exception { new CloudSolrClient.Builder(Collections.singletonList(url0.toExternalForm())).build()) { ClusterStateProvider csp = httpBasedCloudSolrClient.getClusterStateProvider(); assertTrue(csp instanceof Http2ClusterStateProvider); - verifyUrlSchemeInClusterState(csp.getClusterState(), collectionId, expectedReplicas); + verifyUrlSchemeInClusterState(csp.getCollection(collectionId), expectedReplicas); } // http2 @@ -97,20 +96,19 @@ public void testHttpClusterStateWithSSL() throws Exception { .build()) { ClusterStateProvider csp = http2BasedClient.getClusterStateProvider(); assertTrue(csp instanceof Http2ClusterStateProvider); - verifyUrlSchemeInClusterState(csp.getClusterState(), collectionId, expectedReplicas); + verifyUrlSchemeInClusterState(csp.getCollection(collectionId), expectedReplicas); } // Zk cluster state now ClusterStateProvider csp = cluster.getSolrClient().getClusterStateProvider(); assertTrue(csp instanceof ZkClientClusterStateProvider); - verifyUrlSchemeInClusterState(csp.getClusterState(), collectionId, expectedReplicas); + verifyUrlSchemeInClusterState(csp.getCollection(collectionId), expectedReplicas); } private void verifyUrlSchemeInClusterState( - final ClusterState cs, final String collectionId, final int expectedReplicas) { - DocCollection dc = cs.getCollection(collectionId); - assertNotNull(dc); - List replicas = dc.getReplicas(); + final DocCollection collection, final int expectedReplicas) { + assertNotNull(collection); + List replicas = collection.getReplicas(); assertNotNull(replicas); assertEquals(expectedReplicas, replicas.size()); for (Replica r : replicas) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientConPoolTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientConPoolTest.java index 2c654248a4d..078812f0600 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientConPoolTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientConPoolTest.java @@ -119,31 +119,30 @@ public void testLBClient() throws IOException, SolrServerException { ExecutorUtil.newMDCAwareFixedThreadPool( threadCount, new SolrNamedThreadFactory(getClass().getSimpleName() + "TestScheduler")); CloseableHttpClient httpClient = HttpClientUtil.createClient(new ModifiableSolrParams(), pool); - try { - final LBHttpSolrClient roundRobin = - new LBHttpSolrClient.Builder() - .withBaseEndpoint(fooUrl) - .withBaseEndpoint(barUrl) - .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) - .withHttpClient(httpClient) - .build(); - - List concurrentClients = - Arrays.asList( - new ConcurrentUpdateSolrClient.Builder(fooUrl) - .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) - .withHttpClient(httpClient) - .withThreadCount(threadCount) - .withQueueSize(10) - .withExecutorService(threads) - .build(), - new ConcurrentUpdateSolrClient.Builder(barUrl) - .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) - .withHttpClient(httpClient) - .withThreadCount(threadCount) - .withQueueSize(10) - .withExecutorService(threads) - .build()); + try (var roundRobin = + new LBHttpSolrClient.Builder() + .withBaseEndpoint(fooUrl) + .withBaseEndpoint(barUrl) + .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) + .withHttpClient(httpClient) + .build(); + final var fooClient = + new ConcurrentUpdateSolrClient.Builder(fooUrl) + .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) + .withHttpClient(httpClient) + .withThreadCount(threadCount) + .withQueueSize(10) + .withExecutorService(threads) + .build(); + final var barClient = + new ConcurrentUpdateSolrClient.Builder(barUrl) + .withDefaultCollection(DEFAULT_TEST_COLLECTION_NAME) + .withHttpClient(httpClient) + .withThreadCount(threadCount) + .withQueueSize(10) + .withExecutorService(threads) + .build()) { + List concurrentClients = Arrays.asList(fooClient, barClient); for (int i = 0; i < 2; i++) { roundRobin.deleteByQuery("*:*"); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientTestBase.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientTestBase.java index ec14e871e67..d9f3a5544b4 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientTestBase.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/HttpSolrClientTestBase.java @@ -442,7 +442,7 @@ protected void testQueryString() throws Exception { } catch (BaseHttpSolrClient.RemoteSolrException ignored) { } // NOTE: single stream requests send all the params - // as part of the query string. So add "neither" to the request + // as part of the query string. So add "neither" to the request, // so it passes the verification step. req.setQueryParams(Set.of("requestOnly", "both", "neither")); verifyServletState(client, req); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java index 367184ed2d4..5f55b353c0d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/LBHttp2SolrClientTest.java @@ -82,7 +82,7 @@ public void testAsyncWithFailures() { LBHttp2SolrClient testClient = new LBHttp2SolrClient.Builder(client, ep1, ep2).build()) { for (int j = 0; j < 2; j++) { - // j: first time Endpoint One will retrun error code 500. + // first time Endpoint One will return error code 500. // second time Endpoint One will be healthy String basePathToSucceed; @@ -185,7 +185,7 @@ public void testAsync() { } // It is the user's responsibility to shuffle the endpoints when using - // async. LB Http Solr Client always will try the passed-in endpoints + // async. LB Http Solr Client will always try the passed-in endpoints // in order. In this case, endpoint 1 gets all the requests! assertEquals(limit, numEndpointOne); assertEquals(0, numEndpointTwo); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/SendUpdatesToLeadersOverrideTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/SendUpdatesToLeadersOverrideTest.java index ac70668c09e..7c716070b1d 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/SendUpdatesToLeadersOverrideTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/SendUpdatesToLeadersOverrideTest.java @@ -54,7 +54,8 @@ * after the distrib processor) to inspect which replicas receive various {@link * UpdateRequest}s from variously configured {@link CloudSolrClient}s. In some requests, * shards.preference=replica.type:PULL is specified to confirm that typical routing - * prefrences are respected (when the effective value of isSendToLeaders is false + * preferences are respected (when the effective value of isSendToLeaders is + * false * ) */ public class SendUpdatesToLeadersOverrideTest extends SolrCloudTestCase { @@ -134,7 +135,7 @@ private static List stopRecording(final String group) { .collect(Collectors.toUnmodifiableList()); } - /** Convinience class for making assertions about the updates that were processed */ + /** Convenience class for making assertions about the updates that were processed */ private static class RecordingResults { public final List preDistribCommands; public final List postDistribCommands; @@ -271,7 +272,7 @@ public void testHttp2ClientThatDoesNotDefaultToLeaders() throws Exception { } /** - * Given a SolrClient, sends various updates and asserts expecations regarding default behavior: + * Given a SolrClient, sends various updates and asserts expectations regarding default behavior: * that these requests will be initially sent to shard leaders, and "routed" requests will be sent * to the leader for that route's shard */ @@ -300,7 +301,7 @@ private void checkUpdatesDefaultToLeaders(final CloudSolrClient client) throws E "add pre and post should be exact same reqs", add.preDistribRequests.keySet(), add.postDistribRequests.keySet()); - // NOTE: we can't assert the pre/post commands are the same, because they add versioning + // NOTE: we can't assert the pre- / post-commands are the same, because they add versioning // whatever leader our add was routed to, a DBI for the same id should go to the same leader final RecordingResults del = @@ -366,7 +367,7 @@ private void checkUpdatesDefaultToLeaders(final CloudSolrClient client) throws E record.preDistribCores.keySet(), record.postDistribCores.keySet()); - // NOTE: we make no asertion about number of post-distrb requests, just commands + // NOTE: we make no assertion about number of post-distrib requests, just commands // (distrib proc may batch differently then what we send) assertEquals( "multi post-distrib cores don't match pre-distrib cores", @@ -377,8 +378,8 @@ private void checkUpdatesDefaultToLeaders(final CloudSolrClient client) throws E } /** - * Given a SolrClient, sends various updates using {@link #prefPull} and asserts expecations that - * these requests will be initially sent to PULL replcias + * Given a SolrClient, sends various updates using {@link #prefPull} and asserts expectations that + * these requests will be initially sent to PULL replicas */ private void checkUpdatesWithShardsPrefPull(final CloudSolrClient client) throws Exception { @@ -474,7 +475,7 @@ private void checkUpdatesWithShardsPrefPull(final CloudSolrClient client) throws record.postDistribCores.keySet()); // NOTE: Don't assume our docIds are spread across multi-shards... // - // We make no asertion about number of post-distrb requests + // We make no asertion about number of post-distrib requests // (distrib proc may batch differently then what we send) assertThat( "multi post-distrib cores", @@ -486,7 +487,7 @@ private void checkUpdatesWithShardsPrefPull(final CloudSolrClient client) throws /** * Given a SolrClient, sends various updates were {@link IsUpdateRequest#isSendToLeaders} returns - * false, and asserts expectations that requess using {@link #prefPull} are all sent to PULL + * false, and asserts expectations that requests using {@link #prefPull} are all sent to PULL * replicas, regardless of how the client is configured. */ private void checkUpdatesWithSendToLeadersFalse(final CloudSolrClient client) throws Exception { @@ -583,7 +584,7 @@ private void checkUpdatesWithSendToLeadersFalse(final CloudSolrClient client) th record.postDistribCores.keySet()); // NOTE: Don't assume our docIds are spread across multi-shards... // - // We make no asertion about number of post-distrb requests + // We make no assertion about number of post-distrib requests // (distrib proc may batch differently then what we send) assertThat( "multi post-distrib cores", diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java index bf45abad2f2..dd89fb97cbb 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java @@ -145,7 +145,7 @@ public void testSchemaRequestAccuracy() throws Exception { SchemaRepresentation schemaRepresentation = schemaResponse.getSchemaRepresentation(); assertNotNull(schemaRepresentation); assertEquals("test", schemaRepresentation.getName()); - assertEquals(1.6, schemaRepresentation.getVersion(), 0.001f); + assertEquals(1.7, schemaRepresentation.getVersion(), 0.001f); assertEquals("id", schemaRepresentation.getUniqueKey()); assertFalse(schemaRepresentation.getFields().isEmpty()); assertFalse(schemaRepresentation.getDynamicFields().isEmpty()); @@ -168,7 +168,7 @@ public void testSchemaVersionRequestAccuracy() throws Exception { SchemaResponse.SchemaVersionResponse schemaVersionResponse = schemaVersionRequest.process(getSolrClient()); assertValidSchemaResponse(schemaVersionResponse); - assertEquals(1.6, schemaVersionResponse.getSchemaVersion(), 0.001); + assertEquals(1.7, schemaVersionResponse.getSchemaVersion(), 0.001); } @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java index 063b538b743..d8630d016ff 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java @@ -95,8 +95,8 @@ public void testCustomUlogDir() throws Exception { req.setIsTransient(true); req.process(client); - // Show that the newly-created core has values for load on startup and transient different - // than defaults due to the above. + // Show that the newly-created core has values for load on startup and transient that differ + // from defaults due to the above. File logDir; try (SolrCore coreProveIt = cores.getCore("collection1"); SolrCore core = cores.getCore("newcore")) { diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java index 9fa69a52281..e8c72a52b44 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestUpdateRequestCodec.java @@ -111,7 +111,7 @@ public void simple() throws IOException { } @Test - public void testIteratable() throws IOException { + public void testIterable() throws IOException { final List values = new ArrayList<>(); values.add("iterItem1"); values.add("iterItem2"); @@ -122,7 +122,7 @@ public void testIteratable() throws IOException { SolrInputDocument doc = new SolrInputDocument(); doc.addField("id", 1); doc.addField("desc", "one"); - // imagine someone adding a custom Bean that implements Iterable + // imagine someone adds a custom Bean that implements Iterable // but is not a Collection doc.addField("iter", (Iterable) values::iterator); doc.addField("desc", "1"); @@ -284,7 +284,7 @@ private void compareDocs(String m, SolrInputDocument expectedDoc, SolrInputDocum Object expectedVal = expectedField.getValue(); Object actualVal = actualField.getValue(); if (expectedVal instanceof Set && actualVal instanceof Collection) { - // unmarshaled documents never contain Sets, they are just a + // unmarshalled documents never contain Sets, they are just a // List in an arbitrary order based on what the iterator of // the original Set returned, so we need a comparison that is // order agnostic. diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnalysisResponseBaseTest.java similarity index 97% rename from solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java rename to solr/solrj/src/test/org/apache/solr/client/solrj/response/AnalysisResponseBaseTest.java index 35e8f88893b..d56a8d58839 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnlysisResponseBaseTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/AnalysisResponseBaseTest.java @@ -27,7 +27,7 @@ * * @since solr 1.4 */ -public class AnlysisResponseBaseTest extends SolrTestCase { +public class AnalysisResponseBaseTest extends SolrTestCase { /** * Tests the {@link AnalysisResponseBase#buildTokenInfo(org.apache.solr.common.util.NamedList)} @@ -68,7 +68,7 @@ public void testBuildTokenInfo() throws Exception { } /** - * Tests the {@link AnalysisResponseBase#buildPhases(org.apache.solr.common.util.NamedList)} )} + * Tests the {@link AnalysisResponseBase#buildPhases(org.apache.solr.common.util.NamedList)} * method. */ @Test @@ -100,7 +100,7 @@ protected TokenInfo buildTokenInfo(NamedList ignored) { } /** - * Tests the {@link AnalysisResponseBase#buildPhases(org.apache.solr.common.util.NamedList)} )} + * Tests the {@link AnalysisResponseBase#buildPhases(org.apache.solr.common.util.NamedList)} * method for the special case of CharacterFilter. */ @Test diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java index 319905717e1..2f9487a6a3b 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestSpellCheckResponse.java @@ -109,7 +109,7 @@ public void testSpellCheckResponse_Extended() throws Exception { assertTrue(sug.getNumFound() > 0); // assertTrue(sug.getOriginalFrequency() > 0); - // Hmmm... the API for SpellCheckResponse could be nicer: + // Hmm... the API for SpellCheckResponse could be nicer: response.getSuggestions().get(0).getAlternatives().get(0); } diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparatorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparatorTest.java index 1fa85e4981f..17d918a233a 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparatorTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/NodePreferenceRulesComparatorTest.java @@ -123,7 +123,7 @@ public void replicaLeaderTest() { "NRT"), "collection1", "shard1")); - // Prefer non-leader only, therefore node1 has lowest priority + // Prefer non-leader only, therefore node1 has the lowest priority List rules = PreferenceRule.from(ShardParams.SHARDS_PREFERENCE_REPLICA_LEADER + ":false"); NodePreferenceRulesComparator comparator = new NodePreferenceRulesComparator(rules, null); diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/ReplicaListTransformerTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/ReplicaListTransformerTest.java index aa3e86bb8d7..11d1893ae8f 100644 --- a/solr/solrj/src/test/org/apache/solr/client/solrj/routing/ReplicaListTransformerTest.java +++ b/solr/solrj/src/test/org/apache/solr/client/solrj/routing/ReplicaListTransformerTest.java @@ -76,11 +76,11 @@ public void testTransform() throws Exception { final ReplicaListTransformer transformer; if (random().nextBoolean()) { - log.info("Using ToyMatching Transfomer"); + log.info("Using ToyMatching Transformer"); transformer = new ToyMatchingReplicaListTransformer(regex); } else { - log.info("Using conditional Transfomer"); + log.info("Using conditional Transformer"); transformer = new HttpShardHandlerFactory() { diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java index 070f8c88c51..13532e8ef5e 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCollectionStateWatchers.java @@ -121,7 +121,7 @@ private void doTestCollectionWatchWithNodeShutdown(final boolean shutdownUnusedN CloudSolrClient client = cluster.getSolrClient(); - // note: one node in our cluster is unsed by collection + // note: one node in our cluster is unused by collection CollectionAdminRequest.createCollection("testcollection", "config", CLUSTER_SIZE, 1) .processAndWait(client, MAX_WAIT_TIMEOUT); @@ -249,7 +249,7 @@ public void testWaitForStateChecksCurrentState() throws Exception { @Test @Ignore - public void testCanWaitForNonexistantCollection() throws Exception { + public void testCanWaitForNonexistentCollection() throws Exception { Future future = waitInBackground( diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java index 03b4959df93..4cc96202254 100644 --- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java +++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestDocCollectionWatcher.java @@ -180,7 +180,7 @@ public void testWaitForStateChecksCurrentState() throws Exception { } @Test - public void testCanWaitForNonexistantCollection() throws Exception { + public void testCanWaitForNonexistentCollection() throws Exception { Future future = waitInBackground("delayed", MAX_WAIT_TIMEOUT, TimeUnit.SECONDS, Objects::nonNull); diff --git a/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java b/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java index 6ae0e3edd4f..bd732879289 100644 --- a/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/params/SolrParamTest.java @@ -16,6 +16,8 @@ */ package org.apache.solr.common.params; +import static org.apache.solr.SolrTestCaseJ4.params; + import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.HashMap; @@ -24,6 +26,7 @@ import java.util.Map; import org.apache.solr.SolrTestCase; import org.apache.solr.common.SolrException; +import org.apache.solr.search.QueryParsing; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,6 +34,45 @@ public class SolrParamTest extends SolrTestCase { private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + public void testLocalParamRoundTripParsing() throws Exception { + final SolrParams in = + params( + "simple", "xxx", + "blank", "", + "space", "x y z", + "lead_space", " x", + "curly", "x}y", + "quote", "x'y", + "quoted", "'x y'", + "d_quote", "x\"y", + "d_quoted", "\"x y\"", + "dollar", "x$y", + "multi", "x", + "multi", "y y", + "v", "$ref"); + final String toStr = in.toLocalParamsString(); + final SolrParams out = QueryParsing.getLocalParams(toStr, params("ref", "ref value")); + + assertEquals("xxx", out.get("simple")); + assertEquals("", out.get("blank")); + assertEquals("x y z", out.get("space")); + assertEquals(" x", out.get("lead_space")); + assertEquals("x}y", out.get("curly")); + assertEquals("x'y", out.get("quote")); + assertEquals("'x y'", out.get("quoted")); + assertEquals("x\"y", out.get("d_quote")); + assertEquals("\"x y\"", out.get("d_quoted")); + assertEquals("x$y", out.get("dollar")); + + assertArrayEquals(new String[] {"x", "y y"}, out.getParams("multi")); + // first one should win... + assertEquals("x", out.get("multi")); + + assertEquals("ref value", out.get("v")); + + assertIterSize(toStr, 12, out); + } + public void testParamIterators() { ModifiableSolrParams aaa = new ModifiableSolrParams(); @@ -230,7 +272,7 @@ public void testGetParams() { assertEquals(400, getReturnCode(() -> params.getBool("f.bad.bool"))); assertEquals(400, getReturnCode(() -> params.getFloat("f.bad.float"))); - // Ask for params that arent there + // Ask for params that aren't there assertNull(params.get("asagdsaga")); assertNull(params.getBool("asagdsaga")); assertNull(params.getInt("asagdsaga")); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java index fdb5f4176c9..019e00d675f 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java @@ -23,7 +23,6 @@ import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; -import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Paths; import java.util.zip.GZIPInputStream; @@ -101,8 +100,7 @@ public void testURLStream() throws IOException { is.transferTo(os); } - ContentStreamBase stream = - new ContentStreamBase.URLStream(new URL(file.toURI().toASCIIString())); + ContentStreamBase stream = new ContentStreamBase.URLStream(file.toURI().toURL()); try (InputStream s = stream.getStream(); FileInputStream fis = new FileInputStream(file); @@ -133,8 +131,7 @@ public void testURLStreamGZIP() throws IOException { is.transferTo(zos); } - ContentStreamBase stream = - new ContentStreamBase.URLStream(new URL(file.toURI().toASCIIString())); + ContentStreamBase stream = new ContentStreamBase.URLStream(file.toURI().toURL()); try (InputStream s = stream.getStream(); FileInputStream fis = new FileInputStream(file); GZIPInputStream zis = new GZIPInputStream(fis); @@ -150,7 +147,7 @@ public void testURLStreamGZIP() throws IOException { } } - public void testURLStreamCSVGZIPExtention() throws IOException { + public void testURLStreamCSVGZIPExtension() throws IOException { File file = new File(createTempDir().toFile(), "README.CSV.gz"); try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath()); @@ -160,8 +157,7 @@ public void testURLStreamCSVGZIPExtention() throws IOException { is.transferTo(zos); } - ContentStreamBase stream = - new ContentStreamBase.URLStream(new URL(file.toURI().toASCIIString())); + ContentStreamBase stream = new ContentStreamBase.URLStream(file.toURI().toURL()); try (InputStream s = stream.getStream(); FileInputStream fis = new FileInputStream(file); GZIPInputStream zis = new GZIPInputStream(fis); @@ -177,7 +173,7 @@ public void testURLStreamCSVGZIPExtention() throws IOException { } } - public void testURLStreamJSONGZIPExtention() throws IOException { + public void testURLStreamJSONGZIPExtension() throws IOException { File file = new File(createTempDir().toFile(), "README.json.gzip"); try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath()); @@ -187,8 +183,7 @@ public void testURLStreamJSONGZIPExtention() throws IOException { is.transferTo(zos); } - ContentStreamBase stream = - new ContentStreamBase.URLStream(new URL(file.toURI().toASCIIString())); + ContentStreamBase stream = new ContentStreamBase.URLStream(file.toURI().toURL()); try (InputStream s = stream.getStream(); FileInputStream fis = new FileInputStream(file); GZIPInputStream zis = new GZIPInputStream(fis); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/EnvUtilsTest.java b/solr/solrj/src/test/org/apache/solr/common/util/EnvUtilsTest.java index b6af3f56d40..d2cd63795f9 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/EnvUtilsTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/EnvUtilsTest.java @@ -24,43 +24,24 @@ import org.junit.Test; public class EnvUtilsTest extends SolrTestCase { + + private static final Map ENV = + Map.of( + "SOLR_HOME", "/home/solr", + "SOLR_PORT", "8983", + "SOLR_HOST", "localhost", + "SOLR_LOG_LEVEL", "INFO", + "SOLR_BOOLEAN", "true", + "SOLR_LONG", "1234567890", + "SOLR_COMMASEP", "one,two, three", + "SOLR_JSON_LIST", "[\"one\", \"two\", \"three\"]", + "SOLR_ALWAYS_ON_TRACE_ID", "true", + "SOLR_STR_WITH_NEWLINE", "foo\nbar,baz"); + @BeforeClass public static void beforeClass() throws Exception { // Make a map of some common Solr environment variables for testing, and initialize EnvUtils - EnvUtils.setEnvs( - Map.of( - "SOLR_HOME", "/home/solr", - "SOLR_PORT", "8983", - "SOLR_HOST", "localhost", - "SOLR_LOG_LEVEL", "INFO", - "SOLR_BOOLEAN", "true", - "SOLR_LONG", "1234567890", - "SOLR_COMMASEP", "one,two, three", - "SOLR_JSON_LIST", "[\"one\", \"two\", \"three\"]", - "SOLR_ALWAYS_ON_TRACE_ID", "true", - "SOLR_STR_WITH_NEWLINE", "foo\nbar,baz")); - EnvUtils.init(true); - } - - @Test - public void testGetEnv() { - assertEquals("INFO", EnvUtils.getEnv("SOLR_LOG_LEVEL")); - - assertNull(EnvUtils.getEnv("SOLR_NONEXIST")); - assertEquals("myString", EnvUtils.getEnv("SOLR_NONEXIST", "myString")); - - assertTrue(EnvUtils.getEnvAsBool("SOLR_BOOLEAN")); - assertFalse(EnvUtils.getEnvAsBool("SOLR_BOOLEAN_NONEXIST", false)); - - assertEquals("1234567890", EnvUtils.getEnv("SOLR_LONG")); - assertEquals(1234567890L, EnvUtils.getEnvAsLong("SOLR_LONG")); - assertEquals(987L, EnvUtils.getEnvAsLong("SOLR_LONG_NONEXIST", 987L)); - - assertEquals("one,two, three", EnvUtils.getEnv("SOLR_COMMASEP")); - assertEquals(List.of("one", "two", "three"), EnvUtils.getEnvAsList("SOLR_COMMASEP")); - assertEquals(List.of("one", "two", "three"), EnvUtils.getEnvAsList("SOLR_JSON_LIST")); - assertEquals(List.of("fallback"), EnvUtils.getEnvAsList("SOLR_MISSING", List.of("fallback"))); - assertEquals(List.of("foo\nbar", "baz"), EnvUtils.getEnvAsList("SOLR_STR_WITH_NEWLINE")); + EnvUtils.init(true, ENV); } @Test @@ -96,10 +77,10 @@ public void getPropWithCamelCase() { @Test public void testEnvsWithCustomKeyNameMappings() { // These have different names than the environment variables - assertEquals(EnvUtils.getEnv("SOLR_HOME"), EnvUtils.getProperty("solr.solr.home")); - assertEquals(EnvUtils.getEnv("SOLR_PORT"), EnvUtils.getProperty("jetty.port")); - assertEquals(EnvUtils.getEnv("SOLR_HOST"), EnvUtils.getProperty("host")); - assertEquals(EnvUtils.getEnv("SOLR_LOGS_DIR"), EnvUtils.getProperty("solr.log.dir")); + assertEquals(ENV.get("SOLR_HOME"), EnvUtils.getProperty("solr.solr.home")); + assertEquals(ENV.get("SOLR_PORT"), EnvUtils.getProperty("jetty.port")); + assertEquals(ENV.get("SOLR_HOST"), EnvUtils.getProperty("host")); + assertEquals(ENV.get("SOLR_LOGS_DIR"), EnvUtils.getProperty("solr.log.dir")); } @Test @@ -111,10 +92,10 @@ public void testNotMapped() { @Test public void testOverwrite() { EnvUtils.setProperty("solr.overwrite", "original"); - EnvUtils.setEnv("SOLR_OVERWRITE", "overwritten"); - EnvUtils.init(false); + var env2 = Map.of("SOLR_OVERWRITE", "overwritten"); + EnvUtils.init(false, env2); assertEquals("original", EnvUtils.getProperty("solr.overwrite")); - EnvUtils.init(true); + EnvUtils.init(true, env2); assertEquals("overwritten", EnvUtils.getProperty("solr.overwrite")); } } diff --git a/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java b/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java index a9df98a296b..f9d6026edd0 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/ExecutorUtilTest.java @@ -28,9 +28,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import org.apache.lucene.util.NamedThreadFactory; import org.apache.solr.SolrTestCase; +import org.apache.solr.logging.MDCLoggingContext; import org.apache.solr.util.TimeOut; import org.junit.Test; +import org.slf4j.MDC; public class ExecutorUtilTest extends SolrTestCase { @@ -38,9 +41,9 @@ public class ExecutorUtilTest extends SolrTestCase { /** * The maximum amount of time we're willing to let the test wait for any type of blocking action, - * no matter ow slow our CPU is. Any thing that exceeds this time is presumably a bug + * no matter ow slow our CPU is. Anything that exceeds this time is presumably a bug */ - private static final long MAX_SANE_WAIT_DURRATION_MS = 2_000; + private static final long MAX_SANE_WAIT_DURATION_MS = 2_000; /** Test that if there is a non interruptable thread that awaitTermination eventually returns. */ @Test @@ -71,7 +74,7 @@ public void testExecutorUtilAwaitsTerminationWhenTaskIgnoresInterupt() throws Ex // Worker should finish if we let it w.tellWorkerToFinish(); - assertTrue(f.get(MAX_SANE_WAIT_DURRATION_MS, TimeUnit.MILLISECONDS)); + assertTrue(f.get(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS)); } finally { w.tellWorkerToFinish(); ExecutorUtil.shutdownNowAndAwaitTermination(executorService); @@ -96,10 +99,10 @@ public void testExecutorUtilAwaitsTerminationWhenTaskRespectsInterupt() throws E ExecutorUtil.awaitTermination( executorService, MAX_AWAIT_TERMINATION_ARG_MS, TimeUnit.MILLISECONDS); - // Worker should finish on it's own after the interupt + // Worker should finish on its own after the interupt assertTrue( "Worker not interupted in a sane amount of time", w.awaitWorkerInteruptedAtLeastOnce()); - assertFalse(f.get(MAX_SANE_WAIT_DURRATION_MS, TimeUnit.MILLISECONDS)); + assertFalse(f.get(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS)); assertTrue("test sanity check: WTF? how did we get here?", w.getNumberOfInterupts() > 0); } finally { @@ -108,8 +111,59 @@ public void testExecutorUtilAwaitsTerminationWhenTaskRespectsInterupt() throws E } } + @Test + public void testCMDCAwareCachedThreadPool() throws Exception { + // 5 threads max, unbounded queue + ExecutorService executor = + ExecutorUtil.newMDCAwareCachedThreadPool( + 5, Integer.MAX_VALUE, new NamedThreadFactory("test")); + + AtomicInteger concurrentTasks = new AtomicInteger(); + AtomicInteger maxConcurrentTasks = new AtomicInteger(); + int taskCount = 5 + random().nextInt(100); + CountDownLatch latch = new CountDownLatch(5); + List> futures = new ArrayList<>(); + + for (int i = 0; i < taskCount; i++) { + String core = "id_" + random().nextLong(); + + Callable task = + () -> { + // ensure we never have too many concurrent tasks + int concurrent = concurrentTasks.incrementAndGet(); + assertTrue(concurrent <= 5); + maxConcurrentTasks.getAndAccumulate(concurrent, Math::max); + + // assert MDC context is copied from the parent thread that submitted the task + assertEquals(core, MDC.get("core")); + + // The first 4 tasks to be executed will wait on the latch, and the 5th will + // release all the threads. + latch.countDown(); + latch.await(1, TimeUnit.SECONDS); + concurrentTasks.decrementAndGet(); + return null; + }; + + MDCLoggingContext.setCoreName(core); + futures.add(executor.submit(task)); + } + + ExecutorUtil.shutdownAndAwaitTermination(executor); + + for (Future future : futures) { + // Throws an exception (and make the test fail) if an assertion failed + // in the subtask + future.get(); + } + + // assert the pool was actually multithreaded. Since we submitted many tasks, + // all the threads should have been started + assertEquals(5, maxConcurrentTasks.get()); + } + private static final class Worker implements Callable { - // how we communiate out to our caller + // how we communicate out to our caller private final CountDownLatch taskStartedLatch = new CountDownLatch(1); private final CountDownLatch gotFirstInteruptLatch = new CountDownLatch(1); private final AtomicInteger interruptCount = new AtomicInteger(0); @@ -124,12 +178,12 @@ public Worker(final boolean interruptable) { /** Returns false if worker doesn't start in a sane amount of time */ public boolean awaitWorkerStart() throws InterruptedException { - return taskStartedLatch.await(MAX_SANE_WAIT_DURRATION_MS, TimeUnit.MILLISECONDS); + return taskStartedLatch.await(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS); } - /** Returns false if worker didn't recieve interupt in a sane amount of time */ + /** Returns false if worker didn't receive interupt in a sane amount of time */ public boolean awaitWorkerInteruptedAtLeastOnce() throws InterruptedException { - return gotFirstInteruptLatch.await(MAX_SANE_WAIT_DURRATION_MS, TimeUnit.MILLISECONDS); + return gotFirstInteruptLatch.await(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS); } public int getNumberOfInterupts() { @@ -142,15 +196,15 @@ public void tellWorkerToFinish() { @Override public Boolean call() { - // aboslute last resort timeout to prevent infinite while loop + // absolute last resort timeout to prevent infinite while loop final TimeOut threadTimeout = - new TimeOut(MAX_SANE_WAIT_DURRATION_MS, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); + new TimeOut(MAX_SANE_WAIT_DURATION_MS, TimeUnit.MILLISECONDS, TimeSource.NANO_TIME); while (!threadTimeout.hasTimedOut()) { try { // this must be inside the try block, so we'll still catch the InterruptedException if our - // caller shutsdown & awaits termination before we get a chance to start await'ing... + // caller shuts down & awaits termination before we get a chance to start awaiting... taskStartedLatch.countDown(); if (allowedToFinishLatch.await( diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestFastInputStream.java b/solr/solrj/src/test/org/apache/solr/common/util/TestFastInputStream.java index d3937a28f9c..d70257decca 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestFastInputStream.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestFastInputStream.java @@ -71,7 +71,7 @@ else if ((b & 0xE0) != 0xE0) { } } - // code copied rfrom NamedlistCode#writechars + // code copied from NamedlistCode#writechars public static void writeChars(OutputStream os, String s, int start, int length) throws IOException { final int end = start + length; diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java index c10afef30fa..baf36a1cc89 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestJavaBinCodec.java @@ -197,36 +197,36 @@ public List readIterator(DataInputInputStream fis) throws IOException { } }; ) { @SuppressWarnings({"unchecked"}) - List unmarshaledObj = (List) javabin.unmarshal(is); + List unmarshalledObj = (List) javabin.unmarshal(is); List matchObj = generateAllDataTypes(); - compareObjects(unmarshaledObj, matchObj); + compareObjects(unmarshalledObj, matchObj); } catch (IOException e) { throw e; } } - private void compareObjects(List unmarshaledObj, List matchObj) { - assertEquals(unmarshaledObj.size(), matchObj.size()); - for (int i = 0; i < unmarshaledObj.size(); i++) { + private void compareObjects(List unmarshalledObj, List matchObj) { + assertEquals(unmarshalledObj.size(), matchObj.size()); + for (int i = 0; i < unmarshalledObj.size(); i++) { - if (unmarshaledObj.get(i) instanceof byte[] && matchObj.get(i) instanceof byte[]) { - byte[] b1 = (byte[]) unmarshaledObj.get(i); + if (unmarshalledObj.get(i) instanceof byte[] && matchObj.get(i) instanceof byte[]) { + byte[] b1 = (byte[]) unmarshalledObj.get(i); byte[] b2 = (byte[]) matchObj.get(i); assertArrayEquals(b1, b2); - } else if (unmarshaledObj.get(i) instanceof SolrDocument + } else if (unmarshalledObj.get(i) instanceof SolrDocument && matchObj.get(i) instanceof SolrDocument) { - assertTrue(compareSolrDocument(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrDocumentList + assertTrue(compareSolrDocument(unmarshalledObj.get(i), matchObj.get(i))); + } else if (unmarshalledObj.get(i) instanceof SolrDocumentList && matchObj.get(i) instanceof SolrDocumentList) { - assertTrue(compareSolrDocumentList(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrInputDocument + assertTrue(compareSolrDocumentList(unmarshalledObj.get(i), matchObj.get(i))); + } else if (unmarshalledObj.get(i) instanceof SolrInputDocument && matchObj.get(i) instanceof SolrInputDocument) { - assertTrue(compareSolrInputDocument(unmarshaledObj.get(i), matchObj.get(i))); - } else if (unmarshaledObj.get(i) instanceof SolrInputField + assertTrue(compareSolrInputDocument(unmarshalledObj.get(i), matchObj.get(i))); + } else if (unmarshalledObj.get(i) instanceof SolrInputField && matchObj.get(i) instanceof SolrInputField) { - assertTrue(assertSolrInputFieldEquals(unmarshaledObj.get(i), matchObj.get(i))); + assertTrue(assertSolrInputFieldEquals(unmarshalledObj.get(i), matchObj.get(i))); } else { - assertEquals(unmarshaledObj.get(i), matchObj.get(i)); + assertEquals(unmarshalledObj.get(i), matchObj.get(i)); } } } @@ -336,7 +336,7 @@ public void testReadMapEntryBinaryStreamSource() throws IOException { // but keeping this in code to make a point, that even the same exact bin file, there could be // sub-objects in the key or value of the maps, with types that do not implement equals and in // these cases equals would fail as these sub-objects would be equated on their - // memory-references which is highly probbale to be unique and hence the top-level map's equals + // memory-references which is highly probable to be unique and hence the top-level map's equals // will also fail assertNotEquals("2 different references even though from same source are // un-equal",entryFromBinFileA,entryFromBinFileA_clone); diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestXMLEscaping.java b/solr/solrj/src/test/org/apache/solr/common/util/TestXMLEscaping.java index ff4022d8192..b861ab825af 100644 --- a/solr/solrj/src/test/org/apache/solr/common/util/TestXMLEscaping.java +++ b/solr/solrj/src/test/org/apache/solr/common/util/TestXMLEscaping.java @@ -20,7 +20,7 @@ import java.io.StringWriter; import org.apache.solr.SolrTestCase; -/** Test (some of the) character escaping functions of the XML class */ +/** Test some character escaping functions of the XML class */ public class TestXMLEscaping extends SolrTestCase { private void doSimpleTest(String input, String expectedOutput) throws IOException { final StringWriter sw = new StringWriter(); diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java index 55ad19495e0..fc723f60aa5 100644 --- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java +++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java @@ -769,10 +769,6 @@ public static void initCore() throws Exception { log.info("####initCore"); ignoreException("ignore_exception"); - factoryProp = System.getProperty("solr.directoryFactory"); - if (factoryProp == null) { - System.setProperty("solr.directoryFactory", "solr.RAMDirectoryFactory"); - } // other methods like starting a jetty instance need these too System.setProperty("solr.test.sys.prop1", "propone"); diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java index 435a2a931ba..37c90d98db9 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractDistribZkTestBase.java @@ -407,20 +407,21 @@ protected void restartZk(int pauseMillis) throws Exception { // // copyConfigUp(TEST_PATH().resolve("configsets"), "cloud-minimal", "configset-name", zk_address); - protected static void copyConfigUp( + public static void copyConfigUp( Path configSetDir, String srcConfigSet, String dstConfigName, String zkAddr) throws Exception { + + File fullConfDir = new File(configSetDir.toFile(), srcConfigSet); String[] args = new String[] { - "-confname", dstConfigName, - "-confdir", srcConfigSet, - "-zkHost", zkAddr, - "-configsetsDir", configSetDir.toString(), + "--conf-name", dstConfigName, + "--conf-dir", fullConfDir.getAbsolutePath(), + "-z", zkAddr }; ConfigSetUploadTool tool = new ConfigSetUploadTool(); - int res = tool.runTool(SolrCLI.processCommandLineArgs(tool.getName(), tool.getOptions(), args)); + int res = tool.runTool(SolrCLI.processCommandLineArgs(tool, args)); assertEquals("Tool should have returned 0 for success, returned: " + res, res, 0); } } diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java index 5e0461f5240..26f33c7c60d 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java @@ -70,7 +70,6 @@ import org.apache.solr.common.cloud.Replica; import org.apache.solr.common.cloud.Slice; import org.apache.solr.common.cloud.SolrZkClient; -import org.apache.solr.common.cloud.ZkMaintenanceUtils; import org.apache.solr.common.cloud.ZkStateReader; import org.apache.solr.common.params.CollectionAdminParams; import org.apache.solr.common.util.ExecutorUtil; @@ -568,11 +567,7 @@ public void uploadConfigSet(Path configDir, String configName) throws IOExceptio .withTimeout(AbstractZkTestCase.TIMEOUT, TimeUnit.MILLISECONDS) .withConnTimeOut(AbstractZkTestCase.TIMEOUT, TimeUnit.MILLISECONDS) .build()) { - ZkMaintenanceUtils.uploadToZK( - zkClient, - configDir, - ZkMaintenanceUtils.CONFIGS_ZKNODE + "/" + configName, - ZkMaintenanceUtils.UPLOAD_FILENAME_EXCLUDE_PATTERN); + new ZkConfigSetService(zkClient).uploadConfig(configName, configDir, true); } } diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java index 37d200f0f02..d410c3abeb4 100644 --- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java +++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java @@ -101,7 +101,7 @@ protected static SolrZkClient zkClient() { * beforeClass method. */ public static boolean isPRS() { - return EnvUtils.getEnvAsBool(PRS_DEFAULT_PROP, false); + return EnvUtils.getPropertyAsBool(PRS_DEFAULT_PROP, false); } /** @@ -134,7 +134,7 @@ public static void configurePrsDefault() { if (target != null && target.isAnnotationPresent(NoPrs.class)) { usePrs = false; } else { - usePrs = EnvUtils.getEnvAsBool(PRS_DEFAULT_PROP, LuceneTestCase.random().nextBoolean()); + usePrs = EnvUtils.getPropertyAsBool(PRS_DEFAULT_PROP, LuceneTestCase.random().nextBoolean()); } System.setProperty(PRS_DEFAULT_PROP, usePrs ? "true" : "false"); } diff --git a/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java b/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java index 31bc572bf14..9c033827241 100644 --- a/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java +++ b/solr/test-framework/src/java/org/apache/solr/embedded/JettySolrRunner.java @@ -26,6 +26,8 @@ import java.lang.invoke.MethodHandles; import java.net.BindException; import java.net.MalformedURLException; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -782,7 +784,9 @@ public void setProxyPort(int proxyPort) { /** Returns a base URL like {@code http://localhost:8983/solr} */ public URL getBaseUrl() { try { - return new URL(protocol, host, jettyPort, "/solr"); + return new URI(protocol, null, host, jettyPort, "/solr", null, null).toURL(); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } catch (MalformedURLException e) { throw new RuntimeException(e); } @@ -790,9 +794,11 @@ public URL getBaseUrl() { public URL getBaseURLV2() { try { - return new URL(protocol, host, jettyPort, "/api"); + return new URI(protocol, null, host, jettyPort, "/api", null, null).toURL(); } catch (MalformedURLException e) { throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } } @@ -802,9 +808,11 @@ public URL getBaseURLV2() { */ public URL getProxyBaseUrl() { try { - return new URL(protocol, host, getLocalPort(), "/solr"); + return new URI(protocol, null, host, getLocalPort(), "/solr", null, null).toURL(); } catch (MalformedURLException e) { throw new RuntimeException(e); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } } diff --git a/solr/test-framework/src/java/org/apache/solr/handler/BackupRestoreUtils.java b/solr/test-framework/src/java/org/apache/solr/handler/BackupRestoreUtils.java index 10456e4984e..bd610200895 100644 --- a/solr/test-framework/src/java/org/apache/solr/handler/BackupRestoreUtils.java +++ b/solr/test-framework/src/java/org/apache/solr/handler/BackupRestoreUtils.java @@ -20,11 +20,14 @@ import java.io.IOException; import java.io.InputStream; import java.lang.invoke.MethodHandles; +import java.net.URI; +import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Random; +import org.apache.http.client.utils.URIBuilder; import org.apache.lucene.tests.util.TestUtil; import org.apache.solr.SolrTestCase; import org.apache.solr.client.solrj.SolrClient; @@ -74,42 +77,46 @@ public static void verifyDocs(int nDocs, SolrClient leaderClient, String collect public static void runCoreAdminCommand( String baseUrl, String coreName, String action, Map params) - throws IOException { - StringBuilder builder = new StringBuilder(); - builder.append(baseUrl); - builder.append("/admin/cores?action="); - builder.append(action); - builder.append("&core="); - builder.append(coreName); - for (Map.Entry p : params.entrySet()) { - builder.append("&"); - builder.append(p.getKey()); - builder.append("="); - builder.append(p.getValue()); + throws IOException, URISyntaxException { + final URI uri = new URI(baseUrl); + final var oldPath = uri.getPath() != null ? uri.getPath().substring(1) : ""; + final var newPath = "admin/cores"; + final var finalPath = oldPath.isEmpty() ? newPath : oldPath + "/" + newPath; + + final URIBuilder builder = + new URIBuilder(uri) + .setPath(finalPath) + .addParameter("action", action) + .addParameter("core", coreName); + + // Add additional parameters using loop + for (Map.Entry entry : params.entrySet()) { + builder.addParameter(entry.getKey(), entry.getValue()); } - String leaderUrl = builder.toString(); - executeHttpRequest(leaderUrl); + + executeHttpRequest(builder.build()); } public static void runReplicationHandlerCommand( String baseUrl, String coreName, String action, String repoName, String backupName) - throws IOException { - String leaderUrl = - baseUrl - + "/" - + coreName - + ReplicationHandler.PATH - + "?command=" - + action - + "&repository=" - + repoName - + "&name=" - + backupName; - executeHttpRequest(leaderUrl); + throws IOException, URISyntaxException { + final URI uri = new URI(baseUrl); + final var oldPath = uri.getPath() != null ? uri.getPath().substring(1) : ""; + final var newPath = coreName + ReplicationHandler.PATH; + final var finalPath = oldPath.isEmpty() ? newPath : oldPath + "/" + newPath; + + final URI finalURI = + new URIBuilder(uri) + .setPath(finalPath) + .addParameter("command", action) + .addParameter("repository", repoName) + .addParameter("name", backupName) + .build(); + executeHttpRequest(finalURI); } - static void executeHttpRequest(String requestUrl) throws IOException { - URL url = new URL(requestUrl); + private static void executeHttpRequest(URI uri) throws IOException { + URL url = uri.toURL(); try (InputStream stream = url.openStream()) { assert stream != null; } diff --git a/solr/test-framework/src/java/org/apache/solr/handler/TestRestoreCoreUtil.java b/solr/test-framework/src/java/org/apache/solr/handler/TestRestoreCoreUtil.java index de275b8ec94..cd52cf2e68b 100644 --- a/solr/test-framework/src/java/org/apache/solr/handler/TestRestoreCoreUtil.java +++ b/solr/test-framework/src/java/org/apache/solr/handler/TestRestoreCoreUtil.java @@ -18,6 +18,7 @@ import java.io.IOException; import java.io.InputStream; +import java.net.URI; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.regex.Matcher; @@ -35,7 +36,7 @@ public static boolean fetchRestoreStatus(String baseUrl, String coreName) throws + ReplicationHandler.CMD_RESTORE_STATUS; final Pattern pException = Pattern.compile("(.*?)"); - URL url = new URL(leaderUrl); + URL url = URI.create(leaderUrl).toURL(); try (InputStream stream = url.openStream()) { String response = new String(stream.readAllBytes(), StandardCharsets.UTF_8); Matcher matcher = pException.matcher(response); diff --git a/solr/test-framework/src/java/org/apache/solr/schema/SortableBinaryField.java b/solr/test-framework/src/java/org/apache/solr/schema/SortableBinaryField.java index b8969616d01..28edf725d14 100644 --- a/solr/test-framework/src/java/org/apache/solr/schema/SortableBinaryField.java +++ b/solr/test-framework/src/java/org/apache/solr/schema/SortableBinaryField.java @@ -36,6 +36,11 @@ public class SortableBinaryField extends BinaryField { protected void checkSupportsDocValues() { // we support DocValues } + @Override + protected boolean enableDocValuesByDefault() { + return true; + } + @Override public List createFields(SchemaField field, Object value) { if (field.hasDocValues()) { diff --git a/solr/test-framework/src/java/org/apache/solr/util/ExternalPaths.java b/solr/test-framework/src/java/org/apache/solr/util/ExternalPaths.java index 122b01829a2..8d21f9e3e84 100644 --- a/solr/test-framework/src/java/org/apache/solr/util/ExternalPaths.java +++ b/solr/test-framework/src/java/org/apache/solr/util/ExternalPaths.java @@ -17,6 +17,7 @@ package org.apache.solr.util; import java.io.File; +import java.net.URL; /** * Some tests need to reach outside the classpath to get certain resources (e.g. the example @@ -28,8 +29,8 @@ public class ExternalPaths { /** * The main directory path for the solr source being built if it can be determined. If it can not - * be determined -- possily because the current context is a client code base using hte test - * frameowrk -- then this variable will be null. + * be determined -- possibly because the current context is a client code base using the test + * framework -- then this variable will be null. * *

      Note that all other static paths available in this class are derived from the source home, * and if it is null, those paths will just be relative to 'null' and may not be meaningful. @@ -66,23 +67,24 @@ public class ExternalPaths { */ static String determineSourceHome() { try { - File file; - try { - file = new File("solr/conf"); - if (!file.exists()) { - file = new File(ExternalPaths.class.getClassLoader().getResource("solr/conf").toURI()); + File file = new File("solr/conf"); + if (!file.exists()) { + URL resourceUrl = ExternalPaths.class.getClassLoader().getResource("solr/conf"); + if (resourceUrl != null) { + file = new File(resourceUrl.toURI()); + } else { + // If there is no "solr/conf" in the classpath, fall back to searching from the current + // directory. + file = new File(System.getProperty("tests.src.home", ".")); } - } catch (Exception e) { - // If there is no "solr/conf" in the classpath, fall back to searching from the current - // directory. - file = new File(System.getProperty("tests.src.home", ".")); } + File base = file.getAbsoluteFile(); while (!(new File(base, "solr/CHANGES.txt").exists()) && null != base) { base = base.getParentFile(); } return (null == base) ? null : new File(base, "solr/").getAbsolutePath(); - } catch (RuntimeException e) { + } catch (Exception e) { // all bets are off return null; } diff --git a/solr/test-framework/src/test-files/solr/collection1/conf/schema.xml b/solr/test-framework/src/test-files/solr/collection1/conf/schema.xml index ec039ac0bf0..e65df97ed61 100644 --- a/solr/test-framework/src/test-files/solr/collection1/conf/schema.xml +++ b/solr/test-framework/src/test-files/solr/collection1/conf/schema.xml @@ -25,7 +25,7 @@ --> - + - - - - + + + + - - - - + + + + - - - - - + + + + + @@ -104,7 +104,7 @@ - + @@ -112,9 +112,9 @@ - - - + + + - + @@ -545,13 +545,13 @@ - + - + @@ -662,7 +662,7 @@ - + @@ -691,7 +691,7 @@ - + @@ -701,7 +701,7 @@ - + @@ -741,7 +741,7 @@ - + @@ -791,13 +791,13 @@ - + - + @@ -854,28 +854,28 @@ useDocValuesAsStored="false"/> - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + - - - - + + + + diff --git a/solr/test-framework/src/test-files/solr/collection1/conf/solrconfig.xml b/solr/test-framework/src/test-files/solr/collection1/conf/solrconfig.xml index 82dca6384d8..ea95918291c 100644 --- a/solr/test-framework/src/test-files/solr/collection1/conf/solrconfig.xml +++ b/solr/test-framework/src/test-files/solr/collection1/conf/solrconfig.xml @@ -38,7 +38,7 @@ - + 1000000 2000000 3000000 diff --git a/solr/test-framework/src/test-files/solr/configsets/minimal/conf/schema.xml b/solr/test-framework/src/test-files/solr/configsets/minimal/conf/schema.xml index 287d4fe0149..51d407dfc1d 100644 --- a/solr/test-framework/src/test-files/solr/configsets/minimal/conf/schema.xml +++ b/solr/test-framework/src/test-files/solr/configsets/minimal/conf/schema.xml @@ -15,7 +15,7 @@ See the License for the specific language governing permissions and limitations under the License. --> - + diff --git a/solr/webapp/web/partials/security.html b/solr/webapp/web/partials/security.html index 98305554d9e..47d17a0d542 100644 --- a/solr/webapp/web/partials/security.html +++ b/solr/webapp/web/partials/security.html @@ -31,7 +31,7 @@

       
       
      -        bin/solr auth enable -type basicAuth -prompt true -z {{zkHost}}
      +        bin/solr auth enable --type basicAuth --prompt true -z {{zkHost}}
       
             
      diff --git a/versions.lock b/versions.lock index ca387fcb073..cf77a4bcddf 100644 --- a/versions.lock +++ b/versions.lock @@ -2,18 +2,18 @@ biz.aQute.bnd:biz.aQute.bnd.annotation:6.4.1 (1 constraints: 0d051636) com.adobe.xmp:xmpcore:6.1.10 (1 constraints: fd0d5947) com.beust:jcommander:1.82 (2 constraints: 2b123714) -com.carrotsearch:hppc:0.9.1 (2 constraints: ad0fc9a6) +com.carrotsearch:hppc:0.10.0 (2 constraints: d40fecb0) com.carrotsearch.randomizedtesting:randomizedtesting-runner:2.8.1 (2 constraints: cf1501e2) com.cybozu.labs:langdetect:1.1-20120112 (1 constraints: 5c066d5e) com.epam:parso:2.0.14 (1 constraints: 8e0c750e) -com.fasterxml.jackson:jackson-bom:2.17.1 (12 constraints: aefcc272) -com.fasterxml.jackson.core:jackson-annotations:2.17.1 (10 constraints: f9c26e07) -com.fasterxml.jackson.core:jackson-core:2.17.1 (13 constraints: a0062708) -com.fasterxml.jackson.core:jackson-databind:2.17.1 (18 constraints: ed67acfe) -com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.17.1 (2 constraints: 641c9ff1) -com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.17.1 (1 constraints: bb0eb066) -com.fasterxml.jackson.module:jackson-module-jakarta-xmlbind-annotations:2.17.1 (2 constraints: ab2412e1) -com.fasterxml.woodstox:woodstox-core:6.6.2 (2 constraints: a223ea84) +com.fasterxml.jackson:jackson-bom:2.17.2 (12 constraints: bafcbe83) +com.fasterxml.jackson.core:jackson-annotations:2.17.2 (10 constraints: 03c3c711) +com.fasterxml.jackson.core:jackson-core:2.17.2 (13 constraints: ae067719) +com.fasterxml.jackson.core:jackson-databind:2.17.2 (18 constraints: fa67eb18) +com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.17.2 (2 constraints: 651ca0f1) +com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.17.2 (1 constraints: bc0eb166) +com.fasterxml.jackson.module:jackson-module-jakarta-xmlbind-annotations:2.17.2 (2 constraints: ac2451e1) +com.fasterxml.woodstox:woodstox-core:6.7.0 (2 constraints: a123c684) com.github.ben-manes.caffeine:caffeine:3.1.8 (1 constraints: 0e050536) com.github.jai-imageio:jai-imageio-core:1.4.0 (1 constraints: 5c0ced01) com.github.junrar:junrar:7.5.3 (1 constraints: 660c1102) @@ -22,48 +22,48 @@ com.github.spotbugs:spotbugs-annotations:4.8.0 (1 constraints: 0e051736) com.github.stephenc.jcip:jcip-annotations:1.0-1 (3 constraints: c71d2c87) com.github.virtuald:curvesapi:1.07 (1 constraints: 9e0ac7c0) com.google.android:annotations:4.1.1.4 (2 constraints: b918820a) -com.google.api:api-common:2.16.0 (5 constraints: 8944c9b7) -com.google.api:gax:2.33.0 (5 constraints: 554a2894) -com.google.api:gax-grpc:2.33.0 (1 constraints: 1c1006a6) -com.google.api:gax-httpjson:2.33.0 (2 constraints: d8204481) -com.google.api-client:google-api-client:2.2.0 (3 constraints: a132abfa) -com.google.api.grpc:gapic-google-cloud-storage-v2:2.27.0-alpha (2 constraints: ec228039) -com.google.api.grpc:grpc-google-cloud-storage-v2:2.27.0-alpha (2 constraints: ec228039) -com.google.api.grpc:proto-google-cloud-storage-v2:2.27.0-alpha (2 constraints: ec228039) -com.google.api.grpc:proto-google-common-protos:2.29.0 (6 constraints: 1d4e1284) -com.google.api.grpc:proto-google-iam-v1:1.19.0 (2 constraints: f11ecbcd) -com.google.apis:google-api-services-storage:v1-rev20230907-2.0.0 (2 constraints: e625234b) -com.google.auth:google-auth-library-credentials:1.19.0 (6 constraints: 5458e29b) -com.google.auth:google-auth-library-oauth2-http:1.19.0 (5 constraints: 934470bb) -com.google.auto.value:auto-value-annotations:1.10.2 (5 constraints: 434c5d52) -com.google.cloud:google-cloud-bom:0.204.0 (1 constraints: 68059940) -com.google.cloud:google-cloud-core:2.23.0 (3 constraints: 412fa654) -com.google.cloud:google-cloud-core-grpc:2.23.0 (1 constraints: 1b1002a6) -com.google.cloud:google-cloud-core-http:2.23.0 (1 constraints: 1b1002a6) -com.google.cloud:google-cloud-storage:2.27.0 (2 constraints: d71c8a27) -com.google.code.gson:gson:2.10.1 (7 constraints: 005f69b0) -com.google.errorprone:error_prone_annotations:2.23.0 (11 constraints: be86b428) -com.google.guava:failureaccess:1.0.1 (2 constraints: f9199e37) -com.google.guava:guava:32.1.3-jre (26 constraints: 567bee8c) +com.google.api:api-common:2.33.0 (5 constraints: 8444f8b5) +com.google.api:gax:2.50.0 (5 constraints: 504a5892) +com.google.api:gax-grpc:2.50.0 (1 constraints: 1b1005a6) +com.google.api:gax-httpjson:2.50.0 (2 constraints: d6201381) +com.google.api-client:google-api-client:2.6.0 (3 constraints: ad32fffc) +com.google.api.grpc:gapic-google-cloud-storage-v2:2.40.1-alpha (2 constraints: e4226438) +com.google.api.grpc:grpc-google-cloud-storage-v2:2.40.1-alpha (2 constraints: e4226438) +com.google.api.grpc:proto-google-cloud-storage-v2:2.40.1-alpha (2 constraints: e4226438) +com.google.api.grpc:proto-google-common-protos:2.41.0 (6 constraints: 184eab81) +com.google.api.grpc:proto-google-iam-v1:1.36.0 (2 constraints: ef1e9acd) +com.google.apis:google-api-services-storage:v1-rev20240621-2.0.0 (2 constraints: da256149) +com.google.auth:google-auth-library-credentials:1.23.0 (7 constraints: cb686112) +com.google.auth:google-auth-library-oauth2-http:1.23.0 (6 constraints: 0f558a85) +com.google.auto.value:auto-value-annotations:1.10.4 (6 constraints: b85aa377) +com.google.cloud:google-cloud-bom:0.224.0 (1 constraints: 6a05a140) +com.google.cloud:google-cloud-core:2.40.0 (3 constraints: 3e2f1a54) +com.google.cloud:google-cloud-core-grpc:2.40.0 (1 constraints: 1a1001a6) +com.google.cloud:google-cloud-core-http:2.40.0 (1 constraints: 1a1001a6) +com.google.cloud:google-cloud-storage:2.40.1 (2 constraints: cf1cc626) +com.google.code.gson:gson:2.11.0 (6 constraints: 0c550bc0) +com.google.errorprone:error_prone_annotations:2.28.0 (15 constraints: a5c51259) +com.google.guava:failureaccess:1.0.2 (2 constraints: fb19bf37) +com.google.guava:guava:33.1.0-jre (26 constraints: 0280374a) com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava (2 constraints: 4b35b0a0) -com.google.http-client:google-http-client:1.43.3 (11 constraints: 3fbf96b4) -com.google.http-client:google-http-client-apache-v2:1.43.3 (2 constraints: b820e775) -com.google.http-client:google-http-client-appengine:1.43.3 (2 constraints: de20d381) -com.google.http-client:google-http-client-gson:1.43.3 (7 constraints: 6270684c) -com.google.http-client:google-http-client-jackson2:1.43.3 (1 constraints: 1f1007a6) -com.google.j2objc:j2objc-annotations:2.8 (3 constraints: 132a2f47) -com.google.oauth-client:google-oauth-client:1.34.1 (2 constraints: b520b575) -com.google.protobuf:protobuf-java:3.25.1 (11 constraints: d9952bb8) -com.google.protobuf:protobuf-java-util:3.23.2 (3 constraints: 332b702b) +com.google.http-client:google-http-client:1.44.2 (11 constraints: 45bf29b8) +com.google.http-client:google-http-client-apache-v2:1.44.2 (2 constraints: b9201d76) +com.google.http-client:google-http-client-appengine:1.44.2 (2 constraints: de20d781) +com.google.http-client:google-http-client-gson:1.44.2 (7 constraints: 68702d4e) +com.google.http-client:google-http-client-jackson2:1.44.2 (1 constraints: 1f1009a6) +com.google.j2objc:j2objc-annotations:3.0.0 (4 constraints: 453c9e88) +com.google.oauth-client:google-oauth-client:1.36.0 (2 constraints: b720ee75) +com.google.protobuf:protobuf-java:3.25.3 (11 constraints: ed9520ce) +com.google.protobuf:protobuf-java-util:3.25.3 (3 constraints: 3c2b232d) com.google.re2j:re2j:1.7 (2 constraints: 3914d56f) com.googlecode.json-simple:json-simple:1.1.1 (2 constraints: 321c78d2) com.googlecode.juniversalchardet:juniversalchardet:1.0.3 (1 constraints: 5b0ce401) com.googlecode.plist:dd-plist:1.24 (1 constraints: 300c84f5) com.healthmarketscience.jackcess:jackcess:4.0.2 (1 constraints: 5d0cf201) com.healthmarketscience.jackcess:jackcess-encrypt:4.0.1 (1 constraints: 5c0cf101) -com.ibm.icu:icu4j:70.1 (1 constraints: a90f1784) +com.ibm.icu:icu4j:74.2 (1 constraints: ae0f2484) com.j256.simplemagic:simplemagic:1.17 (1 constraints: dd04f830) -com.jayway.jsonpath:json-path:2.9.0 (2 constraints: 6d12c02c) +com.jayway.jsonpath:json-path:2.9.0 (2 constraints: 6f12c62c) com.lmax:disruptor:3.4.4 (1 constraints: 0d050a36) com.mchange:c3p0:0.9.5.5 (1 constraints: c80c571b) com.mchange:mchange-commons-java:0.2.19 (1 constraints: 84075b75) @@ -77,10 +77,10 @@ com.sun.activation:jakarta.activation:1.2.2 (1 constraints: ba0dac35) com.sun.istack:istack-commons-runtime:3.0.12 (1 constraints: eb0d9a43) com.tdunning:t-digest:3.3 (1 constraints: aa04232c) com.zaxxer:SparseBitSet:1.2 (1 constraints: 0d081e75) -commons-cli:commons-cli:1.8.0 (1 constraints: 0b050836) -commons-codec:commons-codec:1.17.0 (12 constraints: 44a7edf4) +commons-cli:commons-cli:1.9.0 (1 constraints: 0c050b36) +commons-codec:commons-codec:1.17.1 (11 constraints: 5a953ac4) commons-collections:commons-collections:3.2.2 (1 constraints: 09050236) -commons-io:commons-io:2.15.1 (10 constraints: 4375f24a) +commons-io:commons-io:2.15.1 (10 constraints: 47759e4f) de.l3s.boilerpipe:boilerpipe:1.1.0 (1 constraints: 590ce401) edu.ucar:cdm:4.5.5 (3 constraints: 9d1abd7d) edu.ucar:grib:4.5.5 (1 constraints: 650c0402) @@ -88,63 +88,65 @@ edu.ucar:httpservices:4.5.5 (2 constraints: 8f122834) edu.ucar:netcdf4:4.5.5 (1 constraints: 650c0402) edu.ucar:udunits:4.5.5 (1 constraints: 2b06034e) edu.usc.ir:sentiment-analysis-parser:0.1 (1 constraints: fa0b50e9) -io.dropwizard.metrics:metrics-annotation:4.2.25 (1 constraints: 351072b0) -io.dropwizard.metrics:metrics-core:4.2.25 (5 constraints: 4c447c05) -io.dropwizard.metrics:metrics-graphite:4.2.25 (1 constraints: 3f05463b) -io.dropwizard.metrics:metrics-jetty10:4.2.25 (1 constraints: 3f05463b) -io.dropwizard.metrics:metrics-jmx:4.2.25 (1 constraints: 3f05463b) -io.dropwizard.metrics:metrics-jvm:4.2.25 (1 constraints: 3f05463b) -io.grpc:grpc-alts:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-api:1.61.1 (9 constraints: ae5c0da1) -io.grpc:grpc-auth:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-context:1.61.1 (4 constraints: 992a4f1b) -io.grpc:grpc-core:1.61.1 (4 constraints: 8629301e) -io.grpc:grpc-googleapis:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-grpclb:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-netty:1.61.1 (1 constraints: 3b05413b) -io.grpc:grpc-netty-shaded:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-protobuf:1.61.1 (2 constraints: 5b1508d7) -io.grpc:grpc-protobuf-lite:1.61.1 (2 constraints: 271a034e) -io.grpc:grpc-rls:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-services:1.61.1 (1 constraints: 211012a6) -io.grpc:grpc-stub:1.61.1 (2 constraints: 5b1508d7) -io.grpc:grpc-util:1.61.1 (4 constraints: 0222e69b) -io.grpc:grpc-xds:1.61.1 (1 constraints: 211012a6) -io.netty:netty-buffer:4.1.108.Final (10 constraints: 649b586c) -io.netty:netty-codec:4.1.108.Final (5 constraints: 3c466293) -io.netty:netty-codec-http:4.1.108.Final (3 constraints: d624f841) -io.netty:netty-codec-http2:4.1.108.Final (1 constraints: 0f0b42d5) -io.netty:netty-codec-socks:4.1.108.Final (1 constraints: 400fc77a) -io.netty:netty-common:4.1.108.Final (12 constraints: 34b5f5a7) -io.netty:netty-handler:4.1.108.Final (3 constraints: ef2b6ba5) -io.netty:netty-handler-proxy:4.1.108.Final (1 constraints: 0f0b42d5) -io.netty:netty-resolver:4.1.108.Final (2 constraints: b01a305e) +io.dropwizard.metrics:metrics-annotation:4.2.26 (1 constraints: 361073b0) +io.dropwizard.metrics:metrics-core:4.2.26 (5 constraints: 51443c07) +io.dropwizard.metrics:metrics-graphite:4.2.26 (1 constraints: 4005473b) +io.dropwizard.metrics:metrics-jetty10:4.2.26 (1 constraints: 4005473b) +io.dropwizard.metrics:metrics-jmx:4.2.26 (1 constraints: 4005473b) +io.dropwizard.metrics:metrics-jvm:4.2.26 (1 constraints: 4005473b) +io.grpc:grpc-alts:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-api:1.65.1 (8 constraints: 6951e68b) +io.grpc:grpc-auth:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-context:1.65.1 (6 constraints: 8f445188) +io.grpc:grpc-core:1.65.1 (3 constraints: 4321151f) +io.grpc:grpc-googleapis:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-grpclb:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-inprocess:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-netty:1.65.1 (1 constraints: 3f054d3b) +io.grpc:grpc-netty-shaded:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-protobuf:1.65.1 (2 constraints: 5d15c9d7) +io.grpc:grpc-protobuf-lite:1.65.1 (2 constraints: 291aca4d) +io.grpc:grpc-rls:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-services:1.65.1 (1 constraints: 1f100ba6) +io.grpc:grpc-stub:1.65.1 (2 constraints: 5d15c9d7) +io.grpc:grpc-util:1.65.1 (2 constraints: ec1876f9) +io.grpc:grpc-xds:1.65.1 (1 constraints: 1f100ba6) +io.netty:netty-buffer:4.1.112.Final (10 constraints: 329b3f3f) +io.netty:netty-codec:4.1.112.Final (5 constraints: 2346668a) +io.netty:netty-codec-http:4.1.112.Final (3 constraints: c724f93e) +io.netty:netty-codec-http2:4.1.112.Final (1 constraints: 0f0b42d5) +io.netty:netty-codec-socks:4.1.112.Final (1 constraints: 3b0fa57a) +io.netty:netty-common:4.1.112.Final (12 constraints: f8b40f68) +io.netty:netty-handler:4.1.112.Final (3 constraints: e52b7aa2) +io.netty:netty-handler-proxy:4.1.112.Final (1 constraints: 0f0b42d5) +io.netty:netty-resolver:4.1.112.Final (2 constraints: a61a1f5d) io.netty:netty-tcnative-boringssl-static:2.0.61.Final (1 constraints: d10fc38e) io.netty:netty-tcnative-classes:2.0.61.Final (1 constraints: d113ea5d) -io.netty:netty-transport:4.1.108.Final (9 constraints: 858d97de) -io.netty:netty-transport-classes-epoll:4.1.108.Final (1 constraints: dd12b130) -io.netty:netty-transport-native-epoll:4.1.108.Final (1 constraints: 0310df9d) -io.netty:netty-transport-native-unix-common:4.1.108.Final (4 constraints: fe3dc14b) +io.netty:netty-transport:4.1.112.Final (9 constraints: 588dbbb9) +io.netty:netty-transport-classes-epoll:4.1.112.Final (1 constraints: d8128f30) +io.netty:netty-transport-native-epoll:4.1.112.Final (1 constraints: 0310df9d) +io.netty:netty-transport-native-unix-common:4.1.112.Final (4 constraints: ef3d2c48) io.opencensus:opencensus-api:0.31.1 (5 constraints: 924d4692) io.opencensus:opencensus-contrib-http-util:0.31.1 (3 constraints: 7232a9fc) io.opencensus:opencensus-proto:0.2.0 (1 constraints: e60fd595) -io.opentelemetry:opentelemetry-api:1.35.0 (10 constraints: 50b75bad) -io.opentelemetry:opentelemetry-api-events:1.35.0-alpha (2 constraints: 2d2f4a7f) -io.opentelemetry:opentelemetry-bom:1.35.0 (1 constraints: 3b05403b) -io.opentelemetry:opentelemetry-context:1.35.0 (2 constraints: 271fceb4) -io.opentelemetry:opentelemetry-exporter-common:1.35.0 (3 constraints: 4d3d08ac) -io.opentelemetry:opentelemetry-exporter-otlp:1.35.0 (1 constraints: 960ff183) -io.opentelemetry:opentelemetry-exporter-otlp-common:1.35.0 (2 constraints: 5223391c) -io.opentelemetry:opentelemetry-exporter-sender-okhttp:1.35.0 (2 constraints: 5223391c) -io.opentelemetry:opentelemetry-extension-incubator:1.35.0-alpha (4 constraints: 4c55c26c) -io.opentelemetry:opentelemetry-sdk:1.35.0 (4 constraints: 6956c4c2) -io.opentelemetry:opentelemetry-sdk-common:1.35.0 (6 constraints: 816c2d65) -io.opentelemetry:opentelemetry-sdk-extension-autoconfigure:1.35.0 (1 constraints: 960ff183) -io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi:1.35.0 (3 constraints: 9d3c0225) -io.opentelemetry:opentelemetry-sdk-logs:1.35.0 (3 constraints: eb32b3b2) -io.opentelemetry:opentelemetry-sdk-metrics:1.35.0 (3 constraints: eb32b3b2) -io.opentelemetry:opentelemetry-sdk-trace:1.35.0 (3 constraints: eb32b3b2) -io.perfmark:perfmark-api:0.26.0 (3 constraints: 21212b16) +io.opentelemetry:opentelemetry-api:1.40.0 (9 constraints: 36a23843) +io.opentelemetry:opentelemetry-api-incubator:1.40.0-alpha (5 constraints: b670fcef) +io.opentelemetry:opentelemetry-bom:1.40.0 (1 constraints: 3705353b) +io.opentelemetry:opentelemetry-context:1.40.0 (2 constraints: 1f1f08b4) +io.opentelemetry:opentelemetry-exporter-common:1.40.0 (3 constraints: 413dfba8) +io.opentelemetry:opentelemetry-exporter-otlp:1.40.0 (1 constraints: 920fe683) +io.opentelemetry:opentelemetry-exporter-otlp-common:1.40.0 (2 constraints: 4a234b1b) +io.opentelemetry:opentelemetry-exporter-sender-okhttp:1.40.0 (2 constraints: 4a234b1b) +io.opentelemetry:opentelemetry-sdk:1.40.0 (4 constraints: 5956d8bc) +io.opentelemetry:opentelemetry-sdk-common:1.40.0 (6 constraints: 696c1b59) +io.opentelemetry:opentelemetry-sdk-extension-autoconfigure:1.40.0 (1 constraints: 920fe683) +io.opentelemetry:opentelemetry-sdk-extension-autoconfigure-spi:1.40.0 (4 constraints: 13518fcb) +io.opentelemetry:opentelemetry-sdk-logs:1.40.0 (3 constraints: df325ab0) +io.opentelemetry:opentelemetry-sdk-metrics:1.40.0 (3 constraints: df325ab0) +io.opentelemetry:opentelemetry-sdk-trace:1.40.0 (3 constraints: df325ab0) +io.perfmark:perfmark-api:0.27.0 (3 constraints: 22216516) +io.prometheus:prometheus-metrics-exposition-formats:1.1.0 (1 constraints: 0405f335) +io.prometheus:prometheus-metrics-model:1.1.0 (2 constraints: 411b133b) io.prometheus:simpleclient:0.16.0 (3 constraints: 9d257513) io.prometheus:simpleclient_common:0.16.0 (1 constraints: 1a1139c0) io.prometheus:simpleclient_httpserver:0.16.0 (1 constraints: 3905353b) @@ -163,36 +165,35 @@ net.arnx:jsonic:1.2.7 (1 constraints: d00b47eb) net.java.dev.jna:jna:5.12.1 (1 constraints: 900c8e0e) net.sf.ehcache:ehcache-core:2.6.2 (1 constraints: 2706f94d) net.sf.jopt-simple:jopt-simple:5.0.4 (1 constraints: be0ad6cc) -net.sourceforge.argparse4j:argparse4j:0.9.0 (1 constraints: 0b050636) net.thisptr:jackson-jq:0.0.13 (1 constraints: 3605223b) org.antlr:antlr4-runtime:4.11.1 (1 constraints: f70fbd96) -org.apache.calcite:calcite-core:1.35.0 (1 constraints: 3b05403b) -org.apache.calcite:calcite-linq4j:1.35.0 (2 constraints: cb12e64b) -org.apache.calcite.avatica:avatica-core:1.23.0 (3 constraints: fb20cdcb) -org.apache.calcite.avatica:avatica-metrics:1.23.0 (1 constraints: 991049c4) +org.apache.calcite:calcite-core:1.37.0 (1 constraints: 3d05463b) +org.apache.calcite:calcite-linq4j:1.37.0 (2 constraints: cf12444c) +org.apache.calcite.avatica:avatica-core:1.25.0 (3 constraints: 0121ddcc) +org.apache.calcite.avatica:avatica-metrics:1.25.0 (1 constraints: 9b104fc4) org.apache.commons:commons-collections4:4.4 (3 constraints: 2a172a57) org.apache.commons:commons-compress:1.26.1 (3 constraints: 011cf2d2) -org.apache.commons:commons-configuration2:2.10.1 (1 constraints: 3605303b) +org.apache.commons:commons-configuration2:2.11.0 (1 constraints: 3605323b) org.apache.commons:commons-csv:1.9.0 (1 constraints: 610cfc01) org.apache.commons:commons-exec:1.4.0 (2 constraints: 031132cf) -org.apache.commons:commons-lang3:3.14.0 (6 constraints: 404e54e4) +org.apache.commons:commons-lang3:3.15.0 (6 constraints: cc4e807b) org.apache.commons:commons-math3:3.6.1 (5 constraints: 57322799) -org.apache.commons:commons-text:1.11.0 (1 constraints: da11b0f8) -org.apache.curator:curator-client:5.5.0 (2 constraints: e81468a3) -org.apache.curator:curator-framework:5.5.0 (2 constraints: 05144b75) -org.apache.curator:curator-recipes:5.5.0 (1 constraints: 0c051336) +org.apache.commons:commons-text:1.12.0 (2 constraints: 651f97e5) +org.apache.curator:curator-client:5.7.0 (2 constraints: ec14cea3) +org.apache.curator:curator-framework:5.7.0 (2 constraints: 0914ad75) +org.apache.curator:curator-recipes:5.7.0 (1 constraints: 0e051936) org.apache.hadoop:hadoop-annotations:3.3.6 (1 constraints: 0e050936) org.apache.hadoop:hadoop-auth:3.3.6 (1 constraints: 0e050936) org.apache.hadoop:hadoop-client-api:3.3.6 (3 constraints: 25280861) org.apache.hadoop:hadoop-client-runtime:3.3.6 (2 constraints: 6f17dc43) org.apache.hadoop:hadoop-common:3.3.6 (1 constraints: 0e050936) -org.apache.hadoop.thirdparty:hadoop-shaded-guava:1.1.1 (1 constraints: 0505f435) +org.apache.hadoop.thirdparty:hadoop-shaded-guava:1.2.0 (1 constraints: 0505f635) org.apache.httpcomponents:httpclient:4.5.14 (9 constraints: 62806342) org.apache.httpcomponents:httpcore:4.4.16 (8 constraints: 256d4617) org.apache.httpcomponents:httpmime:4.5.14 (3 constraints: eb1bfedc) -org.apache.httpcomponents.client5:httpclient5:5.1.3 (1 constraints: 6c10bcb3) -org.apache.httpcomponents.core5:httpcore5:5.1.3 (3 constraints: ff35b9e8) -org.apache.httpcomponents.core5:httpcore5-h2:5.1.3 (1 constraints: 3d13093c) +org.apache.httpcomponents.client5:httpclient5:5.2.1 (1 constraints: 6b10bdb3) +org.apache.httpcomponents.core5:httpcore5:5.2.3 (3 constraints: 40351d23) +org.apache.httpcomponents.core5:httpcore5-h2:5.2 (1 constraints: dd12c315) org.apache.james:apache-mime4j-core:0.8.4 (2 constraints: 981a0d67) org.apache.james:apache-mime4j-dom:0.8.4 (1 constraints: 630cf801) org.apache.kerby:kerb-core:1.0.1 (5 constraints: 683677dd) @@ -209,32 +210,32 @@ org.apache.logging.log4j:log4j-jul:2.21.0 (1 constraints: 3705363b) org.apache.logging.log4j:log4j-layout-template-json:2.21.0 (1 constraints: 3705363b) org.apache.logging.log4j:log4j-slf4j2-impl:2.21.0 (1 constraints: 3705363b) org.apache.logging.log4j:log4j-web:2.21.0 (1 constraints: 3705363b) -org.apache.lucene:lucene-analysis-common:9.10.0 (10 constraints: 96a0f715) -org.apache.lucene:lucene-analysis-icu:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-kuromoji:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-morfologik:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-nori:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-opennlp:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-phonetic:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-smartcn:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-analysis-stempel:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-backward-codecs:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-classification:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-codecs:9.10.0 (3 constraints: 2626bd9c) -org.apache.lucene:lucene-core:9.10.0 (26 constraints: 8d94ec8e) -org.apache.lucene:lucene-expressions:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-grouping:9.10.0 (2 constraints: 3e16d907) -org.apache.lucene:lucene-highlighter:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-join:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-memory:9.10.0 (1 constraints: c60f6a93) -org.apache.lucene:lucene-misc:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-queries:9.10.0 (6 constraints: c25289f5) -org.apache.lucene:lucene-queryparser:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-sandbox:9.10.0 (1 constraints: fa0f1797) -org.apache.lucene:lucene-spatial-extras:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-spatial3d:9.10.0 (1 constraints: e91095ca) -org.apache.lucene:lucene-suggest:9.10.0 (1 constraints: 3c05593b) -org.apache.lucene:lucene-test-framework:9.10.0 (1 constraints: 3c05593b) +org.apache.lucene:lucene-analysis-common:9.11.1 (10 constraints: aaa06d27) +org.apache.lucene:lucene-analysis-icu:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-kuromoji:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-morfologik:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-nori:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-opennlp:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-phonetic:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-smartcn:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-analysis-stempel:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-backward-codecs:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-classification:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-codecs:9.11.1 (3 constraints: 2c26e99d) +org.apache.lucene:lucene-core:9.11.1 (26 constraints: c194eb01) +org.apache.lucene:lucene-expressions:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-grouping:9.11.1 (2 constraints: 42164308) +org.apache.lucene:lucene-highlighter:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-join:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-memory:9.11.1 (1 constraints: c80f6e93) +org.apache.lucene:lucene-misc:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-queries:9.11.1 (6 constraints: ce52d7fa) +org.apache.lucene:lucene-queryparser:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-sandbox:9.11.1 (1 constraints: fc0f1b97) +org.apache.lucene:lucene-spatial-extras:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-spatial3d:9.11.1 (1 constraints: eb1099ca) +org.apache.lucene:lucene-suggest:9.11.1 (1 constraints: 3e055d3b) +org.apache.lucene:lucene-test-framework:9.11.1 (1 constraints: 3e055d3b) org.apache.opennlp:opennlp-tools:1.9.4 (2 constraints: fc1dce6d) org.apache.pdfbox:fontbox:2.0.26 (1 constraints: 180b72d8) org.apache.pdfbox:jbig2-imageio:3.0.4 (1 constraints: 5e0cef01) @@ -257,7 +258,7 @@ org.apache.tika:tika-core:1.28.5 (2 constraints: d8118f11) org.apache.tika:tika-parsers:1.28.5 (1 constraints: 42054a3b) org.apache.tomcat:annotations-api:6.0.53 (1 constraints: 40054e3b) org.apache.xmlbeans:xmlbeans:5.0.3 (2 constraints: 72173075) -org.apache.zookeeper:zookeeper:3.9.2 (2 constraints: 9e13a45f) +org.apache.zookeeper:zookeeper:3.9.2 (2 constraints: a013aa5f) org.apache.zookeeper:zookeeper-jute:3.9.2 (2 constraints: 9d12b123) org.apiguardian:apiguardian-api:1.1.2 (2 constraints: 601bd5a8) org.bitbucket.b_c:jose4j:0.9.6 (1 constraints: 11050c36) @@ -271,34 +272,34 @@ org.carrot2:morfologik-fsa:2.1.9 (1 constraints: db0d9c36) org.carrot2:morfologik-polish:2.1.9 (1 constraints: d312541e) org.carrot2:morfologik-stemming:2.1.9 (2 constraints: d81fb300) org.ccil.cowan.tagsoup:tagsoup:1.2.1 (1 constraints: 5b0ce801) -org.checkerframework:checker-qual:3.37.0 (5 constraints: 5c4694ee) -org.codehaus.janino:commons-compiler:3.1.9 (2 constraints: 3119a8ec) -org.codehaus.janino:janino:3.1.9 (1 constraints: 650dbd2c) +org.checkerframework:checker-qual:3.44.0 (5 constraints: 6c46e5ef) +org.codehaus.janino:commons-compiler:3.1.11 (2 constraints: 83195319) +org.codehaus.janino:janino:3.1.11 (1 constraints: 8e0d433a) org.codehaus.woodstox:stax2-api:4.2.2 (2 constraints: 38155daf) org.codelibs:jhighlight:1.1.0 (1 constraints: 590ce401) org.conscrypt:conscrypt-openjdk-uber:2.5.2 (1 constraints: ed0fea95) -org.eclipse.jetty:jetty-alpn-client:10.0.20 (3 constraints: e52ef55d) -org.eclipse.jetty:jetty-alpn-java-client:10.0.20 (2 constraints: 171b956d) -org.eclipse.jetty:jetty-alpn-java-server:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty:jetty-alpn-server:10.0.20 (2 constraints: ed163d48) -org.eclipse.jetty:jetty-client:10.0.20 (2 constraints: 171b956d) -org.eclipse.jetty:jetty-deploy:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty:jetty-http:10.0.20 (6 constraints: 664fd287) -org.eclipse.jetty:jetty-io:10.0.20 (9 constraints: 997d80d4) -org.eclipse.jetty:jetty-jmx:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty:jetty-rewrite:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty:jetty-security:10.0.20 (2 constraints: b4136d78) -org.eclipse.jetty:jetty-server:10.0.20 (7 constraints: ce6da919) -org.eclipse.jetty:jetty-servlet:10.0.20 (2 constraints: 2e139d60) -org.eclipse.jetty:jetty-servlets:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty:jetty-util:10.0.20 (9 constraints: 9981fa7f) -org.eclipse.jetty:jetty-webapp:10.0.20 (2 constraints: 3c132561) -org.eclipse.jetty:jetty-xml:10.0.20 (3 constraints: 0521c1b8) -org.eclipse.jetty.http2:http2-client:10.0.20 (2 constraints: 171b956d) -org.eclipse.jetty.http2:http2-common:10.0.20 (3 constraints: d324a55f) -org.eclipse.jetty.http2:http2-hpack:10.0.20 (2 constraints: 1a15a6df) -org.eclipse.jetty.http2:http2-http-client-transport:10.0.20 (1 constraints: 65058e40) -org.eclipse.jetty.http2:http2-server:10.0.20 (1 constraints: 65058e40) +org.eclipse.jetty:jetty-alpn-client:10.0.22 (3 constraints: eb2e095f) +org.eclipse.jetty:jetty-alpn-java-client:10.0.22 (2 constraints: 1b1b156e) +org.eclipse.jetty:jetty-alpn-java-server:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty:jetty-alpn-server:10.0.22 (2 constraints: f116a748) +org.eclipse.jetty:jetty-client:10.0.22 (2 constraints: 1b1b156e) +org.eclipse.jetty:jetty-deploy:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty:jetty-http:10.0.22 (6 constraints: 694ff186) +org.eclipse.jetty:jetty-io:10.0.22 (9 constraints: a27dfdd5) +org.eclipse.jetty:jetty-jmx:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty:jetty-rewrite:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty:jetty-security:10.0.22 (2 constraints: b813c578) +org.eclipse.jetty:jetty-server:10.0.22 (7 constraints: d16d9a19) +org.eclipse.jetty:jetty-servlet:10.0.22 (2 constraints: 3213f360) +org.eclipse.jetty:jetty-servlets:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty:jetty-util:10.0.22 (9 constraints: a081b682) +org.eclipse.jetty:jetty-webapp:10.0.22 (2 constraints: 40137b61) +org.eclipse.jetty:jetty-xml:10.0.22 (3 constraints: 0b21bdb9) +org.eclipse.jetty.http2:http2-client:10.0.22 (2 constraints: 1b1b156e) +org.eclipse.jetty.http2:http2-common:10.0.22 (3 constraints: d924c560) +org.eclipse.jetty.http2:http2-hpack:10.0.22 (2 constraints: 1e1508e0) +org.eclipse.jetty.http2:http2-http-client-transport:10.0.22 (1 constraints: 67059040) +org.eclipse.jetty.http2:http2-server:10.0.22 (1 constraints: 67059040) org.eclipse.jetty.toolchain:jetty-servlet-api:4.0.6 (4 constraints: 883053bf) org.gagravarr:vorbis-java-core:0.8 (1 constraints: 010c57e9) org.gagravarr:vorbis-java-tika:0.8 (1 constraints: 010c57e9) @@ -321,7 +322,7 @@ org.hamcrest:hamcrest-core:2.2 (1 constraints: cc05fe3f) org.immutables:value-annotations:2.10.1 (1 constraints: 3605303b) org.itadaki:bzip2:0.9.1 (2 constraints: bd0c4b2c) org.javassist:javassist:3.29.2-GA (1 constraints: 30112ef1) -org.jctools:jctools-core:4.0.1 (1 constraints: 07050036) +org.jctools:jctools-core:4.0.5 (1 constraints: 0b050436) org.jdom:jdom2:2.0.6.1 (1 constraints: be0c371b) org.jetbrains:annotations:13.0 (1 constraints: df0e795c) org.jetbrains.kotlin:kotlin-stdlib:1.9.10 (4 constraints: 5c405537) @@ -342,39 +343,47 @@ org.ow2.asm:asm-analysis:7.2 (1 constraints: e409d9a5) org.ow2.asm:asm-commons:7.2 (1 constraints: 6b0f7267) org.ow2.asm:asm-tree:7.2 (2 constraints: 2f14468c) org.quicktheories:quicktheories:0.26 (1 constraints: dc04f530) -org.reactivestreams:reactive-streams:1.0.4 (3 constraints: 3f2b77fd) +org.reactivestreams:reactive-streams:1.0.4 (4 constraints: 073bf033) org.semver4j:semver4j:5.3.0 (1 constraints: 0a050d36) -org.slf4j:jcl-over-slf4j:2.0.12 (3 constraints: f917acb5) -org.slf4j:jul-to-slf4j:2.0.12 (3 constraints: 5328fa5e) -org.slf4j:slf4j-api:2.0.12 (59 constraints: d5110b25) +org.slf4j:jcl-over-slf4j:2.0.13 (3 constraints: fa17e8b5) +org.slf4j:jul-to-slf4j:2.0.13 (3 constraints: 54285f5f) +org.slf4j:slf4j-api:2.0.13 (59 constraints: df11305d) org.tallison:isoparser:1.9.41.7 (1 constraints: fb0c5528) org.tallison:jmatio:1.5 (1 constraints: ff0b57e9) org.tallison:metadata-extractor:2.17.1.0 (1 constraints: f00c3b28) org.tallison.xmp:xmpcore-shaded:6.1.10 (1 constraints: 300e8d49) -org.threeten:threetenbp:1.6.8 (4 constraints: 2433e267) +org.threeten:threetenbp:1.6.9 (4 constraints: 2833ea68) org.tukaani:xz:1.9 (1 constraints: 030c5be9) org.xerial.snappy:snappy-java:1.1.10.5 (4 constraints: b538b6ff) -software.amazon.awssdk:annotations:2.20.155 (20 constraints: 812ebbc3) -software.amazon.awssdk:apache-client:2.20.155 (4 constraints: bd2a0d69) -software.amazon.awssdk:arns:2.20.155 (2 constraints: 79184eec) -software.amazon.awssdk:auth:2.20.155 (5 constraints: 28394c1f) -software.amazon.awssdk:aws-core:2.20.155 (6 constraints: 064fa793) -software.amazon.awssdk:aws-query-protocol:2.20.155 (3 constraints: df2a444b) -software.amazon.awssdk:aws-xml-protocol:2.20.155 (2 constraints: 79184eec) -software.amazon.awssdk:bom:2.20.155 (1 constraints: a1054146) -software.amazon.awssdk:crt-core:2.20.155 (1 constraints: f10b8505) -software.amazon.awssdk:endpoints-spi:2.20.155 (4 constraints: 6235827a) -software.amazon.awssdk:http-client-spi:2.20.155 (11 constraints: bfa70106) -software.amazon.awssdk:json-utils:2.20.155 (5 constraints: 5a408840) -software.amazon.awssdk:metrics-spi:2.20.155 (7 constraints: 7163033c) -software.amazon.awssdk:profiles:2.20.155 (8 constraints: c7626aa3) -software.amazon.awssdk:protocol-core:2.20.155 (5 constraints: 66497b88) -software.amazon.awssdk:regions:2.20.155 (7 constraints: 4e51e633) -software.amazon.awssdk:s3:2.20.155 (4 constraints: 3d30f50b) -software.amazon.awssdk:sdk-core:2.20.155 (10 constraints: 40899100) -software.amazon.awssdk:sts:2.20.155 (2 constraints: 29122233) -software.amazon.awssdk:third-party-jackson-core:2.20.155 (2 constraints: eb1bdad7) -software.amazon.awssdk:utils:2.20.155 (19 constraints: 991b9bef) +software.amazon.awssdk:annotations:2.26.19 (28 constraints: 48a79471) +software.amazon.awssdk:apache-client:2.26.19 (4 constraints: 112adae3) +software.amazon.awssdk:arns:2.26.19 (2 constraints: 231878c1) +software.amazon.awssdk:auth:2.26.19 (5 constraints: 51386041) +software.amazon.awssdk:aws-core:2.26.19 (6 constraints: 044e3321) +software.amazon.awssdk:aws-query-protocol:2.26.19 (3 constraints: 5e2a17de) +software.amazon.awssdk:aws-xml-protocol:2.26.19 (2 constraints: 231878c1) +software.amazon.awssdk:bom:2.26.19 (1 constraints: 7605bc40) +software.amazon.awssdk:checksums:2.26.19 (4 constraints: 903650f6) +software.amazon.awssdk:checksums-spi:2.26.19 (5 constraints: 75453fbd) +software.amazon.awssdk:crt-core:2.26.19 (1 constraints: c60ba1f9) +software.amazon.awssdk:endpoints-spi:2.26.19 (5 constraints: 13415275) +software.amazon.awssdk:http-auth:2.26.19 (5 constraints: ad3ff879) +software.amazon.awssdk:http-auth-aws:2.26.19 (5 constraints: a43f775d) +software.amazon.awssdk:http-auth-spi:2.26.19 (8 constraints: d86cf7dd) +software.amazon.awssdk:http-client-spi:2.26.19 (15 constraints: 86da972d) +software.amazon.awssdk:identity-spi:2.26.19 (9 constraints: 0f7d43c5) +software.amazon.awssdk:json-utils:2.26.19 (5 constraints: 833fb03f) +software.amazon.awssdk:metrics-spi:2.26.19 (7 constraints: 44622f16) +software.amazon.awssdk:profiles:2.26.19 (8 constraints: 6f611a4a) +software.amazon.awssdk:protocol-core:2.26.19 (5 constraints: 8f48485b) +software.amazon.awssdk:regions:2.26.19 (7 constraints: 4c50ddb1) +software.amazon.awssdk:retries:2.26.19 (3 constraints: d5284028) +software.amazon.awssdk:retries-spi:2.26.19 (6 constraints: 304feb5b) +software.amazon.awssdk:s3:2.26.19 (4 constraints: e72f16c1) +software.amazon.awssdk:sdk-core:2.26.19 (10 constraints: 92871ded) +software.amazon.awssdk:sts:2.26.19 (2 constraints: d3115915) +software.amazon.awssdk:third-party-jackson-core:2.26.19 (2 constraints: 951b3aa8) +software.amazon.awssdk:utils:2.26.19 (25 constraints: 737511fa) software.amazon.eventstream:eventstream:1.0.1 (2 constraints: 2e1ae62b) ua.net.nlp:morfologik-ukrainian-search:4.9.1 (1 constraints: d5126e1e) xerces:xercesImpl:2.12.2 (1 constraints: 8e0c7d0e) @@ -387,12 +396,12 @@ com.amazonaws:aws-java-sdk-core:1.12.501 (2 constraints: b01a32b3) com.amazonaws:aws-java-sdk-kms:1.12.501 (1 constraints: 060dbd37) com.amazonaws:aws-java-sdk-s3:1.12.501 (1 constraints: 10136f43) com.amazonaws:jmespath-java:1.12.501 (2 constraints: b01a32b3) -com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.17.1 (2 constraints: ab195913) -com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.17.1 (3 constraints: fd2e96b4) -com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.17.1 (4 constraints: 6e48db35) -com.fasterxml.jackson.module:jackson-module-kotlin:2.17.1 (2 constraints: aa1d6b60) -com.fasterxml.jackson.module:jackson-module-parameter-names:2.17.1 (2 constraints: 0d247f82) -com.google.cloud:google-cloud-nio:0.127.3 (1 constraints: 9a0e5e6c) +com.fasterxml.jackson.dataformat:jackson-dataformat-xml:2.17.2 (2 constraints: ac195a13) +com.fasterxml.jackson.datatype:jackson-datatype-jdk8:2.17.2 (3 constraints: fe2ed2b4) +com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.17.2 (4 constraints: 6f485c36) +com.fasterxml.jackson.module:jackson-module-kotlin:2.17.2 (2 constraints: ab1d9860) +com.fasterxml.jackson.module:jackson-module-parameter-names:2.17.2 (2 constraints: 0e24bb82) +com.google.cloud:google-cloud-nio:0.127.20 (1 constraints: c90e267b) com.nimbusds:content-type:2.2 (1 constraints: d80b68eb) com.nimbusds:lang-tag:1.7 (1 constraints: dc0b6aeb) com.nimbusds:nimbus-jose-jwt:9.30.2 (1 constraints: 700c4b10) @@ -401,7 +410,7 @@ com.squareup.okhttp3:mockwebserver:4.11.0 (1 constraints: ec0e9471) io.github.microutils:kotlin-logging:3.0.5 (1 constraints: be0ea162) io.github.microutils:kotlin-logging-jvm:3.0.5 (1 constraints: 810f8b7c) io.micrometer:micrometer-core:1.9.12 (1 constraints: fe162919) -io.opentelemetry:opentelemetry-sdk-testing:1.35.0 (1 constraints: 960ff183) +io.opentelemetry:opentelemetry-sdk-testing:1.40.0 (1 constraints: 920fe683) jakarta.servlet:jakarta.servlet-api:4.0.4 (1 constraints: 961568b9) jakarta.websocket:jakarta.websocket-api:1.1.2 (1 constraints: 92155ab9) javax.inject:javax.inject:1 (1 constraints: 7a0df617) @@ -419,9 +428,9 @@ org.apache.kerby:kerb-identity:1.0.1 (1 constraints: 5f0cb602) org.apache.kerby:kerb-server:1.0.1 (1 constraints: d10b65f2) org.apache.kerby:kerb-simplekdc:1.0.1 (1 constraints: dc0d7e3e) org.apache.tomcat.embed:tomcat-embed-el:9.0.76 (1 constraints: d41558cf) -org.bouncycastle:bcpkix-jdk18on:1.77 (1 constraints: e3040431) -org.bouncycastle:bcprov-jdk18on:1.77 (2 constraints: c51a825c) -org.bouncycastle:bcutil-jdk18on:1.77 (1 constraints: 620d2d29) +org.bouncycastle:bcpkix-jdk18on:1.78.1 (1 constraints: 43055a3b) +org.bouncycastle:bcprov-jdk18on:1.78.1 (2 constraints: 851b50bd) +org.bouncycastle:bcutil-jdk18on:1.78.1 (1 constraints: c20d8144) org.freemarker:freemarker:2.3.32 (1 constraints: f00e9371) org.hdrhistogram:HdrHistogram:2.1.12 (1 constraints: 520d2029) org.hsqldb:hsqldb:2.7.2 (1 constraints: 0d050c36) @@ -449,5 +458,5 @@ org.springframework.boot:spring-boot-starter-json:2.7.13 (1 constraints: d914679 org.springframework.boot:spring-boot-starter-logging:2.7.13 (1 constraints: 6e138c46) org.springframework.boot:spring-boot-starter-web:2.7.13 (1 constraints: f30a39d6) org.yaml:snakeyaml:1.30 (1 constraints: 0713d91f) -software.amazon.awssdk:url-connection-client:2.20.155 (2 constraints: 731f6e16) +software.amazon.awssdk:url-connection-client:2.26.19 (2 constraints: 481f08f7) software.amazon.ion:ion-java:1.0.2 (1 constraints: 720db831) diff --git a/versions.props b/versions.props index e48fed17b2c..901287c4ef4 100644 --- a/versions.props +++ b/versions.props @@ -3,13 +3,13 @@ biz.aQute.bnd:biz.aQute.bnd.annotation=6.4.1 com.adobe.testing:s3mock-junit4=2.17.0 com.carrotsearch.randomizedtesting:*=2.8.1 -com.carrotsearch:hppc=0.9.1 +com.carrotsearch:hppc=0.10.0 com.cybozu.labs:langdetect=1.1-20120112 -com.fasterxml.jackson:jackson-bom=2.17.1 +com.fasterxml.jackson:jackson-bom=2.17.2 com.github.ben-manes.caffeine:caffeine=3.1.8 com.github.spotbugs:*=4.8.0 com.github.stephenc.jcip:jcip-annotations=1.0-1 -com.google.cloud:google-cloud-bom=0.204.0 +com.google.cloud:google-cloud-bom=0.224.0 com.google.errorprone:*=2.23.0 com.google.guava:guava=32.1.3-jre com.google.re2j:re2j=1.7 @@ -17,46 +17,45 @@ com.j256.simplemagic:simplemagic=1.17 com.jayway.jsonpath:json-path=2.9.0 com.lmax:disruptor=3.4.4 com.tdunning:t-digest=3.3 -commons-cli:commons-cli=1.8.0 -commons-codec:commons-codec=1.17.0 +commons-cli:commons-cli=1.9.0 +commons-codec:commons-codec=1.17.1 commons-collections:commons-collections=3.2.2 commons-io:commons-io=2.15.1 -io.dropwizard.metrics:*=4.2.25 -io.grpc:grpc-*=1.61.1 -io.netty:*=4.1.108.Final -io.opentelemetry:opentelemetry-bom=1.35.0 +io.dropwizard.metrics:*=4.2.26 +io.grpc:grpc-*=1.65.1 +io.netty:*=4.1.112.Final +io.opentelemetry:opentelemetry-bom=1.40.0 io.prometheus:*=0.16.0 io.swagger.core.v3:*=2.2.22 jakarta.ws.rs:jakarta.ws.rs-api=3.1.0 junit:junit=4.13.2 -net.sourceforge.argparse4j:argparse4j=0.9.0 net.thisptr:jackson-jq=0.0.13 no.nav.security:mock-oauth2-server=0.5.10 -org.apache.calcite.avatica:avatica-core=1.23.0 -org.apache.calcite:*=1.35.0 +org.apache.calcite.avatica:avatica-core=1.25.0 +org.apache.calcite:*=1.37.0 org.apache.commons:commons-collections4=4.4 org.apache.commons:commons-compress=1.26.1 -org.apache.commons:commons-configuration2=2.10.1 +org.apache.commons:commons-configuration2=2.11.0 org.apache.commons:commons-exec=1.4.0 -org.apache.commons:commons-lang3=3.14.0 +org.apache.commons:commons-lang3=3.15.0 org.apache.commons:commons-math3=3.6.1 -org.apache.curator:*=5.5.0 -org.apache.hadoop.thirdparty:*=1.1.1 +org.apache.curator:*=5.7.0 +org.apache.hadoop.thirdparty:*=1.2.0 org.apache.hadoop:*=3.3.6 org.apache.httpcomponents:httpclient=4.5.14 org.apache.httpcomponents:httpcore=4.4.16 org.apache.httpcomponents:httpmime=4.5.14 org.apache.kerby:*=1.0.1 org.apache.logging.log4j:*=2.21.0 -org.apache.lucene:*=9.10.0 +org.apache.lucene:*=9.11.1 org.apache.tika:*=1.28.5 org.apache.tomcat:annotations-api=6.0.53 org.apache.zookeeper:*=3.9.2 org.bitbucket.b_c:jose4j=0.9.6 -org.bouncycastle:bcpkix-jdk18on=1.77 +org.bouncycastle:bcpkix-jdk18on=1.78.1 org.carrot2:carrot2-core=4.5.1 org.codehaus.woodstox:stax2-api=4.2.2 -org.eclipse.jetty*:*=10.0.20 +org.eclipse.jetty*:*=10.0.22 org.eclipse.jetty.toolchain:jetty-servlet-api=4.0.6 org.glassfish.hk2*:*=3.0.5 org.glassfish.hk2.external:jakarta.inject=2.6.1 @@ -65,12 +64,12 @@ org.glassfish.jersey.containers:jersey-container-jetty-http=2.39.1 org.hamcrest:*=2.2 org.hsqldb:hsqldb=2.7.2 org.immutables:value-annotations=2.10.1 -org.jctools:jctools-core=4.0.1 +org.jctools:jctools-core=4.0.5 org.mockito:mockito*=5.12.0 org.openjdk.jmh:*=1.37 org.osgi:osgi.annotation=8.1.0 org.quicktheories:quicktheories=0.26 org.semver4j:semver4j=5.3.0 -org.slf4j:*=2.0.12 +org.slf4j:*=2.0.13 org.xerial.snappy:snappy-java=1.1.10.5 -software.amazon.awssdk:*=2.20.155 +software.amazon.awssdk:*=2.26.19