diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ffa2e33444..cf3e37c36e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @cwperks @DarshitChanpura @nibix @peternied @RyanL1997 @stephen-crawford @reta @willyborankin +* @cwperks @DarshitChanpura @derek-ho @nibix @peternied @RyanL1997 @reta @willyborankin diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b8cbcf8d67..12c24861f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -81,7 +81,7 @@ jobs: working-directory: downloaded-artifacts - name: Upload Coverage with retry - uses: Wandalen/wretry.action@v3.5.0 + uses: Wandalen/wretry.action@v3.7.2 with: attempt_limit: 5 attempt_delay: 2000 @@ -118,7 +118,7 @@ jobs: arguments: | integrationTest -Dbuild.snapshot=false - - uses: alehechka/upload-tartifact@v2 + - uses: actions/upload-artifact@v4 if: always() with: name: integration-${{ matrix.platform }}-JDK${{ matrix.jdk }}-reports diff --git a/MAINTAINERS.md b/MAINTAINERS.md index be4a41dc03..8381795f5e 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -17,18 +17,20 @@ This document contains a list of maintainers in this repo. See [opensearch-proje | Darshit Chanpura | [DarshitChanpura](https://github.com/DarshitChanpura) | Amazon | | Peter Nied | [peternied](https://github.com/peternied) | Amazon | | Craig Perkins | [cwperks](https://github.com/cwperks) | Amazon | +| Derek Ho | [derek-ho](https://github.com/derek-ho) | Amazon | | Ryan Liang | [RyanL1997](https://github.com/RyanL1997) | Amazon | -| Stephen Crawford | [scrawfor99](https://github.com/stephen-crawford) | Amazon | | Andriy Redko | [reta](https://github.com/reta) | Aiven | | Andrey Pleskach | [willyborankin](https://github.com/willyborankin) | Aiven | | Nils Bandener | [nibix](https://github.com/nibix) | Eliatra | ## Emeritus -| Maintainer | GitHub ID | Affiliation | -|------------|-------------------------------------------|-------------| -| Dave Lago | [davidlago](https://github.com/davidlago) | Contributor | -| Chang Liu | [cliu123](https://github.com/cliu123) | Amazon | +| Maintainer | GitHub ID | Affiliation | +|------------------|---------------------------------------------------------|-------------| +| Dave Lago | [davidlago](https://github.com/davidlago) | Contributor | +| Chang Liu | [cliu123](https://github.com/cliu123) | Amazon | +| Stephen Crawford | [stephen-crawford](https://github.com/stephen-crawford) | Contributor | + ## Practices diff --git a/README.md b/README.md index 39de20fc20..2e550ac1ca 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ [![CI](https://github.com/opensearch-project/security/workflows/CI/badge.svg?branch=main)](https://github.com/opensearch-project/security/actions) [![](https://img.shields.io/github/issues/opensearch-project/security/untriaged?labelColor=red)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"untriaged") [![](https://img.shields.io/github/issues/opensearch-project/security/security%20vulnerability?labelColor=red)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"security%20vulnerability") [![](https://img.shields.io/github/issues/opensearch-project/security)](https://github.com/opensearch-project/security/issues) [![](https://img.shields.io/github/issues-pr/opensearch-project/security)](https://github.com/opensearch-project/security/pulls) -[![](https://img.shields.io/codecov/c/gh/opensearch-project/security)](https://app.codecov.io/gh/opensearch-project/security) [![](https://img.shields.io/github/issues/opensearch-project/security/v2.4.0)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"v2.4.0") [![](https://img.shields.io/github/issues/opensearch-project/security/v3.0.0)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"v3.0.0") +[![](https://img.shields.io/codecov/c/gh/opensearch-project/security)](https://app.codecov.io/gh/opensearch-project/security) [![](https://img.shields.io/github/issues/opensearch-project/security/v2.18.0)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"v2.18.0") [![](https://img.shields.io/github/issues/opensearch-project/security/v3.0.0)](https://github.com/opensearch-project/security/issues?q=is%3Aissue+is%3Aopen+label%3A"v3.0.0") [![Slack](https://img.shields.io/badge/Slack-4A154B?&logo=slack&logoColor=white)](https://opensearch.slack.com/archives/C051Y637FKK) @@ -83,6 +83,10 @@ Run tests against local cluster: ```bash ./gradlew integTestRemote -Dtests.rest.cluster=localhost:9200 -Dtests.cluster=localhost:9200 -Dtests.clustername=docker-cluster -Dsecurity=true -Dhttps=true -Duser=admin -Dpassword=admin -Dcommon_utils.version="2.2.0.0" ``` +OR +```bash +./scripts/integtest.sh +``` Note: To run against a remote cluster replace cluster-name and `localhost:9200` with the IPAddress:Port of that cluster. Build artifacts (zip, deb, rpm): diff --git a/build.gradle b/build.gradle index 0c62051d6e..ba70d1ff50 100644 --- a/build.gradle +++ b/build.gradle @@ -495,9 +495,10 @@ configurations { // For integrationTest force "org.apache.httpcomponents:httpclient:4.5.14" force "org.apache.httpcomponents:httpcore:4.4.16" - force "com.google.errorprone:error_prone_annotations:2.32.0" - force "org.checkerframework:checker-qual:3.47.0" - force "ch.qos.logback:logback-classic:1.5.8" + force "com.google.errorprone:error_prone_annotations:2.35.1" + force "org.checkerframework:checker-qual:3.48.2" + force "ch.qos.logback:logback-classic:1.5.12" + force "commons-io:commons-io:2.17.0" } } @@ -530,8 +531,7 @@ task integrationTest(type: Test) { if (System.getenv('CI_ENVIRONMENT') != 'resource-test' && System.getenv('CI_ENVIRONMENT') != null) { exclude '**/ResourceFocusedTests.class' } - // Only run with retries while in CI systems - if (System.getenv('CI_ENVIRONMENT') == 'normal') { + if (System.getenv('DISABLE_RETRY') != 'true') { retry { failOnPassedAfterRetry = false maxRetries = 2 @@ -563,6 +563,11 @@ task integrationTest(type: Test) { } } +tasks.named("integrationTest") { + minHeapSize = "512m" + maxHeapSize = "2g" +} + tasks.integrationTest.finalizedBy(jacocoTestReport) // report is always generated after integration tests run //run the integrationTest task before the check task @@ -580,9 +585,13 @@ dependencies { implementation 'commons-cli:commons-cli:1.9.0' implementation "org.bouncycastle:bcprov-jdk18on:${versions.bouncycastle}" implementation 'org.ldaptive:ldaptive:1.2.3' - implementation 'com.nimbusds:nimbus-jose-jwt:9.41.1' + implementation 'com.nimbusds:nimbus-jose-jwt:9.46' implementation 'com.rfksystems:blake2b:2.0.0' implementation 'com.password4j:password4j:1.8.2' + + // Action privileges: check tables and compact collections + implementation 'com.selectivem.collections:special-collections-complete:1.4.0' + //JWT implementation "io.jsonwebtoken:jjwt-api:${jjwt_version}" implementation "io.jsonwebtoken:jjwt-impl:${jjwt_version}" @@ -592,7 +601,7 @@ dependencies { implementation 'org.apache.commons:commons-collections4:4.4' //Password generation - implementation 'org.passay:passay:1.6.5' + implementation 'org.passay:passay:1.6.6' implementation "org.apache.kafka:kafka-clients:${kafka_version}" @@ -602,16 +611,16 @@ dependencies { runtimeOnly 'com.eclipsesource.minimal-json:minimal-json:0.9.5' runtimeOnly 'commons-codec:commons-codec:1.17.1' runtimeOnly 'org.cryptacular:cryptacular:1.2.7' - compileOnly 'com.google.errorprone:error_prone_annotations:2.32.0' + compileOnly 'com.google.errorprone:error_prone_annotations:2.35.1' runtimeOnly 'com.sun.istack:istack-commons-runtime:4.2.0' runtimeOnly 'jakarta.xml.bind:jakarta.xml.bind-api:4.0.2' - runtimeOnly 'org.ow2.asm:asm:9.7' + runtimeOnly 'org.ow2.asm:asm:9.7.1' testImplementation 'org.apache.camel:camel-xmlsecurity:3.22.2' //OpenSAML implementation 'net.shibboleth.utilities:java-support:8.4.2' - runtimeOnly "io.dropwizard.metrics:metrics-core:4.2.27" + runtimeOnly "io.dropwizard.metrics:metrics-core:4.2.28" implementation "com.onelogin:java-saml:${one_login_java_saml}" implementation "com.onelogin:java-saml-core:${one_login_java_saml}" implementation "org.opensaml:opensaml-core:${open_saml_version}" @@ -646,7 +655,7 @@ dependencies { runtimeOnly 'org.apache.ws.xmlschema:xmlschema-core:2.3.1' runtimeOnly 'org.apache.santuario:xmlsec:2.3.4' runtimeOnly "com.github.luben:zstd-jni:${versions.zstd}" - runtimeOnly 'org.checkerframework:checker-qual:3.47.0' + runtimeOnly 'org.checkerframework:checker-qual:3.48.2' runtimeOnly "org.bouncycastle:bcpkix-jdk18on:${versions.bouncycastle}" runtimeOnly 'org.scala-lang.modules:scala-java8-compat_3:1.0.2' @@ -679,11 +688,14 @@ dependencies { testImplementation 'commons-validator:commons-validator:1.9.0' testImplementation 'org.springframework.kafka:spring-kafka-test:2.9.13' testImplementation "org.springframework:spring-beans:${spring_version}" - testImplementation 'org.junit.jupiter:junit-jupiter:5.11.1' - testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.1' + testImplementation 'org.junit.jupiter:junit-jupiter:5.11.3' + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.11.3' testImplementation('org.awaitility:awaitility:4.2.2') { exclude(group: 'org.hamcrest', module: 'hamcrest') } + testImplementation "org.bouncycastle:bcpkix-jdk18on:${versions.bouncycastle}" + testImplementation "org.bouncycastle:bcutil-jdk18on:${versions.bouncycastle}" + // Only osx-x86_64, osx-aarch_64, linux-x86_64, linux-aarch_64, windows-x86_64 are available if (osdetector.classifier in ["osx-x86_64", "osx-aarch_64", "linux-x86_64", "linux-aarch_64", "windows-x86_64"]) { testImplementation "io.netty:netty-tcnative-classes:2.0.61.Final" @@ -734,9 +746,10 @@ dependencies { integrationTestImplementation "org.apache.httpcomponents:fluent-hc:4.5.14" integrationTestImplementation "org.apache.httpcomponents:httpcore:4.4.16" integrationTestImplementation "org.apache.httpcomponents:httpasyncclient:4.1.5" + integrationTestImplementation "org.mockito:mockito-core:5.14.2" //spotless - implementation('com.google.googlejavaformat:google-java-format:1.23.0') { + implementation('com.google.googlejavaformat:google-java-format:1.24.0') { exclude group: 'com.google.guava' } } diff --git a/bwc-test/src/test/java/org/opensearch/security/bwc/SecurityBackwardsCompatibilityIT.java b/bwc-test/src/test/java/org/opensearch/security/bwc/SecurityBackwardsCompatibilityIT.java index 6767a43ddd..89000c0816 100644 --- a/bwc-test/src/test/java/org/opensearch/security/bwc/SecurityBackwardsCompatibilityIT.java +++ b/bwc-test/src/test/java/org/opensearch/security/bwc/SecurityBackwardsCompatibilityIT.java @@ -27,6 +27,7 @@ import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.apache.hc.core5.http.message.BasicHeader; import org.apache.hc.core5.http.nio.ssl.TlsStrategy; import org.apache.hc.core5.reactor.ssl.TlsDetails; @@ -44,9 +45,11 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.io.IOUtils; +import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.security.bwc.helper.RestHelper; import org.opensearch.test.rest.OpenSearchRestTestCase; +import static org.apache.hc.core5.http.ContentType.APPLICATION_NDJSON; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; @@ -239,15 +242,21 @@ private void ingestData(String index) throws IOException { } }); bulkRequestBody.append(objectMapper.writeValueAsString(indexRequest) + "\n"); - bulkRequestBody.append(objectMapper.writeValueAsString(Song.randomSong().asJson()) + "\n"); + bulkRequestBody.append(Song.randomSong().asJson() + "\n"); } List responses = RestHelper.requestAgainstAllNodes( testUserRestClient, "POST", "_bulk?refresh=wait_for", - RestHelper.toHttpEntity(bulkRequestBody.toString()) + new StringEntity(bulkRequestBody.toString(), APPLICATION_NDJSON) ); responses.forEach(r -> assertThat(r.getStatusLine().getStatusCode(), is(200))); + for (Response response : responses) { + Map responseMap = responseAsMap(response); + List itemResults = (List) XContentMapValues.extractValue(responseMap, "items", "index", "result"); + assertTrue("More than 0 response items", itemResults.size() > 0); + assertTrue("All results are 'created': " + itemResults, itemResults.stream().allMatch(i -> i.equals("created"))); + } } } @@ -266,6 +275,25 @@ private void searchMatchAll(String index) throws IOException { RestHelper.toHttpEntity(matchAllQuery) ); responses.forEach(r -> assertThat(r.getStatusLine().getStatusCode(), is(200))); + + for (Response response : responses) { + Map responseMap = responseAsMap(response); + @SuppressWarnings("unchecked") + List> sourceDocs = (List>) XContentMapValues.extractValue(responseMap, "hits", "hits", "_source"); + + for (Map sourceDoc : sourceDocs) { + assertNull("response doc should not contain field forbidden by FLS: " + responseMap, sourceDoc.get(Song.FIELD_LYRICS)); + assertNotNull( + "response doc should contain field not forbidden by FLS: " + responseMap, + sourceDoc.get(Song.FIELD_ARTIST) + ); + assertEquals( + "response doc should always have genre rock: " + responseMap, + Song.GENRE_ROCK, + sourceDoc.get(Song.FIELD_GENRE) + ); + } + } } } diff --git a/bwc-test/src/test/java/org/opensearch/security/bwc/Song.java b/bwc-test/src/test/java/org/opensearch/security/bwc/Song.java index f60d5f0fcb..ff7b8d9f87 100644 --- a/bwc-test/src/test/java/org/opensearch/security/bwc/Song.java +++ b/bwc-test/src/test/java/org/opensearch/security/bwc/Song.java @@ -52,6 +52,8 @@ public class Song { public static final String GENRE_JAZZ = "jazz"; public static final String GENRE_BLUES = "blues"; + public static final String[] GENRES = new String[] { GENRE_BLUES, GENRE_JAZZ, GENRE_ROCK }; + public static final String QUERY_TITLE_NEXT_SONG = FIELD_TITLE + ":" + "\"" + TITLE_NEXT_SONG + "\""; public static final String QUERY_TITLE_POISON = FIELD_TITLE + ":" + TITLE_POISON; public static final String QUERY_TITLE_MAGNUM_OPUS = FIELD_TITLE + ":" + TITLE_MAGNUM_OPUS; @@ -112,7 +114,11 @@ public static Song randomSong() { UUID.randomUUID().toString(), UUID.randomUUID().toString(), Randomness.get().nextInt(5), - UUID.randomUUID().toString() + randomGenre() ); } + + static String randomGenre() { + return GENRES[Randomness.get().nextInt(GENRES.length)]; + } } diff --git a/bwc-test/src/test/resources/security/sample.pem b/bwc-test/src/test/resources/security/sample.pem index 7ba92534e4..b690a603da 100644 --- a/bwc-test/src/test/resources/security/sample.pem +++ b/bwc-test/src/test/resources/security/sample.pem @@ -1,28 +1,25 @@ -----BEGIN CERTIFICATE----- -MIIEyTCCA7GgAwIBAgIGAWLrc1O2MA0GCSqGSIb3DQEBCwUAMIGPMRMwEQYKCZIm -iZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQ -RXhhbXBsZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290 -IENBMSEwHwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0EwHhcNMTgwNDIy -MDM0MzQ3WhcNMjgwNDE5MDM0MzQ3WjBeMRIwEAYKCZImiZPyLGQBGRYCZGUxDTAL -BgNVBAcMBHRlc3QxDTALBgNVBAoMBG5vZGUxDTALBgNVBAsMBG5vZGUxGzAZBgNV -BAMMEm5vZGUtMC5leGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBAJa+f476vLB+AwK53biYByUwN+40D8jMIovGXm6wgT8+9Sbs899dDXgt -9CE1Beo65oP1+JUz4c7UHMrCY3ePiDt4cidHVzEQ2g0YoVrQWv0RedS/yx/DKhs8 -Pw1O715oftP53p/2ijD5DifFv1eKfkhFH+lwny/vMSNxellpl6NxJTiJVnQ9HYOL -gf2t971ITJHnAuuxUF48HcuNovW4rhtkXef8kaAN7cE3LU+A9T474ULNCKkEFPIl -ZAKN3iJNFdVsxrTU+CUBHzk73Do1cCkEvJZ0ZFjp0Z3y8wLY/gqWGfGVyA9l2CUq -eIZNf55PNPtGzOrvvONiui48vBKH1LsCAwEAAaOCAVkwggFVMIG8BgNVHSMEgbQw -gbGAFJI1DOAPHitF9k0583tfouYSl0BzoYGVpIGSMIGPMRMwEQYKCZImiZPyLGQB -GRYDY29tMRcwFQYKCZImiZPyLGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBs -ZSBDb20gSW5jLjEhMB8GA1UECwwYRXhhbXBsZSBDb20gSW5jLiBSb290IENBMSEw -HwYDVQQDDBhFeGFtcGxlIENvbSBJbmMuIFJvb3QgQ0GCAQEwHQYDVR0OBBYEFKyv -78ZmFjVKM9g7pMConYH7FVBHMAwGA1UdEwEB/wQCMAAwDgYDVR0PAQH/BAQDAgXg -MCAGA1UdJQEB/wQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjA1BgNVHREELjAsiAUq -AwQFBYISbm9kZS0wLmV4YW1wbGUuY29tgglsb2NhbGhvc3SHBH8AAAEwDQYJKoZI -hvcNAQELBQADggEBAIOKuyXsFfGv1hI/Lkpd/73QNqjqJdxQclX57GOMWNbOM5H0 -5/9AOIZ5JQsWULNKN77aHjLRr4owq2jGbpc/Z6kAd+eiatkcpnbtbGrhKpOtoEZy -8KuslwkeixpzLDNISSbkeLpXz4xJI1ETMN/VG8ZZP1bjzlHziHHDu0JNZ6TnNzKr -XzCGMCohFfem8vnKNnKUneMQMvXd3rzUaAgvtf7Hc2LTBlf4fZzZF1EkwdSXhaMA -1lkfHiqOBxtgeDLxCHESZ2fqgVqsWX+t3qHQfivcPW6txtDyrFPRdJOGhiMGzT/t -e/9kkAtQRgpTb3skYdIOOUOV0WGQ60kJlFhAzIs= +MIIEPDCCAySgAwIBAgIUaYSlET3nzsotWTrWueVPPh10yLYwDQYJKoZIhvcNAQEL +BQAwgY8xEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSEwHwYDVQQLDBhFeGFtcGxl +IENvbSBJbmMuIFJvb3QgQ0ExITAfBgNVBAMMGEV4YW1wbGUgQ29tIEluYy4gUm9v +dCBDQTAeFw0yNDAyMjAxNzAzMjVaFw0zNDAyMTcxNzAzMjVaMFcxCzAJBgNVBAYT +AmRlMQ0wCwYDVQQHDAR0ZXN0MQ0wCwYDVQQKDARub2RlMQ0wCwYDVQQLDARub2Rl +MRswGQYDVQQDDBJub2RlLTAuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCm93kXteDQHMAvbUPNPW5pyRHKDD42XGWSgq0k1D29C/Ud +yL21HLzTJa49ZU2ldIkSKs9JqbkHdyK0o8MO6L8dotLoYbxDWbJFW8bp1w6tDTU0 +HGkn47XVu3EwbfrTENg3jFu+Oem6a/501SzITzJWtS0cn2dIFOBimTVpT/4Zv5qr +XA6Cp4biOmoTYWhi/qQl8d0IaADiqoZ1MvZbZ6x76qTrRAbg+UWkpTEXoH1xTc8n +dibR7+HP6OTqCKvo1NhE8uP4pY+fWd6b6l+KLo3IKpfTbAIJXIO+M67FLtWKtttD +ao94B069skzKk6FPgW/OZh6PRCD0oxOavV+ld2SjAgMBAAGjgcYwgcMwRwYDVR0R +BEAwPogFKgMEBQWCEm5vZGUtMC5leGFtcGxlLmNvbYIJbG9jYWxob3N0hxAAAAAA +AAAAAAAAAAAAAAABhwR/AAABMAsGA1UdDwQEAwIF4DAdBgNVHSUEFjAUBggrBgEF +BQcDAQYIKwYBBQUHAwIwDAYDVR0TAQH/BAIwADAdBgNVHQ4EFgQU0/qDQaY10jIo +wCjLUpz/HfQXyt8wHwYDVR0jBBgwFoAUF4ffoFrrZhKn1dD4uhJFPLcrAJwwDQYJ +KoZIhvcNAQELBQADggEBAGbij5WyF0dKhQodQfTiFDb73ygU6IyeJkFSnxF67gDz +pQJZKFvXuVBa3cGP5e7Qp3TK50N+blXGH0xXeIV9lXeYUk4hVfBlp9LclZGX8tGi +7Xa2enMvIt5q/Yg3Hh755ZxnDYxCoGkNOXUmnMusKstE0YzvZ5Gv6fcRKFBUgZLh +hUBqIEAYly1EqH/y45APiRt3Nor1yF6zEI4TnL0yNrHw6LyQkUNCHIGMJLfnJQ9L +camMGIXOx60kXNMTigF9oXXwixWAnDM9y3QT8QXA7hej/4zkbO+vIeV/7lGUdkyg +PAi92EvyxmsliEMyMR0VINl8emyobvfwa7oMeWMR+hg= -----END CERTIFICATE----- diff --git a/bwc-test/src/test/resources/security/test-kirk.jks b/bwc-test/src/test/resources/security/test-kirk.jks index 174dbda656..6c8c5ef77e 100644 Binary files a/bwc-test/src/test/resources/security/test-kirk.jks and b/bwc-test/src/test/resources/security/test-kirk.jks differ diff --git a/checkstyle/checkstyle.xml b/checkstyle/checkstyle.xml index 04a36c49c1..a9c1a8f765 100644 --- a/checkstyle/checkstyle.xml +++ b/checkstyle/checkstyle.xml @@ -43,6 +43,13 @@ + + + + + + + diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 2b189974c2..fb602ee2af 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,7 +1,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionSha256Sum=5b9c5eb3f9fc2c94abaea57d90bd78747ca117ddbbf96c859d3741181a12bf2a -distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip +distributionSha256Sum=31c55713e40233a8303827ceb42ca48a47267a0ad4bab9177123121e71524c26 +distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME diff --git a/release-notes/opensearch-security.release-notes-2.18.0.0.md b/release-notes/opensearch-security.release-notes-2.18.0.0.md new file mode 100644 index 0000000000..b6faf32235 --- /dev/null +++ b/release-notes/opensearch-security.release-notes-2.18.0.0.md @@ -0,0 +1,48 @@ +## Version 2.18.0 Release Notes + +Compatible with OpenSearch and OpenSearch Dashboards version 2.18.0 + +### Enhancements +* Improve error message when a node with an incorrectly configured certificate attempts to connect ([#4819](https://github.com/opensearch-project/security/pull/4819)) +* Support datastreams as an AuditLog Sink ([#4756](https://github.com/opensearch-project/security/pull/4756)) +* Auto-convert V6 configuration instances into V7 configuration instances (for OpenSearch 2.x only) ([#4753](https://github.com/opensearch-project/security/pull/4753)) +* Add can trip circuit breaker override ([#4779](https://github.com/opensearch-project/security/pull/4779)) +* Adding index permissions for remote index in AD ([#4721](https://github.com/opensearch-project/security/pull/4721)) +* Fix env var password hashing for PBKDF2 ([#4778](https://github.com/opensearch-project/security/pull/4778)) +* Add ensureCustomSerialization to ensure that headers are serialized correctly with multiple transport hops ([#4741](https://github.com/opensearch-project/security/pull/4741)) + +### Bug Fixes +* Handle non-flat yaml settings for demo configuration detection ([#4798](https://github.com/opensearch-project/security/pull/4798)) +* Fix bug where admin can read system index ([#4775](https://github.com/opensearch-project/security/pull/4775)) +* Ensure that dual mode enabled flag from cluster settings can get propagated to core ([#4830](https://github.com/opensearch-project/security/pull/4830)) +* Remove failed login attempt for saml authenticator ([#4770](https://github.com/opensearch-project/security/pull/4770)) +* Fix issue in HashingStoredFieldVisitor with stored fields ([#4827](https://github.com/opensearch-project/security/pull/4827)) +* Fix issue with Get mappings on a Closed index ([#4777](https://github.com/opensearch-project/security/pull/4777)) +* changing comments permission for alerting_ack_alerts role ([#4723](https://github.com/opensearch-project/security/pull/4723)) +* Fixed use of rolesMappingConfiguration in InternalUsersApiActionValidationTest ([#4754](https://github.com/opensearch-project/security/pull/4754)) +* Use evaluateSslExceptionHandler() when constructing OpenSearchSecureSettingsFactory ([#4726](https://github.com/opensearch-project/security/pull/4726)) + +### Maintenance +* Bump gradle to 8.10.2 ([#4829](https://github.com/opensearch-project/security/pull/4829)) +* Bump ch.qos.logback:logback-classic from 1.5.8 to 1.5.11 ([#4807](https://github.com/opensearch-project/security/pull/4807)) ([#4825](https://github.com/opensearch-project/security/pull/4825)) +* Bump org.passay:passay from 1.6.5 to 1.6.6 ([#4824](https://github.com/opensearch-project/security/pull/4824)) +* Bump org.junit.jupiter:junit-jupiter from 5.11.0 to 5.11.2 ([#4767](https://github.com/opensearch-project/security/pull/4767)) ([#4811](https://github.com/opensearch-project/security/pull/4811)) +* Bump io.dropwizard.metrics:metrics-core from 4.2.27 to 4.2.28 ([#4789](https://github.com/opensearch-project/security/pull/4789)) +* Bump com.nimbusds:nimbus-jose-jwt from 9.40 to 9.41.2 ([#4737](https://github.com/opensearch-project/security/pull/4737)) ([#4787](https://github.com/opensearch-project/security/pull/4787)) +* Bump org.ow2.asm:asm from 9.7 to 9.7.1 ([#4788](https://github.com/opensearch-project/security/pull/4788)) +* Bump com.google.googlejavaformat:google-java-format from 1.23.0 to 1.24.0 ([#4786](https://github.com/opensearch-project/security/pull/4786)) +* Bump org.xerial.snappy:snappy-java from 1.1.10.6 to 1.1.10.7 ([#4738](https://github.com/opensearch-project/security/pull/4738)) +* Bump org.gradle.test-retry from 1.5.10 to 1.6.0 ([#4736](https://github.com/opensearch-project/security/pull/4736)) +* Moves @cliu123 to emeritus status ([#4667](https://github.com/opensearch-project/security/pull/4667)) +* Add Derek Ho (github: derek-ho) as a maintainer ([#4796](https://github.com/opensearch-project/security/pull/4796)) +* Add deprecation warning for GET/POST/PUT cache ([#4776](https://github.com/opensearch-project/security/pull/4776)) +* Fix for: CVE-2024-47554 ([#4792](https://github.com/opensearch-project/security/pull/4792)) +* Move Stephen to emeritus ([#4804](https://github.com/opensearch-project/security/pull/4804)) +* Undeprecate securityadmin script ([#4768](https://github.com/opensearch-project/security/pull/4768)) +* Bump commons-io:commons-io from 2.16.1 to 2.17.0 ([#4750](https://github.com/opensearch-project/security/pull/4750)) +* Bump org.scala-lang:scala-library from 2.13.14 to 2.13.15 ([#4749](https://github.com/opensearch-project/security/pull/4749)) +* org.checkerframework:checker-qual and ch.qos.logback:logback-classic to new versions ([#4717](https://github.com/opensearch-project/security/pull/4717)) +* Add isActionPaginated to DelegatingRestHandler ([#4765](https://github.com/opensearch-project/security/pull/4765)) +* Refactor ASN1 call ([#4740](https://github.com/opensearch-project/security/pull/4740)) +* Fix 'integTest' not called with test workflows during release ([#4815](https://github.com/opensearch-project/security/pull/4815)) +* Fixed bulk index requests in BWC tests and hardened assertions ([#4831](https://github.com/opensearch-project/security/pull/4831)) diff --git a/scripts/integtest.sh b/scripts/integtest.sh new file mode 100755 index 0000000000..4bdac1544b --- /dev/null +++ b/scripts/integtest.sh @@ -0,0 +1,107 @@ +#!/bin/bash + +set -e + +function usage() { + echo "" + echo "This script is used to run integration tests for plugin installed on a remote OpenSearch/Dashboards cluster." + echo "--------------------------------------------------------------------------" + echo "Usage: $0 [args]" + echo "" + echo "Required arguments:" + echo "None" + echo "" + echo "Optional arguments:" + echo -e "-b BIND_ADDRESS\t, defaults to localhost | 127.0.0.1, can be changed to any IP or domain name for the cluster location." + echo -e "-p BIND_PORT\t, defaults to 9200, can be changed to any port for the cluster location." + echo -e "-s SECURITY_ENABLED\t(true | false), defaults to true. Specify the OpenSearch/Dashboards have security enabled or not." + echo -e "-c CREDENTIAL\t(usename:password), no defaults, effective when SECURITY_ENABLED=true." + echo -e "-h\tPrint this message." + echo -e "-v OPENSEARCH_VERSION\t, no defaults" + echo -e "-n SNAPSHOT\t, defaults to false" + echo -e "-m CLUSTER_NAME\t, defaults to docker-cluster" + echo "--------------------------------------------------------------------------" +} + +while getopts ":h:b:p:s:c:v:n:t:m:u:" arg; do + case $arg in + h) + usage + exit 1 + ;; + b) + BIND_ADDRESS=$OPTARG + ;; + p) + BIND_PORT=$OPTARG + ;; + t) + TRANSPORT_PORT=$OPTARG + ;; + s) + SECURITY_ENABLED=$OPTARG + ;; + c) + CREDENTIAL=$OPTARG + ;; + m) + CLUSTER_NAME=$OPTARG + ;; + v) + OPENSEARCH_VERSION=$OPTARG + ;; + n) + # Do nothing as we're not consuming this param. + ;; + u) + COMMON_UTILS_VERSION=$OPTARG + ;; + :) + echo "-${OPTARG} requires an argument" + usage + exit 1 + ;; + ?) + echo "Invalid option: -${OPTARG}" + exit 1 + ;; + esac +done + + +if [ -z "$BIND_ADDRESS" ] +then + BIND_ADDRESS="localhost" +fi + +if [ -z "$BIND_PORT" ] +then + BIND_PORT="9200" +fi + +if [ -z "$SECURITY_ENABLED" ] +then + SECURITY_ENABLED="true" +fi + +OPENSEARCH_REQUIRED_VERSION="2.12.0" +if [ -z "$CREDENTIAL" ] +then + # Starting in 2.12.0, security demo configuration script requires an initial admin password + COMPARE_VERSION=`echo $OPENSEARCH_REQUIRED_VERSION $OPENSEARCH_VERSION | tr ' ' '\n' | sort -V | uniq | head -n 1` + if [ "$COMPARE_VERSION" != "$OPENSEARCH_REQUIRED_VERSION" ]; then + CREDENTIAL="admin:admin" + else + CREDENTIAL="admin:myStrongPassword123!" + fi +fi + +if [ -z "$CLUSTER_NAME" ] +then + CLUSTER_NAME="docker-cluster" +fi + +USERNAME=`echo $CREDENTIAL | awk -F ':' '{print $1}'` +PASSWORD=`echo $CREDENTIAL | awk -F ':' '{print $2}'` + +./gradlew integTestRemote -Dtests.rest.cluster="$BIND_ADDRESS:$BIND_PORT" -Dtests.cluster="$BIND_ADDRESS:$BIND_PORT" -Dsecurity_enabled=$SECURITY_ENABLED -Dtests.clustername=$CLUSTER_NAME -Dhttps=true -Duser=$USERNAME -Dpassword=$PASSWORD diff --git a/src/integrationTest/java/org/opensearch/security/EncryptionInTransitMigrationTests.java b/src/integrationTest/java/org/opensearch/security/EncryptionInTransitMigrationTests.java new file mode 100644 index 0000000000..58eb7218e6 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/EncryptionInTransitMigrationTests.java @@ -0,0 +1,70 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ +package org.opensearch.security; + +import java.util.Map; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.test.framework.cluster.ClusterManager; +import org.opensearch.test.framework.cluster.LocalCluster; +import org.opensearch.test.framework.cluster.TestRestClient; + +import static java.util.concurrent.TimeUnit.SECONDS; +import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL; +import static org.awaitility.Awaitility.await; +import static org.junit.Assert.assertEquals; + +/** + * Test related to SSL-only mode of security plugin. In this mode, the security plugin is responsible only for TLS/SSL encryption. + * Therefore, the plugin does not perform authentication and authorization. Moreover, the REST resources (e.g. /_plugins/_security/whoami, + * /_plugins/_security/authinfo, etc.) provided by the plugin are not available. + */ +@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) +public class EncryptionInTransitMigrationTests { + + @ClassRule + public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.DEFAULT) + .anonymousAuth(false) + .loadConfigurationIntoIndex(false) + .nodeSettings(Map.of(ConfigConstants.SECURITY_SSL_ONLY, true)) + .sslOnly(true) + .nodeSpecificSettings(0, Map.of(ConfigConstants.SECURITY_CONFIG_SSL_DUAL_MODE_ENABLED, true)) + .nodeSpecificSettings(1, Map.of(ConfigConstants.SECURITY_CONFIG_SSL_DUAL_MODE_ENABLED, true)) + .extectedNodeStartupCount(2) + .authc(AUTHC_HTTPBASIC_INTERNAL) + .build(); + + @Test + public void shouldOnlyConnectWithThirdNodeAfterDynamicDualModeChange() { + try (TestRestClient client = cluster.getRestClient()) { + TestRestClient.HttpResponse response = client.get("_cat/nodes"); + response.assertStatusCode(200); + + String[] lines = response.getBody().split("\n"); + assertEquals("Expected 2 nodes in the initial response", 2, lines.length); + + String settingsJson = "{\"persistent\": {\"plugins.security_config.ssl_dual_mode_enabled\": false}}"; + TestRestClient.HttpResponse settingsResponse = client.putJson("_cluster/settings", settingsJson); + settingsResponse.assertStatusCode(200); + + await().atMost(10, SECONDS).pollInterval(1, SECONDS).until(() -> { + TestRestClient.HttpResponse secondResponse = client.get("_cat/nodes"); + String[] secondLines = secondResponse.getBody().split("\n"); + return secondLines.length == 3; + }); + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java b/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java index 71a1e21444..7ba435b1c3 100644 --- a/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java +++ b/src/integrationTest/java/org/opensearch/security/FlsAndFieldMaskingTests.java @@ -62,6 +62,7 @@ import org.opensearch.test.framework.cluster.TestRestClient; import org.opensearch.test.framework.log.LogsRule; +import static org.apache.http.HttpStatus.SC_OK; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.containsString; @@ -230,6 +231,12 @@ public class FlsAndFieldMaskingTests { "cluster_composite_ops_ro" ).indexPermissions("read").fls(String.format("~%s", FIELD_TITLE)).on(FIRST_INDEX_NAME); + static final TestSecurityConfig.Role ROLE_NO_FIELD_TITLE_WILDCARD_INDEX_FLS = new TestSecurityConfig.Role("example_exclusive_fls") + .clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read", "indices:admin/mappings/get") + .fls(String.format("~%s", FIELD_TITLE)) + .on("*"); + static final TestSecurityConfig.Role ROLE_ONLY_FIELD_TITLE_MASKED = new TestSecurityConfig.Role("example_mask").clusterPermissions( "cluster_composite_ops_ro" ).indexPermissions("read").maskedFields(FIELD_TITLE.concat("::/(?<=.{1})./::").concat(MASK_VALUE)).on(FIRST_INDEX_NAME); @@ -248,6 +255,12 @@ public class FlsAndFieldMaskingTests { ROLE_NO_FIELD_TITLE_FLS ); + /** + * Example user with fls filter in which the user can see every field but the {@link Song#FIELD_TITLE} field. + */ + static final TestSecurityConfig.User USER_NO_FIELD_TITLE_WILDCARD_INDEX_FLS = new TestSecurityConfig.User("exclusive_wildcard_fls_user") + .roles(ROLE_NO_FIELD_TITLE_WILDCARD_INDEX_FLS); + /** * Example user in which {@link Song#FIELD_TITLE} field is masked. */ @@ -305,6 +318,7 @@ public class FlsAndFieldMaskingTests { ALL_INDICES_STARS_LESS_THAN_ZERO_READER, TWINS_FIRST_ARTIST_READER, USER_ONLY_FIELD_TITLE_FLS, + USER_NO_FIELD_TITLE_WILDCARD_INDEX_FLS, USER_NO_FIELD_TITLE_FLS, USER_ONLY_FIELD_TITLE_MASKED, USER_BOTH_ONLY_AND_NO_FIELD_TITLE_FLS, @@ -1778,4 +1792,26 @@ public void testFlsOnAClosedAndReopenedIndex() throws IOException { } } + @SuppressWarnings("unchecked") + @Test + public void testGetMappingsOnAClosedIndexWithFlsRestrictions() throws IOException { + String indexName = "fls_excludes_mappings"; + List docIds = createIndexWithDocs(indexName, SONGS[0], SONGS[1]); + + try (TestRestClient client = cluster.getRestClient(ADMIN_USER)) { + TestRestClient.HttpResponse mappingsResponse = client.get(indexName + "/_mapping"); + mappingsResponse.assertStatusCode(SC_OK); + assertThat(mappingsResponse.getBody(), containsString("title")); + + TestRestClient.HttpResponse closeResponse = client.post(indexName + "/_close"); + closeResponse.assertStatusCode(SC_OK); + } + + try (TestRestClient client = cluster.getRestClient(USER_NO_FIELD_TITLE_WILDCARD_INDEX_FLS)) { + TestRestClient.HttpResponse mappingsResponse = client.get(indexName + "/_mapping"); + mappingsResponse.assertStatusCode(SC_OK); + assertThat(mappingsResponse.getBody(), not(containsString("title"))); + } + } + } diff --git a/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java b/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java index dc2c82c188..7f869ea221 100644 --- a/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java +++ b/src/integrationTest/java/org/opensearch/security/SecurityConfigurationTests.java @@ -266,7 +266,7 @@ public void testParallelTenantPutRequests() throws Exception { assertThat( response.getBody(), response.getStatusCode(), - anyOf(equalTo(HttpStatus.SC_CREATED), equalTo(HttpStatus.SC_CONFLICT)) + anyOf(equalTo(HttpStatus.SC_CREATED), equalTo(HttpStatus.SC_OK), equalTo(HttpStatus.SC_CONFLICT)) ); if (response.getStatusCode() == HttpStatus.SC_CREATED) numCreatedResponses.getAndIncrement(); }); @@ -276,10 +276,13 @@ public void testParallelTenantPutRequests() throws Exception { assertThat(getResponse.getBody(), containsString("create new tenant")); TestRestClient.HttpResponse updateResponse = client.putJson(TENANT_ENDPOINT, TENANT_BODY_TWO); - assertThat(updateResponse.getStatusCode(), equalTo(HttpStatus.SC_OK)); + updateResponse.assertStatusCode(HttpStatus.SC_OK); getResponse = client.get(TENANT_ENDPOINT); // make sure update works assertThat(getResponse.getBody(), containsString("update tenant")); + + TestRestClient.HttpResponse deleteResponse = client.delete(TENANT_ENDPOINT); + deleteResponse.assertStatusCode(HttpStatus.SC_OK); } } } diff --git a/src/integrationTest/java/org/opensearch/security/StoredFieldsTests.java b/src/integrationTest/java/org/opensearch/security/StoredFieldsTests.java new file mode 100644 index 0000000000..9bcc0c5526 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/StoredFieldsTests.java @@ -0,0 +1,110 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + */ +package org.opensearch.security; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; +import org.apache.http.HttpStatus; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.runner.RunWith; + +import org.opensearch.action.admin.indices.create.CreateIndexResponse; +import org.opensearch.client.Client; +import org.opensearch.test.framework.TestSecurityConfig; +import org.opensearch.test.framework.cluster.ClusterManager; +import org.opensearch.test.framework.cluster.LocalCluster; +import org.opensearch.test.framework.cluster.TestRestClient; + +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.test.framework.TestSecurityConfig.AuthcDomain.AUTHC_HTTPBASIC_INTERNAL; + +@RunWith(com.carrotsearch.randomizedtesting.RandomizedRunner.class) +@ThreadLeakScope(ThreadLeakScope.Scope.NONE) +public class StoredFieldsTests { + static final TestSecurityConfig.User TEST_USER_MASKED_FIELDS = new TestSecurityConfig.User("test_user_masked_fields").roles( + new TestSecurityConfig.Role("role_masked_fields").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .maskedFields("restricted") + .on("test_index") + ); + + static final TestSecurityConfig.User TEST_USER_FLS = new TestSecurityConfig.User("test_user_fls").roles( + new TestSecurityConfig.Role("role_fls").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls("~restricted") + .on("test_index") + ); + + @ClassRule + public static final LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS) + .authc(AUTHC_HTTPBASIC_INTERNAL) + .users(TEST_USER_MASKED_FIELDS, TEST_USER_FLS) + .build(); + + @BeforeClass + public static void createTestData() { + try (Client client = cluster.getInternalNodeClient()) { + CreateIndexResponse r = client.admin() + .indices() + .prepareCreate("test_index") + .setMapping("raw", "type=keyword,store=true", "restricted", "type=keyword,store=true") + .get(); + + client.prepareIndex("test_index").setRefreshPolicy(IMMEDIATE).setSource("raw", "hello", "restricted", "boo!").get(); + } + } + + @Test + public void testStoredWithWithApplicableMaskedFieldRestrictions() { + try (TestRestClient client = cluster.getRestClient(TEST_USER_MASKED_FIELDS)) { + TestRestClient.HttpResponse normalSearchResponse = client.get("test_index/_search"); + Assert.assertFalse(normalSearchResponse.getBody().contains("boo!")); + + TestRestClient.HttpResponse fieldSearchResponse = client.postJson("test_index/_search", """ + { + "stored_fields": [ + "raw", + "restricted" + ] + } + """); + fieldSearchResponse.assertStatusCode(HttpStatus.SC_OK); + Assert.assertTrue(fieldSearchResponse.getBody().contains("raw")); + Assert.assertTrue(fieldSearchResponse.getBody().contains("hello")); + Assert.assertTrue(fieldSearchResponse.getBody().contains("restricted")); + Assert.assertFalse(fieldSearchResponse.getBody().contains("boo!")); + } + } + + @Test + public void testStoredWithWithApplicableFlsRestrictions() { + try (TestRestClient client = cluster.getRestClient(TEST_USER_FLS)) { + TestRestClient.HttpResponse normalSearchResponse = client.get("test_index/_search"); + Assert.assertFalse(normalSearchResponse.getBody().contains("boo!")); + + TestRestClient.HttpResponse fieldSearchResponse = client.postJson("test_index/_search", """ + { + "stored_fields": [ + "raw", + "restricted" + ] + } + """); + fieldSearchResponse.assertStatusCode(HttpStatus.SC_OK); + Assert.assertTrue(fieldSearchResponse.getBody().contains("raw")); + Assert.assertTrue(fieldSearchResponse.getBody().contains("hello")); + Assert.assertFalse(fieldSearchResponse.getBody().contains("restricted")); + Assert.assertFalse(fieldSearchResponse.getBody().contains("boo!")); + } + } + +} diff --git a/src/integrationTest/java/org/opensearch/security/SystemIndexTests.java b/src/integrationTest/java/org/opensearch/security/SystemIndexTests.java index 599ffe9ad2..ae068255da 100644 --- a/src/integrationTest/java/org/opensearch/security/SystemIndexTests.java +++ b/src/integrationTest/java/org/opensearch/security/SystemIndexTests.java @@ -13,6 +13,7 @@ import java.util.Map; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; +import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,6 +55,13 @@ public class SystemIndexTests { ) .build(); + @Before + public void setup() { + try (TestRestClient client = cluster.getRestClient(cluster.getAdminCertificate())) { + client.delete(".system-index1"); + } + } + @Test public void adminShouldNotBeAbleToDeleteSecurityIndex() { try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) { @@ -80,4 +88,29 @@ public void adminShouldNotBeAbleToDeleteSecurityIndex() { assertThat(response4.getStatusCode(), equalTo(RestStatus.FORBIDDEN.getStatus())); } } + + @Test + public void regularUserShouldGetNoResultsWhenSearchingSystemIndex() { + // Create system index and index a dummy document as the super admin user, data returned to super admin + try (TestRestClient client = cluster.getRestClient(cluster.getAdminCertificate())) { + HttpResponse response1 = client.put(".system-index1"); + + assertThat(response1.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + String doc = "{\"field\":\"value\"}"; + HttpResponse adminPostResponse = client.postJson(".system-index1/_doc/1?refresh=true", doc); + assertThat(adminPostResponse.getStatusCode(), equalTo(RestStatus.CREATED.getStatus())); + HttpResponse response2 = client.get(".system-index1/_search"); + + assertThat(response2.getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(response2.getBody(), response2.getBody().contains("\"hits\":{\"total\":{\"value\":1,\"relation\":\"eq\"}")); + } + + // Regular users should not be able to read it + try (TestRestClient client = cluster.getRestClient(USER_ADMIN)) { + // regular user cannot read system index + HttpResponse response1 = client.get(".system-index1/_search"); + + assertThat(response1.getBody(), response1.getBody().contains("\"hits\":{\"total\":{\"value\":0,\"relation\":\"eq\"}")); + } + } } diff --git a/src/integrationTest/java/org/opensearch/security/api/AbstractApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/AbstractApiIntegrationTest.java index 297eeb38f9..a69ca83378 100644 --- a/src/integrationTest/java/org/opensearch/security/api/AbstractApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/AbstractApiIntegrationTest.java @@ -13,19 +13,20 @@ import java.io.IOException; import java.nio.file.Path; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.StringJoiner; import com.carrotsearch.randomizedtesting.RandomizedTest; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; -import com.google.common.collect.ImmutableMap; import org.apache.commons.io.FileUtils; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.awaitility.Awaitility; import org.junit.AfterClass; -import org.junit.BeforeClass; +import org.junit.Before; import org.junit.runner.RunWith; import org.opensearch.common.CheckedConsumer; @@ -86,22 +87,22 @@ public abstract class AbstractApiIntegrationTest extends RandomizedTest { public static Path configurationFolder; - public static ImmutableMap.Builder clusterSettings = ImmutableMap.builder(); - protected static TestSecurityConfig testSecurityConfig = new TestSecurityConfig(); public static LocalCluster localCluster; - @BeforeClass - public static void startCluster() throws IOException { + private Class testClass; + + @Before + public void startCluster() throws IOException { + if (this.getClass().equals(testClass)) { + return; + } configurationFolder = ConfigurationFiles.createConfigurationDirectory(); extendConfiguration(); - clusterSettings.put(SECURITY_ALLOW_DEFAULT_INIT_SECURITYINDEX, true) - .put(PLUGINS_SECURITY_RESTAPI_ROLES_ENABLED, List.of("user_admin__all_access", REST_ADMIN_REST_API_ACCESS)) - .put(SECURITY_ALLOW_DEFAULT_INIT_USE_CLUSTER_STATE, randomBoolean()); final var clusterManager = randomFrom(List.of(ClusterManager.THREE_CLUSTER_MANAGERS, ClusterManager.SINGLENODE)); final var localClusterBuilder = new LocalCluster.Builder().clusterManager(clusterManager) - .nodeSettings(clusterSettings.buildKeepingLast()) + .nodeSettings(getClusterSettings()) .defaultConfigurationInitDirectory(configurationFolder.toString()) .loadConfigurationIntoIndex(false); localCluster = localClusterBuilder.build(); @@ -111,6 +112,15 @@ public static void startCluster() throws IOException { .alias("Load default configuration") .until(() -> client.securityHealth().getTextFromJsonBody("/status"), equalTo("UP")); } + testClass = this.getClass(); + } + + protected Map getClusterSettings() { + Map clusterSettings = new HashMap<>(); + clusterSettings.put(SECURITY_ALLOW_DEFAULT_INIT_SECURITYINDEX, true); + clusterSettings.put(PLUGINS_SECURITY_RESTAPI_ROLES_ENABLED, List.of("user_admin__all_access", REST_ADMIN_REST_API_ACCESS)); + clusterSettings.put(SECURITY_ALLOW_DEFAULT_INIT_USE_CLUSTER_STATE, randomBoolean()); + return clusterSettings; } private static void extendConfiguration() throws IOException { diff --git a/src/integrationTest/java/org/opensearch/security/api/AbstractConfigEntityApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/AbstractConfigEntityApiIntegrationTest.java index 12b278ec76..d25ae508c8 100644 --- a/src/integrationTest/java/org/opensearch/security/api/AbstractConfigEntityApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/AbstractConfigEntityApiIntegrationTest.java @@ -39,10 +39,16 @@ public abstract class AbstractConfigEntityApiIntegrationTest extends AbstractApiIntegrationTest { static { - clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); testSecurityConfig.withRestAdminUser(REST_ADMIN_USER, allRestAdminPermissions()); } + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); + return clusterSettings; + } + interface TestDescriptor { String entityJsonProperty(); diff --git a/src/integrationTest/java/org/opensearch/security/api/CertificatesRestApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/CertificatesRestApiIntegrationTest.java index 8a69406bff..43ba0ce807 100644 --- a/src/integrationTest/java/org/opensearch/security/api/CertificatesRestApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/CertificatesRestApiIntegrationTest.java @@ -14,6 +14,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.StringJoiner; import java.util.stream.Collectors; @@ -43,7 +44,6 @@ public class CertificatesRestApiIntegrationTest extends AbstractApiIntegrationTe final static String REGULAR_USER = "regular_user"; static { - clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); testSecurityConfig.roles( new TestSecurityConfig.Role("simple_user_role").clusterPermissions("cluster:admin/security/certificates/info") ) @@ -53,6 +53,13 @@ public class CertificatesRestApiIntegrationTest extends AbstractApiIntegrationTe .withRestAdminUser(REST_API_ADMIN_SSL_INFO, restAdminPermission(Endpoint.SSL, CERTS_INFO_ACTION)); } + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); + return clusterSettings; + } + @Override protected String apiPathPrefix() { return PLUGINS_PREFIX; @@ -96,9 +103,7 @@ public void timeoutTest() throws Exception { } private void verifyTimeoutRequest(final TestRestClient client) throws Exception { - TestRestClient.HttpResponse response = ok(() -> client.get(sslCertsPath() + "?timeout=0")); - final var body = response.bodyAsJsonNode(); - assertThat(body.get("nodes").size(), is(0)); + ok(() -> client.get(sslCertsPath() + "?timeout=0")); } private void verifySSLCertsInfo(final TestRestClient client) throws Exception { diff --git a/src/integrationTest/java/org/opensearch/security/api/ConfigRestApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/ConfigRestApiIntegrationTest.java index 9b50b160ee..16b089f99b 100644 --- a/src/integrationTest/java/org/opensearch/security/api/ConfigRestApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/ConfigRestApiIntegrationTest.java @@ -10,6 +10,7 @@ */ package org.opensearch.security.api; +import java.util.Map; import java.util.StringJoiner; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -30,11 +31,18 @@ public class ConfigRestApiIntegrationTest extends AbstractApiIntegrationTest { final static String REST_API_ADMIN_CONFIG_UPDATE = "rest-api-admin-config-update"; static { - clusterSettings.put(SECURITY_UNSUPPORTED_RESTAPI_ALLOW_SECURITYCONFIG_MODIFICATION, true).put(SECURITY_RESTAPI_ADMIN_ENABLED, true); testSecurityConfig.withRestAdminUser(REST_ADMIN_USER, allRestAdminPermissions()) .withRestAdminUser(REST_API_ADMIN_CONFIG_UPDATE, restAdminPermission(Endpoint.CONFIG, SECURITY_CONFIG_UPDATE)); } + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(SECURITY_UNSUPPORTED_RESTAPI_ALLOW_SECURITYCONFIG_MODIFICATION, true); + clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); + return clusterSettings; + } + private String securityConfigPath(final String... path) { final var fullPath = new StringJoiner("/").add(super.apiPath("securityconfig")); if (path != null) for (final var p : path) @@ -80,6 +88,7 @@ void verifyUpdate(final TestRestClient client) throws Exception { badRequest(() -> client.putJson(securityConfigPath("xxx"), EMPTY_BODY)); verifyNotAllowedMethods(client); + TestRestClient.HttpResponse resp = client.get(securityConfigPath()); final var configJson = ok(() -> client.get(securityConfigPath())).bodyAsJsonNode(); final var authFailureListeners = DefaultObjectMapper.objectMapper.createObjectNode(); authFailureListeners.set( diff --git a/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoWithSettingsTest.java b/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoWithSettingsTest.java index 96aed9ddd8..af8eeb2c8a 100644 --- a/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoWithSettingsTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/DashboardsInfoWithSettingsTest.java @@ -12,6 +12,7 @@ package org.opensearch.security.api; import java.util.List; +import java.util.Map; import org.junit.Test; @@ -32,8 +33,6 @@ public class DashboardsInfoWithSettingsTest extends AbstractApiIntegrationTest { "Password must be minimum 5 characters long and must contain at least one uppercase letter, one lowercase letter, one digit, and one special character."; static { - clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_REGEX, CUSTOM_PASSWORD_REGEX) - .put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_ERROR_MESSAGE, CUSTOM_PASSWORD_MESSAGE); testSecurityConfig.user( new TestSecurityConfig.User("dashboards_user").roles( new Role("dashboards_role").indexPermissions("read").on("*").clusterPermissions("cluster_composite_ops") @@ -41,6 +40,14 @@ public class DashboardsInfoWithSettingsTest extends AbstractApiIntegrationTest { ); } + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_REGEX, CUSTOM_PASSWORD_REGEX); + clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_ERROR_MESSAGE, CUSTOM_PASSWORD_MESSAGE); + return clusterSettings; + } + private String apiPath() { return randomFrom(List.of(PLUGINS_PREFIX + "/dashboardsinfo", LEGACY_OPENDISTRO_PREFIX + "/kibanainfo")); } diff --git a/src/integrationTest/java/org/opensearch/security/api/InternalUsersRegExpPasswordRulesRestApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/InternalUsersRegExpPasswordRulesRestApiIntegrationTest.java index b4a6a8f066..684f30e60b 100644 --- a/src/integrationTest/java/org/opensearch/security/api/InternalUsersRegExpPasswordRulesRestApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/InternalUsersRegExpPasswordRulesRestApiIntegrationTest.java @@ -11,6 +11,7 @@ package org.opensearch.security.api; +import java.util.Map; import java.util.StringJoiner; import org.junit.Test; @@ -27,10 +28,19 @@ public class InternalUsersRegExpPasswordRulesRestApiIntegrationTest extends Abst final static String PASSWORD_VALIDATION_ERROR_MESSAGE = "xxxxxxxx"; - static { - clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_ERROR_MESSAGE, PASSWORD_VALIDATION_ERROR_MESSAGE) - .put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_REGEX, "(?=.*[A-Z])(?=.*[^a-zA-Z\\\\d])(?=.*[0-9])(?=.*[a-z]).{8,}") - .put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_SCORE_BASED_VALIDATION_STRENGTH, PasswordValidator.ScoreStrength.FAIR.name()); + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_ERROR_MESSAGE, PASSWORD_VALIDATION_ERROR_MESSAGE); + clusterSettings.put( + ConfigConstants.SECURITY_RESTAPI_PASSWORD_VALIDATION_REGEX, + "(?=.*[A-Z])(?=.*[^a-zA-Z\\\\d])(?=.*[0-9])(?=.*[a-z]).{8,}" + ); + clusterSettings.put( + ConfigConstants.SECURITY_RESTAPI_PASSWORD_SCORE_BASED_VALIDATION_STRENGTH, + PasswordValidator.ScoreStrength.FAIR.name() + ); + return clusterSettings; } String internalUsers(String... path) { diff --git a/src/integrationTest/java/org/opensearch/security/api/InternalUsersScoreBasedPasswordRulesRestApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/InternalUsersScoreBasedPasswordRulesRestApiIntegrationTest.java index 5b7026b3c3..b18a0c6fd6 100644 --- a/src/integrationTest/java/org/opensearch/security/api/InternalUsersScoreBasedPasswordRulesRestApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/InternalUsersScoreBasedPasswordRulesRestApiIntegrationTest.java @@ -11,6 +11,7 @@ package org.opensearch.security.api; +import java.util.Map; import java.util.StringJoiner; import org.junit.Test; @@ -24,8 +25,11 @@ public class InternalUsersScoreBasedPasswordRulesRestApiIntegrationTest extends AbstractApiIntegrationTest { - static { + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); clusterSettings.put(ConfigConstants.SECURITY_RESTAPI_PASSWORD_MIN_LENGTH, 9); + return clusterSettings; } String internalUsers(String... path) { diff --git a/src/integrationTest/java/org/opensearch/security/api/SslCertsRestApiIntegrationTest.java b/src/integrationTest/java/org/opensearch/security/api/SslCertsRestApiIntegrationTest.java index dbc57839b8..bbdd9ff793 100644 --- a/src/integrationTest/java/org/opensearch/security/api/SslCertsRestApiIntegrationTest.java +++ b/src/integrationTest/java/org/opensearch/security/api/SslCertsRestApiIntegrationTest.java @@ -10,6 +10,8 @@ */ package org.opensearch.security.api; +import java.util.Map; + import com.fasterxml.jackson.databind.JsonNode; import org.junit.Test; @@ -26,11 +28,17 @@ public class SslCertsRestApiIntegrationTest extends AbstractApiIntegrationTest { final static String REST_API_ADMIN_SSL_INFO = "rest-api-admin-ssl-info"; static { - clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); testSecurityConfig.withRestAdminUser(REST_ADMIN_USER, allRestAdminPermissions()) .withRestAdminUser(REST_API_ADMIN_SSL_INFO, restAdminPermission(Endpoint.SSL, CERTS_INFO_ACTION)); } + @Override + protected Map getClusterSettings() { + Map clusterSettings = super.getClusterSettings(); + clusterSettings.put(SECURITY_RESTAPI_ADMIN_ENABLED, true); + return clusterSettings; + } + protected String sslCertsPath() { return super.apiPath("ssl", "certs"); } diff --git a/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java new file mode 100644 index 0000000000..7807dae748 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/ActionPrivilegesTest.java @@ -0,0 +1,1033 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.core.JsonProcessingException; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Suite; + +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isAllowed; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isForbidden; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.isPartiallyOk; +import static org.opensearch.security.privileges.PrivilegeEvaluatorResponseMatcher.missingPrivileges; +import static org.opensearch.security.util.MockIndexMetadataBuilder.dataStreams; +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests for ActionPrivileges. As the ActionPrivileges provides quite a few different code paths for checking + * privileges with different performance characteristics, this test suite defines different test cases for making sure + * all these code paths are tested. So, all functionality must be tested for "well-known" actions and non-well-known + * actions. For index privileges, there are a couple of more tests dimensions. See below. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + ActionPrivilegesTest.ClusterPrivileges.class, + ActionPrivilegesTest.IndexPrivileges.IndicesAndAliases.class, + ActionPrivilegesTest.IndexPrivileges.DataStreams.class, + ActionPrivilegesTest.Misc.class, + ActionPrivilegesTest.StatefulIndexPrivilegesHeapSize.class }) +public class ActionPrivilegesTest { + public static class ClusterPrivileges { + @Test + public void wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/other"), + isForbidden(missingPrivileges("cluster:monitor/nodes/other")) + ); + } + + @Test + public void notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/stats/somethingnotwellknown"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats/somethingnotwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats/somethingnotwellknown")) + ); + assertThat( + subject.hasClusterPrivilege(ctx("test_role"), "cluster:monitor/nodes/something/else"), + isForbidden(missingPrivileges("cluster:monitor/nodes/something/else")) + ); + } + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - '*'", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasClusterPrivilege(ctx("test_role"), "cluster:whatever"), isAllowed()); + assertThat( + subject.hasClusterPrivilege(ctx("other_role"), "cluster:whatever"), + isForbidden(missingPrivileges("cluster:whatever")) + ); + } + + @Test + public void explicit_wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("non_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - '*'\n" + // + "explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats\n" + // + "semi_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*\n", // + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasExplicitClusterPrivilege(ctx("explicit_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat(subject.hasExplicitClusterPrivilege(ctx("semi_explicit_role"), "cluster:monitor/nodes/stats"), isAllowed()); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("non_explicit_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/stats"), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + } + + @Test + public void explicit_notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("non_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - '*'\n" + // + "explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/notwellknown\n" + // + "semi_explicit_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/*\n", // + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasExplicitClusterPrivilege(ctx("explicit_role"), "cluster:monitor/nodes/notwellknown"), isAllowed()); + assertThat(subject.hasExplicitClusterPrivilege(ctx("semi_explicit_role"), "cluster:monitor/nodes/notwellknown"), isAllowed()); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("non_explicit_role"), "cluster:monitor/nodes/notwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + assertThat( + subject.hasExplicitClusterPrivilege(ctx("other_role"), "cluster:monitor/nodes/notwellknown"), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + } + + @Test + public void hasAny_wellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/stats*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/stats")), isAllowed()); + assertThat( + subject.hasAnyClusterPrivilege( + ctx("test_role"), + ImmutableSet.of("cluster:monitor/nodes/foo", "cluster:monitor/nodes/stats") + ), + isAllowed() + ); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:monitor/nodes/stats")), + isForbidden(missingPrivileges("cluster:monitor/nodes/stats")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/other")), + isForbidden(missingPrivileges("cluster:monitor/nodes/other")) + ); + } + + @Test + public void hasAny_notWellKnown() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:monitor/nodes/*", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/nodes/notwellknown")), + isAllowed() + ); + assertThat( + subject.hasAnyClusterPrivilege( + ctx("test_role"), + ImmutableSet.of("cluster:monitor/other", "cluster:monitor/nodes/notwellknown") + ), + isAllowed() + ); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:monitor/nodes/notwellknown")), + isForbidden(missingPrivileges("cluster:monitor/nodes/notwellknown")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/other")), + isForbidden(missingPrivileges("cluster:monitor/other")) + ); + assertThat( + subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:monitor/other", "cluster:monitor/yetanother")), + isForbidden() + ); + } + + @Test + public void hasAny_wildcard() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - '*'", CType.ROLES); + + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, null, Settings.EMPTY); + + assertThat(subject.hasAnyClusterPrivilege(ctx("test_role"), ImmutableSet.of("cluster:whatever")), isAllowed()); + + assertThat( + subject.hasAnyClusterPrivilege(ctx("other_role"), ImmutableSet.of("cluster:whatever")), + isForbidden(missingPrivileges("cluster:whatever")) + ); + } + } + + /** + * Tests for index privileges. This class contains two parameterized test suites, first for indices and aliases, + * second for data streams. + *

+ * Both test suites use parameters to create a 3-dimensional test case space to make sure all code paths are covered. + *

+ * The dimensions are (see also the params() methods): + *

    + *
  1. 1. roles.yml; index patterns: Different usages of patterns, wildcards and constant names. + *
  2. 2. roles.yml; action patterns: Well known actions vs non-well known actions combined with use of patterns vs use of constant action names + *
  3. 3. Statefulness: Shall the data structures from ActionPrivileges.StatefulIndexPrivileges be used or not + *
+ * As so many different situations need to be tested, the test oracle method covers() is used to verify the results. + */ + public static class IndexPrivileges { + + @RunWith(Parameterized.class) + public static class IndicesAndAliases { + final ActionSpec actionSpec; + final IndexSpec indexSpec; + final SecurityDynamicConfiguration roles; + final String primaryAction; + final ImmutableSet requiredActions; + final ImmutableSet otherActions; + final ActionPrivileges subject; + + @Test + public void positive_full() throws Exception { + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx("test_role"), requiredActions, resolved("index_a11")); + assertThat(result, isAllowed()); + } + + @Test + public void positive_partial() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("index_a11", "index_a12")); + + if (covers(ctx, "index_a11", "index_a12")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "index_a11")) { + assertThat(result, isPartiallyOk("index_a11")); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_partial2() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx, + requiredActions, + resolved("index_a11", "index_a12", "index_b1") + ); + + if (covers(ctx, "index_a11", "index_a12", "index_b1")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "index_a11", "index_a12")) { + assertThat(result, isPartiallyOk("index_a11", "index_a12")); + } else if (covers(ctx, "index_a11")) { + assertThat(result, isPartiallyOk("index_a11")); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_noLocal() throws Exception { + IndexResolverReplacer.Resolved resolved = new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + ImmutableSet.of(), + ImmutableSet.of("remote:a"), + ImmutableSet.of("remote:a"), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx("test_role"), requiredActions, resolved); + assertThat(result, isAllowed()); + } + + @Test + public void negative_wrongRole() throws Exception { + PrivilegesEvaluationContext ctx = ctx("other_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("index_a11")); + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + + @Test + public void negative_wrongAction() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, otherActions, resolved("index_a11")); + + if (actionSpec.givenPrivs.contains("*")) { + assertThat(result, isAllowed()); + } else { + assertThat(result, isForbidden(missingPrivileges(otherActions))); + } + } + + @Test + public void positive_hasExplicit_full() { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasExplicitIndexPrivilege(ctx, requiredActions, resolved("index_a11")); + + if (actionSpec.givenPrivs.contains("*")) { + // The * is forbidden for explicit privileges + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } else if (!requiredActions.contains("indices:data/read/search")) { + // For test purposes, we have designated "indices:data/read/search" as an action requiring explicit privileges + // Other actions are not covered here + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } else { + assertThat(result, isAllowed()); + } + } + + private boolean covers(PrivilegesEvaluationContext ctx, String... indices) { + for (String index : indices) { + if (!indexSpec.covers(ctx.getUser(), index)) { + return false; + } + } + return true; + } + + @Parameterized.Parameters(name = "{0}; actions: {1}; {2}") + public static Collection params() { + List result = new ArrayList<>(); + + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec().givenIndexPrivs("*"), // + new IndexSpec().givenIndexPrivs("index_*"), // + new IndexSpec().givenIndexPrivs("index_a11"), // + new IndexSpec().givenIndexPrivs("index_a1*"), // + new IndexSpec().givenIndexPrivs("index_${attrs.dept_no}"), // + new IndexSpec().givenIndexPrivs("alias_a1*") // + )) { + for (ActionSpec actionSpec : Arrays.asList( + new ActionSpec("wildcard")// + .givenPrivs("*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("constant, well known")// + .givenPrivs("indices:data/read/search") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known, two required privs")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search", "indices:data/read/get"), // + new ActionSpec("constant, non well known")// + .givenPrivs("indices:unknown/unwell") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known, two required privs")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell", "indices:unknown/notatall")// + + )) { + for (Statefulness statefulness : Statefulness.values()) { + result.add(new Object[] { indexSpec, actionSpec, statefulness }); + } + } + } + return result; + } + + public IndicesAndAliases(IndexSpec indexSpec, ActionSpec actionSpec, Statefulness statefulness) throws Exception { + this.indexSpec = indexSpec; + this.actionSpec = actionSpec; + this.roles = indexSpec.toRolesConfig(actionSpec); + + this.primaryAction = actionSpec.primaryAction; + this.requiredActions = actionSpec.requiredPrivs; + + this.otherActions = actionSpec.wellKnownActions + ? ImmutableSet.of("indices:data/write/update") + : ImmutableSet.of("indices:foobar/unknown"); + this.indexSpec.indexMetadata = INDEX_METADATA; + + Settings settings = Settings.EMPTY; + if (statefulness == Statefulness.STATEFUL_LIMITED) { + settings = Settings.builder() + .put(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey(), new ByteSizeValue(10, ByteSizeUnit.BYTES)) + .build(); + } + + this.subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> INDEX_METADATA, + settings, + WellKnownActions.CLUSTER_ACTIONS, + WellKnownActions.INDEX_ACTIONS, + WellKnownActions.INDEX_ACTIONS + ); + + if (statefulness == Statefulness.STATEFUL || statefulness == Statefulness.STATEFUL_LIMITED) { + this.subject.updateStatefulIndexPrivileges(INDEX_METADATA, 1); + } + } + + final static Map INDEX_METADATA = // + indices("index_a11", "index_a12", "index_a21", "index_a22", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a11", "index_a12", "index_a21", "index_a22")// + .alias("alias_a1") + .of("index_a11", "index_a12")// + .alias("alias_a2") + .of("index_a21", "index_a22")// + .alias("alias_b") + .of("index_b1", "index_b2")// + .build() + .getIndicesLookup(); + + static IndexResolverReplacer.Resolved resolved(String... indices) { + return new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + ImmutableSet.copyOf(indices), + ImmutableSet.copyOf(indices), + ImmutableSet.of(), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + } + } + + @RunWith(Parameterized.class) + public static class DataStreams { + final ActionSpec actionSpec; + final IndexSpec indexSpec; + final SecurityDynamicConfiguration roles; + final String primaryAction; + final ImmutableSet requiredActions; + final ImmutableSet otherActions; + final ActionPrivileges subject; + + @Test + public void positive_full() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("data_stream_a11")); + if (covers(ctx, "data_stream_a11")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, ".ds-data_stream_a11-000001")) { + assertThat( + result, + isPartiallyOk(".ds-data_stream_a11-000001", ".ds-data_stream_a11-000002", ".ds-data_stream_a11-000003") + ); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void positive_partial() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx, + requiredActions, + resolved("data_stream_a11", "data_stream_a12") + ); + + if (covers(ctx, "data_stream_a11", "data_stream_a12")) { + assertThat(result, isAllowed()); + } else if (covers(ctx, "data_stream_a11")) { + assertThat( + result, + isPartiallyOk( + "data_stream_a11", + ".ds-data_stream_a11-000001", + ".ds-data_stream_a11-000002", + ".ds-data_stream_a11-000003" + ) + ); + } else if (covers(ctx, ".ds-data_stream_a11-000001")) { + assertThat( + result, + isPartiallyOk(".ds-data_stream_a11-000001", ".ds-data_stream_a11-000002", ".ds-data_stream_a11-000003") + ); + } else { + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + } + + @Test + public void negative_wrongRole() throws Exception { + PrivilegesEvaluationContext ctx = ctx("other_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, requiredActions, resolved("data_stream_a11")); + assertThat(result, isForbidden(missingPrivileges(requiredActions))); + } + + @Test + public void negative_wrongAction() throws Exception { + PrivilegesEvaluationContext ctx = ctx("test_role"); + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege(ctx, otherActions, resolved("data_stream_a11")); + assertThat(result, isForbidden(missingPrivileges(otherActions))); + } + + private boolean covers(PrivilegesEvaluationContext ctx, String... indices) { + for (String index : indices) { + if (!indexSpec.covers(ctx.getUser(), index)) { + return false; + } + } + return true; + } + + @Parameterized.Parameters(name = "{0}; actions: {1}; {2}") + public static Collection params() { + List result = new ArrayList<>(); + + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec().givenIndexPrivs("*"), // + new IndexSpec().givenIndexPrivs("data_stream_*"), // + new IndexSpec().givenIndexPrivs("data_stream_a11"), // + new IndexSpec().givenIndexPrivs("data_stream_a1*"), // + new IndexSpec().givenIndexPrivs("data_stream_${attrs.dept_no}"), // + new IndexSpec().givenIndexPrivs(".ds-data_stream_a11*") // + )) { + for (ActionSpec actionSpec : Arrays.asList( + new ActionSpec("constant, well known")// + .givenPrivs("indices:data/read/search") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search"), // + new ActionSpec("pattern, well known, two required privs")// + .givenPrivs("indices:data/read/*") + .requiredPrivs("indices:data/read/search", "indices:data/read/get"), // + new ActionSpec("constant, non well known")// + .givenPrivs("indices:unknown/unwell") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell"), // + new ActionSpec("pattern, non well known, two required privs")// + .givenPrivs("indices:unknown/*") + .requiredPrivs("indices:unknown/unwell", "indices:unknown/notatall")// + + )) { + for (Statefulness statefulness : Statefulness.values()) { + result.add(new Object[] { indexSpec, actionSpec, statefulness }); + } + } + } + return result; + } + + public DataStreams(IndexSpec indexSpec, ActionSpec actionSpec, Statefulness statefulness) throws Exception { + this.indexSpec = indexSpec; + this.actionSpec = actionSpec; + this.roles = indexSpec.toRolesConfig(actionSpec); + + this.primaryAction = actionSpec.primaryAction; + this.requiredActions = actionSpec.requiredPrivs; + + this.otherActions = actionSpec.wellKnownActions + ? ImmutableSet.of("indices:data/write/update") + : ImmutableSet.of("indices:foobar/unknown"); + this.indexSpec.indexMetadata = INDEX_METADATA; + + Settings settings = Settings.EMPTY; + if (statefulness == Statefulness.STATEFUL_LIMITED) { + settings = Settings.builder() + .put(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey(), new ByteSizeValue(10, ByteSizeUnit.BYTES)) + .build(); + } + + this.subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, () -> INDEX_METADATA, settings); + + if (statefulness == Statefulness.STATEFUL || statefulness == Statefulness.STATEFUL_LIMITED) { + this.subject.updateStatefulIndexPrivileges(INDEX_METADATA, 1); + } + } + + final static Map INDEX_METADATA = // + dataStreams("data_stream_a11", "data_stream_a12", "data_stream_a21", "data_stream_a22", "data_stream_b1", "data_stream_b2") + .build() + .getIndicesLookup(); + + static IndexResolverReplacer.Resolved resolved(String... indices) { + ImmutableSet.Builder allIndices = ImmutableSet.builder(); + + for (String index : indices) { + IndexAbstraction indexAbstraction = INDEX_METADATA.get(index); + + if (indexAbstraction instanceof IndexAbstraction.DataStream) { + allIndices.addAll( + indexAbstraction.getIndices().stream().map(i -> i.getIndex().getName()).collect(Collectors.toList()) + ); + } + + allIndices.add(index); + } + + return new IndexResolverReplacer.Resolved( + ImmutableSet.of(), + allIndices.build(), + ImmutableSet.copyOf(indices), + ImmutableSet.of(), + IndicesOptions.LENIENT_EXPAND_OPEN + ); + } + } + + static class IndexSpec { + ImmutableList givenIndexPrivs = ImmutableList.of(); + boolean wildcardPrivs; + Map indexMetadata; + + IndexSpec() {} + + IndexSpec givenIndexPrivs(String... indexPatterns) { + this.givenIndexPrivs = ImmutableList.copyOf(indexPatterns); + this.wildcardPrivs = this.givenIndexPrivs.contains("*"); + return this; + } + + boolean covers(User user, String index) { + if (this.wildcardPrivs) { + return true; + } + + for (String givenIndexPriv : this.givenIndexPrivs) { + if (givenIndexPriv.contains("${")) { + for (Map.Entry entry : user.getCustomAttributesMap().entrySet()) { + givenIndexPriv = givenIndexPriv.replace("${" + entry.getKey() + "}", entry.getValue()); + } + } + + if (givenIndexPriv.endsWith("*")) { + if (index.startsWith(givenIndexPriv.substring(0, givenIndexPriv.length() - 1))) { + return true; + } + + for (IndexAbstraction indexAbstraction : indexMetadata.values()) { + if ((indexAbstraction instanceof IndexAbstraction.Alias + || indexAbstraction instanceof IndexAbstraction.DataStream) + && indexAbstraction.getName().startsWith(givenIndexPriv.substring(0, givenIndexPriv.length() - 1))) { + if (indexAbstraction.getIndices().stream().anyMatch(i -> i.getIndex().getName().equals(index))) { + return true; + } + } + } + } else if (givenIndexPrivs.contains("*")) { + // For simplicity, we only allow a sub-set of patterns. We assume here that the WildcardMatcher + // class fulfills all other cases correctly as per its contract + throw new RuntimeException("The tests only support index patterns with * at the end"); + } else { + if (index.equals(givenIndexPriv)) { + return true; + } + + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Alias || indexAbstraction instanceof IndexAbstraction.DataStream) { + if (indexAbstraction.getIndices().stream().anyMatch(i -> i.getIndex().getName().equals(index))) { + return true; + } + } + } + } + + return false; + } + + SecurityDynamicConfiguration toRolesConfig(ActionSpec actionSpec) { + try { + return SecurityDynamicConfiguration.fromMap( + ImmutableMap.of( + "test_role", + ImmutableMap.of( + "index_permissions", + Arrays.asList( + ImmutableMap.of("index_patterns", this.givenIndexPrivs, "allowed_actions", actionSpec.givenPrivs) + ) + ) + ), + CType.ROLES + ); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + + @Override + public String toString() { + return this.givenIndexPrivs.stream().collect(Collectors.joining(",")); + } + } + + static class ActionSpec { + String name; + ImmutableList givenPrivs; + ImmutableSet requiredPrivs; + String primaryAction; + boolean wellKnownActions; + + ActionSpec(String name) { + super(); + this.name = name; + } + + ActionSpec givenPrivs(String... actions) { + this.givenPrivs = ImmutableList.copyOf(actions); + return this; + } + + ActionSpec requiredPrivs(String... requiredPrivs) { + this.requiredPrivs = ImmutableSet.copyOf(requiredPrivs); + this.primaryAction = requiredPrivs[0]; + this.wellKnownActions = this.requiredPrivs.stream().anyMatch(a -> WellKnownActions.INDEX_ACTIONS.contains(a)); + return this; + } + + @Override + public String toString() { + return name; + } + } + + enum Statefulness { + STATEFUL, + STATEFUL_LIMITED, + NON_STATEFUL + } + } + + public static class Misc { + @Test + public void relevantOnly_identity() throws Exception { + Map metadata = // + indices("index_a11", "index_a12", "index_b")// + .alias("alias_a") + .of("index_a11", "index_a12")// + .build() + .getIndicesLookup(); + + assertTrue( + "relevantOnly() returned identical object", + ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata) == metadata + ); + } + + @Test + public void relevantOnly_closed() throws Exception { + Map metadata = indices("index_open_1", "index_open_2")// + .index("index_closed", IndexMetadata.State.CLOSE) + .build() + .getIndicesLookup(); + + assertNotNull("Original metadata contains index_open_1", metadata.get("index_open_1")); + assertNotNull("Original metadata contains index_closed", metadata.get("index_closed")); + + Map filteredMetadata = ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata); + + assertNotNull("Filtered metadata contains index_open_1", filteredMetadata.get("index_open_1")); + assertNull("Filtered metadata does not contain index_closed", filteredMetadata.get("index_closed")); + } + + @Test + public void relevantOnly_dataStreamBackingIndices() throws Exception { + Map metadata = dataStreams("data_stream_1").build().getIndicesLookup(); + + assertNotNull("Original metadata contains backing index", metadata.get(".ds-data_stream_1-000001")); + assertNotNull("Original metadata contains data stream", metadata.get("data_stream_1")); + + Map filteredMetadata = ActionPrivileges.StatefulIndexPrivileges.relevantOnly(metadata); + + assertNull("Filtered metadata does not contain backing index", filteredMetadata.get(".ds-data_stream_1-000001")); + assertNotNull("Filtered metadata contains data stream", filteredMetadata.get("data_stream_1")); + } + + @Test + public void backingIndexToDataStream() { + Map metadata = indices("index").dataStream("data_stream").build().getIndicesLookup(); + + assertEquals("index", ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream("index", metadata)); + assertEquals( + "data_stream", + ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream(".ds-data_stream-000001", metadata) + ); + assertEquals("non_existing", ActionPrivileges.StatefulIndexPrivileges.backingIndexToDataStream("non_existing", metadata)); + } + + @Test + public void hasIndexPrivilege_errors() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml( + "role_with_errors:\n" + + " index_permissions:\n" + + " - index_patterns: ['/invalid_regex_with_attr${user.name}\\/']\n" + + " allowed_actions: ['indices:some_action*', 'indices:data/write/index']", + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> Collections.emptyMap(), + Settings.EMPTY + ); + + PrivilegesEvaluatorResponse result = subject.hasIndexPrivilege( + ctx("role_with_errors"), + Set.of("indices:some_action", "indices:data/write/index"), + IndexResolverReplacer.Resolved.ofIndex("any_index") + ); + assertThat(result, isForbidden()); + assertTrue(result.hasEvaluationExceptions()); + assertTrue( + "Result mentions role_with_errors: " + result.getEvaluationExceptionInfo(), + result.getEvaluationExceptionInfo() + .startsWith("Exceptions encountered during privilege evaluation:\n" + "Error while evaluating role role_with_errors") + ); + } + + @Test + public void hasExplicitIndexPrivilege_errors() throws Exception { + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml( + "role_with_errors:\n" + + " index_permissions:\n" + + " - index_patterns: ['/invalid_regex_with_attr${user.name}\\/']\n" + + " allowed_actions: ['system:admin/system_index*']", + CType.ROLES + ); + + ActionPrivileges subject = new ActionPrivileges( + roles, + FlattenedActionGroups.EMPTY, + () -> Collections.emptyMap(), + Settings.EMPTY + ); + + PrivilegesEvaluatorResponse result = subject.hasExplicitIndexPrivilege( + ctx("role_with_errors"), + Set.of("system:admin/system_index"), + IndexResolverReplacer.Resolved.ofIndex("any_index") + ); + assertThat(result, isForbidden()); + assertTrue(result.hasEvaluationExceptions()); + assertTrue( + "Result mentions role_with_errors: " + result.getEvaluationExceptionInfo(), + result.getEvaluationExceptionInfo() + .startsWith("Exceptions encountered during privilege evaluation:\n" + "Error while evaluating role role_with_errors") + ); + } + } + + /** + * Verifies that the heap size used by StatefulIndexPrivileges stays within expected bounds. + */ + @RunWith(Parameterized.class) + public static class StatefulIndexPrivilegesHeapSize { + + final Map indices; + final SecurityDynamicConfiguration roles; + final int expectedEstimatedNumberOfBytes; + + @Test + public void estimatedSize() throws Exception { + ActionPrivileges subject = new ActionPrivileges(roles, FlattenedActionGroups.EMPTY, () -> indices, Settings.EMPTY); + + subject.updateStatefulIndexPrivileges(indices, 1); + + int lowerBound = (int) (expectedEstimatedNumberOfBytes * 0.9); + int upperBound = (int) (expectedEstimatedNumberOfBytes * 1.1); + + int actualEstimatedNumberOfBytes = subject.getEstimatedStatefulIndexByteSize(); + + assertTrue( + "estimatedNumberOfBytes: " + lowerBound + " < " + actualEstimatedNumberOfBytes + " < " + upperBound, + lowerBound < actualEstimatedNumberOfBytes && actualEstimatedNumberOfBytes < upperBound + ); + } + + public StatefulIndexPrivilegesHeapSize(int numberOfIndices, int numberOfRoles, int expectedEstimatedNumberOfBytes) { + this.indices = createIndices(numberOfIndices); + this.roles = createRoles(numberOfRoles, numberOfIndices); + this.expectedEstimatedNumberOfBytes = expectedEstimatedNumberOfBytes; + } + + @Parameterized.Parameters(name = "{0} indices; {1} roles; estimated number of bytes: {2}") + public static Collection params() { + List result = new ArrayList<>(); + + // indices; roles; expected number of bytes + result.add(new Object[] { 100, 10, 10_000 }); + result.add(new Object[] { 100, 100, 13_000 }); + result.add(new Object[] { 100, 1000, 26_000 }); + + result.add(new Object[] { 1000, 10, 92_000 }); + result.add(new Object[] { 1000, 100, 94_000 }); + result.add(new Object[] { 1000, 1000, 112_000 }); + + result.add(new Object[] { 10_000, 10, 890_000 }); + result.add(new Object[] { 10_000, 100, 930_000 }); + + return result; + } + + static Map createIndices(int numberOfIndices) { + String[] names = new String[numberOfIndices]; + + for (int i = 0; i < numberOfIndices; i++) { + names[i] = "index_" + i; + } + + return MockIndexMetadataBuilder.indices(names).build().getIndicesLookup(); + } + + static SecurityDynamicConfiguration createRoles(int numberOfRoles, int numberOfIndices) { + try { + Random random = new Random(1); + Map rolesDocument = new HashMap<>(); + List allowedActions = Arrays.asList( + "indices:data/read*", + "indices:admin/mappings/fields/get*", + "indices:admin/resolve/index", + "indices:data/write*", + "indices:admin/mapping/put" + ); + + for (int i = 0; i < numberOfRoles; i++) { + List indexPatterns = new ArrayList<>(); + int numberOfIndexPatterns = Math.min( + (int) ((Math.abs(random.nextGaussian() + 0.3)) * 0.5 * numberOfIndices), + numberOfIndices + ); + + int numberOfIndexPatterns10th = numberOfIndexPatterns / 10; + + if (numberOfIndexPatterns10th > 0) { + for (int k = 0; k < numberOfIndexPatterns10th; k++) { + indexPatterns.add("index_" + random.nextInt(numberOfIndices / 10) + "*"); + } + } else { + for (int k = 0; k < numberOfIndexPatterns; k++) { + indexPatterns.add("index_" + random.nextInt(numberOfIndices)); + } + } + + Map roleDocument = ImmutableMap.of( + "index_permissions", + Arrays.asList(ImmutableMap.of("index_patterns", indexPatterns, "allowed_actions", allowedActions)) + ); + + rolesDocument.put("role_" + i, roleDocument); + } + + return SecurityDynamicConfiguration.fromMap(rolesDocument, CType.ROLES); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } + } + + static PrivilegesEvaluationContext ctx(String... roles) { + User user = new User("test_user"); + user.addAttributes(ImmutableMap.of("attrs.dept_no", "a11")); + return new PrivilegesEvaluationContext( + user, + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + null + ); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java new file mode 100644 index 0000000000..118d5358f6 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivilegesTest.java @@ -0,0 +1,117 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.concurrent.atomic.AtomicReference; + +import org.awaitility.Awaitility; +import org.junit.Test; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.node.Node; +import org.opensearch.threadpool.ThreadPool; + +import org.mockito.Mockito; +import org.mockito.stubbing.Answer; + +public class ClusterStateMetadataDependentPrivilegesTest { + + @Test + public void simpleUpdate() { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + ClusterState clusterState = clusterState(metadata(1)); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenReturn(clusterState); + + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + Awaitility.await().until(() -> subject.getCurrentlyUsedMetadataVersion() == 1); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + @Test + public void frequentUpdates() throws Exception { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + AtomicReference clusterStateReference = new AtomicReference<>(clusterState(metadata(1))); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenAnswer((Answer) invocationOnMock -> clusterStateReference.get()); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + + for (int i = 2; i <= 100; i++) { + clusterStateReference.set(clusterState(metadata(i))); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + Thread.sleep(10); + } + + Awaitility.await().until(() -> subject.getCurrentlyUsedMetadataVersion() == 100); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + @Test + public void shutdown() { + ThreadPool threadPool = threadPool(); + try { + ConcreteTestSubject subject = new ConcreteTestSubject(); + ClusterState clusterState = clusterState(metadata(1)); + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenReturn(clusterState); + subject.updateClusterStateMetadataAsync(clusterService, threadPool); + subject.shutdown(); + } finally { + threadPool.shutdown(); + } + } + + static Metadata metadata(long version) { + return Metadata.builder().version(version).build(); + } + + static ClusterState clusterState(Metadata metadata) { + return ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + } + + static ThreadPool threadPool() { + return new ThreadPool(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), "name").build()); + } + + static class ConcreteTestSubject extends ClusterStateMetadataDependentPrivileges { + + private long currentMetadataVersion; + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + // We need to be slow with updates to test the debounce-functionality + try { + Thread.sleep(100); + } catch (InterruptedException e) {} + + this.currentMetadataVersion = metadata.version(); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + return this.currentMetadataVersion; + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java b/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java new file mode 100644 index 0000000000..e098a605e5 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/IndexPatternTest.java @@ -0,0 +1,252 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.time.ZonedDateTime; +import java.time.temporal.ChronoField; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.junit.Test; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; + +public class IndexPatternTest { + final static int CURRENT_YEAR = ZonedDateTime.now().get(ChronoField.YEAR); + final static int NEXT_YEAR = CURRENT_YEAR + 1; + + final static Metadata INDEX_METADATA = // + indices("index_a11", "index_a12", "index_a21", "index_a22", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a11", "index_a12", "index_a21", "index_a22")// + .alias("alias_b") + .of("index_b1", "index_b2")// + .dataStream("data_stream_a1")// + .dataStream("data_stream_b1")// + .index("index_year_" + CURRENT_YEAR)// + .index("index_year_" + NEXT_YEAR)// + .alias("alias_year_" + CURRENT_YEAR) + .of("index_current_year")// + .alias("alias_year_" + NEXT_YEAR) + .of("index_next_year")// + .build(); + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void constantIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_a11"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + assertTrue(indexPattern.dynamicOnly().isEmpty()); + assertEquals("index_a11", indexPattern.toString()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a12", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantAlias() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("alias_a", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("alias_a1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_b1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void constantDataStream_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("data_stream_a1"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches(".ds-data_stream_a1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches(".ds-data_stream_a2-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_a1*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a21", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternAlias() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("alias_a", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("alias_b", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("alias_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_b1", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void patternDataStream_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("data_stream_a*"); + assertTrue(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + + assertTrue(indexPattern.matches(".ds-data_stream_a1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches(".ds-data_stream_b1-000001", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + /** + * Static invalid regular expressions are just ignored + */ + @Test + public void regex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from("/index_x\\/"); + assertFalse(indexPattern.hasStaticPattern()); + assertFalse(indexPattern.hasDynamicPattern()); + assertTrue(indexPattern.isEmpty()); + } + + @Test + public void dateMathIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertEquals("", indexPattern.toString()); + + assertTrue(indexPattern.matches("index_year_" + CURRENT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_year_" + NEXT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test + public void dateMathAlias_onIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertTrue(indexPattern.matches("index_current_year", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_next_year", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void dateMathIndex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from(""); + indexPattern.matches("index_year_" + CURRENT_YEAR, ctx(), INDEX_METADATA.getIndicesLookup()); + } + + @Test + public void templatedIndex() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_${attrs.a11}"); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertEquals(indexPattern, indexPattern.dynamicOnly()); + + assertTrue(indexPattern.matches("index_a11", ctx(), INDEX_METADATA.getIndicesLookup())); + assertFalse(indexPattern.matches("index_a12", ctx(), INDEX_METADATA.getIndicesLookup())); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void templatedIndex_invalid() throws Exception { + IndexPattern indexPattern = IndexPattern.from("/index_${attrs.a11}\\/"); + assertFalse(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + + indexPattern.matches("whatever", ctx(), INDEX_METADATA.getIndicesLookup()); + } + + @Test + public void mixed() throws Exception { + IndexPattern indexPattern = IndexPattern.from("index_${attrs.a11}", "index_a12"); + assertTrue(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertEquals(WildcardMatcher.from("index_a12"), indexPattern.getStaticPattern()); + assertEquals(IndexPattern.from("index_${attrs.a11}"), indexPattern.dynamicOnly()); + assertEquals("index_a12 index_${attrs.a11}", indexPattern.toString()); + } + + @Test + public void mixed2() throws Exception { + IndexPattern indexPattern = IndexPattern.from("", "index_a12"); + assertTrue(indexPattern.hasStaticPattern()); + assertTrue(indexPattern.hasDynamicPattern()); + assertFalse(indexPattern.isEmpty()); + + assertEquals(WildcardMatcher.from("index_a12"), indexPattern.getStaticPattern()); + assertEquals(IndexPattern.from(""), indexPattern.dynamicOnly()); + assertEquals("index_a12 ", indexPattern.toString()); + } + + @Test + public void equals() { + IndexPattern a1 = IndexPattern.from("data_stream_a*"); + IndexPattern a2 = IndexPattern.from("data_stream_a*"); + IndexPattern b = IndexPattern.from("", "data_stream_a*"); + + assertEquals(a1, a1); + assertEquals(a1, a2); + assertNotEquals(a1, b); + assertFalse(a1.equals(a1.toString())); + } + + private static PrivilegesEvaluationContext ctx() { + IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)); + IndexResolverReplacer indexResolverReplacer = new IndexResolverReplacer(indexNameExpressionResolver, () -> CLUSTER_STATE, null); + User user = new User("test_user"); + user.addAttributes(ImmutableMap.of("attrs.a11", "a11")); + user.addAttributes(ImmutableMap.of("attrs.year", "year")); + + return new PrivilegesEvaluationContext( + user, + ImmutableSet.of(), + "indices:action/test", + null, + null, + indexResolverReplacer, + indexNameExpressionResolver, + () -> CLUSTER_STATE + ); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java new file mode 100644 index 0000000000..dfaa065605 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegeEvaluatorResponseMatcher.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.Arrays; +import java.util.Set; + +import com.google.common.collect.ImmutableSet; +import org.hamcrest.Description; +import org.hamcrest.DiagnosingMatcher; + +/** + * Provides hamcrest matchers for PrivilegesEvaluatorResponse instances, which can be used with assertThat() calls. + */ +public abstract class PrivilegeEvaluatorResponseMatcher extends DiagnosingMatcher { + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is "allowed". + */ + public static PrivilegeEvaluatorResponseMatcher isAllowed() { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Request is fully allowed; isAllowed() returns true"); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.isAllowed()) { + mismatchDescription.appendText("isAllowed() is false"); + return false; + } + + if (response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() must be false if isAllowed() is true"); + return false; + } + + if (!response.getMissingPrivileges().isEmpty()) { + mismatchDescription.appendText("getMissingPrivileges() must be empty if isAllowed() is true"); + return false; + } + + return true; + } + }; + } + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is neither "allowed" or "partially allowed". You can + * add missingPrivileges sub-matchers to verify the actually missing privileges. + */ + public static PrivilegeEvaluatorResponseMatcher isForbidden(PrivilegeEvaluatorResponseMatcher... subMatchers) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Request is fully forbidden; isAllowed() returns false; isPartiallyOk() returns false"); + + for (PrivilegeEvaluatorResponseMatcher subMatcher : subMatchers) { + description.appendText("; "); + subMatcher.describeTo(description); + } + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (response.isAllowed()) { + mismatchDescription.appendText("isAllowed() is true"); + return false; + } + + if (response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() is true"); + return false; + } + + for (PrivilegeEvaluatorResponseMatcher subMatcher : subMatchers) { + if (!subMatcher.matches(response, mismatchDescription)) { + return false; + } + } + + return true; + } + }; + } + + /** + * Asserts that the status of the PrivilegesEvaluatorResponse is "partially ok". You can specify the available + * indices are parameter. + */ + public static PrivilegeEvaluatorResponseMatcher isPartiallyOk(String... availableIndices) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText( + "Request is allowed for a subset of indices; isPartiallyOk() returns true; getAvailableIndices() returns " + ).appendValue(Arrays.asList(availableIndices)); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.isPartiallyOk()) { + mismatchDescription.appendText("isPartiallyOk() is false"); + return false; + } + + if (response.isAllowed()) { + mismatchDescription.appendText("isAllowed() must be false if isPartiallyOk() is true"); + return false; + } + + if (!response.getAvailableIndices().equals(ImmutableSet.copyOf(availableIndices))) { + mismatchDescription.appendText("getAvailableIndices() is ").appendValue(Arrays.asList(response.getAvailableIndices())); + return false; + } + + return true; + } + }; + } + + /** + * Asserts that the missingPrivileges property of a PrivilegesEvaluatorResponse instance equals to the given parameters. + * Should be used as a sub-matcher for isForbidden(). + */ + public static PrivilegeEvaluatorResponseMatcher missingPrivileges(String... missingPrivileges) { + return missingPrivileges(ImmutableSet.copyOf(missingPrivileges)); + } + + /** + * Asserts that the missingPrivileges property of a PrivilegesEvaluatorResponse instance equals to the given parameters. + * Should be used as a sub-matcher for isForbidden(). + */ + public static PrivilegeEvaluatorResponseMatcher missingPrivileges(Set missingPrivileges) { + return new PrivilegeEvaluatorResponseMatcher() { + @Override + public void describeTo(Description description) { + description.appendText("Missing privileges are "); + description.appendValue(missingPrivileges); + } + + @Override + protected boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription) { + if (!response.getMissingPrivileges().equals(missingPrivileges)) { + mismatchDescription.appendText("getMissingPrivileges() returns ").appendValue(response.getMissingPrivileges()); + return false; + } + + return true; + } + }; + } + + @Override + protected boolean matches(Object o, Description mismatchDescription) { + if (!(o instanceof PrivilegesEvaluatorResponse)) { + mismatchDescription.appendText("The object is not an instance of PrivilegesEvaluatorResponse: ").appendValue(o); + } + + PrivilegesEvaluatorResponse response = (PrivilegesEvaluatorResponse) o; + + if (matches(response, mismatchDescription)) { + return true; + } else { + mismatchDescription.appendText("\n"); + mismatchDescription.appendText(response.toString()); + return false; + } + } + + protected abstract boolean matches(PrivilegesEvaluatorResponse response, Description mismatchDescription); + +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java index 561b4a0742..adc0b212f5 100644 --- a/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java +++ b/src/integrationTest/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java @@ -19,6 +19,7 @@ import org.opensearch.script.mustache.MustacheModulePlugin; import org.opensearch.script.mustache.RenderSearchTemplateAction; +import org.opensearch.test.framework.TestIndex; import org.opensearch.test.framework.TestSecurityConfig; import org.opensearch.test.framework.TestSecurityConfig.Role; import org.opensearch.test.framework.cluster.ClusterManager; @@ -65,11 +66,18 @@ public class PrivilegesEvaluatorTest { private String TEST_RENDER_SEARCH_TEMPLATE_QUERY = "{\"params\":{\"status\":[\"pending\",\"published\"]},\"source\":\"{\\\"query\\\": {\\\"terms\\\": {\\\"status\\\": [\\\"{{#status}}\\\",\\\"{{.}}\\\",\\\"{{/status}}\\\"]}}}\"}"; + final static TestIndex R = TestIndex.name("r").build(); + /** + * This is necessary so that the testNegativeLookaheadPattern test has an forbidden index to match against + */ + final static TestIndex T = TestIndex.name("t").build(); + @ClassRule public static LocalCluster cluster = new LocalCluster.Builder().clusterManager(ClusterManager.THREE_CLUSTER_MANAGERS) .authc(AUTHC_HTTPBASIC_INTERNAL) .users(NEGATIVE_LOOKAHEAD, NEGATED_REGEX, SEARCH_TEMPLATE, RENDER_SEARCH_TEMPLATE, TestSecurityConfig.User.USER_ADMIN) .plugin(MustacheModulePlugin.class) + .indices(R, T) .build(); @Test diff --git a/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java b/src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java similarity index 69% rename from src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java rename to src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java index f21a3e98a2..1e61aa0206 100644 --- a/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java +++ b/src/integrationTest/java/org/opensearch/security/privileges/RestEndpointPermissionTests.java @@ -24,7 +24,7 @@ * GitHub history for details. */ -package org.opensearch.security.securityconf; +package org.opensearch.security.privileges; import java.io.IOException; import java.util.AbstractMap.SimpleEntry; @@ -46,21 +46,23 @@ import org.opensearch.security.DefaultObjectMapper; import org.opensearch.security.dlic.rest.api.Endpoint; import org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.PermissionBuilder; +import org.opensearch.security.securityconf.FlattenedActionGroups; import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; -import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; -import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; import org.opensearch.security.securityconf.impl.v7.RoleV7; -import org.opensearch.security.securityconf.impl.v7.TenantV7; - -import org.mockito.Mockito; +import org.opensearch.security.user.User; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.CERTS_INFO_ACTION; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.ENDPOINTS_WITH_PERMISSIONS; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.RELOAD_CERTS_ACTION; import static org.opensearch.security.dlic.rest.api.RestApiAdminPrivilegesEvaluator.SECURITY_CONFIG_UPDATE; -public class SecurityRolesPermissionsTest { +/** + * Moved from https://github.com/opensearch-project/security/blob/54361468f5c4b3a57f3ecffaf1bbe8dccee562be/src/test/java/org/opensearch/security/securityconf/SecurityRolesPermissionsTest.java + * + * See https://github.com/opensearch-project/security/pull/2411 + */ +public class RestEndpointPermissionTests { static final Map NO_REST_ADMIN_PERMISSIONS_ROLES = ImmutableMap.builder() .put("all_access", role("*")) @@ -111,44 +113,36 @@ static String[] allRestApiPermissions() { }).toArray(String[]::new); } - final ConfigModel configModel; + final ActionPrivileges actionPrivileges; - public SecurityRolesPermissionsTest() throws IOException { - this.configModel = new ConfigModelV7( - createRolesConfig(), - createRoleMappingsConfig(), - createActionGroupsConfig(), - createTenantsConfig(), - Mockito.mock(DynamicConfigModel.class), - Settings.EMPTY - ); + public RestEndpointPermissionTests() throws IOException { + this.actionPrivileges = new ActionPrivileges(createRolesConfig(), FlattenedActionGroups.EMPTY, null, Settings.EMPTY); } @Test public void hasNoExplicitClusterPermissionPermissionForRestAdmin() { for (final String role : NO_REST_ADMIN_PERMISSIONS_ROLES.keySet()) { - final SecurityRoles securityRolesForRole = configModel.getSecurityRoles().filter(ImmutableSet.of(role)); for (final Map.Entry entry : ENDPOINTS_WITH_PERMISSIONS.entrySet()) { final Endpoint endpoint = entry.getKey(); final PermissionBuilder permissionBuilder = entry.getValue(); if (endpoint == Endpoint.SSL) { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else if (endpoint == Endpoint.CONFIG) { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); } else { Assert.assertFalse( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build()) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build()).isAllowed() ); } } @@ -158,28 +152,27 @@ public void hasNoExplicitClusterPermissionPermissionForRestAdmin() { @Test public void hasExplicitClusterPermissionPermissionForRestAdminWitFullAccess() { for (final String role : REST_ADMIN_PERMISSIONS_FULL_ACCESS_ROLES.keySet()) { - final SecurityRoles securityRolesForRole = configModel.getSecurityRoles().filter(ImmutableSet.of(role)); for (final Map.Entry entry : ENDPOINTS_WITH_PERMISSIONS.entrySet()) { final Endpoint endpoint = entry.getKey(); final PermissionBuilder permissionBuilder = entry.getValue(); if (endpoint == Endpoint.SSL) { Assert.assertTrue( endpoint.name() + "/" + CERTS_INFO_ACTION, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertTrue( endpoint.name() + "/" + CERTS_INFO_ACTION, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else if (endpoint == Endpoint.CONFIG) { Assert.assertTrue( endpoint.name() + "/" + SECURITY_CONFIG_UPDATE, - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); } else { Assert.assertTrue( endpoint.name(), - securityRolesForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build()) + actionPrivileges.hasExplicitClusterPrivilege(ctx(role), permissionBuilder.build()).isAllowed() ); } } @@ -195,33 +188,31 @@ public void hasExplicitClusterPermissionPermissionForRestAdmin() { .collect(Collectors.toList()); for (final Endpoint endpoint : noSslEndpoints) { final String permission = ENDPOINTS_WITH_PERMISSIONS.get(endpoint).build(); - final SecurityRoles allowOnePermissionRole = configModel.getSecurityRoles() - .filter(ImmutableSet.of(restAdminApiRoleName(endpoint.name().toLowerCase(Locale.ROOT)))); - Assert.assertTrue(endpoint.name(), allowOnePermissionRole.hasExplicitClusterPermissionPermission(permission)); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(endpoint, allowOnePermissionRole); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(endpoint.name().toLowerCase(Locale.ROOT))); + Assert.assertTrue(endpoint.name(), actionPrivileges.hasExplicitClusterPrivilege(ctx, permission).isAllowed()); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(endpoint, ctx); } // verify SSL endpoint with 2 actions for (final String sslAction : ImmutableSet.of(CERTS_INFO_ACTION, RELOAD_CERTS_ACTION)) { - final SecurityRoles sslAllowRole = configModel.getSecurityRoles().filter(ImmutableSet.of(restAdminApiRoleName(sslAction))); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(sslAction)); final PermissionBuilder permissionBuilder = ENDPOINTS_WITH_PERMISSIONS.get(Endpoint.SSL); Assert.assertTrue( Endpoint.SSL + "/" + sslAction, - sslAllowRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(sslAction)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(sslAction)).isAllowed() ); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.SSL, sslAllowRole); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.SSL, ctx); } // verify CONFIG endpoint with 1 action - final SecurityRoles securityConfigAllowRole = configModel.getSecurityRoles() - .filter(ImmutableSet.of(restAdminApiRoleName(SECURITY_CONFIG_UPDATE))); + final PrivilegesEvaluationContext ctx = ctx(restAdminApiRoleName(SECURITY_CONFIG_UPDATE)); final PermissionBuilder permissionBuilder = ENDPOINTS_WITH_PERMISSIONS.get(Endpoint.CONFIG); Assert.assertTrue( Endpoint.SSL + "/" + SECURITY_CONFIG_UPDATE, - securityConfigAllowRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(SECURITY_CONFIG_UPDATE)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(SECURITY_CONFIG_UPDATE)).isAllowed() ); - assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.CONFIG, securityConfigAllowRole); + assertHasNoPermissionsForRestApiAdminOnePermissionRole(Endpoint.CONFIG, ctx); } - void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allowEndpoint, final SecurityRoles allowOnlyRoleForRole) { + void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allowEndpoint, final PrivilegesEvaluationContext ctx) { final Collection noPermissionEndpoints = ENDPOINTS_WITH_PERMISSIONS.keySet() .stream() .filter(e -> e != allowEndpoint) @@ -231,14 +222,17 @@ void assertHasNoPermissionsForRestApiAdminOnePermissionRole(final Endpoint allow if (endpoint == Endpoint.SSL) { Assert.assertFalse( endpoint.name(), - allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(CERTS_INFO_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(CERTS_INFO_ACTION)).isAllowed() ); Assert.assertFalse( endpoint.name(), - allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build(RELOAD_CERTS_ACTION)) + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build(RELOAD_CERTS_ACTION)).isAllowed() ); } else { - Assert.assertFalse(endpoint.name(), allowOnlyRoleForRole.hasExplicitClusterPermissionPermission(permissionBuilder.build())); + Assert.assertFalse( + endpoint.name(), + actionPrivileges.hasExplicitClusterPrivilege(ctx, permissionBuilder.build()).isAllowed() + ); } } } @@ -256,22 +250,8 @@ static SecurityDynamicConfiguration createRolesConfig() throws IOExcepti return SecurityDynamicConfiguration.fromNode(rolesNode, CType.ROLES, 2, 0, 0); } - static SecurityDynamicConfiguration createRoleMappingsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("rolesmapping")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.ROLESMAPPING, 2, 0, 0); - } - - static SecurityDynamicConfiguration createActionGroupsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("actiongroups")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.ACTIONGROUPS, 2, 0, 0); - } - - static SecurityDynamicConfiguration createTenantsConfig() throws IOException { - final ObjectNode metaNode = DefaultObjectMapper.objectMapper.createObjectNode(); - metaNode.set("_meta", meta("tenants")); - return SecurityDynamicConfiguration.fromNode(metaNode, CType.TENANTS, 2, 0, 0); + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext(new User("test_user"), ImmutableSet.copyOf(roles), null, null, null, null, null, null); } } diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java new file mode 100644 index 0000000000..2c8e6de587 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeadersTest.java @@ -0,0 +1,395 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.junit.Test; + +import org.opensearch.Version; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.CheckedFunction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.ParseField; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.search.internal.ShardSearchRequest; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; +import org.opensearch.test.framework.TestSecurityConfig; +import org.opensearch.transport.Transport; + +import org.mockito.Mockito; + +import static org.opensearch.security.Song.ARTIST_STRING; +import static org.opensearch.security.Song.ARTIST_TWINS; +import static org.opensearch.security.Song.FIELD_ARTIST; +import static org.opensearch.security.Song.FIELD_STARS; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public class DlsFlsLegacyHeadersTest { + static NamedXContentRegistry xContentRegistry = new NamedXContentRegistry( + ImmutableList.of( + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(TermQueryBuilder.NAME), + (CheckedFunction) (p) -> TermQueryBuilder.fromXContent(p) + ), + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(MatchQueryBuilder.NAME), + (CheckedFunction) (p) -> MatchQueryBuilder.fromXContent(p) + ), + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(RangeQueryBuilder.NAME), + (CheckedFunction) (p) -> RangeQueryBuilder.fromXContent(p) + ) + ) + ); + + /** + * Basic test that the DLS header matches the one produced in previous versions. + *

+ * Test configuration corresponds to DlsIntegrationTests.testShouldSearchI1_S2I2_S3() + */ + @Test + public void dls_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("read_where_field_artist_matches_artist_string").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_STRING)) + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "read_where_field_artist_matches_artist_string"), + dlsFlsProcessedConfig, + metadata, + false + ).getDlsHeader(); + + // Created with DlsIntegrationTests.testShouldSearchI1_S2I2_S3() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAF0AB17Im1hdGNoIjp7ImFydGlzdCI6IlN0cmluZyJ9fXh0AAlteV9pbmRleDFzcQB+AAZ3DAAAABA/QAAAAAAAAXEAfgAIeHQAEXNlY29uZC10ZXN0LWluZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAFxAH4ACHh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Test that the DLS header matches the one produced in previous versions. In this case, two roles need to be considered. + *

+ * Test configuration corresponds to DlsIntegrationTests.testShouldSearchI1_S3I1_S6I2_S2() + */ + @Test + public void dls_twoRoles() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("read_where_field_artist_matches_artist_twins").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"match\":{\"%s\":\"%s\"}}", FIELD_ARTIST, ARTIST_TWINS)) + .on("*"), + new TestSecurityConfig.Role("read_where_field_stars_greater_than_five").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls(String.format("{\"range\":{\"%s\":{\"gt\":%d}}}", FIELD_STARS, 5)) + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "read_where_field_artist_matches_artist_twins", "read_where_field_stars_greater_than_five"), + dlsFlsProcessedConfig, + metadata, + false + ).getDlsHeader(); + + // Created with DlsIntegrationTests.testShouldSearchI1_S3I1_S6I2_S2() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0ABx7Im1hdGNoIjp7ImFydGlzdCI6IlR3aW5zIn19dAAceyJyYW5nZSI6eyJzdGFycyI6eyJndCI6NX19fXh0AAlteV9pbmRleDFzcQB+AAZ3DAAAABA/QAAAAAAAAnEAfgAIcQB+AAl4dAARc2Vjb25kLXRlc3QtaW5kZXhzcQB+AAZ3DAAAABA/QAAAAAAAAnEAfgAIcQB+AAl4eA=="; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + @Test + public void dls_none() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("role").clusterPermissions("cluster_composite_ops_ro").indexPermissions("read").on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "my_index1").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders(ctx(metadata, "role"), dlsFlsProcessedConfig, metadata, false).getDlsHeader(); + + assertNull(header); + } + + /** + * Basic test that the FLS header matches the one produced in previous versions. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() + */ + @Test + public void fls_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("fls_exclude_stars_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls("~stars") + .on("*") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "fls_index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders(ctx(metadata, "fls_exclude_stars_reader"), dlsFlsProcessedConfig, metadata, false) + .getFlsHeader(); + + // Created with FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAA3QAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAF0AAZ+c3RhcnN4dAAJZmxzX2luZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAFxAH4ACHh0ABFzZWNvbmQtdGVzdC1pbmRleHNxAH4ABncMAAAAED9AAAAAAAABcQB+AAh4eA=="; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Test that the FLS header matches the one produced in previous versions. In this case, inclusion and exclusion is mixed + * and contradicts itself. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.testGetDocumentWithNoTitleFieldAndOnlyTitleFieldFLSRestrictions() + */ + @Test + public void fls_mixedContradiction() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("example_inclusive_fls").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls("title") + .on("first-test-index"), + new TestSecurityConfig.Role("example_exclusive_fls").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .fls(String.format("~title")) + .on("first-test-index") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index", "fls_index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "example_inclusive_fls", "example_exclusive_fls"), + dlsFlsProcessedConfig, + metadata, + false + ).getFlsHeader(); + + // Created with FlsAndFieldMaskingTests.testGetDocumentWithNoTitleFieldAndOnlyTitleFieldFLSRestrictions() on an earlier OpenSearch + // version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAAXQAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0AAV0aXRsZXQABn50aXRsZXh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + /** + * Basic test that the field masking header matches the one produced in previous versions. + *

+ * Test configuration corresponds to FlsAndFieldMaskingTests.searchForDocuments() + */ + @Test + public void fieldMasking_simple() throws Exception { + SecurityDynamicConfiguration rolesConfig = TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("masked_title_artist_lyrics_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .maskedFields("artist::/(?<=.{1})./::*", "lyrics::/(?<=.{1})./::*") + .on("first-test-index"), + new TestSecurityConfig.Role("masked_lyrics_reader").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .maskedFields("lyrics::/(?<=.{1})./::*") + .on("second-test-index") + ); + + Metadata metadata = MockIndexMetadataBuilder.indices("first-test-index", "second-test-index").build(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(rolesConfig, metadata); + String header = new DlsFlsLegacyHeaders( + ctx(metadata, "masked_title_artist_lyrics_reader", "masked_lyrics_reader"), + dlsFlsProcessedConfig, + metadata, + false + ).getFmHeader(); + + // Created with FlsAndFieldMaskingTests.flsEnabledFieldsAreHiddenForNormalUsers() on an earlier OpenSearch version + String expectedHeader = + "rO0ABXNyACVqYXZhLnV0aWwuQ29sbGVjdGlvbnMkVW5tb2RpZmlhYmxlTWFw8aWo/nT1B0ICAAFMAAFtdAAPTGphdmEvdXRpbC9NYXA7eHBzcgARamF2YS51dGlsLkhhc2hNYXAFB9rBwxZg0QMAAkYACmxvYWRGYWN0b3JJAAl0aHJlc2hvbGR4cD9AAAAAAAAMdwgAAAAQAAAAAnQAEGZpcnN0LXRlc3QtaW5kZXhzcgARamF2YS51dGlsLkhhc2hTZXS6RIWVlri3NAMAAHhwdwwAAAAQP0AAAAAAAAJ0ABdhcnRpc3Q6Oi8oPzw9LnsxfSkuLzo6KnQAF2x5cmljczo6Lyg/PD0uezF9KS4vOjoqeHQAEXNlY29uZC10ZXN0LWluZGV4c3EAfgAGdwwAAAAQP0AAAAAAAAF0ABdseXJpY3M6Oi8oPzw9LnsxfSkuLzo6Knh4"; + + assertEquals(Base64Helper.deserializeObject(expectedHeader), Base64Helper.deserializeObject(header)); + } + + @Test + public void performHeaderDecoration_oldNode() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.V_2_0_0); + + // ShardSearchRequest does not extend ActionRequest, thus the headers must be set + ShardSearchRequest request = Mockito.mock(ShardSearchRequest.class); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + + assertEquals(subject.getDlsHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER)); + assertEquals(subject.getFlsHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER)); + assertEquals(subject.getFmHeader(), headerSink.get(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER)); + } + + @Test + public void performHeaderDecoration_actionRequest() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.V_2_0_0); + + // SearchRequest does extend ActionRequest, thus the headers must not be set + SearchRequest request = new SearchRequest(); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + assertEquals(0, headerSink.size()); + } + + @Test + public void performHeaderDecoration_newNode() throws Exception { + Metadata metadata = exampleMetadata(); + DlsFlsProcessedConfig dlsFlsProcessedConfig = dlsFlsProcessedConfig(exampleRolesConfig(), metadata); + + Transport.Connection connection = Mockito.mock(Transport.Connection.class); + Mockito.when(connection.getVersion()).thenReturn(Version.V_3_0_0); + + // ShardSearchRequest does not extend ActionRequest, thus the headers must be set + ShardSearchRequest request = Mockito.mock(ShardSearchRequest.class); + + Map headerSink = new HashMap<>(); + + DlsFlsLegacyHeaders subject = new DlsFlsLegacyHeaders(ctx(metadata, "test_role"), dlsFlsProcessedConfig, metadata, false); + + subject.performHeaderDecoration(connection, request, headerSink); + assertEquals(0, headerSink.size()); + } + + @Test + public void prepare() throws Exception { + Metadata metadata = exampleMetadata(); + + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + + DlsFlsLegacyHeaders.prepare( + threadContext, + ctx(metadata, "test_role"), + dlsFlsProcessedConfig(exampleRolesConfig(), metadata), + metadata, + false + ); + DlsFlsLegacyHeaders instance = threadContext.getTransient(DlsFlsLegacyHeaders.TRANSIENT_HEADER); + + assertNotNull(instance); + } + + @Test + public void prepare_ccs() throws Exception { + Metadata metadata = exampleMetadata(); + + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putTransient(ConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTED_CLUSTER_REQUEST, true); + User user = new User("test_user"); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + + PrivilegesEvaluationContext ctx = new PrivilegesEvaluationContext( + user, + ImmutableSet.of("test_role"), + null, + new ClusterSearchShardsRequest(), + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + () -> clusterState + ); + + DlsFlsLegacyHeaders.prepare(threadContext, ctx, dlsFlsProcessedConfig(exampleRolesConfig(), metadata), metadata, false); + assertTrue(threadContext.getResponseHeaders().containsKey(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER)); + } + + static PrivilegesEvaluationContext ctx(Metadata metadata, String... roles) { + User user = new User("test_user"); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(metadata).build(); + + return new PrivilegesEvaluationContext( + user, + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + () -> clusterState + ); + } + + static DlsFlsProcessedConfig dlsFlsProcessedConfig(SecurityDynamicConfiguration rolesConfig, Metadata metadata) { + return new DlsFlsProcessedConfig( + rolesConfig, + metadata.getIndicesLookup(), + xContentRegistry, + Settings.EMPTY, + FieldMasking.Config.DEFAULT + ); + } + + static SecurityDynamicConfiguration exampleRolesConfig() { + return TestSecurityConfig.Role.toRolesConfiguration( + new TestSecurityConfig.Role("test_role").clusterPermissions("cluster_composite_ops_ro") + .indexPermissions("read") + .dls("{\"match\":{\"artist\":\"foo\"}}") + .fls("~stars") + .maskedFields("foo") + .on("*") + ); + } + + static Metadata exampleMetadata() { + return MockIndexMetadataBuilder.indices("first-test-index", "second-test-index").build(); + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java new file mode 100644 index 0000000000..97a0ddb69e --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/DocumentPrivilegesTest.java @@ -0,0 +1,1397 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import org.hamcrest.BaseMatcher; +import org.hamcrest.Description; +import org.hamcrest.DiagnosingMatcher; +import org.hamcrest.Matcher; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Suite; + +import org.opensearch.action.IndicesRequest; +import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.CheckedFunction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.core.ParseField; +import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.BaseTermQueryBuilder; +import org.opensearch.index.query.MatchNoneQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.util.MockIndexMetadataBuilder.dataStreams; +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotEquals; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +/** + * Unit tests for the DocumentPrivileges class and the underlying AbstractRuleBasedPrivileges class. As these classes + * provide a number of different code paths for checking privileges, the inner test classes use parameterized tests + * to define test matrices to make sure all the code paths are covered. The dimensions of the matrices are: + *

    + *
  • Different user configurations: With user attrs, without user attrs, with single role, with mixed roles + *
  • Statefulness: As the AbstractRuleBasedPrivileges.StatefulRules class can either cover certain indices or not, + * this parameter simulates whether an index is covered or not. This is because the AbstractRuleBasedPrivileges.StatefulRules class + * is updated asynchronously and thus might just cover an index later. + *
  • DfmEmptyOverridesAll: The state of the "plugins.security.dfm_empty_overrides_all" setting. + *
+ * Note: The individual check these parameters and choose the correct assertions based on these parameters. + * This creates quite complex conditions, which might take a while to get an overview over - I am not too happy + * about this. The alternative would be a test oracle, which however will much more complex. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ + DocumentPrivilegesTest.IndicesAndAliases_getRestriction.class, + DocumentPrivilegesTest.IndicesAndAliases_isUnrestricted.class, + DocumentPrivilegesTest.DataStreams_getRestriction.class, + DocumentPrivilegesTest.DlsQuery.class }) +public class DocumentPrivilegesTest { + + static NamedXContentRegistry xContentRegistry = new NamedXContentRegistry( + ImmutableList.of( + new NamedXContentRegistry.Entry( + QueryBuilder.class, + new ParseField(TermQueryBuilder.NAME), + (CheckedFunction) (p) -> TermQueryBuilder.fromXContent(p) + ) + ) + ); + + @RunWith(Parameterized.class) + public static class IndicesAndAliases_getRestriction { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexAbstraction.Index index_a1 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_a1"); + final static IndexAbstraction.Index index_a2 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_a2"); + final static IndexAbstraction.Index index_b1 = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get("index_b1"); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndexSpec indexSpec; + final IndexAbstraction.Index index; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + // If we have two DLS roles, we get the union of queries as restriction + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + // Only one role: Check that the restriction matches the role definition above. + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + // Only one role: Check that the restriction matches the role definition above. + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + // If dfmEmptyOverridesAll == false, roles with restrictions take precedence over roles without restrictions + // Thus, this check comes after the checks for the cases with present DLS roles + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + + IndexToRuleMap restrictionMap = subject.getRestrictions(context, Collections.singleton(index.getName())); + if (dlsRestriction.isUnrestricted()) { + assertTrue("restrictionMap should be unrestricted according to " + dlsRestriction, restrictionMap.isUnrestricted()); + } else { + assertEquals( + "restrictiobMap should contain " + dlsRestriction, + dlsRestriction.getQueries(), + restrictionMap.getIndexMap().get(index.getName()).getQueries() + ); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1 || index == index_a2) { + // Only dls_role_1 and non_dls_role match index_a1 or index_a2. We need to check the effective roles. + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_b1) { + // Only dls_role_2 and non_dls_role match index_b1. We need to check the effective roles. + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + // If dfmEmptyOverridesAll == false, roles with restrictions take precedence over roles without restrictions + // Thus, this check comes after the checks for the cases with present DLS roles + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == index_b1) { + // This test case never grants privileges to index_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // As all the roles in our roleConfig (see above) use user attributes, these won't work with + // users without attributes. Then, access should be also restricted + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1) { + // dls_role_1, dls_role_2 and non_dls_role match index_a1. + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isUnrestricted()); + } + } else if (index == index_a2) { + // only dls_role_2 and non_dls_role match index_a2 + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate_invalid() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("/index_${attr.attr_a}1\\/"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + if (userSpec.roles.contains("dls_role_1") && !(userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll)) { + // dls_role_1 will yield an invalid regex pattern. As we also have user attributes, this will + // lead to an exception being thrown at evaluation time + + try { + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + fail("getRestriction() should have thrown an exception. However, it returned: " + dlsRestriction); + } catch (PrivilegesEvaluationException e) { + assertEquals("Error while evaluating index pattern of role dls_role_1", e.getMessage()); + } + + if (!dfmEmptyOverridesAll) { + // For the isUnrestricted(), we will only get an error if dfmEmptyOverridesAll == false. + // This is because for dfmEmptyOverridesAll == true, we just look for roles which give us + // unrestricted access + + try { + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + fail("isUnrestricted() should have thrown an exception. However, it returned: " + isUnrestricted); + } catch (PrivilegesEvaluationException e) { + assertEquals("Error while evaluating index pattern of role dls_role_1", e.getMessage()); + } + } else { + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + assertFalse("isUnrestricted() should return false, as there is no role which gives privileges", isUnrestricted); + } + + } else { + // Here, we just assert that no exception is being thrown + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + } + + @Test + public void queryPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls("{\"term\": {\"dept\": \"${attr.attr_a}1\"}}") + .on("index_a1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls("{\"term\": {\"dept\": \"${attr.attr_a}2\"}}") + .on("index_a*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_a*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == index_b1) { + // This test case never grants privileges to index_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // If a role uses undefined user attributes for DLS queries, the attribute templates + // remain unchanged in the resulting query. This is a property of the current attribute handling code. + // It would be probably better if an error would be raised in that case. + if (index == index_a1) { + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat( + dlsRestriction, + isRestricted(termQuery("dept", "${attr.attr_a}1"), termQuery("dept", "${attr.attr_a}2")) + ); + } + } + } else if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + // As the roles use index patterns, we have to check the requested index in order to know the effective restrictions + if (index == index_a1) { + // dls_role_1, dls_role_2 and non_dls_role match index_a1. + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a1"), termQuery("dept", "a2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a1"))); + } else { + assertThat(dlsRestriction, isUnrestricted()); + } + } else if (index == index_a2) { + // only dls_role_2 and non_dls_role match index_a2 + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "a2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void alias() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a") + ); + DocumentPrivileges subject = createSubject(roleConfig); + + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (userSpec.roles.isEmpty()) { + // Users without any roles do not have any privileges to access anything + assertThat(dlsRestriction, isFullyRestricted()); + } else if (index == index_a1) { + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_a2) { + if (userSpec.roles.contains("non_dls_role") && dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role") && !dfmEmptyOverridesAll) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == index_b1) { + // index_b1 is not member of alias_a. Thus, the role defintion does not give any privileges. + assertThat(dlsRestriction, isFullyRestricted()); + } else { + fail("Missing case for " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role_1", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec("index_a1"), // + new IndexSpec("index_a2"), // + new IndexSpec("index_b1") + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indexSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + public IndicesAndAliases_getRestriction( + UserSpec userSpec, + IndexSpec indexSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indexSpec = indexSpec; + this.user = userSpec.buildUser(); + this.index = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get(indexSpec.index); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + } + + @RunWith(Parameterized.class) + public static class IndicesAndAliases_isUnrestricted { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexNameExpressionResolver INDEX_NAME_EXPRESSION_RESOLVER = new IndexNameExpressionResolver( + new ThreadContext(Settings.EMPTY) + ); + final static IndexResolverReplacer RESOLVER_REPLACER = new IndexResolverReplacer( + INDEX_NAME_EXPRESSION_RESOLVER, + () -> CLUSTER_STATE, + null + ); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndicesSpec indicesSpec; + final IndexResolverReplacer.Resolved resolvedIndices; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.equals(ImmutableList.of("dls_role_1", "non_dls_role")) + && indicesSpec.indices.equals(ImmutableList.of("index_b1"))) { + // index_b1 is only covered by non_dls_role, so we are also unrestricted here + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void template() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("index_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("index_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (indicesSpec.indices.contains("index_b1")) { + // None of the roles above cover index_b1, so full restrictions should be assumed + assertFalse(result); + } else if (userSpec.attributes.isEmpty()) { + // All roles defined above use attributes. If there are no user attributes, we must get a restricted result. + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.equals(ImmutableList.of("dls_role_1", "non_dls_role")) + && indicesSpec.indices.equals(ImmutableList.of("index_a2"))) { + // index_a2 is not covered by this configuration + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_static() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_static_wildcardNonDls() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll + && userSpec.roles.contains("non_dls_role") + && indicesSpec.indices.equals(ImmutableList.of("index_b1"))) { + // index_b1 is covered neither by dls_role_1 nor dls_role_2, so it is unrestricted when non_dls_role is present + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_a2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_a*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Test + public void alias_template() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("alias_${attr.attr_a}"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("index_${attr.attr_a}2"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("alias_${attr.attr_a}") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + boolean result = subject.isUnrestricted(context, resolvedIndices); + + if (userSpec.attributes.isEmpty()) { + // All roles defined above use attributes. If there are no user attributes, we must get a restricted result. + assertFalse(result); + } else if (resolvedIndices.getAllIndices().contains("index_b1")) { + // index_b1 is not covered by any of the above roles, so there should be always a restriction + assertFalse(result); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertTrue(result); + } else if (!dfmEmptyOverridesAll && userSpec.roles.equals(ImmutableList.of("non_dls_role"))) { + // For dfmEmptyOverridesAll == false, the presence only non_dls_role must be there for an unrestricted result. + assertTrue(result); + } else { + assertFalse(result); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role_1", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndicesSpec indicesSpec : Arrays.asList( + new IndicesSpec("index_a1"), // + new IndicesSpec("index_a2"), // + new IndicesSpec("index_b1"), // + new IndicesSpec("alias_a"), // + new IndicesSpec("index_a1", "index_a2"), // + new IndicesSpec("index_a1", "index_b1"), // + new IndicesSpec("alias_a", "index_b1") + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indicesSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + public IndicesAndAliases_isUnrestricted( + UserSpec userSpec, + IndicesSpec indicesSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indicesSpec = indicesSpec; + this.user = userSpec.buildUser(); + this.resolvedIndices = RESOLVER_REPLACER.resolveRequest(new IndicesRequest.Replaceable() { + + @Override + public String[] indices() { + return indicesSpec.indices.toArray(new String[0]); + } + + @Override + public IndicesOptions indicesOptions() { + return IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED; + } + + @Override + public IndicesRequest indices(String... strings) { + return this; + } + }); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + RESOLVER_REPLACER, + INDEX_NAME_EXPRESSION_RESOLVER, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + } + + @RunWith(Parameterized.class) + public static class DataStreams_getRestriction { + final static Metadata INDEX_METADATA = dataStreams("datastream_a1", "datastream_a2", "datastream_b1", "datastream_b2").build(); + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + final static IndexAbstraction.Index datastream_a1_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_a1-000001"); + final static IndexAbstraction.Index datastream_a2_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_a2-000001"); + final static IndexAbstraction.Index datastream_b1_backing = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup() + .get(".ds-datastream_b1-000001"); + + final Statefulness statefulness; + final UserSpec userSpec; + final User user; + final IndexSpec indexSpec; + final IndexAbstraction.Index index; + final PrivilegesEvaluationContext context; + final boolean dfmEmptyOverridesAll; + + @Test + public void wildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r1")).on("*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*").dls(QueryBuilders.termQuery("dept", "dept_r2")).on("*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPattern() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("datastream_a*", "datastream_b*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing || index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_b1_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPattern_nonDlsRoleOnWildcard() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_a*"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_b*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + // For dfmEmptyOverridesAll == true, the presence of non_dls_role alone is sufficient for an unrestricted result. + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing || index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_b1_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Test + public void indexPatternTemplate() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("dls_role_1").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r1")) + .on("datastream_${attr.attr_a}1"), + new TestSecurityConfig.Role("dls_role_2").indexPermissions("*") + .dls(QueryBuilders.termQuery("dept", "dept_r2")) + .on("datastream_${attr.attr_a}*"), + new TestSecurityConfig.Role("non_dls_role").indexPermissions("*").on("datastream_${attr.attr_a}*") + ); + + DocumentPrivileges subject = createSubject(roleConfig); + DlsRestriction dlsRestriction = subject.getRestriction(context, index.getName()); + + if (index == datastream_b1_backing) { + // This test case never grants privileges to datastream_b1 + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.attributes.isEmpty()) { + // As all the roles in our roleConfig (see above) use user attributes, these won't work with + // users without attributes. Then, access should be also restricted + assertThat(dlsRestriction, isFullyRestricted()); + } else if (userSpec.roles.isEmpty()) { + assertThat(dlsRestriction, isFullyRestricted()); + } else if (dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else if (userSpec.roles.contains("dls_role_1") || userSpec.roles.contains("dls_role_2")) { + if (index == datastream_a1_backing) { + if (userSpec.roles.contains("dls_role_1") && userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"), termQuery("dept", "dept_r2"))); + } else if (userSpec.roles.contains("dls_role_1")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r1"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } else if (index == datastream_a2_backing) { + if (userSpec.roles.contains("dls_role_2")) { + assertThat(dlsRestriction, isRestricted(termQuery("dept", "dept_r2"))); + } else { + assertThat(dlsRestriction, isFullyRestricted()); + } + } + } else if (!dfmEmptyOverridesAll && userSpec.roles.contains("non_dls_role")) { + assertThat(dlsRestriction, isUnrestricted()); + } else { + fail("Unhandled case " + this); + } + + boolean isUnrestricted = subject.isUnrestricted(context, index.getName()); + if (dlsRestriction.isUnrestricted()) { + assertTrue("isUnrestricted() should return true according to " + dlsRestriction, isUnrestricted); + } else { + assertFalse("isUnrestricted() should return false according to " + dlsRestriction, isUnrestricted); + } + } + + @Parameterized.Parameters(name = "{0}; {1}; {2}; {3}") + public static Collection params() { + List result = new ArrayList<>(); + + for (UserSpec userSpec : Arrays.asList( + new UserSpec("non_dls_role", "non_dls_role"), // + new UserSpec("dls_role_1", "dls_role_1"), // + new UserSpec("dls_role_1 and dls_role_2", "dls_role_1", "dls_role_2"), // + new UserSpec("dls_role_1 and non_dls_role", "dls_role_1", "non_dls_role"), // + new UserSpec("non_dls_role, attributes", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1, attributes", "dls_role_1").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and dls_role_2, attributes", "dls_role_1", "dls_role_2").attribute("attr.attr_a", "a"), // + new UserSpec("dls_role_1 and non_dls_role, attributes", "dls_role", "non_dls_role").attribute("attr.attr_a", "a"), // + new UserSpec("no roles")// + )) { + for (IndexSpec indexSpec : Arrays.asList( + new IndexSpec(datastream_a1_backing.getName()), // + new IndexSpec(datastream_a2_backing.getName()), // + new IndexSpec(datastream_b1_backing.getName()) + )) { + for (Statefulness statefulness : Statefulness.values()) { + for (DfmEmptyOverridesAll dfmEmptyOverridesAll : DfmEmptyOverridesAll.values()) { + result.add(new Object[] { userSpec, indexSpec, statefulness, dfmEmptyOverridesAll }); + } + } + } + } + return result; + } + + private DocumentPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new DocumentPrivileges( + roleConfig, + statefulness == Statefulness.STATEFUL ? INDEX_METADATA.getIndicesLookup() : Map.of(), + xContentRegistry, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", this.dfmEmptyOverridesAll).build() + ); + } + + public DataStreams_getRestriction( + UserSpec userSpec, + IndexSpec indexSpec, + Statefulness statefulness, + DfmEmptyOverridesAll dfmEmptyOverridesAll + ) { + this.userSpec = userSpec; + this.indexSpec = indexSpec; + this.user = userSpec.buildUser(); + this.index = (IndexAbstraction.Index) INDEX_METADATA.getIndicesLookup().get(indexSpec.index); + this.context = new PrivilegesEvaluationContext( + this.user, + ImmutableSet.copyOf(userSpec.roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + this.statefulness = statefulness; + this.dfmEmptyOverridesAll = dfmEmptyOverridesAll == DfmEmptyOverridesAll.DFM_EMPTY_OVERRIDES_ALL_TRUE; + } + + } + + /** + * Unit tests for the inner class DocumentPrivileges.DlsQuery + */ + public static class DlsQuery { + @Test(expected = PrivilegesConfigurationValidationException.class) + public void invalidQuery() throws Exception { + DocumentPrivileges.DlsQuery.create("{\"invalid\": \"totally\"}", xContentRegistry); + } + + @Test(expected = PrivilegesEvaluationException.class) + public void invalidTemplatedQuery() throws Exception { + DocumentPrivileges.DlsQuery.create("{\"invalid\": \"totally ${attr.foo}\"}", xContentRegistry) + .evaluate(new PrivilegesEvaluationContext(new User("test_user"), ImmutableSet.of(), null, null, null, null, null, null)); + } + + @Test + public void equals() throws Exception { + DocumentPrivileges.DlsQuery query1a = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "1")), + xContentRegistry + ); + DocumentPrivileges.DlsQuery query1b = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "1")), + xContentRegistry + ); + DocumentPrivileges.DlsQuery query2 = DocumentPrivileges.DlsQuery.create( + Strings.toString(MediaTypeRegistry.JSON, QueryBuilders.termQuery("foo", "2")), + xContentRegistry + ); + + assertEquals(query1a, query1a); + assertEquals(query1a, query1b); + assertNotEquals(query2, query1a); + assertFalse(query1a.equals(query1a.queryString)); + } + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + public static class UserSpec { + final List roles; + final String description; + final Map attributes = new HashMap<>(); + + UserSpec(String description, String... roles) { + this.description = description; + this.roles = Arrays.asList(roles); + } + + UserSpec attribute(String name, String value) { + this.attributes.put(name, value); + return this; + } + + User buildUser() { + User user = new User("test_user_" + description); + user.addAttributes(this.attributes); + return user; + } + + @Override + public String toString() { + return this.description; + } + } + + public static class IndexSpec { + final String index; + + IndexSpec(String index) { + this.index = index; + } + + @Override + public String toString() { + return this.index; + } + } + + public static class IndicesSpec { + final ImmutableList indices; + + IndicesSpec(String... indices) { + this.indices = ImmutableList.copyOf(indices); + } + + @Override + public String toString() { + return this.indices.toString(); + } + } + + /** + * Determines whether the stateful/denormalized data structure shall be created or not. + */ + static enum Statefulness { + STATEFUL, + NON_STATEFUL + } + + /** + * Reflects the value of the setting plugins.security.dfm_empty_overrides_all + */ + static enum DfmEmptyOverridesAll { + DFM_EMPTY_OVERRIDES_ALL_TRUE, + DFM_EMPTY_OVERRIDES_ALL_FALSE + } + + static DiagnosingMatcher isUnrestricted() { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has no restrictions"); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.isUnrestricted()) { + return true; + } else { + mismatchDescription.appendText("The DlsRestriction object is not unrestricted:").appendValue(dlsRestriction); + return false; + } + } + + }; + + } + + @SafeVarargs + static DiagnosingMatcher isRestricted(Matcher... queries) { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has the restrictions: ") + .appendList("", "", ", ", Arrays.asList(queries)); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.isUnrestricted()) { + mismatchDescription.appendText("The DlsRestriction object is not restricted:").appendValue(dlsRestriction); + return false; + + } + + Set> subMatchers = new HashSet<>(Arrays.asList(queries)); + Set unmatchedQueries = new HashSet<>(dlsRestriction.getQueries()); + + for (DocumentPrivileges.RenderedDlsQuery query : dlsRestriction.getQueries()) { + for (Matcher subMatcher : subMatchers) { + if (subMatcher.matches(query.getQueryBuilder())) { + unmatchedQueries.remove(query); + subMatchers.remove(subMatcher); + break; + } + } + } + + if (unmatchedQueries.isEmpty() && subMatchers.isEmpty()) { + return true; + } + + if (!unmatchedQueries.isEmpty()) { + mismatchDescription.appendText("The DlsRestriction contains unexpected queries:") + .appendValue(unmatchedQueries) + .appendText("\n"); + } + + if (!subMatchers.isEmpty()) { + mismatchDescription.appendText("The DlsRestriction does not contain expected queries: ") + .appendValue(subMatchers) + .appendText("\n"); + } + + return false; + } + + }; + } + + static DiagnosingMatcher isFullyRestricted() { + return new DiagnosingMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A DlsRestriction object that has full restrictions"); + } + + @Override + protected boolean matches(Object item, Description mismatchDescription) { + if (!(item instanceof DlsRestriction)) { + mismatchDescription.appendValue(item).appendText(" is not a DlsRestriction object"); + return false; + } + + DlsRestriction dlsRestriction = (DlsRestriction) item; + + if (dlsRestriction.getQueries().size() != 0) { + for (DocumentPrivileges.RenderedDlsQuery query : dlsRestriction.getQueries()) { + if (!query.getQueryBuilder().equals(new MatchNoneQueryBuilder())) { + mismatchDescription.appendText("The DlsRestriction object is not fully restricted:") + .appendValue(dlsRestriction); + return false; + } + } + + return true; + } else { + mismatchDescription.appendText("The DlsRestriction object is not fully restricted:").appendValue(dlsRestriction); + return false; + } + } + + }; + } + + static BaseMatcher termQuery(String field, Object value) { + return new BaseMatcher() { + + @Override + public void describeTo(Description description) { + description.appendText("A TermQueryBuilder object with ").appendValue(field).appendText("=").appendValue(value); + } + + @Override + public boolean matches(Object item) { + if (!(item instanceof BaseTermQueryBuilder)) { + return false; + } + + BaseTermQueryBuilder queryBuilder = (BaseTermQueryBuilder) item; + + if (queryBuilder.fieldName().equals(field) && queryBuilder.value().equals(value)) { + return true; + } else { + return false; + } + } + }; + } + +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java new file mode 100644 index 0000000000..7f4c5bacf2 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldMaskingTest.java @@ -0,0 +1,283 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import com.google.common.collect.ImmutableSet; +import org.apache.lucene.util.BytesRef; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests on the FieldMasking class - top-level functionality is tested in FieldMaskingTest.Basic. The inner classes FieldMasking.Field + * and FieldMasking.FieldMaskingRule are tested in the correspondingly named inner test suites. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ FieldMaskingTest.Basic.class, FieldMaskingTest.Field.class, FieldMaskingTest.FieldMaskingRule.class }) +public class FieldMaskingTest { + + /** + * Top-level unit tests on the FieldMasking class. Note: This does just test the full functionality, as most of it + * is provided by the AbstractRuleBasedPrivileges super-class which is already covered by DocumentPrivilegesTest. + */ + public static class Basic { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void indexPattern_simple() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1"), "index_a1"); + + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a"), rule.get("masked_field_a").getExpression()); + assertNull("other_field_should be unrestricted", rule.get("other_field")); + } + + @Test + public void indexPattern_joined() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*"), + new TestSecurityConfig.Role("fm_role_2").indexPermissions("*").maskedFields("masked_field_a1_*").on("index_a1") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1", "fm_role_2"), "index_a1"); + + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a"), rule.get("masked_field_a").getExpression()); + assertEquals(new FieldMasking.FieldMaskingExpression("masked_field_a1_*"), rule.get("masked_field_a1_x").getExpression()); + + assertNull("other_field_should be unrestricted", rule.get("other_field")); + } + + @Test + public void indexPattern_unrestricted() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fm_role_1").indexPermissions("*").maskedFields("masked_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fm_role").indexPermissions("*").on("*") + ); + + FieldMasking subject = createSubject(roleConfig); + + FieldMasking.FieldMaskingRule rule = subject.getRestriction(ctx("fm_role_1", "non_fm_role"), "index_a1"); + assertNull("masked_field_a be unrestricted", rule.get("masked_field_a")); + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + static FieldMasking createSubject(SecurityDynamicConfiguration roleConfig) { + return new FieldMasking( + roleConfig, + INDEX_METADATA.getIndicesLookup(), + FieldMasking.Config.DEFAULT, + Settings.builder().put("plugins.security.dfm_empty_overrides_all", true).build() + ); + } + + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext( + new User("test_user"), + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + } + } + + /** + * Unit tests on the FieldMasking.FieldMaskingRule.Field class. + */ + public static class Field { + @Test + public void simple() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*"); + assertEquals("field_*", expression.getSource()); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertNull(expression.getRegexReplacements()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", field.apply("foobar")); + } + + @Test + public void simple_deviatingDefaultAlgorithm() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*"); + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field( + expression, + FieldMasking.Config.fromSettings( + Settings.builder().put("plugins.security.masked_fields.algorithm.default", "SHA-256").build() + ) + ); + assertEquals("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2", field.apply("foobar")); + } + + @Test + public void explicitAlgorithm() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*::SHA-256"); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertEquals("SHA-256", expression.getAlgoName()); + assertEquals("field_*::SHA-256", expression.getSource()); + assertNull(expression.getRegexReplacements()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("c3ab8ff13720e8ad9047dd39466b3c8974e592c2fa383d4a3960714caef0c4f2", field.apply("foobar")); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void explicitAlgorithm_invalid() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::SHADY-777"); + } + + @Test + public void regex_single() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression("field_*:://::+masked+"); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertEquals(1, expression.getRegexReplacements().size()); + assertEquals("", expression.getRegexReplacements().get(0).getRegex().toString()); + assertEquals("+masked+", expression.getRegexReplacements().get(0).getReplacement()); + assertEquals("field_*:://::+masked+", expression.getSource()); + assertEquals( + Arrays.asList(new FieldMasking.FieldMaskingExpression.RegexReplacement("//", "+masked+")), + expression.getRegexReplacements() + ); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("foobar", field.apply("foobar")); + assertEquals("foo+masked+bar", field.apply("foobar")); + } + + @Test + public void regex_multi() throws Exception { + FieldMasking.FieldMaskingExpression expression = new FieldMasking.FieldMaskingExpression( + "field_*:://::+masked+::/\\d/::*" + ); + assertEquals(WildcardMatcher.from("field_*"), expression.getPattern()); + assertNull(expression.getAlgoName()); + assertEquals(2, expression.getRegexReplacements().size()); + assertEquals("", expression.getRegexReplacements().get(0).getRegex().toString()); + assertEquals("+masked+", expression.getRegexReplacements().get(0).getReplacement()); + assertEquals("\\d", expression.getRegexReplacements().get(1).getRegex().toString()); + assertEquals("*", expression.getRegexReplacements().get(1).getReplacement()); + assertEquals("field_*:://::+masked+::/\\d/::*", expression.getSource()); + + FieldMasking.FieldMaskingRule.Field field = new FieldMasking.FieldMaskingRule.Field(expression, FieldMasking.Config.DEFAULT); + assertEquals("foobar", field.apply("foobar")); + assertEquals("foo**bar", field.apply("foo42bar")); + assertEquals("foo+masked+bar**", field.apply("foobar42")); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_oddParams() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::/a/::b::/c/"); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_invalidRegex() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::/a\\/::b"); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void regex_missingSlashes() throws Exception { + new FieldMasking.FieldMaskingExpression("field_*::a::b"); + } + } + + /** + * Unit tests on the FieldMasking.FieldMaskingRule class. + */ + public static class FieldMaskingRule { + @Test + public void allowAll() { + assertTrue( + "FieldMasking.FieldMaskingRule.ALLOW_ALL identifies itself as such", + FieldMasking.FieldMaskingRule.ALLOW_ALL.isAllowAll() + ); + assertTrue( + "FieldMasking.FieldMaskingRule.ALLOW_ALL identifies itself as such", + FieldMasking.FieldMaskingRule.ALLOW_ALL.isUnrestricted() + ); + assertFalse("FieldMasking.FieldMaskingRule.ALLOW_ALL allows field", FieldMasking.FieldMaskingRule.ALLOW_ALL.isMasked("field")); + assertEquals("FM:[]", FieldMasking.FieldMaskingRule.ALLOW_ALL.toString()); + } + + @Test + public void allowAll_constructed() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT); + assertTrue("FieldMasking.FieldMaskingRule without masked fields should return true for isAllowAll()", rule.isAllowAll()); + assertFalse("FieldMasking.FieldMaskingRule without masked fields allows field", rule.isMasked("field")); + assertEquals("FM:[]", rule.toString()); + } + + @Test + public void simple() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "field_masked_*"); + assertFalse("FieldMasking.FieldMaskingRule should return false for isAllowAll()", rule.isAllowAll()); + assertTrue("Rule applies to field field_masked_1", rule.isMasked("field_masked_1")); + assertFalse("Rule does not apply to field field_other", rule.isMasked("field_other")); + assertEquals("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", rule.get("field_masked_1").apply("foobar")); + assertEquals( + new BytesRef("96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727".getBytes(StandardCharsets.UTF_8)), + rule.get("field_masked_1").apply(new BytesRef("foobar".getBytes(StandardCharsets.UTF_8))) + ); + assertEquals("FM:[field_masked_*]", rule.toString()); + } + + @Test + public void keyword() throws Exception { + FieldMasking.FieldMaskingRule rule = FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "field_masked"); + assertFalse("FieldMasking.FieldMaskingRule should return false for isAllowAll()", rule.isAllowAll()); + assertTrue("Rule applies to field field_masked_1", rule.isMasked("field_masked")); + assertTrue("Rule applies to field field_masked_1.keyword", rule.isMasked("field_masked.keyword")); + assertEquals( + "96c8d1da7eb153db858d4f0585120319e17ed1162db9e94bee19fb10b6d19727", + rule.get("field_masked.keyword").apply("foobar") + ); + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java new file mode 100644 index 0000000000..54a32e9972 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FieldPrivilegesTest.java @@ -0,0 +1,296 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Arrays; +import java.util.Collections; + +import com.google.common.collect.ImmutableSet; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Suite; + +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; +import org.opensearch.security.user.User; +import org.opensearch.test.framework.TestSecurityConfig; + +import static org.opensearch.security.util.MockIndexMetadataBuilder.indices; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +/** + * Unit tests on the FieldMasking class - top-level functionality is tested in FieldMaskingTest.Basic. The inner classes FieldMasking.Field + * and FieldMasking.FieldMaskingRule are tested in the correspondingly named inner test suites. + */ +@RunWith(Suite.class) +@Suite.SuiteClasses({ FieldPrivilegesTest.Basic.class, FieldPrivilegesTest.FlsRule.class, FieldPrivilegesTest.FlsPattern.class }) +public class FieldPrivilegesTest { + public static class Basic { + final static Metadata INDEX_METADATA = // + indices("index_a1", "index_a2", "index_b1", "index_b2")// + .alias("alias_a") + .of("index_a1", "index_a2")// + .build(); + + final static ClusterState CLUSTER_STATE = ClusterState.builder(ClusterState.EMPTY_STATE).metadata(INDEX_METADATA).build(); + + @Test + public void indexPattern_simple_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertFalse("Fields other than included_field_a should be not allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_simple_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1"), "index_a1"); + assertFalse("excluded_field_a should be not allowed", rule.isAllowed("excluded_field_a")); + assertTrue("Fields other than included_field_a should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_joined_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*"), + new TestSecurityConfig.Role("fls_role_2").indexPermissions("*").fls("included_field_a1_*").on("index_a1") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "fls_role_2"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertTrue("included_field_a1_foo should be allowed", rule.isAllowed("included_field_a1_foo")); + assertFalse( + "Fields other than included_field_a and included_field_a1_foo should be not allowed", + rule.isAllowed("other_field") + ); + } + + @Test + public void indexPattern_joined_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*"), + new TestSecurityConfig.Role("fls_role_2").indexPermissions("*").fls("~excluded_field_a1_*").on("index_a1") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "fls_role_2"), "index_a1"); + assertFalse("excluded_field_a should be not allowed", rule.isAllowed("excluded_field_a")); + assertFalse("excluded_field_a1_foo should be not allowed", rule.isAllowed("excluded_field_a1_foo")); + assertTrue("Fields other than included_field_a and included_field_a1_foo should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_unrestricted_inclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("included_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fls_role").indexPermissions("*").on("*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "non_fls_role"), "index_a1"); + assertTrue("included_field_a should be allowed", rule.isAllowed("included_field_a")); + assertTrue("other_field should be allowed", rule.isAllowed("other_field")); + } + + @Test + public void indexPattern_unrestricted_exclusive() throws Exception { + SecurityDynamicConfiguration roleConfig = roleConfig( + new TestSecurityConfig.Role("fls_role_1").indexPermissions("*").fls("~excluded_field_a").on("index_a*"), + new TestSecurityConfig.Role("non_fls_role").indexPermissions("*").on("*") + ); + + FieldPrivileges subject = createSubject(roleConfig); + + FieldPrivileges.FlsRule rule = subject.getRestriction(ctx("fls_role_1", "non_fls_role"), "index_a1"); + assertTrue("excluded_field_a should be allowed", rule.isAllowed("excluded_field_a")); + assertTrue("other_field should be allowed", rule.isAllowed("other_field")); + } + + static SecurityDynamicConfiguration roleConfig(TestSecurityConfig.Role... roles) { + return TestSecurityConfig.Role.toRolesConfiguration(roles); + } + + static FieldPrivileges createSubject(SecurityDynamicConfiguration roleConfig) { + return new FieldPrivileges( + roleConfig, + INDEX_METADATA.getIndicesLookup(), + Settings.builder().put("plugins.security.dfm_empty_overrides_all", true).build() + ); + } + + static PrivilegesEvaluationContext ctx(String... roles) { + return new PrivilegesEvaluationContext( + new User("test_user"), + ImmutableSet.copyOf(roles), + null, + null, + null, + null, + null, + () -> CLUSTER_STATE + ); + } + } + + public static class FlsRule { + @Test + public void simple_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive"); + assertFalse("FLS rule field_inclusive should be restricted", flsRule.isUnrestricted()); + assertTrue("field_inclusive is allowed", flsRule.isAllowed("field_inclusive")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + assertEquals("FLS:[field_inclusive]", flsRule.toString()); + assertEquals(Arrays.asList("field_inclusive"), flsRule.getSource()); + } + + @Test + public void simple_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~field_exclusive"); + assertFalse("FLS rule field_exclusive should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive is not allowed", flsRule.isAllowed("field_exclusive")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive_1", "field_inclusive_2"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertTrue("field_inclusive_1 is allowed", flsRule.isAllowed("field_inclusive_1")); + assertTrue("field_inclusive_2 is allowed", flsRule.isAllowed("field_inclusive_2")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~field_exclusive_1", "~field_exclusive_2"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_1")); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_2")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void multi_mixed() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("field_inclusive_1", "~field_exclusive_1"); + // This is one of the weird parts. This just REPLICATES the old behavior for backwards compat. + // The behavior is undocumented - if there are exclusions and inclusions, only exclusions are regarded. + // It might make sense to re-think this behavior. + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("field_exclusive_1 is not allowed", flsRule.isAllowed("field_exclusive_1")); + assertTrue("other_field is allowed", flsRule.isAllowed("other_field")); + } + + @Test + public void nested_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("a.b.c"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertTrue("a.b.c is allowed", flsRule.isAllowed("a.b.c")); + assertFalse("a.b is not allowed for non-objects", flsRule.isAllowed("a.b")); + assertTrue("a.b is not allowed for objects", flsRule.isObjectAllowed("a.b")); + assertFalse("other_field is not allowed", flsRule.isAllowed("other_field")); + assertFalse("a.b.other_field is not allowed", flsRule.isAllowed("a.b.other_field")); + } + + @Test + public void nested_exclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("~a.b.c"); + assertFalse("FLS rule should be restricted", flsRule.isUnrestricted()); + assertFalse("a.b.c is not allowed", flsRule.isAllowed("a.b.c")); + assertTrue("a.b is allowed", flsRule.isAllowed("a.b")); + assertTrue("a.b is allowed for objects", flsRule.isObjectAllowed("a.b")); + } + + @Test + public void wildcard_inclusive() throws Exception { + FieldPrivileges.FlsRule flsRule = FieldPrivileges.FlsRule.of("*"); + assertTrue("FLS rule * is unrestricted", flsRule.isUnrestricted()); + assertTrue("anything is allowed", flsRule.isAllowed("anything")); + assertEquals("FLS:*", flsRule.toString()); + } + + } + + public static class FlsPattern { + @Test + public void simple_inclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("field_inclusive"); + assertFalse("field_inclusive should be not excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_inclusive"), flsPattern.getPattern()); + assertEquals("field_inclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void simple_exclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("~field_exclusive"); + assertTrue("field_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_exclusive"), flsPattern.getPattern()); + assertEquals("~field_exclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void simple_exclusive2() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("!field_exclusive"); + assertTrue("field_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("field_exclusive"), flsPattern.getPattern()); + assertEquals("!field_exclusive", flsPattern.getSource()); + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test + public void nested_inclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("a.b.c_inclusive"); + assertEquals(WildcardMatcher.from("a.b.c_inclusive"), flsPattern.getPattern()); + assertEquals( + Arrays.asList(new FieldPrivileges.FlsPattern("a"), new FieldPrivileges.FlsPattern("a.b")), + flsPattern.getParentObjectPatterns() + ); + } + + @Test + public void nested_exclusive() throws Exception { + FieldPrivileges.FlsPattern flsPattern = new FieldPrivileges.FlsPattern("~a.b.c_exclusive"); + assertTrue("a.b.c_exclusive should be excluded", flsPattern.isExcluded()); + assertEquals(WildcardMatcher.from("a.b.c_exclusive"), flsPattern.getPattern()); + // Exclusive patterns do not need an explicit inclusion of the parent objects. Thus, we get an empty list here + assertEquals(Collections.emptyList(), flsPattern.getParentObjectPatterns()); + } + + @Test(expected = PrivilegesConfigurationValidationException.class) + public void invalidRegex() throws Exception { + new FieldPrivileges.FlsPattern("/a\\/"); + } + } +} diff --git a/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java new file mode 100644 index 0000000000..2b28d6330e --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilterTest.java @@ -0,0 +1,373 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.Assert; +import org.junit.Test; + +import static java.nio.charset.StandardCharsets.UTF_8; + +public class FlsDocumentFilterTest { + + @Test + public void identity() throws Exception { + String sourceDocument = """ + { + "big_integer": 12345678901234567890123456789012345678901234567890, + "string": "x", + "big_float": 12345678901234567890123456789012345678901234567890.123456789, + "object": { + "attribute": "x", + "nested_object": { + "x": "y" + }, + "nested_array": [1,2,3] + }, + "array": [ + 1, + "x", + { + "foo": "bar" + }, + [1,2,3,4] + ] + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + assertJsonStructurallyEquivalent(sourceDocument, result); + } + + @Test + public void filterSimpleAttribute_exclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": 42, + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": 41, + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterSimpleAttribute_inclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": 42, + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "b": 42 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObject_exclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": { + "x": 123, + "y": 456 + }, + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": 41, + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObjectAttribute_exclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": { + "x": 123, + "y": 456 + }, + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b.x"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": 41, + "b": { + "y": 456 + }, + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterObjectAttribute_inclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": { + "x": 123, + "y": 456 + }, + "c": 43, + "d": {} + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b.x", "c"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "b": { + "x": 123 + }, + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterArrayContainingObject_exclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": [ + {"x": 12, "y": 34}, + {"x": 56, "y": 78} + ], + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b.x"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": 41, + "b": [ + {"y": 34}, + {"y": 78} + ], + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void filterArrayContainingObject_inclusion() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": [ + {"x": 12, "y": 34}, + {"x": 56, "y": 78} + ], + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("b.y", "c"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "b": [ + {"y": 34}, + {"y": 78} + ], + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void keepMetadata() throws Exception { + String sourceDocument = """ + { + "a": 41, + "b": 42, + "c": 43 + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.of("~b"), + FieldMasking.FieldMaskingRule.ALLOW_ALL, + ImmutableSet.of("b") + ); + + String expectedDocument = """ + { + "a": 41, + "b": 42, + "c": 43 + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void maskSimpleAttribute() throws Exception { + String sourceDocument = """ + { + "a": "x", + "b": "y", + "c": "z" + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "b"), + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": "x", + "b": "1147ddc9246d856b1ce322f1dc9eeda895b56d545c324510c2eca47a9dcc5d3f", + "c": "z" + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + @Test + public void maskObjectAttribute() throws Exception { + String sourceDocument = """ + { + "a": "x", + "b": { + "b1": "y1", + "b2": "y2" + }, + "c": "z" + } + """; + + byte[] result = FlsDocumentFilter.filter( + sourceDocument.getBytes(UTF_8), + FieldPrivileges.FlsRule.ALLOW_ALL, + FieldMasking.FieldMaskingRule.of(FieldMasking.Config.DEFAULT, "b.b1"), + ImmutableSet.of() + ); + + String expectedDocument = """ + { + "a": "x", + "b": { + "b1": "19937da9d0b0fb38c3ce369bed130b647fa547914d675e09a62ba260a6d7811b", + "b2": "y2" + }, + "c": "z" + } + """; + + assertJsonStructurallyEquivalent(expectedDocument, result); + } + + private static void assertJsonStructurallyEquivalent(String expected, byte[] actual) throws Exception { + ObjectMapper objectMapper = new ObjectMapper(); + + JsonNode expectedTree = objectMapper.readTree(expected); + JsonNode actualTree = objectMapper.readTree(actual); + + Assert.assertEquals("JSON is not structurally equivalent", expectedTree, actualTree); + } + +} diff --git a/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java index c41b5f4cda..ade540f85c 100644 --- a/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java +++ b/src/integrationTest/java/org/opensearch/security/rest/WhoAmITests.java @@ -77,7 +77,7 @@ public class WhoAmITests { protected final String expectedAuthorizedBody = "{\"dn\":null,\"is_admin\":false,\"is_node_certificate_request\":false}"; protected final String expectedUnuauthorizedBody = - "no permissions for [security:whoamiprotected] and User [name=who_am_i_user_no_perm, backend_roles=[], requestedTenant=null]"; + "no permissions for [any of [cluster:admin/opendistro_security/whoamiprotected, security:whoamiprotected]] and User [name=who_am_i_user_no_perm, backend_roles=[], requestedTenant=null]"; public static final String WHOAMI_ENDPOINT = "_plugins/_security/whoami"; public static final String WHOAMI_PROTECTED_ENDPOINT = "_plugins/_security/whoamiprotected"; diff --git a/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java b/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java new file mode 100644 index 0000000000..cb0e4f32c4 --- /dev/null +++ b/src/integrationTest/java/org/opensearch/security/util/MockIndexMetadataBuilder.java @@ -0,0 +1,134 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.util; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.opensearch.Version; +import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.Index; + +/** + * Creates mocks of org.opensearch.cluster.metadata.IndexAbstraction maps. Useful for unit testing code which + * operates on index metadata. + */ +public class MockIndexMetadataBuilder { + + private static final Settings INDEX_SETTINGS = Settings.builder() + .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + + private Metadata.Builder delegate = new Metadata.Builder(); + private Map nameToIndexMetadataBuilderMap = new HashMap<>(); + + public static MockIndexMetadataBuilder indices(String... indices) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String index : indices) { + builder.index(index); + } + + return builder; + } + + public static MockIndexMetadataBuilder dataStreams(String... dataStreams) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String dataStream : dataStreams) { + builder.dataStream(dataStream); + } + + return builder; + } + + public Metadata build() { + for (IndexMetadata.Builder indexMetadataBuilder : nameToIndexMetadataBuilderMap.values()) { + this.delegate.put(indexMetadataBuilder); + } + + return this.delegate.build(); + } + + public MockIndexMetadataBuilder index(String indexName) { + return index(indexName, IndexMetadata.State.OPEN); + } + + public MockIndexMetadataBuilder index(String indexName, IndexMetadata.State state) { + getIndexMetadataBuilder(indexName, state); + return this; + } + + public AliasBuilder alias(String alias) { + return new AliasBuilder(alias); + } + + public MockIndexMetadataBuilder dataStream(String dataStream) { + return dataStream(dataStream, 3); + } + + public MockIndexMetadataBuilder dataStream(String dataStream, int generations) { + List backingIndices = new ArrayList<>(); + + for (int i = 1; i <= generations; i++) { + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, i); + backingIndices.add(new Index(backingIndexName, backingIndexName)); + getIndexMetadataBuilder(backingIndexName, IndexMetadata.State.OPEN); + } + + DataStream dataStreamMetadata = new DataStream(dataStream, new DataStream.TimestampField("@timestamp"), backingIndices); + this.delegate.put(dataStreamMetadata); + + return this; + } + + private IndexMetadata.Builder getIndexMetadataBuilder(String indexName, IndexMetadata.State state) { + IndexMetadata.Builder result = this.nameToIndexMetadataBuilderMap.get(indexName); + + if (result != null) { + return result; + } + + result = new IndexMetadata.Builder(indexName).state(state) + .settings(Settings.builder().put(INDEX_SETTINGS).put(IndexMetadata.SETTING_INDEX_UUID, indexName).build()); + + this.nameToIndexMetadataBuilderMap.put(indexName, result); + + return result; + } + + public class AliasBuilder { + private String aliasName; + + private AliasBuilder(String alias) { + this.aliasName = alias; + } + + public MockIndexMetadataBuilder of(String... indices) { + AliasMetadata aliasMetadata = new AliasMetadata.Builder(aliasName).build(); + + for (String index : indices) { + IndexMetadata.Builder indexMetadataBuilder = getIndexMetadataBuilder(index, IndexMetadata.State.OPEN); + indexMetadataBuilder.putAlias(aliasMetadata); + } + + return MockIndexMetadataBuilder.this; + } + } +} diff --git a/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java index 9edf77f75c..38def260ed 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java +++ b/src/integrationTest/java/org/opensearch/test/framework/TestSecurityConfig.java @@ -42,6 +42,7 @@ import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -57,11 +58,19 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.Strings; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.query.QueryBuilder; import org.opensearch.security.hasher.PasswordHasher; import org.opensearch.security.hasher.PasswordHasherFactory; import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; +import org.opensearch.security.securityconf.impl.v7.ConfigV7; +import org.opensearch.security.securityconf.impl.v7.InternalUserV7; +import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.test.framework.cluster.OpenSearchClientProvider.UserCredentialsHolder; @@ -160,6 +169,13 @@ public TestSecurityConfig user(User user) { return this; } + public TestSecurityConfig users(User... users) { + for (User user : users) { + this.user(user); + } + return this; + } + public TestSecurityConfig withRestAdminUser(final String name, final String... permissions) { if (!internalUsers.containsKey(name)) { user(new User(name, "REST Admin with permissions: " + Arrays.toString(permissions)).reserved(true)); @@ -517,6 +533,10 @@ public Object getAttribute(String attributeName) { return attributes.get(attributeName); } + public Map getAttributes() { + return this.attributes; + } + @Override public XContentBuilder toXContent(XContentBuilder xContentBuilder, Params params) throws IOException { xContentBuilder.startObject(); @@ -665,6 +685,22 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(name, clusterPermissions, indexPermissions, hidden, reserved, description); } + + public static SecurityDynamicConfiguration toRolesConfiguration( + TestSecurityConfig.Role... roles + ) { + try { + return SecurityDynamicConfiguration.fromJson( + configToJson(CType.ROLES, Stream.of(roles).collect(Collectors.toMap(r -> r.name, r -> r))), + CType.ROLES, + 2, + 0, + 0 + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } } public static class RoleMapping implements ToXContentObject { @@ -778,6 +814,11 @@ public IndexPermission dls(String dlsQuery) { return this; } + public IndexPermission dls(QueryBuilder dlsQuery) { + this.dlsQuery = Strings.toString(MediaTypeRegistry.JSON, dlsQuery); + return this; + } + public IndexPermission fls(String... fls) { this.fls = Arrays.asList(fls); return this; @@ -1023,6 +1064,52 @@ public void updateInternalUsersConfiguration(Client client, List users) { updateConfigInIndex(client, CType.INTERNALUSERS, userMap); } + public SecurityDynamicConfiguration getSecurityConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson( + singleEntryConfigToJson(CType.CONFIG, CType.CONFIG.toLCString(), config), + CType.CONFIG, + 2, + 0, + 0 + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getInternalUserConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.INTERNALUSERS, internalUsers), CType.INTERNALUSERS, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getRolesConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ROLES, roles), CType.ROLES, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration getRoleMappingsConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ROLESMAPPING, rolesMapping), CType.ROLESMAPPING, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public SecurityDynamicConfiguration geActionGroupsConfiguration() { + try { + return SecurityDynamicConfiguration.fromJson(configToJson(CType.ACTIONGROUPS, actionGroups), CType.ACTIONGROUPS, 2, 0, 0); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + static String hashPassword(final String clearTextPassword) { return passwordHasher.hash(clearTextPassword.toCharArray()); } @@ -1095,25 +1182,30 @@ private static String configToJson(CType configType, Map configType, String configurationRoot, ToXContentObject config) + throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + + builder.startObject(); + builder.startObject("_meta"); + builder.field("type", configType.toLCString()); + builder.field("config_version", 2); + builder.endObject(); + + builder.field(configurationRoot, config); + + builder.endObject(); + + return builder.toString(); + } + private void writeSingleEntryConfigToIndex(Client client, CType configType, ToXContentObject config) { writeSingleEntryConfigToIndex(client, configType, configType.toLCString(), config); } private void writeSingleEntryConfigToIndex(Client client, CType configType, String configurationRoot, ToXContentObject config) { try { - XContentBuilder builder = XContentFactory.jsonBuilder(); - - builder.startObject(); - builder.startObject("_meta"); - builder.field("type", configType.toLCString()); - builder.field("config_version", 2); - builder.endObject(); - - builder.field(configurationRoot, config); - - builder.endObject(); - - String json = builder.toString(); + String json = singleEntryConfigToJson(configType, configurationRoot, config); log.info("Writing security plugin configuration into index " + configType + ":\n" + json); diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java index 894bb5baa9..5ae8c0b125 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java +++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalCluster.java @@ -31,6 +31,7 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -85,7 +86,9 @@ public class LocalCluster extends ExternalResource implements AutoCloseable, Ope private final List> plugins; private final ClusterManager clusterManager; private final TestSecurityConfig testSecurityConfig; + private Map nodeSpecificOverride; private Settings nodeOverride; + private Integer expectedNodeStartupCount; private final String clusterName; private final MinimumSecuritySettingsSupplierFactory minimumOpenSearchSettingsSupplierFactory; private final TestCertificates testCertificates; @@ -100,6 +103,7 @@ private LocalCluster( String clusterName, TestSecurityConfig testSgConfig, boolean sslOnly, + Map nodeSpecificOverride, Settings nodeOverride, ClusterManager clusterManager, List> plugins, @@ -108,13 +112,15 @@ private LocalCluster( Map remotes, List testIndices, boolean loadConfigurationIntoIndex, - String defaultConfigurationInitDirectory + String defaultConfigurationInitDirectory, + Integer expectedNodeStartupCount ) { this.plugins = plugins; this.testCertificates = testCertificates; this.clusterManager = clusterManager; this.testSecurityConfig = testSgConfig; this.sslOnly = sslOnly; + this.nodeSpecificOverride = nodeSpecificOverride; this.nodeOverride = nodeOverride; this.clusterName = clusterName; this.minimumOpenSearchSettingsSupplierFactory = new MinimumSecuritySettingsSupplierFactory(testCertificates); @@ -125,6 +131,7 @@ private LocalCluster( if (StringUtils.isNoneBlank(defaultConfigurationInitDirectory)) { System.setProperty(INIT_CONFIGURATION_DIR, defaultConfigurationInitDirectory); } + this.expectedNodeStartupCount = expectedNodeStartupCount; } public String getSnapshotDirPath() { @@ -232,6 +239,7 @@ private void start() { try { NodeSettingsSupplier nodeSettingsSupplier = minimumOpenSearchSettingsSupplierFactory.minimumOpenSearchSettings( sslOnly, + nodeSpecificOverride, nodeOverride ); localOpenSearchCluster = new LocalOpenSearchCluster( @@ -239,7 +247,8 @@ private void start() { clusterManager, nodeSettingsSupplier, plugins, - testCertificates + testCertificates, + expectedNodeStartupCount ); localOpenSearchCluster.start(); @@ -312,8 +321,10 @@ public CertificateData getAdminCertificate() { public static class Builder { private final Settings.Builder nodeOverrideSettingsBuilder = Settings.builder(); + private final Map nodeSpecificOverrideSettingsBuilder = new HashMap<>(); private boolean sslOnly = false; + private Integer expectedNodeStartupCount; private final List> plugins = new ArrayList<>(); private Map remoteClusters = new HashMap<>(); private List clusterDependencies = new ArrayList<>(); @@ -365,6 +376,11 @@ public Builder sslOnly(boolean sslOnly) { return this; } + public Builder extectedNodeStartupCount(int expectedNodeStartupCount) { + this.expectedNodeStartupCount = expectedNodeStartupCount; + return this; + } + public Builder nodeSettings(Map settings) { settings.forEach((key, value) -> { if (value instanceof List) { @@ -378,6 +394,25 @@ public Builder nodeSettings(Map settings) { return this; } + public Builder nodeSpecificSettings(int nodeNumber, Map settings) { + if (!nodeSpecificOverrideSettingsBuilder.containsKey(nodeNumber)) { + Settings.Builder builderCopy = Settings.builder(); + builderCopy.put(nodeOverrideSettingsBuilder.build()); + nodeSpecificOverrideSettingsBuilder.put(nodeNumber, builderCopy); + } + Settings.Builder nodeSettingsBuilder = nodeSpecificOverrideSettingsBuilder.get(nodeNumber); + settings.forEach((key, value) -> { + if (value instanceof List) { + List values = ((List) value).stream().map(String::valueOf).collect(Collectors.toList()); + nodeSettingsBuilder.putList(key, values); + } else { + nodeSettingsBuilder.put(key, String.valueOf(value)); + } + }); + + return this; + } + /** * Adds additional plugins to the cluster */ @@ -412,6 +447,14 @@ public Builder indices(TestIndex... indices) { return this; } + /** + * Specifies test indices that shall be created upon startup of the cluster. + */ + public Builder indices(Collection indices) { + this.testIndices.addAll(indices); + return this; + } + public Builder users(TestSecurityConfig.User... users) { for (TestSecurityConfig.User user : users) { testSecurityConfig.user(user); @@ -512,10 +555,15 @@ public LocalCluster build() { } clusterName += "_" + num.incrementAndGet(); Settings settings = nodeOverrideSettingsBuilder.build(); + Map nodeSpecificSettings = new HashMap<>(); + for (Map.Entry entry : nodeSpecificOverrideSettingsBuilder.entrySet()) { + nodeSpecificSettings.put(entry.getKey(), entry.getValue().build()); + } return new LocalCluster( clusterName, testSecurityConfig, sslOnly, + nodeSpecificSettings, settings, clusterManager, plugins, @@ -524,7 +572,8 @@ public LocalCluster build() { remoteClusters, testIndices, loadConfigurationIntoIndex, - defaultConfigurationInitDirectory + defaultConfigurationInitDirectory, + expectedNodeStartupCount ); } catch (Exception e) { log.error("Failed to build LocalCluster", e); diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java index 96da63d9fb..8570c3d398 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java +++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/LocalOpenSearchCluster.java @@ -97,6 +97,7 @@ public class LocalOpenSearchCluster { private final List> additionalPlugins; private final List nodes = new ArrayList<>(); private final TestCertificates testCertificates; + private final Integer expectedNodeStartupCount; private File clusterHomeDir; private List seedHosts; @@ -112,13 +113,15 @@ public LocalOpenSearchCluster( ClusterManager clusterManager, NodeSettingsSupplier nodeSettingsSupplier, List> additionalPlugins, - TestCertificates testCertificates + TestCertificates testCertificates, + Integer expectedNodeStartCount ) { this.clusterName = clusterName; this.clusterManager = clusterManager; this.nodeSettingsSupplier = nodeSettingsSupplier; this.additionalPlugins = additionalPlugins; this.testCertificates = testCertificates; + this.expectedNodeStartupCount = expectedNodeStartCount; try { createClusterDirectory(clusterName); } catch (IOException e) { @@ -198,7 +201,12 @@ public void start() throws Exception { log.info("Startup finished. Waiting for GREEN"); - waitForCluster(ClusterHealthStatus.GREEN, TimeValue.timeValueSeconds(10), nodes.size()); + int expectedCount = nodes.size(); + if (expectedNodeStartupCount != null) { + expectedCount = expectedNodeStartupCount; + } + + waitForCluster(ClusterHealthStatus.GREEN, TimeValue.timeValueSeconds(10), expectedCount); log.info("Started: {}", this); } diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java index 4ad5f8420e..34a105ea39 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java +++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/MinimumSecuritySettingsSupplierFactory.java @@ -28,6 +28,8 @@ package org.opensearch.test.framework.cluster; +import java.util.Map; + import org.opensearch.common.settings.Settings; import org.opensearch.security.support.ConfigConstants; import org.opensearch.test.framework.certificate.TestCertificates; @@ -51,6 +53,16 @@ public NodeSettingsSupplier minimumOpenSearchSettings(boolean sslOnly, Settings return i -> minimumOpenSearchSettingsBuilder(i, sslOnly).put(other).build(); } + public NodeSettingsSupplier minimumOpenSearchSettings(boolean sslOnly, Map nodeOverride, Settings other) { + return i -> { + Settings override = nodeOverride.get(i); + if (override != null) { + return minimumOpenSearchSettingsBuilder(i, sslOnly).put(other).put(override).build(); + } + return minimumOpenSearchSettingsBuilder(i, sslOnly).put(other).build(); + }; + } + private Settings.Builder minimumOpenSearchSettingsBuilder(int node, boolean sslOnly) { Settings.Builder builder = Settings.builder(); diff --git a/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java b/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java index 139378fd22..d9384dda61 100644 --- a/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java +++ b/src/integrationTest/java/org/opensearch/test/framework/cluster/PortAllocator.java @@ -63,7 +63,7 @@ public SortedSet allocate(String clientName, int numRequested, int minP int startPort = minPort; - while (!isAvailable(startPort)) { + while (!isPortRangeAvailable(startPort, startPort + numRequested)) { startPort += 10; } @@ -72,8 +72,10 @@ public SortedSet allocate(String clientName, int numRequested, int minP for (int currentPort = startPort; foundPorts.size() < numRequested && currentPort < SocketUtils.PORT_RANGE_MAX && (currentPort - startPort) < 10000; currentPort++) { - if (allocate(clientName, currentPort)) { - foundPorts.add(currentPort); + if (isAvailable(currentPort)) { + if (allocate(clientName, currentPort)) { + foundPorts.add(currentPort); + } } } @@ -121,6 +123,15 @@ private boolean isAvailable(int port) { return !isAllocated(port) && !isInUse(port); } + private boolean isPortRangeAvailable(int port, int endPort) { + for (int i = port; i <= endPort; i++) { + if (!isAvailable(i)) { + return false; + } + } + return true; + } + private synchronized boolean isAllocated(int port) { AllocatedPort allocatedPort = this.allocatedPorts.get(port); diff --git a/src/main/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticator.java b/src/main/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticator.java index b3cb7bfe8c..2f01a9225d 100644 --- a/src/main/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticator.java +++ b/src/main/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticator.java @@ -33,6 +33,7 @@ import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.DefaultObjectMapper; import org.opensearch.security.auth.HTTPAuthenticator; import org.opensearch.security.filter.SecurityRequest; import org.opensearch.security.filter.SecurityResponse; @@ -185,7 +186,22 @@ private AuthCredentials extractCredentials0(final SecurityRequest request) { final AuthCredentials ac = new AuthCredentials(subject, roles).markComplete(); for (Entry claim : claims.entrySet()) { - ac.addAttribute("attr.jwt." + claim.getKey(), String.valueOf(claim.getValue())); + String key = "attr.jwt." + claim.getKey(); + Object value = claim.getValue(); + + if (value instanceof Collection) { + try { + // Convert the list to a JSON array string + String jsonValue = DefaultObjectMapper.writeValueAsString(value, false); + ac.addAttribute(key, jsonValue); + } catch (Exception e) { + log.warn("Failed to convert list claim to JSON for key: " + key, e); + // Fallback to string representation + ac.addAttribute(key, String.valueOf(value)); + } + } else { + ac.addAttribute(key, String.valueOf(value)); + } } return ac; diff --git a/src/main/java/com/amazon/dlic/auth/http/saml/HTTPSamlAuthenticator.java b/src/main/java/com/amazon/dlic/auth/http/saml/HTTPSamlAuthenticator.java index ae3d1c9128..20e0b25b5c 100644 --- a/src/main/java/com/amazon/dlic/auth/http/saml/HTTPSamlAuthenticator.java +++ b/src/main/java/com/amazon/dlic/auth/http/saml/HTTPSamlAuthenticator.java @@ -88,6 +88,7 @@ public class HTTPSamlAuthenticator implements HTTPAuthenticator, Destroyable { private static final Pattern PATTERN_PATH_PREFIX = Pattern.compile(REGEX_PATH_PREFIX); private static boolean openSamlInitialized = false; + public static final String SAML_TYPE = "saml"; private String subjectKey; private String rolesKey; @@ -175,7 +176,7 @@ public AuthCredentials extractCredentials(final SecurityRequest request, final T @Override public String getType() { - return "saml"; + return SAML_TYPE; } @Override diff --git a/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java b/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java index f93cf0aa13..71bc78580e 100644 --- a/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java +++ b/src/main/java/org/opensearch/security/OpenSearchSecurityPlugin.java @@ -38,17 +38,7 @@ import java.security.PrivilegedAction; import java.security.Provider; import java.security.Security; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; +import java.util.*; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiFunction; import java.util.function.Function; @@ -159,7 +149,6 @@ import org.opensearch.security.configuration.DlsFlsRequestValve; import org.opensearch.security.configuration.DlsFlsValveImpl; import org.opensearch.security.configuration.PrivilegesInterceptorImpl; -import org.opensearch.security.configuration.Salt; import org.opensearch.security.configuration.SecurityFlsDlsIndexSearcherWrapper; import org.opensearch.security.dlic.rest.api.Endpoint; import org.opensearch.security.dlic.rest.api.SecurityRestApiActions; @@ -174,9 +163,12 @@ import org.opensearch.security.http.XFFResolver; import org.opensearch.security.identity.NoopPluginSubject; import org.opensearch.security.identity.SecurityTokenManager; +import org.opensearch.security.privileges.ActionPrivileges; +import org.opensearch.security.privileges.PrivilegesEvaluationException; import org.opensearch.security.privileges.PrivilegesEvaluator; import org.opensearch.security.privileges.PrivilegesInterceptor; import org.opensearch.security.privileges.RestLayerPrivilegesEvaluator; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resources.ResourceAccessHandler; import org.opensearch.security.resources.ResourceManagementRepository; @@ -189,6 +181,7 @@ import org.opensearch.security.rest.SecurityWhoAmIAction; import org.opensearch.security.rest.TenantInfoAction; import org.opensearch.security.securityconf.DynamicConfigFactory; +import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.setting.OpensearchDynamicSetting; import org.opensearch.security.setting.TransportPassiveAuthSetting; import org.opensearch.security.ssl.ExternalSecurityKeyStore; @@ -205,8 +198,6 @@ import org.opensearch.security.support.ModuleInfo; import org.opensearch.security.support.ReflectionHelper; import org.opensearch.security.support.SecuritySettings; -import org.opensearch.security.support.SecurityUtils; -import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.transport.DefaultInterClusterRequestEvaluator; import org.opensearch.security.transport.InterClusterRequestEvaluator; import org.opensearch.security.transport.SecurityInterceptor; @@ -276,9 +267,9 @@ public final class OpenSearchSecurityPlugin extends OpenSearchSecuritySSLPlugin private volatile IndexResolverReplacer irr; private final AtomicReference namedXContentRegistry = new AtomicReference<>(NamedXContentRegistry.EMPTY);; private volatile DlsFlsRequestValve dlsFlsValve = null; - private volatile Salt salt; private volatile OpensearchDynamicSetting transportPassiveAuthSetting; private volatile PasswordHasher passwordHasher; + private volatile DlsFlsBaseContext dlsFlsBaseContext; private ResourceManagementRepository rmr; private ResourceAccessHandler resourceAccessHandler; private final Set indicesToListen = new HashSet<>(); @@ -662,7 +653,7 @@ public List getRestHandlers( evaluator, threadPool, Objects.requireNonNull(auditLog), - sks, + sslSettingsManager, Objects.requireNonNull(userService), sslCertReloadEnabled, passwordHasher @@ -718,7 +709,8 @@ public void onIndexModule(IndexModule indexModule) { auditLog, ciol, evaluator, - salt + dlsFlsValve::getCurrentConfig, + dlsFlsBaseContext ) ); @@ -747,28 +739,18 @@ public void clear(String reason) { @Override public Weight doCache(Weight weight, QueryCachingPolicy policy) { - @SuppressWarnings("unchecked") - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - - if (SecurityUtils.evalMap(allowedFlsFields, index().getName()) != null) { - return weight; - } else { - @SuppressWarnings("unchecked") - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - - if (SecurityUtils.evalMap(maskedFieldsMap, index().getName()) != null) { + try { + if (dlsFlsValve.hasFlsOrFieldMasking(index().getName())) { + // Do not cache return weight; } else { return nodeCache.doCache(weight, policy); } + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating FLS configuration", e); + // We fall back to no caching + return weight; } - } }); @@ -841,17 +823,16 @@ public void onQueryPhase(SearchContext searchContext, long tookInNanos) { return; } - @SuppressWarnings("unchecked") - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, indexModule.getIndex().getName()); - if (maskedEval != null) { - final Set mf = maskedFieldsMap.get(maskedEval); - if (mf != null && !mf.isEmpty()) { + try { + if (dlsFlsValve.hasFieldMasking(indexModule.getIndex().getName())) { dlsFlsValve.onQueryPhase(queryResult); } + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating field masking config", e); + // It is safe to call the code nevertheless, as this code does not enforce any privileges. + // Rather, it performs some fixes to keep aggregations happy after field masking has been + // applied. If no field masking has been applied, this should be a no-op. + dlsFlsValve.onQueryPhase(queryResult); } } }.toListener()); @@ -1085,10 +1066,9 @@ public Collection createComponents( final ClusterInfoHolder cih = new ClusterInfoHolder(this.cs.getClusterName().value()); this.cs.addListener(cih); - this.salt = Salt.from(settings); final IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(threadPool.getThreadContext()); - irr = new IndexResolverReplacer(resolver, clusterService, cih); + irr = new IndexResolverReplacer(resolver, clusterService::state, cih); final String DEFAULT_INTERCLUSTER_REQUEST_EVALUATOR_CLASS = DefaultInterClusterRequestEvaluator.class.getName(); InterClusterRequestEvaluator interClusterRequestEvaluator = new DefaultInterClusterRequestEvaluator(settings); @@ -1106,18 +1086,9 @@ public Collection createComponents( namedXContentRegistry.set(xContentRegistry); if (SSLConfig.isSslOnlyMode()) { - dlsFlsValve = new DlsFlsRequestValve.NoopDlsFlsRequestValve(); auditLog = new NullAuditLog(); privilegesInterceptor = new PrivilegesInterceptor(resolver, clusterService, localClient, threadPool); } else { - dlsFlsValve = new DlsFlsValveImpl( - settings, - localClient, - clusterService, - resolver, - xContentRegistry, - threadPool.getThreadContext() - ); auditLog = new AuditLogImpl(settings, configPath, localClient, threadPool, resolver, clusterService, environment); privilegesInterceptor = new PrivilegesInterceptorImpl(resolver, clusterService, localClient, threadPool); } @@ -1140,7 +1111,9 @@ public Collection createComponents( evaluator = new PrivilegesEvaluator( clusterService, + clusterService::state, threadPool, + threadPool.getThreadContext(), cr, resolver, auditLog, @@ -1151,6 +1124,23 @@ public Collection createComponents( namedXContentRegistry.get() ); + dlsFlsBaseContext = new DlsFlsBaseContext(evaluator, threadPool.getThreadContext(), adminDns); + + if (SSLConfig.isSslOnlyMode()) { + dlsFlsValve = new DlsFlsRequestValve.NoopDlsFlsRequestValve(); + } else { + dlsFlsValve = new DlsFlsValveImpl( + settings, + localClient, + clusterService, + resolver, + xContentRegistry, + threadPool, + dlsFlsBaseContext + ); + cr.subscribeOnChange(configMap -> { ((DlsFlsValveImpl) dlsFlsValve).updateConfiguration(cr.getConfiguration(CType.ROLES)); }); + } + sf = new SecurityFilter(settings, evaluator, adminDns, dlsFlsValve, auditLog, threadPool, cs, compatConfig, irr, xffResolver); final String principalExtractorClass = settings.get(SSLConfigConstants.SECURITY_SSL_TRANSPORT_PRINCIPAL_EXTRACTOR_CLASS, null); @@ -1161,7 +1151,7 @@ public Collection createComponents( principalExtractor = ReflectionHelper.instantiatePrincipalExtractor(principalExtractorClass); } - restLayerEvaluator = new RestLayerPrivilegesEvaluator(clusterService, threadPool); + restLayerEvaluator = new RestLayerPrivilegesEvaluator(evaluator); securityRestHandler = new SecurityRestFilter( backendRegistry, @@ -1179,16 +1169,12 @@ public Collection createComponents( dcf.registerDCFListener(irr); dcf.registerDCFListener(xffResolver); dcf.registerDCFListener(evaluator); - dcf.registerDCFListener(restLayerEvaluator); dcf.registerDCFListener(securityRestHandler); dcf.registerDCFListener(tokenManager); if (!(auditLog instanceof NullAuditLog)) { // Don't register if advanced modules is disabled in which case auditlog is instance of NullAuditLog dcf.registerDCFListener(auditLog); } - if (dlsFlsValve instanceof DlsFlsValveImpl) { - dcf.registerDCFListener(dlsFlsValve); - } cr.setDynamicConfigFactory(dcf); @@ -1234,9 +1220,8 @@ public Collection createComponents( components.add(userService); components.add(passwordHasher); - if (!ExternalSecurityKeyStore.hasExternalSslContext(settings)) { - components.add(sks); - } + components.add(sslSettingsManager); + final var allowDefaultInit = settings.getAsBoolean(SECURITY_ALLOW_DEFAULT_INIT_SECURITYINDEX, false); final var useClusterState = useClusterStateToInitSecurityConfig(settings); if (!SSLConfig.isSslOnlyMode() && !isDisabled(settings) && allowDefaultInit && useClusterState) { @@ -1400,7 +1385,7 @@ public List> getSettings() { settings.add(Setting.simpleString(ConfigConstants.SECURITY_CONFIG_INDEX_NAME, Property.NodeScope, Property.Filtered)); settings.add(Setting.groupSetting(ConfigConstants.SECURITY_AUTHCZ_IMPERSONATION_DN + ".", Property.NodeScope)); // not filtered - // here + // here settings.add(Setting.simpleString(ConfigConstants.SECURITY_CERT_OID, Property.NodeScope, Property.Filtered)); @@ -1416,8 +1401,8 @@ public List> getSettings() { );// not filtered here settings.add(Setting.boolSetting(ConfigConstants.SECURITY_NODES_DN_DYNAMIC_CONFIG_ENABLED, false, Property.NodeScope));// not - // filtered - // here + // filtered + // here settings.add( Setting.boolSetting( @@ -1461,8 +1446,8 @@ public List> getSettings() { Setting.boolSetting(ConfigConstants.SECURITY_DFM_EMPTY_OVERRIDES_ALL, false, Property.NodeScope, Property.Filtered) ); settings.add(Setting.groupSetting(ConfigConstants.SECURITY_AUTHCZ_REST_IMPERSONATION_USERS + ".", Property.NodeScope)); // not - // filtered - // here + // filtered + // here settings.add(Setting.simpleString(ConfigConstants.SECURITY_ROLES_MAPPING_RESOLUTION, Property.NodeScope, Property.Filtered)); settings.add( @@ -2072,6 +2057,9 @@ public List> getSettings() { Property.Filtered ) ); + + // Privileges evaluation + settings.add(ActionPrivileges.PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE); } return settings; @@ -2124,50 +2112,24 @@ public Collection> getGuiceServiceClasses() final List> services = new ArrayList<>(1); services.add(GuiceHolder.class); - log.info("Guice service classes loaded"); return services; } @Override public Function> getFieldFilter() { return index -> { - if (threadPool == null) { + if (threadPool == null || dlsFlsValve == null) { return field -> true; } - @SuppressWarnings("unchecked") - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - - final String eval = SecurityUtils.evalMap(allowedFlsFields, index); - - if (eval == null) { - return field -> true; - } else { - - final Set includesExcludes = allowedFlsFields.get(eval); - final Set includesSet = new HashSet<>(includesExcludes.size()); - final Set excludesSet = new HashSet<>(includesExcludes.size()); - - for (final String incExc : includesExcludes) { - final char firstChar = incExc.charAt(0); - if (firstChar == '!' || firstChar == '~') { - excludesSet.add(incExc.substring(1)); - } else { - includesSet.add(incExc); - } - } - - if (!excludesSet.isEmpty()) { - WildcardMatcher excludeMatcher = WildcardMatcher.from(excludesSet); - return field -> !excludeMatcher.test(handleKeyword(field)); - } else { - WildcardMatcher includeMatcher = WildcardMatcher.from(includesSet); - return field -> includeMatcher.test(handleKeyword(field)); + return field -> { + try { + return dlsFlsValve.isFieldAllowed(index, field); + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating FLS for {}.{}", index, field, e); + return false; } - } + }; }; } @@ -2181,13 +2143,6 @@ public Collection getSystemIndexDescriptors(Settings sett return Collections.singletonList(systemIndexDescriptor); } - private static String handleKeyword(final String field) { - if (field != null && field.endsWith(KEYWORD)) { - return field.substring(0, field.length() - KEYWORD.length()); - } - return field; - } - @Override public Subject getCurrentSubject() { // Not supported @@ -2206,7 +2161,15 @@ public PluginSubject getPluginSubject(Plugin plugin) { @Override public Optional getSecureSettingFactory(Settings settings) { - return Optional.of(new OpenSearchSecureSettingsFactory(threadPool, sks, evaluateSslExceptionHandler(), securityRestHandler)); + return Optional.of( + new OpenSearchSecureSettingsFactory( + threadPool, + sslSettingsManager, + evaluateSslExceptionHandler(), + securityRestHandler, + SSLConfig + ) + ); } @SuppressWarnings("removal") diff --git a/src/main/java/org/opensearch/security/auth/BackendRegistry.java b/src/main/java/org/opensearch/security/auth/BackendRegistry.java index 0e39acf59e..0b00bcf943 100644 --- a/src/main/java/org/opensearch/security/auth/BackendRegistry.java +++ b/src/main/java/org/opensearch/security/auth/BackendRegistry.java @@ -75,6 +75,7 @@ import static org.apache.http.HttpStatus.SC_FORBIDDEN; import static org.apache.http.HttpStatus.SC_SERVICE_UNAVAILABLE; import static org.apache.http.HttpStatus.SC_UNAUTHORIZED; +import static com.amazon.dlic.auth.http.saml.HTTPSamlAuthenticator.SAML_TYPE; public class BackendRegistry { @@ -303,7 +304,10 @@ public boolean authenticate(final SecurityRequestChannel request) { if (authDomain.isChallenge()) { final Optional restResponse = httpAuthenticator.reRequestAuthentication(request, null); if (restResponse.isPresent()) { - auditLog.logFailedLogin("", false, null, request); + // saml will always hit this to re-request authentication + if (!authDomain.getHttpAuthenticator().getType().equals(SAML_TYPE)) { + auditLog.logFailedLogin("", false, null, request); + } if (isTraceEnabled) { log.trace("No 'Authorization' header, send 401 and 'WWW-Authenticate Basic'"); } diff --git a/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java b/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java index 4cce5bb61f..c002535b3d 100644 --- a/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java +++ b/src/main/java/org/opensearch/security/compliance/FieldReadCallback.java @@ -32,10 +32,10 @@ import org.opensearch.index.mapper.Uid; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.dlic.rest.support.Utils; +import org.opensearch.security.privileges.dlsfls.FieldMasking; import org.opensearch.security.support.HeaderHelper; import org.opensearch.security.support.JsonFlattener; import org.opensearch.security.support.SourceFieldsContext; -import org.opensearch.security.support.WildcardMatcher; //TODO We need to deal with caching!! //Currently we disable caching (and realtime requests) when FLS or DLS is applied @@ -49,7 +49,7 @@ public final class FieldReadCallback { // private final ThreadContext threadContext; // private final ClusterService clusterService; private final Index index; - private final WildcardMatcher maskedFieldsMatcher; + private final FieldMasking.FieldMaskingRule fmRule; private final AuditLog auditLog; private Function, Map> filterFunction; private SourceFieldsContext sfc; @@ -61,7 +61,7 @@ public FieldReadCallback( final IndexService indexService, final ClusterService clusterService, final AuditLog auditLog, - final WildcardMatcher maskedFieldsMatcher, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId ) { super(); @@ -69,7 +69,7 @@ public FieldReadCallback( // this.clusterService = Objects.requireNonNull(clusterService); this.index = Objects.requireNonNull(indexService).index(); this.auditLog = auditLog; - this.maskedFieldsMatcher = maskedFieldsMatcher; + this.fmRule = fmRule; this.shardId = shardId; try { sfc = (SourceFieldsContext) HeaderHelper.deserializeSafeFromHeader(threadContext, "_opendistro_security_source_field_context"); @@ -88,7 +88,8 @@ public FieldReadCallback( } private boolean recordField(final String fieldName, boolean isStringField) { - return !(isStringField && maskedFieldsMatcher.test(fieldName)) + // We do not record fields in read history if they are masked. + return !(isStringField && fmRule.isMasked(fieldName)) && auditLog.getComplianceConfig().readHistoryEnabledForField(index.getName(), fieldName); } diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java b/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java index cc410c0158..761cc989d1 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationChangeListener.java @@ -29,6 +29,7 @@ /** * Callback function on change particular configuration */ +@FunctionalInterface public interface ConfigurationChangeListener { /** diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java b/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java index f1062cede3..35e5053a8b 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationLoaderSecurity7.java @@ -135,7 +135,8 @@ public void singleFailure(Failure failure) { "Failure {} retrieving configuration for {} (index={})", failure == null ? null : failure.getMessage(), Arrays.toString(events), - securityIndex + securityIndex, + failure.getFailure() ); } diff --git a/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java b/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java index 9d64732e2d..2ba1ebcb8c 100644 --- a/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java +++ b/src/main/java/org/opensearch/security/configuration/ConfigurationRepository.java @@ -569,7 +569,7 @@ public synchronized void subscribeOnChange(ConfigurationChangeListener listener) private synchronized void notifyAboutChanges(ConfigurationMap typeToConfig) { for (ConfigurationChangeListener listener : configurationChangedListener) { try { - LOGGER.debug("Notify {} listener about change configuration with type {}", listener); + LOGGER.debug("Notify {} listener about change configuration with type {}", listener, typeToConfig); listener.onChange(typeToConfig); } catch (Exception e) { LOGGER.error("{} listener errored: " + e, listener, e); diff --git a/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java b/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java index 95d07cf0b2..08e06436aa 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFilterLevelActionHandler.java @@ -59,12 +59,13 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.DocumentPrivileges; +import org.opensearch.security.privileges.dlsfls.IndexToRuleMap; import org.opensearch.security.queries.QueryBuilderTraverser; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.EvaluatedDlsFlsConfig; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.ReflectiveAttributeAccessors; -import org.opensearch.security.support.SecurityUtils; public class DlsFilterLevelActionHandler { private static final Logger log = LogManager.getLogger(DlsFilterLevelActionHandler.class); @@ -76,13 +77,12 @@ public class DlsFilterLevelActionHandler { public static boolean handle( PrivilegesEvaluationContext context, - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig, + IndexToRuleMap dlsRestrictionMap, ActionListener listener, Client nodeClient, ClusterService clusterService, IndicesService indicesService, IndexNameExpressionResolver resolver, - DlsQueryParser dlsQueryParser, ThreadContext threadContext ) { @@ -115,13 +115,12 @@ public static boolean handle( return new DlsFilterLevelActionHandler( context, - evaluatedDlsFlsConfig, + dlsRestrictionMap, listener, nodeClient, clusterService, indicesService, resolver, - dlsQueryParser, threadContext ).handle(); } @@ -129,11 +128,10 @@ public static boolean handle( private final String action; private final ActionRequest request; private final ActionListener listener; - private final EvaluatedDlsFlsConfig evaluatedDlsFlsConfig; + private final IndexToRuleMap dlsRestrictionMap; private final Resolved resolved; private final boolean requiresIndexScoping; private final Client nodeClient; - private final DlsQueryParser dlsQueryParser; private final ClusterService clusterService; private final IndicesService indicesService; private final ThreadContext threadContext; @@ -143,24 +141,22 @@ public static boolean handle( DlsFilterLevelActionHandler( PrivilegesEvaluationContext context, - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig, + IndexToRuleMap dlsRestrictionMap, ActionListener listener, Client nodeClient, ClusterService clusterService, IndicesService indicesService, IndexNameExpressionResolver resolver, - DlsQueryParser dlsQueryParser, ThreadContext threadContext ) { this.action = context.getAction(); this.request = context.getRequest(); this.listener = listener; - this.evaluatedDlsFlsConfig = evaluatedDlsFlsConfig; + this.dlsRestrictionMap = dlsRestrictionMap; this.resolved = context.getResolvedRequest(); this.nodeClient = nodeClient; this.clusterService = clusterService; this.indicesService = indicesService; - this.dlsQueryParser = dlsQueryParser; this.threadContext = threadContext; this.resolver = resolver; @@ -464,7 +460,7 @@ private boolean modifyQuery() throws IOException { } private boolean modifyQuery(String localClusterAlias) throws IOException { - Map> filterLevelQueries = evaluatedDlsFlsConfig.getDlsQueriesByIndex(); + Map filterLevelQueries = dlsRestrictionMap.getIndexMap(); BoolQueryBuilder dlsQueryBuilder = QueryBuilders.boolQuery().minimumShouldMatch(1); DocumentAllowList documentAllowlist = new DocumentAllowList(); @@ -474,8 +470,6 @@ private boolean modifyQuery(String localClusterAlias) throws IOException { Set indices = resolved.getAllIndicesResolved(clusterService, resolver); for (String index : indices) { - String dlsEval = SecurityUtils.evalMap(filterLevelQueries, index); - String prefixedIndex; if (localClusterAlias != null) { @@ -484,18 +478,9 @@ private boolean modifyQuery(String localClusterAlias) throws IOException { prefixedIndex = index; } - if (dlsEval == null) { - if (requiresIndexScoping) { - // This index has no DLS configured, thus it is unrestricted. - // To allow the index in a complex query, we need to add the query below to let the index pass. - dlsQueryBuilder.should(QueryBuilders.termQuery("_index", prefixedIndex)); - } - continue; - } - - Set unparsedDlsQueries = filterLevelQueries.get(dlsEval); + DlsRestriction dlsRestriction = filterLevelQueries.get(index); - if (unparsedDlsQueries == null || unparsedDlsQueries.isEmpty()) { + if (dlsRestriction == null || dlsRestriction.isUnrestricted()) { if (requiresIndexScoping) { // This index has no DLS configured, thus it is unrestricted. // To allow the index in a complex query, we need to add the query below to let the index pass. @@ -504,22 +489,22 @@ private boolean modifyQuery(String localClusterAlias) throws IOException { continue; } - for (String unparsedDlsQuery : unparsedDlsQueries) { + for (DocumentPrivileges.RenderedDlsQuery parsedDlsQuery : dlsRestriction.getQueries()) { queryCount++; - QueryBuilder parsedDlsQuery = dlsQueryParser.parse(unparsedDlsQuery); - if (!requiresIndexScoping) { - dlsQueryBuilder.should(parsedDlsQuery); + dlsQueryBuilder.should(parsedDlsQuery.getQueryBuilder()); } else { // The original request referred to several indices. That's why we have to scope each query to the index it is meant for dlsQueryBuilder.should( - QueryBuilders.boolQuery().must(QueryBuilders.termQuery("_index", prefixedIndex)).must(parsedDlsQuery) + QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("_index", prefixedIndex)) + .must(parsedDlsQuery.getQueryBuilder()) ); } Set queryBuilders = QueryBuilderTraverser.findAll( - parsedDlsQuery, + parsedDlsQuery.getQueryBuilder(), (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null ); diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java b/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java index b09745727f..bddf4731bb 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java @@ -17,18 +17,11 @@ //https://github.com/salyh/elasticsearch-security-plugin/blob/4b53974a43b270ae77ebe79d635e2484230c9d01/src/main/java/org/elasticsearch/plugins/security/filter/DlsWriteFilter.java import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; +import java.util.ArrayList; import java.util.Iterator; import java.util.List; -import java.util.ListIterator; -import java.util.Map; -import java.util.Optional; import java.util.Set; -import java.util.function.Function; -import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; @@ -65,157 +58,73 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.collect.Tuple; import org.opensearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.common.xcontent.support.XContentMapValues; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.index.shard.ShardId; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexService; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.compliance.ComplianceConfig; import org.opensearch.security.compliance.FieldReadCallback; -import org.opensearch.security.dlic.rest.support.Utils; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.FieldPrivileges; +import org.opensearch.security.privileges.dlsfls.FlsStoredFieldVisitor; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.MapUtils; -import org.opensearch.security.support.SecurityUtils; -import org.opensearch.security.support.WildcardMatcher; class DlsFlsFilterLeafReader extends SequentialStoredFieldsLeafReader { private static final String KEYWORD = ".keyword"; - private static final String[] EMPTY_STRING_ARRAY = new String[0]; - private final Set includesSet; - private final Set excludesSet; private final FieldInfos flsFieldInfos; - private final boolean flsEnabled; - private String[] includes; - private String[] excludes; - private boolean canOptimize = true; - private Function, Map> filterFunction; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final MaskedFieldsMap maskedFieldsMap; private final ShardId shardId; - private final boolean maskFields; - private final Salt salt; - private final String maskingAlgorithmDefault; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fmRule; + private final Set metaFields; private DlsGetEvaluator dge = null; DlsFlsFilterLeafReader( final LeafReader delegate, - final Set includesExcludes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, final ShardId shardId, - final Salt salt + final Set metaFields ) { super(delegate); - maskFields = (maskedFields != null && maskedFields.size() > 0); - this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.salt = salt; - this.maskingAlgorithmDefault = clusterService.getSettings().get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT); - this.maskedFieldsMap = MaskedFieldsMap.extractMaskedFields(maskFields, maskedFields, salt, maskingAlgorithmDefault); this.shardId = shardId; - flsEnabled = includesExcludes != null && !includesExcludes.isEmpty(); - - if (flsEnabled) { - - final FieldInfos infos = delegate.getFieldInfos(); - this.includesSet = new HashSet<>(includesExcludes.size()); - this.excludesSet = new HashSet<>(includesExcludes.size()); - - for (final String incExc : includesExcludes) { - if (canOptimize && (incExc.indexOf('.') > -1 || incExc.indexOf('*') > -1)) { - canOptimize = false; - } - - final char firstChar = incExc.charAt(0); - - if (firstChar == '!' || firstChar == '~') { - excludesSet.add(incExc.substring(1)); - excludesSet.add(incExc.substring(1) + KEYWORD); - } else { - includesSet.add(incExc); - } - } + this.flsRule = flsRule; + this.fmRule = fmRule; + this.metaFields = metaFields; - int i = 0; - final FieldInfo[] fa = new FieldInfo[infos.size()]; - - if (canOptimize) { - if (!excludesSet.isEmpty()) { - for (final FieldInfo info : infos) { - if (!excludesSet.contains(info.name)) { - fa[i++] = info; - } - } - } else { - for (final String inc : includesSet) { - FieldInfo f; - if ((f = infos.fieldInfo(inc)) != null) { - fa[i++] = f; - } - } - } - } else { - if (!excludesSet.isEmpty()) { - WildcardMatcher matcher = WildcardMatcher.from(excludesSet); - for (final FieldInfo info : infos) { - if (!matcher.test(info.name)) { - fa[i++] = info; - } - } - - this.excludes = excludesSet.toArray(EMPTY_STRING_ARRAY); + try { + if (!flsRule.isAllowAll()) { + FieldInfos originalFieldInfos = delegate.getFieldInfos(); + List restrictedFieldInfos = new ArrayList<>(originalFieldInfos.size()); - } else { - WildcardMatcher matcher = WildcardMatcher.from(includesSet); - for (final FieldInfo info : infos) { - if (matcher.test(info.name)) { - fa[i++] = info; - } + for (FieldInfo fieldInfo : originalFieldInfos) { + if (metaFields.contains(fieldInfo.name) || flsRule.isAllowed(fieldInfo.name)) { + restrictedFieldInfos.add(fieldInfo); } - - this.includes = includesSet.toArray(EMPTY_STRING_ARRAY); } - if (!excludesSet.isEmpty()) { - filterFunction = XContentMapValues.filter(null, excludes); - } else { - filterFunction = XContentMapValues.filter(includes, null); - } + this.flsFieldInfos = new FieldInfos(restrictedFieldInfos.toArray(new FieldInfo[restrictedFieldInfos.size()])); + } else { + this.flsFieldInfos = delegate.getFieldInfos(); } - final FieldInfo[] tmp = new FieldInfo[i]; - System.arraycopy(fa, 0, tmp, 0, i); - this.flsFieldInfos = new FieldInfos(tmp); - - } else { - this.includesSet = null; - this.excludesSet = null; - this.flsFieldInfos = null; - } - - try { dge = new DlsGetEvaluator(dlsQuery, in, applyDlsHere()); } catch (IOException e) { throw ExceptionsHelper.convertToOpenSearchException(e); @@ -287,91 +196,53 @@ public boolean hasDeletions() { } } - private static class MaskedFieldsMap { - private final Map maskedFieldsMap; - - private MaskedFieldsMap(Map maskedFieldsMap) { - this.maskedFieldsMap = maskedFieldsMap; - } - - public static MaskedFieldsMap extractMaskedFields( - boolean maskFields, - Set maskedFields, - final Salt salt, - String algorithmDefault - ) { - if (maskFields) { - return new MaskedFieldsMap( - maskedFields.stream() - .map(mf -> new MaskedField(mf, salt, algorithmDefault)) - .collect(ImmutableMap.toImmutableMap(mf -> WildcardMatcher.from(mf.getName()), Function.identity())) - ); - } else { - return new MaskedFieldsMap(Collections.emptyMap()); - } - } - - public Optional getMaskedField(String fieldName) { - return maskedFieldsMap.entrySet().stream().filter(entry -> entry.getKey().test(fieldName)).map(Map.Entry::getValue).findFirst(); - } - - public boolean anyMatch(String fieldName) { - return maskedFieldsMap.keySet().stream().anyMatch(m -> m.test(fieldName)); - } - - public WildcardMatcher getMatcher() { - return WildcardMatcher.from(maskedFieldsMap.keySet()); - } - - } - private static class DlsFlsSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper { - private final Set includes; + private final FieldPrivileges.FlsRule flsRule; private final Query dlsQuery; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final Set maskedFields; + private final FieldMasking.FieldMaskingRule fmRule; private final ShardId shardId; - private final Salt salt; + private final Set metaFields; public DlsFlsSubReaderWrapper( - final Set includes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId, - final Salt salt + final Set metaFields ) { - this.includes = includes; + this.flsRule = flsRule; this.dlsQuery = dlsQuery; this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.maskedFields = maskedFields; + this.fmRule = fmRule; this.shardId = shardId; - this.salt = salt; + this.metaFields = metaFields; } @Override public LeafReader wrap(final LeafReader reader) { return new DlsFlsFilterLeafReader( reader, - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ); } @@ -379,66 +250,66 @@ public LeafReader wrap(final LeafReader reader) { static class DlsFlsDirectoryReader extends FilterDirectoryReader { - private final Set includes; + private final FieldPrivileges.FlsRule flsRule; private final Query dlsQuery; private final IndexService indexService; private final ThreadContext threadContext; private final ClusterService clusterService; private final AuditLog auditlog; - private final Set maskedFields; + private final FieldMasking.FieldMaskingRule fmRule; private final ShardId shardId; - private final Salt salt; + private final Set metaFields; public DlsFlsDirectoryReader( final DirectoryReader in, - final Set includes, + final FieldPrivileges.FlsRule flsRule, final Query dlsQuery, final IndexService indexService, final ThreadContext threadContext, final ClusterService clusterService, final AuditLog auditlog, - final Set maskedFields, + final FieldMasking.FieldMaskingRule fmRule, ShardId shardId, - final Salt salt + final Set metaFields ) throws IOException { super( in, new DlsFlsSubReaderWrapper( - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ) ); - this.includes = includes; + this.flsRule = flsRule; this.dlsQuery = dlsQuery; this.indexService = indexService; this.threadContext = threadContext; this.clusterService = clusterService; this.auditlog = auditlog; - this.maskedFields = maskedFields; + this.fmRule = fmRule; this.shardId = shardId; - this.salt = salt; + this.metaFields = metaFields; } @Override protected DirectoryReader doWrapDirectoryReader(final DirectoryReader in) throws IOException { return new DlsFlsDirectoryReader( in, - includes, + flsRule, dlsQuery, indexService, threadContext, clusterService, auditlog, - maskedFields, + fmRule, shardId, - salt + metaFields ); } @@ -509,21 +380,15 @@ private StoredFieldVisitor getDlsFlsVisitor(StoredFieldVisitor visitor) { if (complianceConfig != null && complianceConfig.readHistoryEnabledForIndex(indexService.index().getName())) { visitor = new ComplianceAwareStoredFieldVisitor(visitor); } - if (maskFields) { - visitor = new HashingStoredFieldVisitor(visitor); - } - if (flsEnabled) { - visitor = new FlsStoredFieldVisitor(visitor); + if (!flsRule.isAllowAll() || !fmRule.isAllowAll()) { + visitor = new FlsStoredFieldVisitor(visitor, flsRule, fmRule, metaFields); } return visitor; } private void finishVisitor(StoredFieldVisitor visitor) { if (visitor instanceof FlsStoredFieldVisitor) { - visitor = ((FlsStoredFieldVisitor) visitor).delegate; - } - if (visitor instanceof HashingStoredFieldVisitor) { - visitor = ((HashingStoredFieldVisitor) visitor).delegate; + visitor = ((FlsStoredFieldVisitor) visitor).delegate(); } if (visitor instanceof ComplianceAwareStoredFieldVisitor) { ((ComplianceAwareStoredFieldVisitor) visitor).finished(); @@ -540,23 +405,18 @@ public void document(final int docID, StoredFieldVisitor visitor) throws IOExcep } } - private boolean isFls(final BytesRef termAsFiledName) { - return isFls(termAsFiledName.utf8ToString()); + private boolean isAllowed(BytesRef term) { + return isAllowed(term.utf8ToString()); } - private boolean isFls(final String name) { - - if (!flsEnabled) { - return true; - } - - return flsFieldInfos.fieldInfo(name) != null; + private boolean isAllowed(String fieldName) { + return this.metaFields.contains(fieldName) || flsRule.isAllowed(fieldName); } @Override public FieldInfos getFieldInfos() { - if (!flsEnabled) { + if (flsRule.isAllowAll()) { return in.getFieldInfos(); } @@ -571,7 +431,7 @@ private class ComplianceAwareStoredFieldVisitor extends StoredFieldVisitor { indexService, clusterService, auditlog, - maskedFieldsMap.getMatcher(), + fmRule, shardId ); @@ -637,193 +497,11 @@ public void finished() { } - private class FlsStoredFieldVisitor extends StoredFieldVisitor { - - private final StoredFieldVisitor delegate; - - public FlsStoredFieldVisitor(final StoredFieldVisitor delegate) { - super(); - this.delegate = delegate; - } - - @Override - public void binaryField(final FieldInfo fieldInfo, final byte[] value) throws IOException { - - if (fieldInfo.name.equals("_source")) { - Map filteredSource = Utils.byteArrayToMutableJsonMap(value); - - if (!canOptimize) { - filteredSource = filterFunction.apply(filteredSource); - } else { - if (!excludesSet.isEmpty()) { - filteredSource.keySet().removeAll(excludesSet); - } else { - filteredSource.keySet().retainAll(includesSet); - } - } - - delegate.binaryField(fieldInfo, Utils.jsonMapToByteArray(filteredSource)); - } else { - delegate.binaryField(fieldInfo, value); - } - } - - @Override - public Status needsField(final FieldInfo fieldInfo) throws IOException { - return isFls(fieldInfo.name) ? delegate.needsField(fieldInfo) : Status.NO; - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - @Override - public void intField(final FieldInfo fieldInfo, final int value) throws IOException { - delegate.intField(fieldInfo, value); - } - - @Override - public void longField(final FieldInfo fieldInfo, final long value) throws IOException { - delegate.longField(fieldInfo, value); - } - - @Override - public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { - delegate.floatField(fieldInfo, value); - } - - @Override - public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { - delegate.doubleField(fieldInfo, value); - } - - @Override - public boolean equals(final Object obj) { - return delegate.equals(obj); - } - - @Override - public String toString() { - return delegate.toString(); - } - } - - private class HashingStoredFieldVisitor extends StoredFieldVisitor { - - private final StoredFieldVisitor delegate; - - public HashingStoredFieldVisitor(final StoredFieldVisitor delegate) { - super(); - this.delegate = delegate; - } - - @Override - public void binaryField(final FieldInfo fieldInfo, final byte[] value) throws IOException { - - if (fieldInfo.name.equals("_source")) { - final BytesReference bytesRef = new BytesArray(value); - final Tuple> bytesRefTuple = XContentHelper.convertToMap( - bytesRef, - false, - XContentType.JSON - ); - Map filteredSource = bytesRefTuple.v2(); - MapUtils.deepTraverseMap(filteredSource, HASH_CB); - final XContentBuilder xBuilder = XContentBuilder.builder(bytesRefTuple.v1().xContent()).map(filteredSource); - delegate.binaryField(fieldInfo, BytesReference.toBytes(BytesReference.bytes(xBuilder))); - } else { - delegate.binaryField(fieldInfo, value); - } - } - - @Override - public Status needsField(final FieldInfo fieldInfo) throws IOException { - return delegate.needsField(fieldInfo); - } - - @Override - public int hashCode() { - return delegate.hashCode(); - } - - @Override - public void intField(final FieldInfo fieldInfo, final int value) throws IOException { - delegate.intField(fieldInfo, value); - } - - @Override - public void longField(final FieldInfo fieldInfo, final long value) throws IOException { - delegate.longField(fieldInfo, value); - } - - @Override - public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { - delegate.floatField(fieldInfo, value); - } - - @Override - public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { - delegate.doubleField(fieldInfo, value); - } - - @Override - public boolean equals(final Object obj) { - return delegate.equals(obj); - } - - @Override - public String toString() { - return delegate.toString(); - } - } - - private final MapUtils.Callback HASH_CB = new HashingCallback(); - - private class HashingCallback implements MapUtils.Callback { - @SuppressWarnings({ "rawtypes", "unchecked" }) - @Override - public void call(String key, Map map, List stack) { - Object v = map.get(key); - - if (v instanceof List) { - final String field = stack.isEmpty() ? key : Joiner.on('.').join(stack) + "." + key; - final MaskedField mf = maskedFieldsMap.getMaskedField(field).orElse(null); - if (mf != null) { - final List listField = (List) v; - for (ListIterator iterator = listField.listIterator(); iterator.hasNext();) { - final Object listFieldItem = iterator.next(); - - if (listFieldItem instanceof String) { - iterator.set(mf.mask(((String) listFieldItem))); - } else if (listFieldItem instanceof byte[]) { - iterator.set(mf.mask(((byte[]) listFieldItem))); - } - } - } - } - - if (v != null && (v instanceof String || v instanceof byte[])) { - - final String field = stack.isEmpty() ? key : Joiner.on('.').join(stack) + "." + key; - final MaskedField mf = maskedFieldsMap.getMaskedField(field).orElse(null); - if (mf != null) { - if (v instanceof String) { - map.replace(key, mf.mask(((String) v))); - } else { - map.replace(key, mf.mask(((byte[]) v))); - } - } - } - } - - } - @Override public Fields getTermVectors(final int docID) throws IOException { final Fields fields = in.getTermVectors(docID); - if (!flsEnabled || fields == null) { + if (flsRule.isAllowAll() || fields == null) { return fields; } @@ -831,13 +509,13 @@ public Fields getTermVectors(final int docID) throws IOException { @Override public Iterator iterator() { - return Iterators.filter(fields.iterator(), input -> isFls(input)); + return Iterators.filter(fields.iterator(), input -> isAllowed(input)); } @Override public Terms terms(final String field) throws IOException { - if (!isFls(field)) { + if (!isAllowed(field)) { return null; } @@ -855,232 +533,223 @@ public int size() { @Override public NumericDocValues getNumericDocValues(final String field) throws IOException { - return isFls(field) ? in.getNumericDocValues(field) : null; + return isAllowed(field) ? in.getNumericDocValues(field) : null; } @Override public BinaryDocValues getBinaryDocValues(final String field) throws IOException { - return isFls(field) ? wrapBinaryDocValues(field, in.getBinaryDocValues(field)) : null; + return isAllowed(field) ? wrapBinaryDocValues(field, in.getBinaryDocValues(field)) : null; } private BinaryDocValues wrapBinaryDocValues(final String field, final BinaryDocValues binaryDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (binaryDocValues != null && ((maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null)) { - final MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (binaryDocValues == null || fmRuleField == null) { + return binaryDocValues; + } - if (mf != null) { - return new BinaryDocValues() { + return new BinaryDocValues() { - @Override - public int nextDoc() throws IOException { - return binaryDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return binaryDocValues.nextDoc(); + } - @Override - public int docID() { - return binaryDocValues.docID(); - } + @Override + public int docID() { + return binaryDocValues.docID(); + } - @Override - public long cost() { - return binaryDocValues.cost(); - } + @Override + public long cost() { + return binaryDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return binaryDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return binaryDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return binaryDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return binaryDocValues.advanceExact(target); + } - @Override - public BytesRef binaryValue() throws IOException { - return mf.mask(binaryDocValues.binaryValue()); - } - }; + @Override + public BytesRef binaryValue() throws IOException { + return fmRuleField.apply(binaryDocValues.binaryValue()); } - } - return binaryDocValues; + }; + } @Override public SortedDocValues getSortedDocValues(final String field) throws IOException { - return isFls(field) ? wrapSortedDocValues(field, in.getSortedDocValues(field)) : null; + return isAllowed(field) ? wrapSortedDocValues(field, in.getSortedDocValues(field)) : null; } private SortedDocValues wrapSortedDocValues(final String field, final SortedDocValues sortedDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (sortedDocValues != null && (maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null) { - final MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (sortedDocValues == null || fmRuleField == null) { + return sortedDocValues; + } - if (mf != null) { - return new SortedDocValues() { + return new SortedDocValues() { - @Override - public int lookupTerm(BytesRef key) throws IOException { - return sortedDocValues.lookupTerm(key); - } + @Override + public int lookupTerm(BytesRef key) throws IOException { + return sortedDocValues.lookupTerm(key); + } - @Override - public TermsEnum termsEnum() throws IOException { - return new MaskedTermsEnum(sortedDocValues.termsEnum(), mf); - } + @Override + public TermsEnum termsEnum() throws IOException { + return new MaskedTermsEnum(sortedDocValues.termsEnum(), fmRuleField); + } - @Override - public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { - return new MaskedTermsEnum(sortedDocValues.intersect(automaton), mf); - } + @Override + public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { + return new MaskedTermsEnum(sortedDocValues.intersect(automaton), fmRuleField); + } - @Override - public int nextDoc() throws IOException { - return sortedDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return sortedDocValues.nextDoc(); + } - @Override - public int docID() { - return sortedDocValues.docID(); - } + @Override + public int docID() { + return sortedDocValues.docID(); + } - @Override - public long cost() { - return sortedDocValues.cost(); - } + @Override + public long cost() { + return sortedDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return sortedDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return sortedDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return sortedDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return sortedDocValues.advanceExact(target); + } - @Override - public int ordValue() throws IOException { - return sortedDocValues.ordValue(); - } + @Override + public int ordValue() throws IOException { + return sortedDocValues.ordValue(); + } - @Override - public BytesRef lookupOrd(int ord) throws IOException { - return mf.mask(sortedDocValues.lookupOrd(ord)); - } + @Override + public BytesRef lookupOrd(int ord) throws IOException { + return fmRuleField.apply(sortedDocValues.lookupOrd(ord)); + } - @Override - public int getValueCount() { - return sortedDocValues.getValueCount(); - } - }; + @Override + public int getValueCount() { + return sortedDocValues.getValueCount(); } - } - return sortedDocValues; + }; + } @Override public SortedNumericDocValues getSortedNumericDocValues(final String field) throws IOException { - return isFls(field) ? in.getSortedNumericDocValues(field) : null; + return isAllowed(field) ? in.getSortedNumericDocValues(field) : null; } @Override public SortedSetDocValues getSortedSetDocValues(final String field) throws IOException { - return isFls(field) ? wrapSortedSetDocValues(field, in.getSortedSetDocValues(field)) : null; + return isAllowed(field) ? wrapSortedSetDocValues(field, in.getSortedSetDocValues(field)) : null; } private SortedSetDocValues wrapSortedSetDocValues(final String field, final SortedSetDocValues sortedSetDocValues) { + FieldMasking.FieldMaskingRule.Field fmRuleField = fmRule.get(field); - final MaskedFieldsMap maskedFieldsMap; - - if (sortedSetDocValues != null && ((maskedFieldsMap = getRuntimeMaskedFieldInfo()) != null)) { - MaskedField mf = maskedFieldsMap.getMaskedField(handleKeyword(field)).orElse(null); + if (sortedSetDocValues == null || fmRuleField == null) { + return sortedSetDocValues; + } - if (mf != null) { - return new SortedSetDocValues() { + return new SortedSetDocValues() { - @Override - public long lookupTerm(BytesRef key) throws IOException { - return sortedSetDocValues.lookupTerm(key); - } + @Override + public long lookupTerm(BytesRef key) throws IOException { + return sortedSetDocValues.lookupTerm(key); + } - @Override - public TermsEnum termsEnum() throws IOException { - return new MaskedTermsEnum(sortedSetDocValues.termsEnum(), mf); - } + @Override + public TermsEnum termsEnum() throws IOException { + return new MaskedTermsEnum(sortedSetDocValues.termsEnum(), fmRuleField); + } - @Override - public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { - return new MaskedTermsEnum(sortedSetDocValues.intersect(automaton), mf); - } + @Override + public TermsEnum intersect(CompiledAutomaton automaton) throws IOException { + return new MaskedTermsEnum(sortedSetDocValues.intersect(automaton), fmRuleField); + } - @Override - public int nextDoc() throws IOException { - return sortedSetDocValues.nextDoc(); - } + @Override + public int nextDoc() throws IOException { + return sortedSetDocValues.nextDoc(); + } - @Override - public int docID() { - return sortedSetDocValues.docID(); - } + @Override + public int docID() { + return sortedSetDocValues.docID(); + } - @Override - public long cost() { - return sortedSetDocValues.cost(); - } + @Override + public long cost() { + return sortedSetDocValues.cost(); + } - @Override - public int advance(int target) throws IOException { - return sortedSetDocValues.advance(target); - } + @Override + public int advance(int target) throws IOException { + return sortedSetDocValues.advance(target); + } - @Override - public boolean advanceExact(int target) throws IOException { - return sortedSetDocValues.advanceExact(target); - } + @Override + public boolean advanceExact(int target) throws IOException { + return sortedSetDocValues.advanceExact(target); + } - @Override - public long nextOrd() throws IOException { - return sortedSetDocValues.nextOrd(); - } + @Override + public long nextOrd() throws IOException { + return sortedSetDocValues.nextOrd(); + } - @Override - public int docValueCount() { - return sortedSetDocValues.docValueCount(); - } + @Override + public int docValueCount() { + return sortedSetDocValues.docValueCount(); + } - @Override - public BytesRef lookupOrd(long ord) throws IOException { - return mf.mask(sortedSetDocValues.lookupOrd(ord)); - } + @Override + public BytesRef lookupOrd(long ord) throws IOException { + return fmRuleField.apply(sortedSetDocValues.lookupOrd(ord)); + } - @Override - public long getValueCount() { - return sortedSetDocValues.getValueCount(); - } - }; + @Override + public long getValueCount() { + return sortedSetDocValues.getValueCount(); } - } - return sortedSetDocValues; + }; + } @Override public NumericDocValues getNormValues(final String field) throws IOException { - return isFls(field) ? in.getNormValues(field) : null; + return isAllowed(field) ? in.getNormValues(field) : null; } @Override public PointValues getPointValues(String field) throws IOException { - return isFls(field) ? in.getPointValues(field) : null; + return isAllowed(field) ? in.getPointValues(field) : null; } @Override public Terms terms(String field) throws IOException { - return isFls(field) ? wrapTerms(field, in.terms(field)) : null; + return isAllowed(field) ? wrapTerms(field, in.terms(field)) : null; } private Terms wrapTerms(final String field, Terms terms) throws IOException { @@ -1089,8 +758,7 @@ private Terms wrapTerms(final String field, Terms terms) throws IOException { return null; } - MaskedFieldsMap maskedFieldInfo = getRuntimeMaskedFieldInfo(); - if (maskedFieldInfo != null && maskedFieldInfo.anyMatch(handleKeyword(field))) { + if (fmRule.isMasked(field)) { return null; } @@ -1110,7 +778,7 @@ public BytesRef next() throws IOException { // wind forward in the sequence of terms until we reached the end or we find a allowed term(=field name) // so that calling this method never return a term which is not allowed by fls rules for (BytesRef nextBytesRef = in.next(); nextBytesRef != null; nextBytesRef = in.next()) { - if (!isFls((nextBytesRef))) { + if (!isAllowed((nextBytesRef))) { continue; } else { return nextBytesRef; @@ -1127,7 +795,7 @@ public SeekStatus seekCeil(BytesRef text) throws IOException { // So delegateStatus here is either FOUND or NOT_FOUND // check if the current term (=field name) is allowed // If so just return current seek status - if (delegateStatus != SeekStatus.END && isFls((in.term()))) { + if (delegateStatus != SeekStatus.END && isAllowed((in.term()))) { return delegateStatus; } else if (delegateStatus == SeekStatus.END) { // If we hit the end just return END @@ -1145,7 +813,7 @@ public SeekStatus seekCeil(BytesRef text) throws IOException { @Override public boolean seekExact(BytesRef term) throws IOException { - return isFls(term) && in.seekExact(term); + return isAllowed(term) && in.seekExact(term); } @Override @@ -1206,41 +874,15 @@ public boolean hasDeletions() { return dge.hasDeletions(); } - @SuppressWarnings("unchecked") - private MaskedFieldsMap getRuntimeMaskedFieldInfo() { - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER - ); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, indexService.index().getName()); - - if (maskedEval != null) { - final Set mf = maskedFieldsMap.get(maskedEval); - if (mf != null && !mf.isEmpty()) { - return MaskedFieldsMap.extractMaskedFields(true, mf, salt, maskingAlgorithmDefault); - } - - } - - return null; - } - - private String handleKeyword(final String field) { - if (field != null && field.endsWith(KEYWORD)) { - return field.substring(0, field.length() - KEYWORD.length()); - } - return field; - } - private static class MaskedTermsEnum extends TermsEnum { private final TermsEnum delegate; - private final MaskedField mf; + private final FieldMasking.FieldMaskingRule.Field fmRuleField; - public MaskedTermsEnum(TermsEnum delegate, MaskedField mf) { + public MaskedTermsEnum(TermsEnum delegate, FieldMasking.FieldMaskingRule.Field fmRuleField) { super(); this.delegate = delegate; - this.mf = mf; + this.fmRuleField = fmRuleField; } @Override @@ -1275,7 +917,7 @@ public void seekExact(BytesRef term, TermState state) throws IOException { @Override public BytesRef term() throws IOException { - return mf.mask(delegate.term()); + return fmRuleField.apply(delegate.term()); } @Override @@ -1336,4 +978,5 @@ private boolean applyDlsHere() { // (a get for example) return !action.startsWith("indices:data/read/search"); } + } diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java b/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java index d629cbaff3..2565057afd 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsRequestValve.java @@ -31,6 +31,8 @@ import org.opensearch.search.internal.SearchContext; import org.opensearch.search.query.QuerySearchResult; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; import org.opensearch.threadpool.ThreadPool; public interface DlsFlsRequestValve { @@ -41,6 +43,14 @@ public interface DlsFlsRequestValve { void onQueryPhase(QuerySearchResult queryResult); + DlsFlsProcessedConfig getCurrentConfig(); + + boolean hasFlsOrFieldMasking(String index) throws PrivilegesEvaluationException; + + boolean hasFieldMasking(String index) throws PrivilegesEvaluationException; + + boolean isFieldAllowed(String index, String field) throws PrivilegesEvaluationException; + public static class NoopDlsFlsRequestValve implements DlsFlsRequestValve { @Override @@ -57,6 +67,26 @@ public void handleSearchContext(SearchContext context, ThreadPool threadPool, Na public void onQueryPhase(QuerySearchResult queryResult) { } + + @Override + public DlsFlsProcessedConfig getCurrentConfig() { + return null; + } + + @Override + public boolean hasFlsOrFieldMasking(String index) { + return false; + } + + @Override + public boolean hasFieldMasking(String index) { + return false; + } + + @Override + public boolean isFieldAllowed(String index, String field) { + return true; + } } } diff --git a/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java b/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java index 4141a3f8f5..498b908e5d 100644 --- a/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java +++ b/src/main/java/org/opensearch/security/configuration/DlsFlsValveImpl.java @@ -11,15 +11,13 @@ package org.opensearch.security.configuration; -import java.io.Serializable; import java.lang.reflect.Field; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Comparator; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.stream.StreamSupport; @@ -35,12 +33,9 @@ import org.opensearch.OpenSearchSecurityException; import org.opensearch.SpecialPermission; import org.opensearch.action.ActionRequest; -import org.opensearch.action.DocWriteRequest; import org.opensearch.action.RealtimeRequest; -import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; import org.opensearch.action.admin.indices.shrink.ResizeRequest; import org.opensearch.action.bulk.BulkItemRequest; -import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkShardRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.update.UpdateRequest; @@ -72,18 +67,22 @@ import org.opensearch.search.internal.SearchContext; import org.opensearch.search.query.QuerySearchResult; import org.opensearch.security.OpenSearchSecurityPlugin; +import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; +import org.opensearch.security.privileges.dlsfls.DlsFlsLegacyHeaders; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.IndexToRuleMap; import org.opensearch.security.resolver.IndexResolverReplacer; -import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.EvaluatedDlsFlsConfig; -import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.securityconf.DynamicConfigFactory; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.SecurityUtils; import org.opensearch.threadpool.ThreadPool; -import org.greenrobot.eventbus.Subscribe; - public class DlsFlsValveImpl implements DlsFlsRequestValve { private static final String MAP_EXECUTION_HINT = "map"; @@ -93,11 +92,12 @@ public class DlsFlsValveImpl implements DlsFlsRequestValve { private final ClusterService clusterService; private final ThreadContext threadContext; private final Mode mode; - private final DlsQueryParser dlsQueryParser; private final IndexNameExpressionResolver resolver; - private final boolean dfmEmptyOverwritesAll; private final NamedXContentRegistry namedXContentRegistry; - private volatile ConfigModel configModel; + private final DlsFlsBaseContext dlsFlsBaseContext; + private final AtomicReference dlsFlsProcessedConfig = new AtomicReference<>(); + private final FieldMasking.Config fieldMaskingConfig; + private final Settings settings; public DlsFlsValveImpl( Settings settings, @@ -105,22 +105,27 @@ public DlsFlsValveImpl( ClusterService clusterService, IndexNameExpressionResolver resolver, NamedXContentRegistry namedXContentRegistry, - ThreadContext threadContext + ThreadPool threadPool, + DlsFlsBaseContext dlsFlsBaseContext ) { super(); this.nodeClient = nodeClient; this.clusterService = clusterService; this.resolver = resolver; - this.threadContext = threadContext; + this.threadContext = threadPool.getThreadContext(); this.mode = Mode.get(settings); - this.dlsQueryParser = new DlsQueryParser(namedXContentRegistry); - this.dfmEmptyOverwritesAll = settings.getAsBoolean(ConfigConstants.SECURITY_DFM_EMPTY_OVERRIDES_ALL, false); this.namedXContentRegistry = namedXContentRegistry; - } + this.fieldMaskingConfig = FieldMasking.Config.fromSettings(settings); + this.dlsFlsBaseContext = dlsFlsBaseContext; + this.settings = settings; + + clusterService.addListener(event -> { + DlsFlsProcessedConfig config = dlsFlsProcessedConfig.get(); - @Subscribe - public void onConfigModelChanged(ConfigModel configModel) { - this.configModel = configModel; + if (config != null) { + config.updateClusterStateMetadataAsync(clusterService, threadPool); + } + }); } /** @@ -130,269 +135,295 @@ public void onConfigModelChanged(ConfigModel configModel) { */ @Override public boolean invoke(PrivilegesEvaluationContext context, final ActionListener listener) { - - EvaluatedDlsFlsConfig evaluatedDlsFlsConfig = configModel.getSecurityRoles() - .filter(context.getMappedRoles()) - .getDlsFls(context.getUser(), dfmEmptyOverwritesAll, resolver, clusterService, namedXContentRegistry); - + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); ActionRequest request = context.getRequest(); IndexResolverReplacer.Resolved resolved = context.getResolvedRequest(); - if (log.isDebugEnabled()) { - log.debug( - "DlsFlsValveImpl.invoke()\nrequest: " - + request - + "\nevaluatedDlsFlsConfig: " - + evaluatedDlsFlsConfig - + "\nresolved: " - + resolved - + "\nmode: " - + mode - ); - } - - if (evaluatedDlsFlsConfig == null || evaluatedDlsFlsConfig.isEmpty()) { - return true; - } + try { + boolean hasDlsRestrictions = !config.getDocumentPrivileges().isUnrestricted(context, resolved); + boolean hasFlsRestrictions = !config.getFieldPrivileges().isUnrestricted(context, resolved); + boolean hasFieldMasking = !config.getFieldMasking().isUnrestricted(context, resolved); - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { - if (log.isDebugEnabled()) { - log.debug("DLS is already done for: " + threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE)); + if (!hasDlsRestrictions && !hasFlsRestrictions && !hasFieldMasking) { + return true; } - return true; - } - - EvaluatedDlsFlsConfig filteredDlsFlsConfig = evaluatedDlsFlsConfig.filter(resolved); + if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { + if (log.isDebugEnabled()) { + log.debug( + "DLS is already done for: {}", + threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) + ); + } - boolean doFilterLevelDls; + return true; + } - if (mode == Mode.FILTER_LEVEL) { - doFilterLevelDls = true; - } else if (mode == Mode.LUCENE_LEVEL) { - doFilterLevelDls = false; - } else { // mode == Mode.ADAPTIVE - Mode modeByHeader = getDlsModeHeader(); + IndexToRuleMap dlsRestrictionMap = null; + boolean doFilterLevelDls; - if (modeByHeader == Mode.FILTER_LEVEL) { + if (mode == Mode.FILTER_LEVEL) { doFilterLevelDls = true; - log.debug("Doing filter-level DLS due to header"); - } else { - doFilterLevelDls = dlsQueryParser.containsTermLookupQuery(filteredDlsFlsConfig.getAllQueries()); - - if (doFilterLevelDls) { - setDlsModeHeader(Mode.FILTER_LEVEL); - log.debug("Doing filter-level DLS because the query contains a TLQ"); + dlsRestrictionMap = config.getDocumentPrivileges() + .getRestrictions(context, resolved.getAllIndicesResolved(clusterService, context.getIndexNameExpressionResolver())); + } else if (mode == Mode.LUCENE_LEVEL) { + doFilterLevelDls = false; + } else { // mode == Mode.ADAPTIVE + Mode modeByHeader = getDlsModeHeader(); + dlsRestrictionMap = config.getDocumentPrivileges() + .getRestrictions(context, resolved.getAllIndicesResolved(clusterService, context.getIndexNameExpressionResolver())); + + if (modeByHeader == Mode.FILTER_LEVEL) { + doFilterLevelDls = true; + log.debug("Doing filter-level DLS due to header"); } else { - log.debug("Doing lucene-level DLS because the query does not contain a TLQ"); + doFilterLevelDls = dlsRestrictionMap.containsAny(DlsRestriction::containsTermLookupQuery); + + if (doFilterLevelDls) { + setDlsModeHeader(Mode.FILTER_LEVEL); + log.debug("Doing filter-level DLS because the query contains a TLQ"); + } else { + log.debug("Doing lucene-level DLS because the query does not contain a TLQ"); + } } } - } - - if (!doFilterLevelDls) { - setDlsHeaders(evaluatedDlsFlsConfig, request); - } - setFlsHeaders(evaluatedDlsFlsConfig, request); - - if (filteredDlsFlsConfig.isEmpty()) { - return true; - } + if (DlsFlsLegacyHeaders.possiblyRequired(clusterService)) { + DlsFlsLegacyHeaders.prepare(threadContext, context, config, clusterService.state().metadata(), doFilterLevelDls); + } - if (request instanceof RealtimeRequest) { - ((RealtimeRequest) request).realtime(Boolean.FALSE); - } + if (request instanceof RealtimeRequest) { + ((RealtimeRequest) request).realtime(Boolean.FALSE); + } - if (request instanceof SearchRequest) { + if (request instanceof SearchRequest) { - SearchRequest searchRequest = ((SearchRequest) request); + SearchRequest searchRequest = ((SearchRequest) request); - // When we encounter a terms or sampler aggregation with masked fields activated we forcibly - // need to switch off global ordinals because field masking can break ordering - // CS-SUPPRESS-SINGLE: RegexpSingleline Ignore term inside of url - // https://www.elastic.co/guide/en/elasticsearch/reference/master/eager-global-ordinals.html#_avoiding_global_ordinal_loading - // CS-ENFORCE-SINGLE - if (evaluatedDlsFlsConfig.hasFieldMasking()) { + // When we encounter a terms or sampler aggregation with masked fields activated we forcibly + // need to switch off global ordinals because field masking can break ordering + // CS-SUPPRESS-SINGLE: RegexpSingleline Ignore term inside of url + // https://www.elastic.co/guide/en/elasticsearch/reference/master/eager-global-ordinals.html#_avoiding_global_ordinal_loading + // CS-ENFORCE-SINGLE + if (hasFieldMasking) { - if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { - for (AggregationBuilder aggregationBuilder : searchRequest.source().aggregations().getAggregatorFactories()) { - if (aggregationBuilder instanceof TermsAggregationBuilder) { - ((TermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); - } + if (searchRequest.source() != null && searchRequest.source().aggregations() != null) { + for (AggregationBuilder aggregationBuilder : searchRequest.source().aggregations().getAggregatorFactories()) { + if (aggregationBuilder instanceof TermsAggregationBuilder) { + ((TermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } - if (aggregationBuilder instanceof SignificantTermsAggregationBuilder) { - ((SignificantTermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); - } + if (aggregationBuilder instanceof SignificantTermsAggregationBuilder) { + ((SignificantTermsAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } - if (aggregationBuilder instanceof DiversifiedAggregationBuilder) { - ((DiversifiedAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + if (aggregationBuilder instanceof DiversifiedAggregationBuilder) { + ((DiversifiedAggregationBuilder) aggregationBuilder).executionHint(MAP_EXECUTION_HINT); + } } } } - } - if (!evaluatedDlsFlsConfig.hasFls() && !evaluatedDlsFlsConfig.hasDls() && searchRequest.source().aggregations() != null) { + if (!hasFlsRestrictions && !hasDlsRestrictions && searchRequest.source().aggregations() != null) { - boolean cacheable = true; + boolean cacheable = true; - for (AggregationBuilder af : searchRequest.source().aggregations().getAggregatorFactories()) { + for (AggregationBuilder af : searchRequest.source().aggregations().getAggregatorFactories()) { - if (!af.getType().equals("cardinality") && !af.getType().equals("count")) { - cacheable = false; - continue; - } + if (!af.getType().equals("cardinality") && !af.getType().equals("count")) { + cacheable = false; + continue; + } - StringBuilder sb = new StringBuilder(); + StringBuilder sb = new StringBuilder(); - if (searchRequest.source() != null) { - sb.append(Strings.toString(MediaTypeRegistry.JSON, searchRequest.source()) + System.lineSeparator()); - } + if (searchRequest.source() != null) { + sb.append(Strings.toString(MediaTypeRegistry.JSON, searchRequest.source()) + System.lineSeparator()); + } - sb.append(Strings.toString(MediaTypeRegistry.JSON, af) + System.lineSeparator()); + sb.append(Strings.toString(MediaTypeRegistry.JSON, af) + System.lineSeparator()); - LogManager.getLogger("debuglogger").error(sb.toString()); + LogManager.getLogger("debuglogger").error(sb.toString()); - } + } + + if (!cacheable) { + searchRequest.requestCache(Boolean.FALSE); + } else { + LogManager.getLogger("debuglogger") + .error( + "Shard requestcache enabled for " + + (searchRequest.source() == null + ? "" + : Strings.toString(MediaTypeRegistry.JSON, searchRequest.source())) + ); + } - if (!cacheable) { - searchRequest.requestCache(Boolean.FALSE); } else { - LogManager.getLogger("debuglogger") - .error( - "Shard requestcache enabled for " - + (searchRequest.source() == null - ? "" - : Strings.toString(MediaTypeRegistry.JSON, searchRequest.source())) - ); + searchRequest.requestCache(Boolean.FALSE); } - - } else { - searchRequest.requestCache(Boolean.FALSE); } - } - - if (request instanceof UpdateRequest) { - listener.onFailure(new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated")); - return false; - } - if (request instanceof BulkRequest) { - for (DocWriteRequest inner : ((BulkRequest) request).requests()) { - if (inner instanceof UpdateRequest) { - listener.onFailure( - new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") - ); - return false; - } + if (request instanceof UpdateRequest) { + listener.onFailure(new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated")); + return false; } - } - if (request instanceof BulkShardRequest) { - for (BulkItemRequest inner : ((BulkShardRequest) request).items()) { - if (inner.request() instanceof UpdateRequest) { - listener.onFailure( - new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") - ); - return false; + if (request instanceof BulkShardRequest) { + for (BulkItemRequest inner : ((BulkShardRequest) request).items()) { + if (inner.request() instanceof UpdateRequest) { + listener.onFailure( + new OpenSearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated") + ); + return false; + } } } - } - if (request instanceof ResizeRequest) { - listener.onFailure(new OpenSearchSecurityException("Resize is not supported when FLS or DLS or Fieldmasking is activated")); - return false; - } - - if (context.getAction().contains("plugins/replication")) { - listener.onFailure( - new OpenSearchSecurityException( - "Cross Cluster Replication is not supported when FLS or DLS or Fieldmasking is activated", - RestStatus.FORBIDDEN - ) - ); - return false; - } + if (request instanceof ResizeRequest) { + listener.onFailure(new OpenSearchSecurityException("Resize is not supported when FLS or DLS or Fieldmasking is activated")); + return false; + } - if (evaluatedDlsFlsConfig.hasDls()) { - if (request instanceof SearchRequest) { + if (context.getAction().contains("plugins/replication")) { + listener.onFailure( + new OpenSearchSecurityException( + "Cross Cluster Replication is not supported when FLS or DLS or Fieldmasking is activated", + RestStatus.FORBIDDEN + ) + ); + return false; + } - final SearchSourceBuilder source = ((SearchRequest) request).source(); - if (source != null) { - AggregatorFactories.Builder aggregations = source.aggregations(); - if (aggregations != null) { - for (AggregationBuilder factory : aggregations.getAggregatorFactories()) { - if (factory instanceof TermsAggregationBuilder && ((TermsAggregationBuilder) factory).minDocCount() == 0) { - listener.onFailure(new OpenSearchException("min_doc_count 0 is not supported when DLS is activated")); - return false; + if (hasDlsRestrictions) { + if (request instanceof SearchRequest) { + + final SearchSourceBuilder source = ((SearchRequest) request).source(); + if (source != null) { + AggregatorFactories.Builder aggregations = source.aggregations(); + if (aggregations != null) { + for (AggregationBuilder factory : aggregations.getAggregatorFactories()) { + if (factory instanceof TermsAggregationBuilder && ((TermsAggregationBuilder) factory).minDocCount() == 0) { + listener.onFailure(new OpenSearchException("min_doc_count 0 is not supported when DLS is activated")); + return false; + } } } - } - if (source.profile()) { - listener.onFailure(new OpenSearchSecurityException("Profiling is not supported when DLS is activated")); - return false; - } + if (source.profile()) { + listener.onFailure(new OpenSearchSecurityException("Profiling is not supported when DLS is activated")); + return false; + } + } } } - } - if (doFilterLevelDls && filteredDlsFlsConfig.hasDls()) { - return DlsFilterLevelActionHandler.handle( - context, - evaluatedDlsFlsConfig, - listener, - nodeClient, - clusterService, - OpenSearchSecurityPlugin.GuiceHolder.getIndicesService(), - resolver, - dlsQueryParser, - threadContext - ); - } else { - return true; + if (doFilterLevelDls && hasDlsRestrictions) { + return DlsFilterLevelActionHandler.handle( + context, + dlsRestrictionMap, + listener, + nodeClient, + clusterService, + OpenSearchSecurityPlugin.GuiceHolder.getIndicesService(), + resolver, + threadContext + ); + } else { + return true; + } + + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating DLS/FLS privileges", e); + listener.onFailure(new OpenSearchSecurityException("Error while evaluating DLS/FLS privileges")); + return false; + } catch (RuntimeException e) { + log.error(e); + throw e; } } @Override - public void handleSearchContext(SearchContext context, ThreadPool threadPool, NamedXContentRegistry namedXContentRegistry) { + public void handleSearchContext(SearchContext searchContext, ThreadPool threadPool, NamedXContentRegistry namedXContentRegistry) { try { - @SuppressWarnings("unchecked") - final Map> queries = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadPool.getThreadContext(), - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER - ); + String index = searchContext.indexShard().indexSettings().getIndex().getName(); + + if (log.isTraceEnabled()) { + log.trace("handleSearchContext(); index: {}", index); + } - final String dlsEval = SecurityUtils.evalMap(queries, context.indexShard().indexSettings().getIndex().getName()); + if (searchContext.suggest() != null) { + return; + } - if (dlsEval != null) { + if (dlsFlsBaseContext.isDlsDoneOnFilterLevel() || mode == Mode.FILTER_LEVEL) { + // For filter level DLS, the query was already modified to include the DLS restrictions. + // Thus, we can exist here early. + log.trace("handleSearchContext(): DLS is done on the filter level; no further handling necessary"); + return; + } - if (context.suggest() != null) { - return; - } + if (dlsFlsBaseContext.isPrivilegedConfigRequest()) { + // Requests with the header OPENDISTRO_SECURITY_CONF_REQUEST_HEADER set bypass any access controls. + // This follows the logic from + // https://github.com/opensearch-project/security/blob/1c898dcc4a92e8d4aa8b18c3fed761b5f6e52d4f/src/main/java/org/opensearch/security/filter/SecurityFilter.java#L209 + // In the old DLS/FLS implementation, that check in SecurityFilter would also affect this code. + // Now it does not any more, thus we need this additional check here. + return; + } - assert context.parsedQuery() != null; + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return; + } - final Set unparsedDlsQueries = queries.get(dlsEval); + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); - if (unparsedDlsQueries != null && !unparsedDlsQueries.isEmpty()) { - BooleanQuery.Builder queryBuilder = dlsQueryParser.parse( - unparsedDlsQueries, - context.getQueryShardContext(), - (q) -> new ConstantScoreQuery(q) - ); + DlsRestriction dlsRestriction = config.getDocumentPrivileges().getRestriction(privilegesEvaluationContext, index); + + if (log.isTraceEnabled()) { + log.trace("handleSearchContext(); index: {}; dlsRestriction: {}", index, dlsRestriction); + } - queryBuilder.add(context.parsedQuery().query(), Occur.MUST); + DocumentAllowList documentAllowList = DocumentAllowList.get(threadContext); - ParsedQuery dlsQuery = new ParsedQuery(queryBuilder.build()); + if (documentAllowList.isEntryForIndexPresent(index)) { + // The documentAllowList is needed for two cases: + // - DLS rules which use "term lookup queries" and thus need to access indices for which no privileges are present + // - Dashboards multi tenancy which can redirect index accesses to indices for which no normal index privileges are present - if (dlsQuery != null) { - context.parsedQuery(dlsQuery); - context.preProcess(true); - } + if (!dlsRestriction.isUnrestricted() && documentAllowList.isAllowed(index, "*")) { + dlsRestriction = DlsRestriction.NONE; + log.debug("Lifting DLS for {} due to present document allowlist", index); + } + } + + if (!dlsRestriction.isUnrestricted()) { + if (mode == Mode.ADAPTIVE && dlsRestriction.containsTermLookupQuery()) { + // Special case for scroll operations: + // Normally, the check dlsFlsBaseContext.isDlsDoneOnFilterLevel() already aborts early if DLS filter level mode + // has been activated. However, this is not the case for scroll operations, as these lose the thread context value + // on which dlsFlsBaseContext.isDlsDoneOnFilterLevel() is based on. Thus, we need to check here again the deeper + // conditions. + log.trace("DlsRestriction: contains TLQ."); + return; } + + assert searchContext.parsedQuery() != null; + + BooleanQuery.Builder queryBuilder = dlsRestriction.toBooleanQueryBuilder( + searchContext.getQueryShardContext(), + (q) -> new ConstantScoreQuery(q) + ); + + queryBuilder.add(searchContext.parsedQuery().query(), Occur.MUST); + + searchContext.parsedQuery(new ParsedQuery(queryBuilder.build())); + searchContext.preProcess(true); } } catch (Exception e) { + log.error("Error in handleSearchContext()", e); throw new RuntimeException("Error evaluating dls for a search query: " + e, e); } } @@ -411,6 +442,45 @@ public void onQueryPhase(QuerySearchResult queryResult) { ); } + @Override + public DlsFlsProcessedConfig getCurrentConfig() { + return dlsFlsProcessedConfig.get(); + } + + @Override + public boolean hasFlsOrFieldMasking(String index) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return false; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return !config.getFieldPrivileges().isUnrestricted(privilegesEvaluationContext, index) + || !config.getFieldMasking().isUnrestricted(privilegesEvaluationContext, index); + } + + @Override + public boolean hasFieldMasking(String index) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return false; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return !config.getFieldMasking().isUnrestricted(privilegesEvaluationContext, index); + } + + @Override + public boolean isFieldAllowed(String index, String field) throws PrivilegesEvaluationException { + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); + if (privilegesEvaluationContext == null) { + return true; + } + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfig.get(); + return config.getFieldPrivileges().getRestriction(privilegesEvaluationContext, index).isAllowed(field); + } + private static InternalAggregation aggregateBuckets(InternalAggregation aggregation) { if (aggregation instanceof StringTerms) { StringTerms stringTerms = (StringTerms) aggregation; @@ -441,42 +511,6 @@ private static List mergeBuckets( return buckets; } - private void setDlsHeaders(EvaluatedDlsFlsConfig dlsFls, ActionRequest request) { - if (!dlsFls.getDlsQueriesByIndex().isEmpty()) { - Map> dlsQueries = dlsFls.getDlsQueriesByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, - Base64Helper.serializeObject((Serializable) dlsQueries) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for DLS info: {}", dlsQueries); - } - } else { - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER) != null) { - Object deserializedDlsQueries = Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ); - if (!dlsQueries.equals(deserializedDlsQueries)) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER + " does not match (SG 900D)" - ); - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, - Base64Helper.serializeObject((Serializable) dlsQueries) - ); - if (log.isDebugEnabled()) { - log.debug("attach DLS info: {}", dlsQueries); - } - } - } - } - } - private void setDlsModeHeader(Mode mode) { String modeString = mode.name(); @@ -504,95 +538,6 @@ private Mode getDlsModeHeader() { } } - private void setFlsHeaders(EvaluatedDlsFlsConfig dlsFls, ActionRequest request) { - if (!dlsFls.getFieldMaskingByIndex().isEmpty()) { - Map> maskedFieldsMap = dlsFls.getFieldMaskingByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, - Base64Helper.serializeObject((Serializable) maskedFieldsMap) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for masked fields info: {}", maskedFieldsMap); - } - } else { - - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER) != null) { - if (!maskedFieldsMap.equals( - Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - )) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + " does not match (SG 901D)" - ); - } else { - if (log.isDebugEnabled()) { - log.debug(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + " already set"); - } - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, - Base64Helper.serializeObject((Serializable) maskedFieldsMap) - ); - if (log.isDebugEnabled()) { - log.debug("attach masked fields info: {}", maskedFieldsMap); - } - } - } - } - - if (!dlsFls.getFlsByIndex().isEmpty()) { - Map> flsFields = dlsFls.getFlsByIndex(); - - if (request instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { - threadContext.addResponseHeader( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, - Base64Helper.serializeObject((Serializable) flsFields) - ); - if (log.isDebugEnabled()) { - log.debug("added response header for FLS info: {}", flsFields); - } - } else { - if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER) != null) { - if (!flsFields.equals( - Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - )) { - throw new OpenSearchSecurityException( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - + " does not match (SG 901D) " - + flsFields - + "---" - + Base64Helper.deserializeObject( - threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER), - threadContext.getTransient(ConfigConstants.USE_JDK_SERIALIZATION) - ) - ); - } else { - if (log.isDebugEnabled()) { - log.debug(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER + " already set"); - } - } - } else { - threadContext.putHeader( - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, - Base64Helper.serializeObject((Serializable) flsFields) - ); - if (log.isDebugEnabled()) { - log.debug("attach FLS info: {}", flsFields); - } - } - } - - } - } - private static class BucketMerger implements Consumer { private Comparator comparator; private StringTerms.Bucket bucket = null; @@ -730,4 +675,26 @@ static Mode get(Settings settings) { } } } + + public void updateConfiguration(SecurityDynamicConfiguration rolesConfiguration) { + try { + if (rolesConfiguration != null) { + DlsFlsProcessedConfig oldConfig = this.dlsFlsProcessedConfig.getAndSet( + new DlsFlsProcessedConfig( + DynamicConfigFactory.addStatics(rolesConfiguration.clone()), + clusterService.state().metadata().getIndicesLookup(), + namedXContentRegistry, + settings, + fieldMaskingConfig + ) + ); + + if (oldConfig != null) { + oldConfig.shutdown(); + } + } + } catch (Exception e) { + log.error("Error while updating DLS/FLS configuration with {}", rolesConfiguration, e); + } + } } diff --git a/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java b/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java deleted file mode 100644 index 9640abcd8e..0000000000 --- a/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.configuration; - -import java.util.Set; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.function.Function; - -import com.google.common.cache.Cache; -import com.google.common.cache.CacheBuilder; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.PrefixQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitSetProducer; -import org.apache.lucene.search.join.ToChildBlockJoinQuery; - -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.xcontent.DeprecationHandler; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.query.AbstractQueryBuilder; -import org.opensearch.index.query.ParsedQuery; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.TermsQueryBuilder; -import org.opensearch.security.queries.QueryBuilderTraverser; - -public final class DlsQueryParser { - - private static final Logger log = LogManager.getLogger(DlsQueryParser.class); - private static final Query NON_NESTED_QUERY; - - static { - // Match all documents but not the nested ones - // Nested document types start with __ - // https://discuss.elastic.co/t/whats-nested-documents-layout-inside-the-lucene/59944/9 - NON_NESTED_QUERY = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), Occur.FILTER) - .add(new PrefixQuery(new Term("_type", "__")), Occur.MUST_NOT) - .build(); - } - - private static Cache parsedQueryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(4, TimeUnit.HOURS) - .build(); - private static Cache queryContainsTlqCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(4, TimeUnit.HOURS) - .build(); - - private final NamedXContentRegistry namedXContentRegistry; - - public DlsQueryParser(NamedXContentRegistry namedXContentRegistry) { - this.namedXContentRegistry = namedXContentRegistry; - } - - public BooleanQuery.Builder parse(Set unparsedDlsQueries, QueryShardContext queryShardContext) { - return parse(unparsedDlsQueries, queryShardContext, null); - } - - public BooleanQuery.Builder parse( - Set unparsedDlsQueries, - QueryShardContext queryShardContext, - Function queryMapFunction - ) { - - if (unparsedDlsQueries == null || unparsedDlsQueries.isEmpty()) { - return null; - } - - boolean hasNestedMapping = queryShardContext.getMapperService().hasNested(); - - BooleanQuery.Builder dlsQueryBuilder = new BooleanQuery.Builder(); - dlsQueryBuilder.setMinimumNumberShouldMatch(1); - - for (String unparsedDlsQuery : unparsedDlsQueries) { - ParsedQuery parsedQuery = queryShardContext.toQuery(parse(unparsedDlsQuery)); - Query dlsQuery = parsedQuery.query(); - - if (queryMapFunction != null) { - dlsQuery = queryMapFunction.apply(dlsQuery); - } - - dlsQueryBuilder.add(dlsQuery, Occur.SHOULD); - - if (hasNestedMapping) { - handleNested(queryShardContext, dlsQueryBuilder, dlsQuery); - } - } - - return dlsQueryBuilder; - } - - private static void handleNested( - final QueryShardContext queryShardContext, - final BooleanQuery.Builder dlsQueryBuilder, - final Query parentQuery - ) { - final BitSetProducer parentDocumentsFilter = queryShardContext.bitsetFilter(NON_NESTED_QUERY); - dlsQueryBuilder.add(new ToChildBlockJoinQuery(parentQuery, parentDocumentsFilter), Occur.SHOULD); - } - - public QueryBuilder parse(String unparsedDlsQuery) { - try { - final QueryBuilder qb = parsedQueryCache.get(unparsedDlsQuery, new Callable() { - - @Override - public QueryBuilder call() throws Exception { - final XContentParser parser = JsonXContent.jsonXContent.createParser( - namedXContentRegistry, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - unparsedDlsQuery - ); - return AbstractQueryBuilder.parseInnerQueryBuilder(parser); - } - - }); - - return qb; - } catch (ExecutionException e) { - throw new RuntimeException("Error while parsing " + unparsedDlsQuery, e.getCause()); - } - } - - boolean containsTermLookupQuery(Set unparsedQueries) { - for (String query : unparsedQueries) { - if (containsTermLookupQuery(query)) { - if (log.isDebugEnabled()) { - log.debug("containsTermLookupQuery() returns true due to " + query + "\nqueries: " + unparsedQueries); - } - - return true; - } - } - - if (log.isDebugEnabled()) { - log.debug("containsTermLookupQuery() returns false\nqueries: " + unparsedQueries); - } - - return false; - } - - boolean containsTermLookupQuery(String query) { - try { - return queryContainsTlqCache.get(query, () -> { - QueryBuilder queryBuilder = parse(query); - - return QueryBuilderTraverser.exists( - queryBuilder, - (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null - ); - }); - } catch (ExecutionException e) { - throw new RuntimeException("Error handling parsing " + query, e.getCause()); - } - } - -} diff --git a/src/main/java/org/opensearch/security/configuration/MaskedField.java b/src/main/java/org/opensearch/security/configuration/MaskedField.java deleted file mode 100644 index 579b9f476d..0000000000 --- a/src/main/java/org/opensearch/security/configuration/MaskedField.java +++ /dev/null @@ -1,210 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.configuration; - -import java.nio.charset.StandardCharsets; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import com.google.common.base.Splitter; -import org.apache.commons.lang3.StringUtils; -import org.apache.lucene.util.BytesRef; -import org.bouncycastle.util.encoders.Hex; - -import com.rfksystems.blake2b.Blake2b; - -public class MaskedField { - - private final String name; - private String algo = null; - private List regexReplacements; - private final byte[] defaultSalt; - private final String defaultAlgorithm; - - public MaskedField(final String value, final Salt salt, final String defaultAlgorithm) { - this.defaultSalt = salt.getSalt16(); - this.defaultAlgorithm = defaultAlgorithm; - final List tokens = Splitter.on("::").splitToList(Objects.requireNonNull(value)); - final int tokenCount = tokens.size(); - if (tokenCount == 1) { - name = tokens.get(0); - } else if (tokenCount == 2) { - name = tokens.get(0); - algo = tokens.get(1); - } else if (tokenCount >= 3 && tokenCount % 2 == 1) { - name = tokens.get(0); - regexReplacements = new ArrayList<>((tokenCount - 1) / 2); - for (int i = 1; i < tokenCount - 1; i = i + 2) { - regexReplacements.add(new RegexReplacement(tokens.get(i), tokens.get(i + 1))); - } - } else { - throw new IllegalArgumentException("Expected 1 or 2 or >=3 (but then odd count) tokens, got " + tokenCount); - } - } - - public final void isValid() throws Exception { - mask(new byte[] { 1, 2, 3, 4, 5 }); - } - - public byte[] mask(byte[] value) { - if (algo != null) { - return customHash(value, algo); - } else if (regexReplacements != null) { - String cur = new String(value, StandardCharsets.UTF_8); - for (RegexReplacement rr : regexReplacements) { - cur = cur.replaceAll(rr.getRegex(), rr.getReplacement()); - } - return cur.getBytes(StandardCharsets.UTF_8); - } else if (StringUtils.isNotEmpty(defaultAlgorithm)) { - return customHash(value, defaultAlgorithm); - } else { - return blake2bHash(value); - } - } - - public String mask(String value) { - return new String(mask(value.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); - } - - public BytesRef mask(BytesRef value) { - if (value == null) { - return null; - } - final BytesRef copy = BytesRef.deepCopyOf(value); - return new BytesRef(mask(copy.bytes)); - } - - public String getName() { - return name; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((algo == null) ? 0 : algo.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - result = prime * result + ((regexReplacements == null) ? 0 : regexReplacements.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - MaskedField other = (MaskedField) obj; - if (algo == null) { - if (other.algo != null) return false; - } else if (!algo.equals(other.algo)) return false; - if (name == null) { - if (other.name != null) return false; - } else if (!name.equals(other.name)) return false; - if (regexReplacements == null) { - if (other.regexReplacements != null) return false; - } else if (!regexReplacements.equals(other.regexReplacements)) return false; - return true; - } - - @Override - public String toString() { - return "MaskedField [name=" - + name - + ", algo=" - + algo - + ", regexReplacements=" - + regexReplacements - + ", defaultSalt=" - + Arrays.toString(defaultSalt) - + ", defaultAlgorithm=" - + defaultAlgorithm - + ", isDefault()=" - + isDefault() - + "]"; - } - - private boolean isDefault() { - return regexReplacements == null && algo == null; - } - - private static byte[] customHash(byte[] in, final String algorithm) { - try { - MessageDigest digest = MessageDigest.getInstance(algorithm); - return Hex.encode(digest.digest(in)); - } catch (NoSuchAlgorithmException e) { - throw new IllegalArgumentException(e); - } - } - - private byte[] blake2bHash(byte[] in) { - // Salt is passed incorrectly but order of parameters is retained at present to ensure full backwards compatibility - // Tracking with https://github.com/opensearch-project/security/issues/4274 - final Blake2b hash = new Blake2b(null, 32, null, defaultSalt); - hash.update(in, 0, in.length); - final byte[] out = new byte[hash.getDigestSize()]; - hash.digest(out, 0); - return Hex.encode(out); - } - - private static class RegexReplacement { - private final String regex; - private final String replacement; - - public RegexReplacement(String regex, String replacement) { - super(); - this.regex = regex.substring(1).substring(0, regex.length() - 2); - this.replacement = replacement; - } - - public String getRegex() { - return regex; - } - - public String getReplacement() { - return replacement; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((regex == null) ? 0 : regex.hashCode()); - result = prime * result + ((replacement == null) ? 0 : replacement.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - RegexReplacement other = (RegexReplacement) obj; - if (regex == null) { - if (other.regex != null) return false; - } else if (!regex.equals(other.regex)) return false; - if (replacement == null) { - if (other.replacement != null) return false; - } else if (!replacement.equals(other.replacement)) return false; - return true; - } - - @Override - public String toString() { - return "RegexReplacement [regex=" + regex + ", replacement=" + replacement + "]"; - } - - } -} diff --git a/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java b/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java index e4d75c5611..4a0b25bdce 100644 --- a/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java +++ b/src/main/java/org/opensearch/security/configuration/PrivilegesInterceptorImpl.java @@ -46,6 +46,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.security.privileges.DocumentAllowList; import org.opensearch.security.privileges.PrivilegesInterceptor; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.DynamicConfigModel; @@ -203,6 +204,11 @@ && isTenantAllowed(request, action, user, tenants, requestedTenant)) { // to avoid security issue final String tenantIndexName = toUserIndexName(dashboardsIndexName, requestedTenant); + + // The new DLS/FLS implementation defaults to a "deny all" pattern in case no roles are configured + // for an index. As the PrivilegeInterceptor grants access to indices bypassing index privileges, + // we need to allow-list these indices. + applyDocumentAllowList(tenantIndexName); return newAccessGrantedReplaceResult(replaceIndex(request, dashboardsIndexName, tenantIndexName, action)); } else if (!user.getName().equals(dashboardsServerUsername)) { @@ -218,6 +224,20 @@ && isTenantAllowed(request, action, user, tenants, requestedTenant)) { return CONTINUE_EVALUATION_REPLACE_RESULT; } + private void applyDocumentAllowList(String indexName) { + DocumentAllowList documentAllowList = new DocumentAllowList(); + documentAllowList.add(indexName, "*"); + IndexAbstraction indexAbstraction = clusterService.state().getMetadata().getIndicesLookup().get(indexName); + + if (indexAbstraction instanceof IndexAbstraction.Alias) { + for (IndexMetadata index : ((IndexAbstraction.Alias) indexAbstraction).getIndices()) { + documentAllowList.add(index.getIndex().getName(), "*"); + } + } + + documentAllowList.applyTo(threadPool.getThreadContext()); + } + private String getConcreteIndexName(String name, Map indicesLookup) { for (int i = 1; i < Integer.MAX_VALUE; i++) { String concreteName = name.concat("_" + i); diff --git a/src/main/java/org/opensearch/security/configuration/Salt.java b/src/main/java/org/opensearch/security/configuration/Salt.java index 3799fa846f..e13a430c79 100644 --- a/src/main/java/org/opensearch/security/configuration/Salt.java +++ b/src/main/java/org/opensearch/security/configuration/Salt.java @@ -69,7 +69,7 @@ private Salt(final String saltAsString) { * Returns a new salt array every time it is called. * @return salt in bytes */ - byte[] getSalt16() { + public byte[] getSalt16() { return salt16; } diff --git a/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java b/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java index f66ff7a2c0..4f7a412097 100644 --- a/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java +++ b/src/main/java/org/opensearch/security/configuration/SecurityFlsDlsIndexSearcherWrapper.java @@ -15,17 +15,17 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashSet; -import java.util.Map; import java.util.Set; import java.util.function.LongSupplier; +import java.util.function.Supplier; -import com.google.common.collect.Sets; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; +import org.opensearch.OpenSearchException; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -36,12 +36,18 @@ import org.opensearch.index.shard.ShardUtils; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.compliance.ComplianceIndexingOperationListener; +import org.opensearch.security.privileges.DocumentAllowList; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.privileges.dlsfls.DlsFlsBaseContext; +import org.opensearch.security.privileges.dlsfls.DlsFlsProcessedConfig; +import org.opensearch.security.privileges.dlsfls.DlsRestriction; +import org.opensearch.security.privileges.dlsfls.FieldMasking; +import org.opensearch.security.privileges.dlsfls.FieldPrivileges; import org.opensearch.security.support.ConfigConstants; -import org.opensearch.security.support.HeaderHelper; -import org.opensearch.security.support.SecurityUtils; -public class SecurityFlsDlsIndexSearcherWrapper extends SecurityIndexSearcherWrapper { +public class SecurityFlsDlsIndexSearcherWrapper extends SystemIndexSearcherWrapper { public final Logger log = LogManager.getLogger(this.getClass()); @@ -53,8 +59,8 @@ public class SecurityFlsDlsIndexSearcherWrapper extends SecurityIndexSearcherWra private final IndexService indexService; private final AuditLog auditlog; private final LongSupplier nowInMillis; - private final DlsQueryParser dlsQueryParser; - private final Salt salt; + private final Supplier dlsFlsProcessedConfigSupplier; + private final DlsFlsBaseContext dlsFlsBaseContext; public SecurityFlsDlsIndexSearcherWrapper( final IndexService indexService, @@ -64,7 +70,8 @@ public SecurityFlsDlsIndexSearcherWrapper( final AuditLog auditlog, final ComplianceIndexingOperationListener ciol, final PrivilegesEvaluator evaluator, - final Salt salt + final Supplier dlsFlsProcessedConfigSupplier, + final DlsFlsBaseContext dlsFlsBaseContext ) { super(indexService, settings, adminDNs, evaluator); Set metadataFieldsCopy; @@ -87,7 +94,6 @@ public SecurityFlsDlsIndexSearcherWrapper( this.clusterService = clusterService; this.indexService = indexService; this.auditlog = auditlog; - this.dlsQueryParser = new DlsQueryParser(indexService.xContentRegistry()); final boolean allowNowinDlsQueries = settings.getAsBoolean(ConfigConstants.SECURITY_UNSUPPORTED_ALLOW_NOW_IN_DLS, false); if (allowNowinDlsQueries) { nowInMillis = () -> System.currentTimeMillis(); @@ -95,7 +101,8 @@ public SecurityFlsDlsIndexSearcherWrapper( nowInMillis = () -> { throw new IllegalArgumentException("'now' is not allowed in DLS queries"); }; } log.debug("FLS/DLS {} enabled for index {}", this, indexService.index().getName()); - this.salt = salt; + this.dlsFlsProcessedConfigSupplier = dlsFlsProcessedConfigSupplier; + this.dlsFlsBaseContext = dlsFlsBaseContext; } @SuppressWarnings("unchecked") @@ -103,62 +110,97 @@ public SecurityFlsDlsIndexSearcherWrapper( protected DirectoryReader dlsFlsWrap(final DirectoryReader reader, boolean isAdmin) throws IOException { final ShardId shardId = ShardUtils.extractShardId(reader); + PrivilegesEvaluationContext privilegesEvaluationContext = this.dlsFlsBaseContext.getPrivilegesEvaluationContext(); - Set flsFields = null; - Set maskedFields = null; - Query dlsQuery = null; - - if (!isAdmin) { + if (log.isTraceEnabled()) { + log.trace("dlsFlsWrap(); index: {}; privilegeEvaluationContext: {}", index.getName(), privilegesEvaluationContext); + } - final Map> allowedFlsFields = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER - ); - final Map> queries = (Map>) HeaderHelper.deserializeSafeFromHeader( - threadContext, - ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER - ); - final Map> maskedFieldsMap = (Map>) HeaderHelper.deserializeSafeFromHeader( + if (isAdmin || privilegesEvaluationContext == null) { + return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( + reader, + FieldPrivileges.FlsRule.ALLOW_ALL, + null, + indexService, threadContext, - ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER + clusterService, + auditlog, + FieldMasking.FieldMaskingRule.ALLOW_ALL, + shardId, + metaFields ); + } + + try { + + DlsFlsProcessedConfig config = this.dlsFlsProcessedConfigSupplier.get(); + DlsRestriction dlsRestriction; + + if (!this.dlsFlsBaseContext.isDlsDoneOnFilterLevel()) { + dlsRestriction = config.getDocumentPrivileges().getRestriction(privilegesEvaluationContext, index.getName()); + } else { + dlsRestriction = DlsRestriction.NONE; + } - final String flsEval = SecurityUtils.evalMap(allowedFlsFields, index.getName()); - final String dlsEval = SecurityUtils.evalMap(queries, index.getName()); - final String maskedEval = SecurityUtils.evalMap(maskedFieldsMap, index.getName()); + FieldPrivileges.FlsRule flsRule = config.getFieldPrivileges().getRestriction(privilegesEvaluationContext, index.getName()); + FieldMasking.FieldMaskingRule fmRule = config.getFieldMasking().getRestriction(privilegesEvaluationContext, index.getName()); - if (flsEval != null) { - flsFields = Sets.union(metaFields, allowedFlsFields.get(flsEval)); + Query dlsQuery; + + if (dlsRestriction.isUnrestricted()) { + dlsQuery = null; + } else { + QueryShardContext queryShardContext = this.indexService.newQueryShardContext(shardId.getId(), null, nowInMillis, null); + dlsQuery = new ConstantScoreQuery(dlsRestriction.toBooleanQueryBuilder(queryShardContext, null).build()); } - if (dlsEval != null) { - Set unparsedDlsQueries = queries.get(dlsEval); + DocumentAllowList documentAllowList = DocumentAllowList.get(threadContext); + + if (documentAllowList.isEntryForIndexPresent(index.getName())) { + // The documentAllowList is needed for two cases: + // - DLS rules which use "term lookup queries" and thus need to access indices for which no privileges are present + // - Dashboards multi tenancy which can redirect index accesses to indices for which no normal index privileges are present + + if (!dlsRestriction.isUnrestricted() && documentAllowList.isAllowed(index.getName(), "*")) { + dlsRestriction = DlsRestriction.NONE; + log.debug("Lifting DLS for {} due to present document allowlist", index.getName()); + dlsQuery = null; - if (unparsedDlsQueries != null && !unparsedDlsQueries.isEmpty()) { - QueryShardContext queryShardContext = this.indexService.newQueryShardContext(shardId.getId(), null, nowInMillis, null); - // no need for scoring here, so its possible to wrap this in a - // ConstantScoreQuery - dlsQuery = new ConstantScoreQuery(dlsQueryParser.parse(unparsedDlsQueries, queryShardContext).build()); + } + + if (!flsRule.isAllowAll() || !fmRule.isAllowAll()) { + log.debug("Lifting FLS/FM for {} due to present document allowlist", index.getName()); + flsRule = FieldPrivileges.FlsRule.ALLOW_ALL; + fmRule = FieldMasking.FieldMaskingRule.ALLOW_ALL; } } - if (maskedEval != null) { - maskedFields = new HashSet<>(); - maskedFields.addAll(maskedFieldsMap.get(maskedEval)); + if (log.isTraceEnabled()) { + log.trace( + "dlsFlsWrap(); index: {}; dlsRestriction: {}; flsRule: {}; fmRule: {}", + index.getName(), + dlsRestriction, + flsRule, + fmRule + ); } - } - return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( - reader, - flsFields, - dlsQuery, - indexService, - threadContext, - clusterService, - auditlog, - maskedFields, - shardId, - salt - ); + return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader( + reader, + flsRule, + dlsQuery, + indexService, + threadContext, + clusterService, + auditlog, + fmRule, + shardId, + metaFields + ); + + } catch (PrivilegesEvaluationException e) { + log.error("Error while evaluating DLS/FLS for {}", this.index.getName(), e); + throw new OpenSearchException("Error while evaluating DLS/FLS", e); + } } } diff --git a/src/main/java/org/opensearch/security/configuration/SecurityIndexSearcherWrapper.java b/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java similarity index 86% rename from src/main/java/org/opensearch/security/configuration/SecurityIndexSearcherWrapper.java rename to src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java index 7a40e5dbd0..b87c92c356 100644 --- a/src/main/java/org/opensearch/security/configuration/SecurityIndexSearcherWrapper.java +++ b/src/main/java/org/opensearch/security/configuration/SystemIndexSearcherWrapper.java @@ -39,9 +39,12 @@ import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.index.Index; import org.opensearch.index.IndexService; +import org.opensearch.indices.SystemIndexRegistry; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.privileges.PrivilegesEvaluatorResponse; +import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.HeaderHelper; import org.opensearch.security.support.WildcardMatcher; @@ -49,7 +52,7 @@ import org.greenrobot.eventbus.Subscribe; -public class SecurityIndexSearcherWrapper implements CheckedFunction { +public class SystemIndexSearcherWrapper implements CheckedFunction { protected final Logger log = LogManager.getLogger(this.getClass()); protected final ThreadContext threadContext; @@ -68,7 +71,7 @@ public class SecurityIndexSearcherWrapper implements CheckedFunction mappedRoles = evaluator.mapRoles(user, caller); - final SecurityRoles securityRoles = evaluator.getSecurityRoles(mappedRoles); - return !securityRoles.isPermittedOnSystemIndex(index.getName()); + + String permission = ConfigConstants.SYSTEM_INDEX_PERMISSION; + PrivilegesEvaluationContext context = evaluator.createContext(user, permission); + PrivilegesEvaluatorResponse result = evaluator.getActionPrivileges() + .hasExplicitIndexPrivilege(context, Set.of(permission), IndexResolverReplacer.Resolved.ofIndex(index.getName())); + + return !result.isAllowed(); } return true; } diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/AbstractApiAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/AbstractApiAction.java index 160a7f708d..f27c7f0ac0 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/AbstractApiAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/AbstractApiAction.java @@ -646,4 +646,9 @@ public String getName() { return getClass().getSimpleName(); } + @Override + public boolean canTripCircuitBreaker() { + return false; + } + } diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/RateLimitersApiAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/RateLimitersApiAction.java index 7ef5c59c1e..387fe75cde 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/RateLimitersApiAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/RateLimitersApiAction.java @@ -179,13 +179,14 @@ private void authFailureConfigApiRequestHandlers(RequestHandler.RequestHandlersB // Try to remove the listener by name if (config.dynamic.auth_failure_listeners.getListeners().remove(listenerName) == null) { notFound(channel, "listener not found"); + } else { + saveOrUpdateConfiguration(client, configuration, new OnSucessActionListener<>(channel) { + @Override + public void onResponse(IndexResponse indexResponse) { + ok(channel, authFailureContent(config)); + } + }); } - saveOrUpdateConfiguration(client, configuration, new OnSucessActionListener<>(channel) { - @Override - public void onResponse(IndexResponse indexResponse) { - ok(channel, authFailureContent(config)); - } - }); }).error((status, toXContent) -> response(channel, status, toXContent))) .override(PUT, (channel, request, client) -> loadConfiguration(getConfigType(), false, false).valid(configuration -> { ConfigV7 config = (ConfigV7) configuration.getCEntry(CType.CONFIG.toLCString()); diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java index 10abd83f7b..3c9f58a522 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/RolesApiAction.java @@ -29,14 +29,13 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestRequest.Method; -import org.opensearch.security.configuration.MaskedField; import org.opensearch.security.configuration.Salt; import org.opensearch.security.dlic.rest.validation.EndpointValidator; import org.opensearch.security.dlic.rest.validation.RequestContentValidator; import org.opensearch.security.dlic.rest.validation.RequestContentValidator.DataType; import org.opensearch.security.dlic.rest.validation.ValidationResult; +import org.opensearch.security.privileges.dlsfls.FieldMasking; import org.opensearch.security.securityconf.impl.CType; -import org.opensearch.security.support.ConfigConstants; import org.opensearch.threadpool.ThreadPool; import static org.opensearch.security.dlic.rest.api.RequestHandler.methodNotImplementedHandler; @@ -92,11 +91,7 @@ private ValidationResult validateMaskedFields(final JsonNode content) private Pair validateMaskedFieldSyntax(final JsonNode maskedFieldNode) { try { - new MaskedField( - maskedFieldNode.asText(), - SALT, - validationContext.settings().get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT) - ).isValid(); + new FieldMasking.FieldMaskingExpression(maskedFieldNode.asText()); } catch (Exception e) { return Pair.of(maskedFieldNode.asText(), e.getMessage()); } diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/SecurityRestApiActions.java b/src/main/java/org/opensearch/security/dlic/rest/api/SecurityRestApiActions.java index 3963e443d8..c28a1bdc1d 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/SecurityRestApiActions.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/SecurityRestApiActions.java @@ -25,7 +25,7 @@ import org.opensearch.security.configuration.ConfigurationRepository; import org.opensearch.security.hasher.PasswordHasher; import org.opensearch.security.privileges.PrivilegesEvaluator; -import org.opensearch.security.ssl.SecurityKeyStore; +import org.opensearch.security.ssl.SslSettingsManager; import org.opensearch.security.ssl.transport.PrincipalExtractor; import org.opensearch.security.user.UserService; import org.opensearch.threadpool.ThreadPool; @@ -46,7 +46,7 @@ public static Collection getHandler( final PrivilegesEvaluator evaluator, final ThreadPool threadPool, final AuditLog auditLog, - final SecurityKeyStore securityKeyStore, + final SslSettingsManager sslSettingsManager, final UserService userService, final boolean certificatesReloadEnabled, final PasswordHasher passwordHasher @@ -97,7 +97,13 @@ public static Collection getHandler( new MultiTenancyConfigApiAction(clusterService, threadPool, securityApiDependencies), new RateLimitersApiAction(clusterService, threadPool, securityApiDependencies), new ConfigUpgradeApiAction(clusterService, threadPool, securityApiDependencies), - new SecuritySSLCertsApiAction(clusterService, threadPool, securityKeyStore, certificatesReloadEnabled, securityApiDependencies), + new SecuritySSLCertsApiAction( + clusterService, + threadPool, + sslSettingsManager, + certificatesReloadEnabled, + securityApiDependencies + ), new CertificatesApiAction(clusterService, threadPool, securityApiDependencies) ); } diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/SecuritySSLCertsApiAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/SecuritySSLCertsApiAction.java index 7f4bff50ab..5233149c66 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/SecuritySSLCertsApiAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/SecuritySSLCertsApiAction.java @@ -12,11 +12,10 @@ package org.opensearch.security.dlic.rest.api; import java.io.IOException; -import java.security.cert.X509Certificate; -import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import java.util.stream.Stream; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,8 +30,10 @@ import org.opensearch.rest.RestRequest.Method; import org.opensearch.security.dlic.rest.validation.ValidationResult; import org.opensearch.security.securityconf.impl.CType; -import org.opensearch.security.ssl.SecurityKeyStore; -import org.opensearch.security.ssl.util.SSLConfigConstants; +import org.opensearch.security.ssl.SslContextHandler; +import org.opensearch.security.ssl.SslSettingsManager; +import org.opensearch.security.ssl.config.CertType; +import org.opensearch.security.ssl.config.Certificate; import org.opensearch.security.support.ConfigConstants; import org.opensearch.threadpool.ThreadPool; @@ -62,23 +63,20 @@ public class SecuritySSLCertsApiAction extends AbstractApiAction { ) ); - private final SecurityKeyStore securityKeyStore; + private final SslSettingsManager sslSettingsManager; private final boolean certificatesReloadEnabled; - private final boolean httpsEnabled; - public SecuritySSLCertsApiAction( final ClusterService clusterService, final ThreadPool threadPool, - final SecurityKeyStore securityKeyStore, + final SslSettingsManager sslSettingsManager, final boolean certificatesReloadEnabled, final SecurityApiDependencies securityApiDependencies ) { super(Endpoint.SSL, clusterService, threadPool, securityApiDependencies); - this.securityKeyStore = securityKeyStore; + this.sslSettingsManager = sslSettingsManager; this.certificatesReloadEnabled = certificatesReloadEnabled; - this.httpsEnabled = securityApiDependencies.settings().getAsBoolean(SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED, true); this.requestHandlersBuilder.configureRequestHandlers(this::securitySSLCertsRequestHandlers); } @@ -108,10 +106,10 @@ private void securitySSLCertsRequestHandlers(RequestHandler.RequestHandlersBuild .verifyAccessForAllMethods() .override( Method.GET, - (channel, request, client) -> withSecurityKeyStore().valid(keyStore -> loadCertificates(channel, keyStore)) + (channel, request, client) -> withSecurityKeyStore().valid(ignore -> loadCertificates(channel)) .error((status, toXContent) -> response(channel, status, toXContent)) ) - .override(Method.PUT, (channel, request, client) -> withSecurityKeyStore().valid(keyStore -> { + .override(Method.PUT, (channel, request, client) -> withSecurityKeyStore().valid(ignore -> { if (!certificatesReloadEnabled) { badRequest( channel, @@ -123,7 +121,7 @@ private void securitySSLCertsRequestHandlers(RequestHandler.RequestHandlersBuild ) ); } else { - reloadCertificates(channel, request, keyStore); + reloadCertificates(channel, request); } }).error((status, toXContent) -> response(channel, status, toXContent))); } @@ -138,65 +136,70 @@ boolean accessHandler(final RestRequest request) { } } - ValidationResult withSecurityKeyStore() { - if (securityKeyStore == null) { + ValidationResult withSecurityKeyStore() { + if (sslSettingsManager == null) { return ValidationResult.error(RestStatus.OK, badRequestMessage("keystore is not initialized")); } - return ValidationResult.success(securityKeyStore); + return ValidationResult.success(sslSettingsManager); } - protected void loadCertificates(final RestChannel channel, final SecurityKeyStore keyStore) throws IOException { + protected void loadCertificates(final RestChannel channel) throws IOException { ok( channel, (builder, params) -> builder.startObject() - .field("http_certificates_list", httpsEnabled ? generateCertDetailList(keyStore.getHttpCerts()) : null) - .field("transport_certificates_list", generateCertDetailList(keyStore.getTransportCerts())) + .field( + "http_certificates_list", + generateCertDetailList( + sslSettingsManager.sslContextHandler(CertType.HTTP).map(SslContextHandler::keyMaterialCertificates).orElse(null) + ) + ) + .field( + "transport_certificates_list", + generateCertDetailList( + sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::keyMaterialCertificates) + .orElse(null) + ) + ) .endObject() ); } - private List> generateCertDetailList(final X509Certificate[] certs) { + private List> generateCertDetailList(final Stream certs) { if (certs == null) { return null; } - return Arrays.stream(certs).map(cert -> { - final String issuerDn = cert != null && cert.getIssuerX500Principal() != null ? cert.getIssuerX500Principal().getName() : ""; - final String subjectDn = cert != null && cert.getSubjectX500Principal() != null ? cert.getSubjectX500Principal().getName() : ""; - - final String san = securityKeyStore.getSubjectAlternativeNames(cert); - - final String notBefore = cert != null && cert.getNotBefore() != null ? cert.getNotBefore().toInstant().toString() : ""; - final String notAfter = cert != null && cert.getNotAfter() != null ? cert.getNotAfter().toInstant().toString() : ""; - return ImmutableMap.of( + return certs.map( + c -> ImmutableMap.of( "issuer_dn", - issuerDn, + c.issuer(), "subject_dn", - subjectDn, + c.subject(), "san", - san, + c.subjectAlternativeNames(), "not_before", - notBefore, + c.notBefore(), "not_after", - notAfter - ); - }).collect(Collectors.toList()); + c.notAfter() + ) + ).collect(Collectors.toList()); } - protected void reloadCertificates(final RestChannel channel, final RestRequest request, final SecurityKeyStore keyStore) - throws IOException { + protected void reloadCertificates(final RestChannel channel, final RestRequest request) throws IOException { final String certType = request.param("certType").toLowerCase().trim(); try { switch (certType) { case "http": - if (!httpsEnabled) { + if (sslSettingsManager.sslConfiguration(CertType.HTTP).isPresent()) { + sslSettingsManager.reloadSslContext(CertType.HTTP); + ok(channel, (builder, params) -> builder.startObject().field("message", "updated http certs").endObject()); + } else { badRequest(channel, "SSL for HTTP is disabled"); - return; } - keyStore.initHttpSSLConfig(); - ok(channel, (builder, params) -> builder.startObject().field("message", "updated http certs").endObject()); break; case "transport": - keyStore.initTransportSSLConfig(); + sslSettingsManager.reloadSslContext(CertType.TRANSPORT); + sslSettingsManager.reloadSslContext(CertType.TRANSPORT_CLIENT); ok(channel, (builder, params) -> builder.startObject().field("message", "updated transport certs").endObject()); break; default: diff --git a/src/main/java/org/opensearch/security/dlic/rest/api/ssl/TransportCertificatesInfoNodesAction.java b/src/main/java/org/opensearch/security/dlic/rest/api/ssl/TransportCertificatesInfoNodesAction.java index 681c2c01eb..39edfd570f 100644 --- a/src/main/java/org/opensearch/security/dlic/rest/api/ssl/TransportCertificatesInfoNodesAction.java +++ b/src/main/java/org/opensearch/security/dlic/rest/api/ssl/TransportCertificatesInfoNodesAction.java @@ -12,22 +12,22 @@ package org.opensearch.security.dlic.rest.api.ssl; import java.io.IOException; -import java.security.cert.X509Certificate; import java.util.List; import java.util.Map; - -import com.google.common.collect.ImmutableList; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.opensearch.action.FailedNodeException; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.nodes.TransportNodesAction; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; -import org.opensearch.security.ssl.DefaultSecurityKeyStore; -import org.opensearch.security.ssl.util.SSLConfigConstants; +import org.opensearch.security.ssl.SslContextHandler; +import org.opensearch.security.ssl.SslSettingsManager; +import org.opensearch.security.ssl.config.CertType; +import org.opensearch.security.ssl.config.Certificate; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportService; @@ -38,18 +38,15 @@ public class TransportCertificatesInfoNodesAction extends TransportNodesAction< TransportCertificatesInfoNodesAction.NodeRequest, CertificatesNodesResponse.CertificatesNodeResponse> { - private final DefaultSecurityKeyStore securityKeyStore; - - private final boolean httpsEnabled; + private final SslSettingsManager sslSettingsManager; @Inject public TransportCertificatesInfoNodesAction( - final Settings settings, final ThreadPool threadPool, final ClusterService clusterService, final TransportService transportService, final ActionFilters actionFilters, - final DefaultSecurityKeyStore securityKeyStore + final SslSettingsManager sslSettingsManager ) { super( CertificatesActionType.NAME, @@ -62,8 +59,7 @@ public TransportCertificatesInfoNodesAction( ThreadPool.Names.GENERIC, CertificatesNodesResponse.CertificatesNodeResponse.class ); - this.httpsEnabled = settings.getAsBoolean(SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED, true); - this.securityKeyStore = securityKeyStore; + this.sslSettingsManager = sslSettingsManager; } @Override @@ -89,12 +85,6 @@ protected CertificatesNodesResponse.CertificatesNodeResponse newNodeResponse(fin protected CertificatesNodesResponse.CertificatesNodeResponse nodeOperation(final NodeRequest request) { final var sslCertRequest = request.sslCertsInfoNodesRequest; - if (securityKeyStore == null) { - return new CertificatesNodesResponse.CertificatesNodeResponse( - clusterService.localNode(), - new IllegalStateException("keystore is not initialized") - ); - } try { return new CertificatesNodesResponse.CertificatesNodeResponse( clusterService.localNode(), @@ -109,23 +99,27 @@ protected CertificatesInfo loadCertificates(final CertificateType certificateTyp var httpCertificates = List.of(); var transportsCertificates = List.of(); if (CertificateType.isHttp(certificateType)) { - httpCertificates = httpsEnabled ? certificatesDetails(securityKeyStore.getHttpCerts()) : List.of(); + httpCertificates = sslSettingsManager.sslContextHandler(CertType.HTTP) + .map(SslContextHandler::keyMaterialCertificates) + .map(this::certificatesDetails) + .orElse(List.of()); } if (CertificateType.isTransport(certificateType)) { - transportsCertificates = certificatesDetails(securityKeyStore.getTransportCerts()); + transportsCertificates = sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::keyMaterialCertificates) + .map(this::certificatesDetails) + .orElse(List.of()); } return new CertificatesInfo(Map.of(CertificateType.HTTP, httpCertificates, CertificateType.TRANSPORT, transportsCertificates)); } - private List certificatesDetails(final X509Certificate[] certs) { - if (certs == null) { + private List certificatesDetails(final Stream certificateStream) { + if (certificateStream == null) { return null; } - final var certificates = ImmutableList.builder(); - for (final var c : certs) { - certificates.add(CertificateInfo.from(c, securityKeyStore.getSubjectAlternativeNames(c))); - } - return certificates.build(); + return certificateStream.map( + c -> new CertificateInfo(c.subject(), c.subjectAlternativeNames(), c.issuer(), c.notAfter(), c.notBefore()) + ).collect(Collectors.toList()); } public static class NodeRequest extends TransportRequest { diff --git a/src/main/java/org/opensearch/security/filter/SecurityFilter.java b/src/main/java/org/opensearch/security/filter/SecurityFilter.java index f0ab7bb487..3323c9e38a 100644 --- a/src/main/java/org/opensearch/security/filter/SecurityFilter.java +++ b/src/main/java/org/opensearch/security/filter/SecurityFilter.java @@ -461,7 +461,7 @@ public void onFailure(Exception e) { ? String.format( "no permissions for %s and associated roles %s", pres.getMissingPrivileges(), - pres.getResolvedSecurityRoles() + context.getMappedRoles() ) : String.format("no permissions for %s and %s", pres.getMissingPrivileges(), user); } diff --git a/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java b/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java index b56f3e951d..c9d10ee2fa 100644 --- a/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java +++ b/src/main/java/org/opensearch/security/filter/SecurityRestFilter.java @@ -27,12 +27,14 @@ package org.opensearch.security.filter; import java.nio.file.Path; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.regex.Pattern; import javax.net.ssl.SSLPeerUnverifiedException; +import com.google.common.collect.ImmutableSet; import org.apache.http.HttpStatus; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -225,17 +227,12 @@ void authorizeRequest(RestHandler original, SecurityRequestChannel request, User if (routeSupportsRestAuthorization) { PrivilegesEvaluatorResponse pres = new PrivilegesEvaluatorResponse(); NamedRoute route = ((NamedRoute) handler.get()); - // if actionNames are present evaluate those first - Set actionNames = route.actionNames(); - if (actionNames != null && !actionNames.isEmpty()) { - pres = evaluator.evaluate(user, actionNames); - } - - // now if pres.allowed is still false check for the NamedRoute name as a permission - if (!pres.isAllowed()) { - String action = route.name(); - pres = evaluator.evaluate(user, Set.of(action)); - } + // Check both route.actionNames() and route.name(). The presence of either is sufficient. + Set actionNames = ImmutableSet.builder() + .addAll(route.actionNames() != null ? route.actionNames() : Collections.emptySet()) + .add(route.name()) + .build(); + pres = evaluator.evaluate(user, route.name(), actionNames); if (log.isDebugEnabled()) { log.debug(pres.toString()); diff --git a/src/main/java/org/opensearch/security/http/OnBehalfOfAuthenticator.java b/src/main/java/org/opensearch/security/http/OnBehalfOfAuthenticator.java index 327a25f849..1f5d8900fe 100644 --- a/src/main/java/org/opensearch/security/http/OnBehalfOfAuthenticator.java +++ b/src/main/java/org/opensearch/security/http/OnBehalfOfAuthenticator.java @@ -14,6 +14,7 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.Map.Entry; import java.util.Optional; @@ -31,6 +32,7 @@ import org.opensearch.SpecialPermission; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.DefaultObjectMapper; import org.opensearch.security.auth.HTTPAuthenticator; import org.opensearch.security.authtoken.jwt.EncryptionDecryptionUtil; import org.opensearch.security.filter.SecurityRequest; @@ -204,7 +206,22 @@ private AuthCredentials extractCredentials0(final SecurityRequest request) { final AuthCredentials ac = new AuthCredentials(subject, roles, backendRoles).markComplete(); for (Entry claim : claims.entrySet()) { - ac.addAttribute("attr.jwt." + claim.getKey(), String.valueOf(claim.getValue())); + String key = "attr.jwt." + claim.getKey(); + Object value = claim.getValue(); + + if (value instanceof Collection) { + try { + // Convert the list to a JSON array string + String jsonValue = DefaultObjectMapper.writeValueAsString(value, false); + ac.addAttribute(key, jsonValue); + } catch (Exception e) { + log.warn("Failed to convert list claim to JSON for key: " + key, e); + // Fallback to string representation + ac.addAttribute(key, String.valueOf(value)); + } + } else { + ac.addAttribute(key, String.valueOf(value)); + } } return ac; diff --git a/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java b/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java new file mode 100644 index 0000000000..87ac32d090 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ActionPrivileges.java @@ -0,0 +1,1141 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; + +import com.selectivem.collections.CheckTable; +import com.selectivem.collections.CompactMapGroupBuilder; +import com.selectivem.collections.DeduplicatingCompactSubSetBuilder; +import com.selectivem.collections.ImmutableCompactSubSet; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking privileges. + *

+ * With the exception of the statefulIndex property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role and action group configuration. If the role or + * action group configuration is changed, a new instance needs to be built. + */ +public class ActionPrivileges extends ClusterStateMetadataDependentPrivileges { + + /** + * This setting controls the allowed heap size of the precomputed index privileges (in the inner class StatefulIndexPrivileges). + * If the size of the indices exceed the amount of bytes configured here, it will be truncated. Privileges evaluation will + * continue to work correctly, but it will be slower. + *

+ * This settings defaults to 10 MB. This is a generous limit. Experiments have shown that an example setup with + * 10,000 indices and 1,000 roles requires about 1 MB of heap. 100,000 indices and 100 roles require about 9 MB of heap. + * (Of course, these numbers can vary widely based on the actual role configuration). + */ + public static Setting PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE = Setting.memorySizeSetting( + "plugins.security.privileges_evaluation.precomputed_privileges.max_heap_size", + new ByteSizeValue(10, ByteSizeUnit.MB), + Setting.Property.NodeScope + ); + + private static final Logger log = LogManager.getLogger(ActionPrivileges.class); + + private final ClusterPrivileges cluster; + private final IndexPrivileges index; + private final SecurityDynamicConfiguration roles; + private final FlattenedActionGroups actionGroups; + private final ImmutableSet wellKnownClusterActions; + private final ImmutableSet wellKnownIndexActions; + private final Supplier> indexMetadataSupplier; + private final ByteSizeValue statefulIndexMaxHeapSize; + + private final AtomicReference statefulIndex = new AtomicReference<>(); + + public ActionPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + Supplier> indexMetadataSupplier, + Settings settings, + ImmutableSet wellKnownClusterActions, + ImmutableSet wellKnownIndexActions, + ImmutableSet explicitlyRequiredIndexActions + ) { + this.cluster = new ClusterPrivileges(roles, actionGroups, wellKnownClusterActions); + this.index = new IndexPrivileges(roles, actionGroups, wellKnownIndexActions, explicitlyRequiredIndexActions); + this.roles = roles; + this.actionGroups = actionGroups; + this.wellKnownClusterActions = wellKnownClusterActions; + this.wellKnownIndexActions = wellKnownIndexActions; + this.indexMetadataSupplier = indexMetadataSupplier; + this.statefulIndexMaxHeapSize = PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.get(settings); + } + + public ActionPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + Supplier> indexMetadataSupplier, + Settings settings + ) { + this( + roles, + actionGroups, + indexMetadataSupplier, + settings, + WellKnownActions.CLUSTER_ACTIONS, + WellKnownActions.INDEX_ACTIONS, + WellKnownActions.EXPLICITLY_REQUIRED_INDEX_ACTIONS + ); + } + + public PrivilegesEvaluatorResponse hasClusterPrivilege(PrivilegesEvaluationContext context, String action) { + return cluster.providesPrivilege(context, action, context.getMappedRoles()); + } + + public PrivilegesEvaluatorResponse hasAnyClusterPrivilege(PrivilegesEvaluationContext context, Set actions) { + return cluster.providesAnyPrivilege(context, actions, context.getMappedRoles()); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action and the + * provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + public PrivilegesEvaluatorResponse hasExplicitClusterPrivilege(PrivilegesEvaluationContext context, String action) { + return cluster.providesExplicitPrivilege(context, action, context.getMappedRoles()); + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + *

+ * If privileges are only available for a sub-set of indices, isPartiallyOk() will return true + * and the indices for which privileges are available are returned by getAvailableIndices(). This allows the + * do_not_fail_on_forbidden behaviour. + */ + public PrivilegesEvaluatorResponse hasIndexPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices + ) { + PrivilegesEvaluatorResponse response = this.index.providesWildcardPrivilege(context, actions); + if (response != null) { + return response; + } + + if (!resolvedIndices.isLocalAll() && resolvedIndices.getAllIndices().isEmpty()) { + // This is necessary for requests which operate on remote indices. + // Access control for the remote indices will be performed on the remote cluster. + log.debug("No local indices; grant the request"); + return PrivilegesEvaluatorResponse.ok(); + } + + // TODO one might want to consider to create a semantic wrapper for action in order to be better tell apart + // what's the action and what's the index in the generic parameters of CheckTable. + CheckTable checkTable = CheckTable.create( + resolvedIndices.getAllIndicesResolved(context.getClusterStateSupplier(), context.getIndexNameExpressionResolver()), + actions + ); + + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + PrivilegesEvaluatorResponse resultFromStatefulIndex = null; + + Map indexMetadata = this.indexMetadataSupplier.get(); + + if (statefulIndex != null) { + resultFromStatefulIndex = statefulIndex.providesPrivilege(actions, resolvedIndices, context, checkTable, indexMetadata); + + if (resultFromStatefulIndex != null) { + // If we get a result from statefulIndex, we are done. + return resultFromStatefulIndex; + } + + // Otherwise, we need to carry on checking privileges using the non-stateful object. + // Note: statefulIndex.hasPermission() modifies as a side effect the checkTable. + // We can carry on using this as an intermediate result and further complete checkTable below. + } + + return this.index.providesPrivilege(context, actions, resolvedIndices, checkTable, indexMetadata); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + */ + public PrivilegesEvaluatorResponse hasExplicitIndexPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices + ) { + CheckTable checkTable = CheckTable.create(resolvedIndices.getAllIndices(), actions); + return this.index.providesExplicitPrivilege(context, actions, resolvedIndices, checkTable, this.indexMetadataSupplier.get()); + } + + /** + * Updates the stateful index configuration with the given indices. Should be normally only called by + * updateStatefulIndexPrivilegesAsync(). Package visible for testing. + */ + void updateStatefulIndexPrivileges(Map indices, long metadataVersion) { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + + indices = StatefulIndexPrivileges.relevantOnly(indices); + + if (statefulIndex == null || !statefulIndex.indices.equals(indices)) { + long start = System.currentTimeMillis(); + this.statefulIndex.set( + new StatefulIndexPrivileges(roles, actionGroups, wellKnownIndexActions, indices, metadataVersion, statefulIndexMaxHeapSize) + ); + long duration = System.currentTimeMillis() - start; + log.debug("Updating StatefulIndexPrivileges took {} ms", duration); + } else { + synchronized (this) { + // Even if the indices did not change, update the metadataVersion in statefulIndex to reflect + // that the instance is up-to-date. + if (statefulIndex.metadataVersion < metadataVersion) { + statefulIndex.metadataVersion = metadataVersion; + } + } + } + } + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + this.updateStatefulIndexPrivileges(metadata.getIndicesLookup(), metadata.version()); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + return statefulIndex != null ? statefulIndex.metadataVersion : 0; + } + + int getEstimatedStatefulIndexByteSize() { + StatefulIndexPrivileges statefulIndex = this.statefulIndex.get(); + + if (statefulIndex != null) { + return statefulIndex.estimatedByteSize; + } else { + return 0; + } + } + + /** + * Pre-computed, optimized cluster privilege maps. Instances of this class are immutable. + *

+ * The data structures in this class are optimized for answering the question + * "I have action A and roles [x,y,z]. Do I have authorization to execute the action?". + *

+ * The check will be possible in time O(1) for "well-known" actions when the user actually has the privileges. + */ + static class ClusterPrivileges { + + /** + * Maps names of actions to the roles that provide a privilege for the respective action. + * Note that the mapping is not comprehensive, additionally the data structures rolesWithWildcardPermissions + * and rolesToActionMatcher need to be considered for a full view of the privileges. + *

+ * This does not include privileges obtained via "*" action patterns. This is both meant as a + * optimization and to support explicit privileges. + */ + private final ImmutableMap> actionToRoles; + + /** + * This contains all role names that provide wildcard (*) privileges for cluster actions. + * This avoids a blow-up of the actionToRoles object by such roles. + */ + private final ImmutableSet rolesWithWildcardPermissions; + + /** + * This maps role names to a matcher which matches the action names this role provides privileges for. + * This is only used as a last resort if the test with actionToRole and rolesWithWildcardPermissions failed. + * This is only necessary for actions which are not contained in the list of "well-known" actions provided + * during construction. + * + * This does not include privileges obtained via "*" action patterns. This is both meant as a + * optimization and to support explicit privileges. + */ + private final ImmutableMap rolesToActionMatcher; + + private final ImmutableSet wellKnownClusterActions; + + /** + * Creates pre-computed cluster privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. However, having a proper error reporting mechanism would be + * kind of nice. + */ + ClusterPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownClusterActions + ) { + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + Map> actionToRoles = new HashMap<>(); + ImmutableSet.Builder rolesWithWildcardPermissions = ImmutableSet.builder(); + ImmutableMap.Builder rolesToActionMatcher = ImmutableMap.builder(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + ImmutableSet permissionPatterns = actionGroups.resolve(role.getCluster_permissions()); + + // This list collects all the matchers for action names that will be found for the current role + List wildcardMatchers = new ArrayList<>(); + + for (String permission : permissionPatterns) { + // If we have a permission which does not use any pattern, we just simply add it to the + // "actionToRoles" map. + // Otherwise, we match the pattern against the provided well-known cluster actions and add + // these to the "actionToRoles" map. Additionally, for the case that the well-known cluster + // actions are not complete, we also collect the matcher to be used as a last resort later. + + if (WildcardMatcher.isExact(permission)) { + actionToRoles.computeIfAbsent(permission, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } else if (permission.equals("*")) { + // Special case: Roles with a wildcard "*" giving privileges for all actions. We will not resolve + // this stuff, but just note separately that this role just gets all the cluster privileges. + rolesWithWildcardPermissions.add(roleName); + } else { + WildcardMatcher wildcardMatcher = WildcardMatcher.from(permission); + Set matchedActions = wildcardMatcher.getMatchAny( + wellKnownClusterActions, + Collectors.toUnmodifiableSet() + ); + + for (String action : matchedActions) { + actionToRoles.computeIfAbsent(action, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } + + wildcardMatchers.add(wildcardMatcher); + } + } + + if (!wildcardMatchers.isEmpty()) { + rolesToActionMatcher.put(roleName, WildcardMatcher.from(wildcardMatchers)); + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.actionToRoles = actionToRoles.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().build(completedRoleSetBuilder))); + this.rolesWithWildcardPermissions = rolesWithWildcardPermissions.build(); + this.rolesToActionMatcher = rolesToActionMatcher.build(); + this.wellKnownClusterActions = wellKnownClusterActions; + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action and the + * provided roles. Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesPrivilege(PrivilegesEvaluationContext context, String action, Set roles) { + + // 1: Check roles with wildcards + if (CollectionUtils.containsAny(roles, this.rolesWithWildcardPermissions)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Check well-known actions - this should cover most cases + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 3: Only if everything else fails: Check the matchers in case we have a non-well-known action + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(action); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action and the + * provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesExplicitPrivilege(PrivilegesEvaluationContext context, String action, Set roles) { + + // 1: Check well-known actions - this should cover most cases + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Only if everything else fails: Check the matchers in case we have a non-well-known action + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(action); + } + + /** + * Checks whether this instance provides privileges for the combination of any of the provided actions and the + * provided roles. Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + * Otherwise, allowed will be false and missingPrivileges will contain the name of the given action. + */ + PrivilegesEvaluatorResponse providesAnyPrivilege(PrivilegesEvaluationContext context, Set actions, Set roles) { + // 1: Check roles with wildcards + if (CollectionUtils.containsAny(roles, this.rolesWithWildcardPermissions)) { + return PrivilegesEvaluatorResponse.ok(); + } + + // 2: Check well-known actions - this should cover most cases + for (String action : actions) { + ImmutableCompactSubSet rolesWithPrivileges = this.actionToRoles.get(action); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(roles)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + + // 3: Only if everything else fails: Check the matchers in case we have a non-well-known action + for (String action : actions) { + if (!this.wellKnownClusterActions.contains(action)) { + for (String role : roles) { + WildcardMatcher matcher = this.rolesToActionMatcher.get(role); + + if (matcher != null && matcher.test(action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + } + + if (actions.size() == 1) { + return PrivilegesEvaluatorResponse.insufficient(actions.iterator().next()); + } else { + return PrivilegesEvaluatorResponse.insufficient("any of " + actions); + } + } + } + + /** + * Partially pre-computed, optimized index privilege maps. Instances of this class are immutable. + *

+ * This class is independent of the actual indices present in the cluster. See StatefulIndexPermissions for a class + * that also takes actual indices into account and is thus fully pre-computed. + *

+ * Purposes of this class: + *

+ * 1. Answer the question "given an action and a set of roles, do I have wildcard index privileges" in O(1) + *

+ * 2. Pre-compute the data structures as far as possible in cases that StatefulIndexPermissions cannot check the + * permissions. This is the case when: + *

+ * a) StatefulIndexPermissions does not cover all indices + * b) The requested index does not exist (especially the case for create index actions) + * c) The index patterns use placeholders like "${user.name}" - these can be only resolved when the User object is present. + * d) The action is not among the "well known" actions. + */ + static class IndexPrivileges { + /** + * Maps role names to concrete action names to IndexPattern objects which define the indices the privileges apply to. + */ + private final ImmutableMap> rolesToActionToIndexPattern; + + /** + * Maps role names to action names matchers to IndexPattern objects which define the indices the privileges apply to. + * This is especially for "non-well-known" actions. + */ + private final ImmutableMap> rolesToActionPatternToIndexPattern; + + /** + * Maps action names to the roles which provide wildcard ("*") index privileges for the respective action. + * This allows to answer the question "given an action and a set of roles, do I have wildcard index privileges" + * in O(1) + */ + private final ImmutableMap> actionToRolesWithWildcardIndexPrivileges; + + /** + * A pre-defined set of action names that is used to pre-compute the result of action patterns. + */ + private final ImmutableSet wellKnownIndexActions; + + /** + * A pre-defined set of action names that is included in the rolesToExplicitActionToIndexPattern data structure + */ + private final ImmutableSet explicitlyRequiredIndexActions; + + /** + * Maps role names to concrete action names to IndexPattern objects which define the indices the privileges apply to. + * The action names are only explicitly granted privileges which are listed in explicitlyRequiredIndexActions. + *

+ * Compare https://github.com/opensearch-project/security/pull/2887 + */ + private final ImmutableMap> rolesToExplicitActionToIndexPattern; + + /** + * Creates pre-computed index privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. However, having a proper error reporting mechanism would be + * kind of nice. + */ + IndexPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownIndexActions, + ImmutableSet explicitlyRequiredIndexActions + ) { + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + + Map> rolesToActionToIndexPattern = new HashMap<>(); + Map> rolesToActionPatternToIndexPattern = new HashMap<>(); + Map> actionToRolesWithWildcardIndexPrivileges = new HashMap<>(); + Map> rolesToExplicitActionToIndexPattern = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + ImmutableSet permissions = actionGroups.resolve(indexPermissions.getAllowed_actions()); + + for (String permission : permissions) { + // If we have a permission which does not use any pattern, we just simply add it to the + // "rolesToActionToIndexPattern" map. + // Otherwise, we match the pattern against the provided well-known index actions and add + // these to the "rolesToActionToIndexPattern" map. Additionally, for the case that the + // well-known index actions are not complete, we also collect the actionMatcher to be used + // as a last resort later. + + if (WildcardMatcher.isExact(permission)) { + rolesToActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(permission, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (explicitlyRequiredIndexActions.contains(permission)) { + rolesToExplicitActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(permission, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + } + + if (indexPermissions.getIndex_patterns().contains("*")) { + actionToRolesWithWildcardIndexPrivileges.computeIfAbsent( + permission, + k -> roleSetBuilder.createSubSetBuilder() + ).add(roleName); + } + } else { + WildcardMatcher actionMatcher = WildcardMatcher.from(permission); + + for (String action : actionMatcher.iterateMatching(wellKnownIndexActions)) { + rolesToActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(action, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (indexPermissions.getIndex_patterns().contains("*")) { + actionToRolesWithWildcardIndexPrivileges.computeIfAbsent( + permission, + k -> roleSetBuilder.createSubSetBuilder() + ).add(roleName); + } + } + + rolesToActionPatternToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(actionMatcher, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + + if (actionMatcher != WildcardMatcher.ANY) { + for (String action : actionMatcher.iterateMatching(explicitlyRequiredIndexActions)) { + rolesToExplicitActionToIndexPattern.computeIfAbsent(roleName, k -> new HashMap<>()) + .computeIfAbsent(action, k -> new IndexPattern.Builder()) + .add(indexPermissions.getIndex_patterns()); + } + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.rolesToActionToIndexPattern = rolesToActionToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.rolesToActionPatternToIndexPattern = rolesToActionPatternToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.actionToRolesWithWildcardIndexPrivileges = actionToRolesWithWildcardIndexPrivileges.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry -> entry.getValue().build(completedRoleSetBuilder))); + + this.rolesToExplicitActionToIndexPattern = rolesToExplicitActionToIndexPattern.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue() + .entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(Map.Entry::getKey, entry2 -> entry2.getValue().build())) + ) + ); + + this.wellKnownIndexActions = wellKnownIndexActions; + this.explicitlyRequiredIndexActions = explicitlyRequiredIndexActions; + } + + /** + * Checks whether this instance provides privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Returns a PrivilegesEvaluatorResponse with allowed=true if privileges are available. + *

+ * If privileges are only available for a sub-set of indices, isPartiallyOk() will return true + * and the indices for which privileges are available are returned by getAvailableIndices(). This allows the + * do_not_fail_on_forbidden behaviour. + *

+ * This method will only verify privileges for the index/action combinations which are un-checked in + * the checkTable instance provided to this method. Checked index/action combinations are considered to be + * "already fulfilled by other means" - usually that comes from the stateful data structure. + * As a side-effect, this method will further mark the available index/action combinations in the provided + * checkTable instance as checked. + */ + PrivilegesEvaluatorResponse providesPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + CheckTable checkTable, + Map indexMetadata + ) { + List exceptions = new ArrayList<>(); + + for (String role : context.getMappedRoles()) { + ImmutableMap actionToIndexPattern = this.rolesToActionToIndexPattern.get(role); + + if (actionToIndexPattern != null) { + for (String action : actions) { + IndexPattern indexPattern = actionToIndexPattern.get(action); + + if (indexPattern != null) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + + // If all actions are well-known, the index.rolesToActionToIndexPattern data structure that was evaluated above, + // would have contained all the actions if privileges are provided. If there are non-well-known actions among the + // actions, we also have to evaluate action patterns to check the authorization + + boolean allActionsWellKnown = actions.stream().allMatch(a -> this.wellKnownIndexActions.contains(a)); + + if (!checkTable.isComplete() && !allActionsWellKnown) { + top: for (String role : context.getMappedRoles()) { + ImmutableMap actionPatternToIndexPattern = this.rolesToActionPatternToIndexPattern.get( + role + ); + + if (actionPatternToIndexPattern != null) { + for (String action : actions) { + if (this.wellKnownIndexActions.contains(action)) { + continue; + } + + for (Map.Entry entry : actionPatternToIndexPattern.entrySet()) { + WildcardMatcher actionMatcher = entry.getKey(); + IndexPattern indexPattern = entry.getValue(); + + if (actionMatcher.test(action)) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + break top; + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + } + } + + if (checkTable.isComplete()) { + return PrivilegesEvaluatorResponse.ok(); + } + + Set availableIndices = checkTable.getCompleteRows(); + + if (!availableIndices.isEmpty()) { + return PrivilegesEvaluatorResponse.partiallyOk(availableIndices, checkTable).evaluationExceptions(exceptions); + } + + return PrivilegesEvaluatorResponse.insufficient(checkTable) + .reason( + resolvedIndices.getAllIndices().size() == 1 + ? "Insufficient permissions for the referenced index" + : "None of " + resolvedIndices.getAllIndices().size() + " referenced indices has sufficient permissions" + ) + .evaluationExceptions(exceptions); + } + + /** + * Returns PrivilegesEvaluatorResponse.ok() if the user identified in the context object has privileges for all + * indices (using *) for the given actions. Returns null otherwise. Then, further checks must be done to check + * the user's privileges. + */ + PrivilegesEvaluatorResponse providesWildcardPrivilege(PrivilegesEvaluationContext context, Set actions) { + ImmutableSet effectiveRoles = context.getMappedRoles(); + + for (String action : actions) { + ImmutableCompactSubSet rolesWithWildcardIndexPrivileges = this.actionToRolesWithWildcardIndexPrivileges.get(action); + + if (rolesWithWildcardIndexPrivileges == null || !rolesWithWildcardIndexPrivileges.containsAny(effectiveRoles)) { + return null; + } + } + + return PrivilegesEvaluatorResponse.ok(); + } + + /** + * Checks whether this instance provides explicit privileges for the combination of the provided action, + * the provided indices and the provided roles. + *

+ * Explicit means here that the privilege is not granted via a "*" action privilege wildcard. Other patterns + * are possible. See also: https://github.com/opensearch-project/security/pull/2411 and https://github.com/opensearch-project/security/issues/3038 + */ + PrivilegesEvaluatorResponse providesExplicitPrivilege( + PrivilegesEvaluationContext context, + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + CheckTable checkTable, + Map indexMetadata + ) { + List exceptions = new ArrayList<>(); + + if (!CollectionUtils.containsAny(actions, this.explicitlyRequiredIndexActions)) { + return PrivilegesEvaluatorResponse.insufficient(CheckTable.create(ImmutableSet.of("_"), actions)); + } + + for (String role : context.getMappedRoles()) { + ImmutableMap actionToIndexPattern = this.rolesToExplicitActionToIndexPattern.get(role); + + if (actionToIndexPattern != null) { + for (String action : actions) { + IndexPattern indexPattern = actionToIndexPattern.get(action); + + if (indexPattern != null) { + for (String index : checkTable.iterateUncheckedRows(action)) { + try { + if (indexPattern.matches(index, context, indexMetadata) && checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } catch (PrivilegesEvaluationException e) { + // We can ignore these errors, as this max leads to fewer privileges than available + log.error("Error while evaluating index pattern of role {}. Ignoring entry", role, e); + exceptions.add(new PrivilegesEvaluationException("Error while evaluating role " + role, e)); + } + } + } + } + } + } + + return PrivilegesEvaluatorResponse.insufficient(checkTable) + .reason("No explicit privileges have been provided for the referenced indices.") + .evaluationExceptions(exceptions); + } + } + + /** + * Fully pre-computed, optimized index privilege maps. + *

+ * The data structures in this class are optimized to answer the question "given an action, an index and a set of + * roles, do I have the respective privilege" in O(1). + *

+ * There are cases where this class will not be able to answer this question. These cases are the following: + * - The requested index does not exist (especially the case for create index actions) + * - The action is not well-known. + * - The indices used for pre-computing the data structures are not complete (possibly due to race conditions) + * - The role definition uses placeholders (like "${user.name}") in index patterns. + * - The role definition grants privileges to all indices (via "*") (these are omitted here for efficiency reasons). + * In such cases, the question needs to be answered by IndexPermissions (see above). + *

+ * This class also takes into account aliases and data streams. If a permission is granted on an alias, it will be + * automatically inherited by the indices it points to. The same holds for the backing indices of a data stream. + */ + static class StatefulIndexPrivileges { + + /** + * Maps concrete action names to concrete index names and then to the roles which provide privileges for the + * combination of action and index. This map can contain besides indices also names of data streams and aliases. + * For aliases and data streams, it will then contain both the actual alias/data stream and the backing indices. + */ + private final Map>> actionToIndexToRoles; + + /** + * The index information that was used to construct this instance. + */ + private final Map indices; + + /** + * The well known index actions that were used to construct this instance. + */ + private final ImmutableSet wellKnownIndexActions; + + private final int estimatedByteSize; + + private long metadataVersion; + + /** + * Creates pre-computed index privileges based on the given parameters. + *

+ * This constructor will not throw an exception if it encounters any invalid configuration (that is, + * in particular, unparseable regular expressions). Rather, it will just log an error. This is okay, as it + * just results in fewer available privileges. + */ + StatefulIndexPrivileges( + SecurityDynamicConfiguration roles, + FlattenedActionGroups actionGroups, + ImmutableSet wellKnownIndexActions, + Map indices, + long metadataVersion, + ByteSizeValue statefulIndexMaxHeapSize + ) { + Map< + String, + CompactMapGroupBuilder.MapBuilder>> actionToIndexToRoles = + new HashMap<>(); + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + CompactMapGroupBuilder> indexMapBuilder = + new CompactMapGroupBuilder<>(indices.keySet(), (k2) -> roleSetBuilder.createSubSetBuilder()); + + // We iterate here through the present RoleV7 instances and nested through their "index_permissions" sections. + // During the loop, the actionToIndexToRoles map is being built. + // For that, action patterns from the role will be matched against the "well-known actions" to build + // a concrete action map and index patterns from the role will be matched against the present indices + // to build a concrete index map. + // + // The complexity of this loop is O(n*m) where n is dependent on the structure of the roles configuration + // and m is the number of matched indices. This formula does not take the loop through matchedActions in + // account, as this is bound by a constant number and thus does not need to be considered in the O() notation. + + top: for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + ImmutableSet permissions = actionGroups.resolve(indexPermissions.getAllowed_actions()); + + if (indexPermissions.getIndex_patterns().contains("*")) { + // Wildcard index patterns are handled in the static IndexPermissions object. + // This avoids having to build huge data structures - when a very easy shortcut is available. + continue; + } + + WildcardMatcher indexMatcher = IndexPattern.from(indexPermissions.getIndex_patterns()).getStaticPattern(); + + if (indexMatcher == WildcardMatcher.NONE) { + // The pattern is likely blank because there are only templated patterns. + // Index patterns with templates are not handled here, but in the static IndexPermissions object + continue; + } + + for (String permission : permissions) { + WildcardMatcher actionMatcher = WildcardMatcher.from(permission); + Collection matchedActions = actionMatcher.getMatchAny(wellKnownIndexActions, Collectors.toList()); + + for (Map.Entry indicesEntry : indexMatcher.iterateMatching( + indices.entrySet(), + Map.Entry::getKey + )) { + for (String action : matchedActions) { + CompactMapGroupBuilder.MapBuilder< + String, + DeduplicatingCompactSubSetBuilder.SubSetBuilder> indexToRoles = actionToIndexToRoles + .computeIfAbsent(action, k -> indexMapBuilder.createMapBuilder()); + + indexToRoles.get(indicesEntry.getKey()).add(roleName); + + if (indicesEntry.getValue() instanceof IndexAbstraction.Alias) { + // For aliases we additionally add the sub-indices to the privilege map + for (IndexMetadata subIndex : indicesEntry.getValue().getIndices()) { + indexToRoles.get(subIndex.getIndex().getName()).add(roleName); + } + } + + if (roleSetBuilder.getEstimatedByteSize() + indexMapBuilder + .getEstimatedByteSize() > statefulIndexMaxHeapSize.getBytes()) { + log.info( + "Size of precomputed index privileges exceeds configured limit ({}). Using capped data structure." + + "This might lead to slightly lower performance during privilege evaluation. Consider raising {}.", + statefulIndexMaxHeapSize, + PRECOMPUTED_PRIVILEGES_MAX_HEAP_SIZE.getKey() + ); + break top; + } + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry.getKey(), e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completedRoleSetBuilder = roleSetBuilder.build(); + + this.estimatedByteSize = roleSetBuilder.getEstimatedByteSize() + indexMapBuilder.getEstimatedByteSize(); + log.debug("Estimated size of StatefulIndexPermissions data structure: {}", this.estimatedByteSize); + + this.actionToIndexToRoles = actionToIndexToRoles.entrySet() + .stream() + .collect( + ImmutableMap.toImmutableMap( + Map.Entry::getKey, + entry -> entry.getValue().build(subSetBuilder -> subSetBuilder.build(completedRoleSetBuilder)) + ) + ); + + this.indices = ImmutableMap.copyOf(indices); + this.metadataVersion = metadataVersion; + this.wellKnownIndexActions = wellKnownIndexActions; + } + + /** + * Checks whether the user has privileges based on the given parameters and information in this class. This method + * has two major channels for returning results: + *

+ * 1. The return value is either PrivilegesEvaluatorResponse.ok() or null. If it is null, this method cannot + * completely tell whether the user has full privileges. A further check with IndexPermissions will be necessary. + * If PrivilegesEvaluatorResponse.ok() is returned, then full privileges could be already determined. + *

+ * 2. As a side effect, this method will modify the supplied CheckTable object. This will be the case regardless + * of whether null or PrivilegesEvaluatorResponse.ok() is returned. The interesting case is actually when null + * is returned, because then the remaining logic needs only to check for the unchecked cases. + * + * @param actions the actions the user needs to have privileges for + * @param resolvedIndices the index the user needs to have privileges for + * @param context context information like user, resolved roles, etc. + * @param checkTable An action/index matrix. This method will modify the table as a side effect and check the cells where privileges are present. + * @return PrivilegesEvaluatorResponse.ok() or null. + */ + PrivilegesEvaluatorResponse providesPrivilege( + Set actions, + IndexResolverReplacer.Resolved resolvedIndices, + PrivilegesEvaluationContext context, + CheckTable checkTable, + Map indexMetadata + ) { + ImmutableSet effectiveRoles = context.getMappedRoles(); + + for (String action : actions) { + Map> indexToRoles = actionToIndexToRoles.get(action); + + if (indexToRoles != null) { + for (String index : resolvedIndices.getAllIndices()) { + String lookupIndex = index; + + if (index.startsWith(DataStream.BACKING_INDEX_PREFIX)) { + // If we have a backing index of a data stream, we will not try to test + // the backing index here, as we filter backing indices during initialization. + // Instead, we look up the containing data stream and check whether this has privileges. + lookupIndex = backingIndexToDataStream(index, indexMetadata); + } + + ImmutableCompactSubSet rolesWithPrivileges = indexToRoles.get(lookupIndex); + + if (rolesWithPrivileges != null && rolesWithPrivileges.containsAny(effectiveRoles)) { + if (checkTable.check(index, action)) { + return PrivilegesEvaluatorResponse.ok(); + } + } + } + } + } + + // If we reached this point, we cannot tell whether the user has privileges using this instance. + // Return null to indicate that there is no answer. + // The checkTable object might contain already a partial result. + return null; + } + + /** + * If the given index is the backing index of a data stream, the name of the data stream is returned. + * Otherwise, the name of the index itself is being returned. + */ + static String backingIndexToDataStream(String index, Map indexMetadata) { + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Index && indexAbstraction.getParentDataStream() != null) { + return indexAbstraction.getParentDataStream().getName(); + } else { + return index; + } + } + + /** + * Filters the given index abstraction map to only contain entries that are relevant the for stateful class. + * This has the goal to keep the heap footprint of instances of StatefulIndexPrivileges at a reasonable size. + *

+ * This removes the following entries: + *

    + *
  • closed indices - closed indices do not need any fast privilege evaluation + *
  • backing indices of data streams - privileges should be only assigned directly to the data streams. + * the privilege evaluation code is able to recognize that an index is member of a data stream and test + * its privilege via that data stream. If a privilege is directly assigned to a backing index, we use + * the "slowish" code paths. + *
  • Indices which are not matched by includeIndices + *
+ */ + static Map relevantOnly(Map indices) { + // First pass: Check if we need to filter at all + boolean doFilter = false; + + for (IndexAbstraction indexAbstraction : indices.values()) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + if (indexAbstraction.getParentDataStream() != null + || indexAbstraction.getWriteIndex().getState() == IndexMetadata.State.CLOSE) { + doFilter = true; + break; + } + } + } + + if (!doFilter) { + return indices; + } + + // Second pass: Only if we actually need filtering, we will do it + ImmutableMap.Builder builder = ImmutableMap.builder(); + + for (IndexAbstraction indexAbstraction : indices.values()) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + if (indexAbstraction.getParentDataStream() == null + && indexAbstraction.getWriteIndex().getState() != IndexMetadata.State.CLOSE) { + builder.put(indexAbstraction.getName(), indexAbstraction); + } + } else { + builder.put(indexAbstraction.getName(), indexAbstraction); + } + } + + return builder.build(); + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java b/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java new file mode 100644 index 0000000000..282e2e6bb6 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ClusterStateMetadataDependentPrivileges.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.concurrent.Future; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.threadpool.ThreadPool; + +/** + * Abstract super class for classes which need metadata updates from the cluster state. This class implements + * asynchronous updates - that means that any subclass needs to be prepared for not having the most up to date + * cluster state. + */ +public abstract class ClusterStateMetadataDependentPrivileges { + + private static final Logger log = LogManager.getLogger(ClusterStateMetadataDependentPrivileges.class); + private Future updateFuture; + + /** + * Updates the stateful index configuration asynchronously with the index metadata from the current cluster state. + * As the update process can take some seconds for clusters with many indices, this method "de-bounces" the updates, + * i.e., a further update will be only initiated after the previous update has finished. This is okay as this class + * can handle the case that it do not have the most recent information. It will fall back to slower methods then. + */ + public synchronized void updateClusterStateMetadataAsync(ClusterService clusterService, ThreadPool threadPool) { + long currentMetadataVersion = clusterService.state().metadata().version(); + + if (currentMetadataVersion <= getCurrentlyUsedMetadataVersion()) { + return; + } + + if (this.updateFuture == null || this.updateFuture.isDone()) { + this.updateFuture = threadPool.generic().submit(() -> { + for (int i = 0;; i++) { + if (i > 5) { + try { + // In case we got many consecutive updates, let's sleep a little to let + // other operations catch up. + Thread.sleep(100); + } catch (InterruptedException e) { + return; + } + } + + Metadata metadata = clusterService.state().metadata(); + + synchronized (ClusterStateMetadataDependentPrivileges.this) { + if (metadata.version() <= ClusterStateMetadataDependentPrivileges.this.getCurrentlyUsedMetadataVersion()) { + return; + } + } + + try { + log.debug("Updating {} with metadata version {}", this, metadata.version()); + updateClusterStateMetadata(metadata); + } catch (Exception e) { + log.error("Error while updating {}", this, e); + } finally { + synchronized (ClusterStateMetadataDependentPrivileges.this) { + if (ClusterStateMetadataDependentPrivileges.this.updateFuture.isCancelled()) { + // This can happen if this instance got obsolete due to a config update + // or if the node is shutting down + return; + } + } + } + } + }); + } + } + + /** + * Stops any concurrent update tasks to let the node gracefully shut down. + */ + public synchronized void shutdown() { + if (this.updateFuture != null && !this.updateFuture.isDone()) { + this.updateFuture.cancel(true); + } + } + + protected abstract void updateClusterStateMetadata(Metadata metadata); + + protected abstract long getCurrentlyUsedMetadataVersion(); + +} diff --git a/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java b/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java index 129233a007..6e41857737 100644 --- a/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java +++ b/src/main/java/org/opensearch/security/privileges/DocumentAllowList.java @@ -14,6 +14,9 @@ import java.util.HashSet; import java.util.Set; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.security.support.ConfigConstants; @@ -25,12 +28,29 @@ */ public class DocumentAllowList { - private final Set entries = new HashSet<>(); + private static final Logger log = LogManager.getLogger(DocumentAllowList.class); - public DocumentAllowList() { + public static DocumentAllowList get(ThreadContext threadContext) { + String header = threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_DOC_ALLOWLIST_HEADER); + if (header == null) { + return EMPTY; + } else { + try { + return parse(header); + } catch (Exception e) { + log.error("Error while handling document allow list: {}", header, e); + return EMPTY; + } + } } + private static final DocumentAllowList EMPTY = new DocumentAllowList(); + + private final Set entries = new HashSet<>(); + + public DocumentAllowList() {} + public void add(String index, String id) { this.add(new Entry(index, id)); } @@ -59,6 +79,16 @@ public boolean isAllowed(String index, String id) { return false; } + public boolean isEntryForIndexPresent(String index) { + for (Entry entry : entries) { + if (entry.index.equals(index)) { + return true; + } + } + + return false; + } + public String toString() { if (this.entries.isEmpty()) { return ""; diff --git a/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java b/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java new file mode 100644 index 0000000000..50e933246b --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/ExpressionEvaluationException.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +/** + * This exception indicates that an expression - such as a regular expression - could not be properly evaluated during + * privilege evaluation. + */ +public class ExpressionEvaluationException extends Exception { + public ExpressionEvaluationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/IndexPattern.java b/src/main/java/org/opensearch/security/privileges/IndexPattern.java new file mode 100644 index 0000000000..d5d419f72b --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/IndexPattern.java @@ -0,0 +1,257 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import com.google.common.collect.ImmutableList; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.security.support.WildcardMatcher; + +/** + * Aggregates index patterns defined in roles and segments them into patterns using template expressions ("index_${user.name}"), + * patterns using date math and plain patterns. This segmentation is needed because only plain patterns can be used + * to pre-compute privilege maps. The other types of patterns need to be evaluated "live" during the actual request. + */ +public class IndexPattern { + private static final Logger log = LogManager.getLogger(IndexPattern.class); + + /** + * An IndexPattern which does not match any index. + */ + public static final IndexPattern EMPTY = new IndexPattern(WildcardMatcher.NONE, ImmutableList.of(), ImmutableList.of()); + + /** + * Plain index patterns without any dynamic expressions like user attributes and date math. + * This can be not null. If this instance cannot match any static pattern, this will be WildcardMatcher.NONE. + */ + private final WildcardMatcher staticPattern; + + /** + * Index patterns which contain user attributes (like ${user.name}) + */ + private final ImmutableList patternTemplates; + + /** + * Index patterns which contain date math (like ) + */ + private final ImmutableList dateMathExpressions; + private final int hashCode; + + private IndexPattern(WildcardMatcher staticPattern, ImmutableList patternTemplates, ImmutableList dateMathExpressions) { + this.staticPattern = staticPattern; + this.patternTemplates = patternTemplates; + this.dateMathExpressions = dateMathExpressions; + this.hashCode = staticPattern.hashCode() + patternTemplates.hashCode() + dateMathExpressions.hashCode(); + } + + public boolean matches(String index, PrivilegesEvaluationContext context, Map indexMetadata) + throws PrivilegesEvaluationException { + if (staticPattern != WildcardMatcher.NONE && staticPattern.test(index)) { + return true; + } + + if (!patternTemplates.isEmpty()) { + for (String patternTemplate : this.patternTemplates) { + try { + WildcardMatcher matcher = context.getRenderedMatcher(patternTemplate); + + if (matcher.test(index)) { + return true; + } + } catch (ExpressionEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating dynamic index pattern: " + patternTemplate, e); + } + } + } + + if (!dateMathExpressions.isEmpty()) { + IndexNameExpressionResolver indexNameExpressionResolver = context.getIndexNameExpressionResolver(); + + // Note: The use of date math expressions in privileges is a bit odd, as it only provides a very limited + // solution for the potential user case. A different approach might be nice. + + for (String dateMathExpression : this.dateMathExpressions) { + try { + String resolvedExpression = indexNameExpressionResolver.resolveDateMathExpression(dateMathExpression); + + WildcardMatcher matcher = WildcardMatcher.from(resolvedExpression); + + if (matcher.test(index)) { + return true; + } + } catch (Exception e) { + throw new PrivilegesEvaluationException("Error while evaluating date math expression: " + dateMathExpression, e); + } + } + } + + IndexAbstraction indexAbstraction = indexMetadata.get(index); + + if (indexAbstraction instanceof IndexAbstraction.Index) { + // Check for the privilege for aliases or data streams containing this index + + if (indexAbstraction.getParentDataStream() != null) { + if (matches(indexAbstraction.getParentDataStream().getName(), context, indexMetadata)) { + return true; + } + } + + // Retrieve aliases: The use of getWriteIndex() is a bit messy, but it is the only way to access + // alias metadata from here. + for (String alias : indexAbstraction.getWriteIndex().getAliases().keySet()) { + if (matches(alias, context, indexMetadata)) { + return true; + } + } + } + + return false; + } + + @Override + public String toString() { + if (patternTemplates.size() == 0 && dateMathExpressions.size() == 0) { + return staticPattern.toString(); + } else { + StringBuilder result = new StringBuilder(); + + if (staticPattern != WildcardMatcher.NONE) { + result.append(staticPattern); + } + + if (patternTemplates.size() != 0) { + if (result.length() != 0) { + result.append(" "); + } + + result.append(String.join(",", patternTemplates)); + } + + if (dateMathExpressions.size() != 0) { + if (result.length() != 0) { + result.append(" "); + } + + result.append(String.join(",", dateMathExpressions)); + } + + return result.toString(); + } + } + + public WildcardMatcher getStaticPattern() { + return staticPattern; + } + + /** + * Returns true if this object contains patterns which can be matched against indices upfront. + */ + public boolean hasStaticPattern() { + return staticPattern != WildcardMatcher.NONE; + } + + /** + * Returns true if this object contains patterns which must be matched against indices again for each request, + * as they depend on user attributes or on the current time. + */ + public boolean hasDynamicPattern() { + return !patternTemplates.isEmpty() || !dateMathExpressions.isEmpty(); + } + + /** + * Returns a sub-set of this object, which includes only the patterns which must be matched against indices again for each request, + * as they depend on user attributes or on the current time. + */ + public IndexPattern dynamicOnly() { + if (patternTemplates.isEmpty() && dateMathExpressions.isEmpty()) { + return EMPTY; + } else { + return new IndexPattern(WildcardMatcher.NONE, this.patternTemplates, this.dateMathExpressions); + } + } + + /** + * Returns true if this object cannot match against any index name. + */ + public boolean isEmpty() { + return !hasStaticPattern() && !hasDynamicPattern(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof IndexPattern that)) return false; + return Objects.equals(staticPattern, that.staticPattern) + && Objects.equals(patternTemplates, that.patternTemplates) + && Objects.equals(dateMathExpressions, that.dateMathExpressions); + } + + @Override + public int hashCode() { + return hashCode; + } + + static class Builder { + private List constantPatterns = new ArrayList<>(); + private List patternTemplates = new ArrayList<>(); + private List dateMathExpressions = new ArrayList<>(); + + void add(List source) { + for (int i = 0; i < source.size(); i++) { + try { + String indexPattern = source.get(i); + + if (indexPattern.startsWith("<") && indexPattern.endsWith(">")) { + this.dateMathExpressions.add(indexPattern); + } else if (!containsPlaceholder(indexPattern)) { + this.constantPatterns.add(WildcardMatcher.from(indexPattern)); + } else { + this.patternTemplates.add(indexPattern); + } + } catch (Exception e) { + // This usually happens when the index pattern defines an unparseable regular expression + log.error("Error while creating index pattern for {}", source, e); + } + } + } + + IndexPattern build() { + return new IndexPattern( + constantPatterns.size() != 0 ? WildcardMatcher.from(constantPatterns) : WildcardMatcher.NONE, + ImmutableList.copyOf(patternTemplates), + ImmutableList.copyOf(dateMathExpressions) + ); + } + } + + static boolean containsPlaceholder(String indexPattern) { + return indexPattern.indexOf("${") != -1; + } + + public static IndexPattern from(List source) { + Builder builder = new Builder(); + builder.add(source); + return builder.build(); + } + + public static IndexPattern from(String... source) { + return from(Arrays.asList(source)); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java index 57c1c18414..4fd4141b08 100644 --- a/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/PitPrivilegesEvaluator.java @@ -18,17 +18,15 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import com.google.common.collect.ImmutableSet; + import org.opensearch.action.ActionRequest; import org.opensearch.action.admin.indices.segments.PitSegmentsRequest; import org.opensearch.action.search.CreatePitRequest; import org.opensearch.action.search.DeletePitRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.unit.TimeValue; import org.opensearch.security.OpenSearchSecurityPlugin; import org.opensearch.security.resolver.IndexResolverReplacer; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.user.User; /** * This class evaluates privileges for point in time (Delete and List all) operations. @@ -39,11 +37,9 @@ public class PitPrivilegesEvaluator { public PrivilegesEvaluatorResponse evaluate( final ActionRequest request, - final ClusterService clusterService, - final User user, - final SecurityRoles securityRoles, + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, final String action, - final IndexNameExpressionResolver resolver, final PrivilegesEvaluatorResponse presponse, final IndexResolverReplacer irr ) { @@ -64,7 +60,7 @@ public PrivilegesEvaluatorResponse evaluate( if (pitIds.size() == 1 && "_all".equals(pitIds.get(0))) { return presponse; } else { - return handlePitsAccess(pitIds, clusterService, user, securityRoles, action, resolver, presponse, irr); + return handlePitsAccess(pitIds, context, actionPrivileges, action, presponse, irr); } } @@ -73,11 +69,9 @@ public PrivilegesEvaluatorResponse evaluate( */ private PrivilegesEvaluatorResponse handlePitsAccess( List pitIds, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, + PrivilegesEvaluationContext context, + ActionPrivileges actionPrivileges, final String action, - IndexNameExpressionResolver resolver, PrivilegesEvaluatorResponse presponse, final IndexResolverReplacer irr ) { @@ -87,30 +81,16 @@ private PrivilegesEvaluatorResponse handlePitsAccess( for (String[] indices : pitToIndicesMap.values()) { pitIndices.addAll(Arrays.asList(indices)); } - Set allPermittedIndices = getPermittedIndices(pitIndices, clusterService, user, securityRoles, action, resolver, irr); + String[] indicesArr = new String[pitIndices.size()]; + CreatePitRequest req = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, pitIndices.toArray(indicesArr)); + final IndexResolverReplacer.Resolved pitResolved = irr.resolveRequest(req); + PrivilegesEvaluatorResponse subResponse = actionPrivileges.hasIndexPrivilege(context, ImmutableSet.of(action), pitResolved); // Only if user has access to all PIT's indices, allow operation, otherwise continue evaluation in PrivilegesEvaluator. - if (allPermittedIndices.containsAll(pitIndices)) { + if (subResponse.isAllowed()) { presponse.allowed = true; presponse.markComplete(); } - return presponse; - } - /** - * This method returns list of permitted indices for the PIT indices passed - */ - private Set getPermittedIndices( - Set pitIndices, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, - final String action, - IndexNameExpressionResolver resolver, - final IndexResolverReplacer irr - ) { - String[] indicesArr = new String[pitIndices.size()]; - CreatePitRequest req = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, pitIndices.toArray(indicesArr)); - final IndexResolverReplacer.Resolved pitResolved = irr.resolveRequest(req); - return securityRoles.reduce(pitResolved, user, new String[] { action }, resolver, clusterService); + return presponse; } } diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java b/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java new file mode 100644 index 0000000000..007f782155 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesConfigurationValidationException.java @@ -0,0 +1,24 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +/** + * Thrown when the privileges configuration cannot be parsed because it is invalid. + */ +public class PrivilegesConfigurationValidationException extends Exception { + public PrivilegesConfigurationValidationException(String message) { + super(message); + } + + public PrivilegesConfigurationValidationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java index 98ffddb3d3..f7e5d6de7d 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationContext.java @@ -10,19 +10,27 @@ */ package org.opensearch.security.privileges; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Supplier; + import com.google.common.collect.ImmutableSet; import org.opensearch.action.ActionRequest; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; import org.opensearch.tasks.Task; /** * Request-scoped context information for privilege evaluation. - * + *

* This class carries metadata about the request and provides caching facilities for data which might need to be * evaluated several times per request. - * + *

* As this class is request-scoped, it is only used by a single thread. Thus, no thread synchronization mechanisms * are necessary. */ @@ -31,9 +39,19 @@ public class PrivilegesEvaluationContext { private final String action; private final ActionRequest request; private IndexResolverReplacer.Resolved resolvedRequest; + private Map indicesLookup; private final Task task; private ImmutableSet mappedRoles; private final IndexResolverReplacer indexResolverReplacer; + private final IndexNameExpressionResolver indexNameExpressionResolver; + private final Supplier clusterStateSupplier; + + /** + * This caches the ready to use WildcardMatcher instances for the current request. Many index patterns have + * to be executed several times per request (for example first for action privileges, later for DLS). Thus, + * it makes sense to cache and later re-use these. + */ + private final Map renderedPatternTemplateCache = new HashMap<>(); public PrivilegesEvaluationContext( User user, @@ -41,20 +59,49 @@ public PrivilegesEvaluationContext( String action, ActionRequest request, Task task, - IndexResolverReplacer indexResolverReplacer + IndexResolverReplacer indexResolverReplacer, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier clusterStateSupplier ) { this.user = user; this.mappedRoles = mappedRoles; this.action = action; this.request = request; - this.task = task; + this.clusterStateSupplier = clusterStateSupplier; this.indexResolverReplacer = indexResolverReplacer; + this.indexNameExpressionResolver = indexNameExpressionResolver; + this.task = task; } public User getUser() { return user; } + /** + * Interpolates any attribute references (like ${user.name}) in the given string and parses the result + * to a WildcardMatcher. This method catches earlier rendered templates in order to avoid recurring re-rendering + * of templates during a single privilege evaluation pass. + * + * @throws ExpressionEvaluationException if the resulting pattern could not be parsed. This is usually the case + * if an invalid regex was supplied. + */ + public WildcardMatcher getRenderedMatcher(String template) throws ExpressionEvaluationException { + WildcardMatcher matcher = this.renderedPatternTemplateCache.get(template); + + if (matcher == null) { + try { + matcher = WildcardMatcher.from(UserAttributes.replaceProperties(template, this)); + } catch (Exception e) { + // This especially happens for invalid regular expressions + throw new ExpressionEvaluationException("Error while evaluating expression in " + template, e); + } + + this.renderedPatternTemplateCache.put(template, matcher); + } + + return matcher; + } + public String getAction() { return action; } @@ -94,4 +141,35 @@ void setMappedRoles(ImmutableSet mappedRoles) { this.mappedRoles = mappedRoles; } + public Supplier getClusterStateSupplier() { + return clusterStateSupplier; + } + + public Map getIndicesLookup() { + if (this.indicesLookup == null) { + this.indicesLookup = clusterStateSupplier.get().metadata().getIndicesLookup(); + } + return this.indicesLookup; + } + + public IndexNameExpressionResolver getIndexNameExpressionResolver() { + return indexNameExpressionResolver; + } + + @Override + public String toString() { + return "PrivilegesEvaluationContext{" + + "user=" + + user + + ", action='" + + action + + '\'' + + ", request=" + + request + + ", resolvedRequest=" + + resolvedRequest + + ", mappedRoles=" + + mappedRoles + + '}'; + } } diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java new file mode 100644 index 0000000000..b3a0ac569f --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluationException.java @@ -0,0 +1,42 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import org.apache.commons.lang3.StringUtils; + +/** + * Signifies that an error was encountered while evaluating the privileges of a user for a particular request. + * + */ +public class PrivilegesEvaluationException extends Exception { + public PrivilegesEvaluationException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Returns a formatted multi-line-string showing cause messages as separate, indented lines. Does not include + * stack traces. + */ + public String getNestedMessages() { + if (this.getCause() == null) { + return this.getMessage(); + } + + StringBuilder result = new StringBuilder(this.getMessage()).append("\n"); + + Throwable cause = this.getCause(); + for (int i = 1; cause != null; cause = cause.getCause(), i++) { + result.append(StringUtils.repeat(' ', i * 3)).append(cause.getMessage()).append("\n"); + } + + return result.toString(); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java index 199442ee03..36666972ec 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluator.java @@ -35,6 +35,8 @@ import java.util.Map; import java.util.Set; import java.util.StringJoiner; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Supplier; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -71,9 +73,11 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.termvectors.MultiTermVectorsAction; import org.opensearch.action.update.UpdateAction; +import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.AliasMetadata; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; @@ -88,9 +92,14 @@ import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.ConfigModel; +import org.opensearch.security.securityconf.DynamicConfigFactory; import org.opensearch.security.securityconf.DynamicConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; import org.opensearch.security.securityconf.impl.DashboardSignInOption; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; @@ -121,7 +130,7 @@ public class PrivilegesEvaluator { private static final IndicesOptions ALLOW_EMPTY = IndicesOptions.fromOptions(true, true, false, false); protected final Logger log = LogManager.getLogger(this.getClass()); - private final ClusterService clusterService; + private final Supplier clusterStateSupplier; private final IndexNameExpressionResolver resolver; @@ -142,10 +151,14 @@ public class PrivilegesEvaluator { private final PitPrivilegesEvaluator pitPrivilegesEvaluator; private DynamicConfigModel dcm; private final NamedXContentRegistry namedXContentRegistry; + private final Settings settings; + private final AtomicReference actionPrivileges = new AtomicReference<>(); public PrivilegesEvaluator( final ClusterService clusterService, - final ThreadPool threadPool, + Supplier clusterStateSupplier, + ThreadPool threadPool, + final ThreadContext threadContext, final ConfigurationRepository configurationRepository, final IndexNameExpressionResolver resolver, AuditLog auditLog, @@ -157,12 +170,13 @@ public PrivilegesEvaluator( ) { super(); - this.clusterService = clusterService; this.resolver = resolver; this.auditLog = auditLog; - this.threadContext = threadPool.getThreadContext(); + this.threadContext = threadContext; this.privilegesInterceptor = privilegesInterceptor; + this.clusterStateSupplier = clusterStateSupplier; + this.settings = settings; this.checkSnapshotRestoreWritePrivileges = settings.getAsBoolean( ConfigConstants.SECURITY_CHECK_SNAPSHOT_RESTORE_WRITE_PRIVILEGES, @@ -177,6 +191,56 @@ public PrivilegesEvaluator( termsAggregationEvaluator = new TermsAggregationEvaluator(); pitPrivilegesEvaluator = new PitPrivilegesEvaluator(); this.namedXContentRegistry = namedXContentRegistry; + + if (configurationRepository != null) { + configurationRepository.subscribeOnChange(configMap -> { + try { + SecurityDynamicConfiguration actionGroupsConfiguration = configurationRepository.getConfiguration( + CType.ACTIONGROUPS + ); + SecurityDynamicConfiguration rolesConfiguration = configurationRepository.getConfiguration(CType.ROLES); + + this.updateConfiguration(actionGroupsConfiguration, rolesConfiguration); + } catch (Exception e) { + log.error("Error while updating ActionPrivileges object with {}", configMap, e); + } + }); + } + + if (clusterService != null) { + clusterService.addListener(event -> { + ActionPrivileges actionPrivileges = PrivilegesEvaluator.this.actionPrivileges.get(); + if (actionPrivileges != null) { + actionPrivileges.updateClusterStateMetadataAsync(clusterService, threadPool); + } + }); + } + + } + + void updateConfiguration( + SecurityDynamicConfiguration actionGroupsConfiguration, + SecurityDynamicConfiguration rolesConfiguration + ) { + if (rolesConfiguration != null) { + SecurityDynamicConfiguration actionGroupsWithStatics = actionGroupsConfiguration != null + ? DynamicConfigFactory.addStatics(actionGroupsConfiguration.clone()) + : DynamicConfigFactory.addStatics(SecurityDynamicConfiguration.empty(CType.ACTIONGROUPS)); + FlattenedActionGroups flattenedActionGroups = new FlattenedActionGroups(actionGroupsWithStatics); + ActionPrivileges actionPrivileges = new ActionPrivileges( + DynamicConfigFactory.addStatics(rolesConfiguration.clone()), + flattenedActionGroups, + () -> clusterStateSupplier.get().metadata().getIndicesLookup(), + settings + ); + Metadata metadata = clusterStateSupplier.get().metadata(); + actionPrivileges.updateStatefulIndexPrivileges(metadata.getIndicesLookup(), metadata.version()); + ActionPrivileges oldInstance = this.actionPrivileges.getAndSet(actionPrivileges); + + if (oldInstance != null) { + oldInstance.shutdown(); + } + } } @Subscribe @@ -189,22 +253,17 @@ public void onDynamicConfigModelChanged(DynamicConfigModel dcm) { this.dcm = dcm; } - public SecurityRoles getSecurityRoles(Set roles) { - return configModel.getSecurityRoles().filter(roles); + public ActionPrivileges getActionPrivileges() { + return this.actionPrivileges.get(); } - public boolean hasRestAdminPermissions(final User user, final TransportAddress remoteAddress, final String permissions) { - final Set userRoles = mapRoles(user, remoteAddress); - return hasRestAdminPermissions(userRoles, permissions); - } - - private boolean hasRestAdminPermissions(final Set roles, String permission) { - final SecurityRoles securityRoles = getSecurityRoles(roles); - return securityRoles.hasExplicitClusterPermissionPermission(permission); + public boolean hasRestAdminPermissions(final User user, final TransportAddress remoteAddress, final String permission) { + PrivilegesEvaluationContext context = createContext(user, permission); + return this.actionPrivileges.get().hasExplicitClusterPrivilege(context, permission).isAllowed(); } public boolean isInitialized() { - return configModel != null && configModel.getSecurityRoles() != null && dcm != null; + return configModel != null && dcm != null && actionPrivileges.get() != null; } private void setUserInfoInThreadContext(User user) { @@ -221,6 +280,10 @@ private void setUserInfoInThreadContext(User user) { } } + public PrivilegesEvaluationContext createContext(User user, String action) { + return createContext(user, action, null, null, null); + } + public PrivilegesEvaluationContext createContext( User user, String action0, @@ -235,7 +298,7 @@ public PrivilegesEvaluationContext createContext( TransportAddress caller = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS); ImmutableSet mappedRoles = ImmutableSet.copyOf((injectedRoles == null) ? mapRoles(user, caller) : injectedRoles); - return new PrivilegesEvaluationContext(user, mappedRoles, action0, request, task, irr); + return new PrivilegesEvaluationContext(user, mappedRoles, action0, request, task, irr, resolver, clusterStateSupplier); } public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) { @@ -262,7 +325,7 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) action0 = PutMappingAction.NAME; } - final PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); + PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); final String injectedRolesValidationString = threadContext.getTransient( ConfigConstants.OPENDISTRO_SECURITY_INJECTED_ROLES_VALIDATION @@ -278,8 +341,6 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) mappedRoles = ImmutableSet.copyOf(injectedRolesValidationSet); context.setMappedRoles(mappedRoles); } - presponse.resolvedSecurityRoles.addAll(mappedRoles); - final SecurityRoles securityRoles = getSecurityRoles(mappedRoles); // Add the security roles for this user so that they can be used for DLS parameter substitution. user.addSecurityRoles(mappedRoles); @@ -287,11 +348,16 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) final boolean isDebugEnabled = log.isDebugEnabled(); if (isDebugEnabled) { - log.debug("Evaluate permissions for {} on {}", user, clusterService.localNode().getName()); + log.debug("Evaluate permissions for {}", user); log.debug("Action: {} ({})", action0, request.getClass().getSimpleName()); log.debug("Mapped roles: {}", mappedRoles.toString()); } + ActionPrivileges actionPrivileges = this.actionPrivileges.get(); + if (actionPrivileges == null) { + throw new OpenSearchSecurityException("OpenSearch Security is not initialized: roles configuration is missing"); + } + if (request instanceof BulkRequest && (Strings.isNullOrEmpty(user.getRequestedTenant()))) { // Shortcut for bulk actions. The details are checked on the lower level of the BulkShardRequests (Action // indices:data/write/bulk[s]). @@ -300,18 +366,16 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) // No further access check for the default tenant is necessary, as access will be also checked on the TransportShardBulkAction // level. - if (!securityRoles.impliesClusterPermissionPermission(action0)) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; + presponse = actionPrivileges.hasClusterPrivilege(context, action0); + + if (!presponse.allowed) { log.info( "No cluster-level perm match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", user, action0, mappedRoles, - presponse.missingPrivileges + presponse.getMissingPrivileges() ); - } else { - presponse.allowed = true; } return presponse; } @@ -328,17 +392,8 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // Security index access - if (systemIndexAccessEvaluator.evaluate( - request, - task, - action0, - requestedResolved, - presponse, - securityRoles, - user, - resolver, - clusterService - ).isComplete()) { + if (systemIndexAccessEvaluator.evaluate(request, task, action0, requestedResolved, presponse, context, actionPrivileges, user) + .isComplete()) { return presponse; } @@ -348,7 +403,7 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // check access for point in time requests - if (pitPrivilegesEvaluator.evaluate(request, clusterService, user, securityRoles, action0, resolver, presponse, irr).isComplete()) { + if (pitPrivilegesEvaluator.evaluate(request, context, actionPrivileges, action0, presponse, irr).isComplete()) { return presponse; } @@ -362,22 +417,20 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) final boolean serviceAccountUser = user.isServiceAccount(); if (isClusterPerm(action0)) { if (serviceAccountUser) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; log.info("{} is a service account which doesn't have access to cluster level permission: {}", user, action0); - return presponse; + return PrivilegesEvaluatorResponse.insufficient(action0); } - if (!securityRoles.impliesClusterPermissionPermission(action0)) { - presponse.missingPrivileges.add(action0); - presponse.allowed = false; + presponse = actionPrivileges.hasClusterPrivilege(context, action0); + + if (!presponse.allowed) { log.info( "No cluster-level perm match for {} {} [Action [{}]] [RolesChecked {}]. No permissions for {}", user, requestedResolved, action0, mappedRoles, - presponse.missingPrivileges + presponse.getMissingPrivileges() ); return presponse; } else { @@ -428,13 +481,11 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) } // term aggregations - if (termsAggregationEvaluator.evaluate(requestedResolved, request, clusterService, user, securityRoles, resolver, presponse) - .isComplete()) { + if (termsAggregationEvaluator.evaluate(requestedResolved, request, context, actionPrivileges, presponse).isComplete()) { return presponse; } - final Set allIndexPermsRequired = evaluateAdditionalIndexPermissions(request, action0); - final String[] allIndexPermsRequiredA = allIndexPermsRequired.toArray(new String[0]); + ImmutableSet allIndexPermsRequired = evaluateAdditionalIndexPermissions(request, action0); if (isDebugEnabled) { log.debug( @@ -444,9 +495,6 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) ); } - presponse.missingPrivileges.clear(); - presponse.missingPrivileges.addAll(allIndexPermsRequired); - if (isDebugEnabled) { log.debug("Requested resolved index types: {}", requestedResolved); log.debug("Security roles: {}", mappedRoles); @@ -472,91 +520,67 @@ public PrivilegesEvaluatorResponse evaluate(PrivilegesEvaluationContext context) if (!replaceResult.continueEvaluation) { if (replaceResult.accessDenied) { auditLog.logMissingPrivileges(action0, request, task); + return PrivilegesEvaluatorResponse.insufficient(action0); } else { presponse.allowed = true; presponse.createIndexRequestBuilder = replaceResult.createIndexRequestBuilder; + return presponse; } - return presponse; } } - if (dnfofEnabled && DNFOF_MATCHER.test(action0)) { - - if (requestedResolved.getAllIndices().isEmpty()) { - presponse.missingPrivileges.clear(); - presponse.allowed = true; - return presponse; - } + boolean dnfofPossible = dnfofEnabled && DNFOF_MATCHER.test(action0); - Set reduced = securityRoles.reduce(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); + presponse = actionPrivileges.hasIndexPrivilege(context, allIndexPermsRequired, requestedResolved); - if (reduced.isEmpty()) { - if (dcm.isDnfofForEmptyResultsEnabled() && request instanceof IndicesRequest.Replaceable) { - - ((IndicesRequest.Replaceable) request).indices(new String[0]); - presponse.missingPrivileges.clear(); - presponse.allowed = true; - - if (request instanceof SearchRequest) { - ((SearchRequest) request).indicesOptions(ALLOW_EMPTY); - } else if (request instanceof ClusterSearchShardsRequest) { - ((ClusterSearchShardsRequest) request).indicesOptions(ALLOW_EMPTY); - } else if (request instanceof GetFieldMappingsRequest) { - ((GetFieldMappingsRequest) request).indicesOptions(ALLOW_EMPTY); - } - - return presponse; + if (presponse.isPartiallyOk()) { + if (dnfofPossible) { + if (irr.replace(request, true, presponse.getAvailableIndices())) { + return PrivilegesEvaluatorResponse.ok(); } - presponse.allowed = false; - return presponse; } + } else if (!presponse.isAllowed()) { + if (dnfofPossible && dcm.isDnfofForEmptyResultsEnabled() && request instanceof IndicesRequest.Replaceable) { + ((IndicesRequest.Replaceable) request).indices(new String[0]); + + if (request instanceof SearchRequest) { + ((SearchRequest) request).indicesOptions(ALLOW_EMPTY); + } else if (request instanceof ClusterSearchShardsRequest) { + ((ClusterSearchShardsRequest) request).indicesOptions(ALLOW_EMPTY); + } else if (request instanceof GetFieldMappingsRequest) { + ((GetFieldMappingsRequest) request).indicesOptions(ALLOW_EMPTY); + } - if (irr.replace(request, true, reduced.toArray(new String[0]))) { - presponse.missingPrivileges.clear(); - presponse.allowed = true; - return presponse; + return PrivilegesEvaluatorResponse.ok(); } } - // not bulk, mget, etc request here - boolean permGiven = false; - - if (isDebugEnabled) { - log.debug("Security roles: {}", securityRoles.getRoleNames()); - } + if (presponse.isAllowed()) { + if (checkFilteredAliases(requestedResolved, action0, isDebugEnabled)) { + presponse.allowed = false; + return presponse; + } - if (dcm.isMultiRolespanEnabled()) { - permGiven = securityRoles.impliesTypePermGlobal(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); + if (isDebugEnabled) { + log.debug("Allowed because we have all indices permissions for {}", action0); + } } else { - permGiven = securityRoles.get(requestedResolved, user, allIndexPermsRequiredA, resolver, clusterService); - - } - - if (!permGiven) { log.info( - "No {}-level perm match for {} {} [Action [{}]] [RolesChecked {}]", + "No {}-level perm match for {} {}: {} [Action [{}]] [RolesChecked {}]", "index", user, requestedResolved, + presponse.getReason(), action0, mappedRoles ); - log.info("No permissions for {}", presponse.missingPrivileges); - } else { - - if (checkFilteredAliases(requestedResolved, action0, isDebugEnabled)) { - presponse.allowed = false; - return presponse; - } - - if (isDebugEnabled) { - log.debug("Allowed because we have all indices permissions for {}", action0); + log.info("Index to privilege matrix:\n{}", presponse.getPrivilegeMatrix()); + if (presponse.hasEvaluationExceptions()) { + log.info("Evaluation errors:\n{}", presponse.getEvaluationExceptionInfo()); } } - presponse.allowed = permGiven; return presponse; - } public Set mapRoles(final User user, final TransportAddress caller) { @@ -604,9 +628,8 @@ public List getSignInOptions() { return dcm.getSignInOptions(); } - private Set evaluateAdditionalIndexPermissions(final ActionRequest request, final String originalAction) { - // --- check inner bulk requests - final Set additionalPermissionsRequired = new HashSet<>(); + private ImmutableSet evaluateAdditionalIndexPermissions(final ActionRequest request, final String originalAction) { + ImmutableSet.Builder additionalPermissionsRequired = ImmutableSet.builder(); if (!isClusterPerm(originalAction)) { additionalPermissionsRequired.add(originalAction); @@ -660,15 +683,17 @@ private Set evaluateAdditionalIndexPermissions(final ActionRequest reque additionalPermissionsRequired.addAll(ConfigConstants.SECURITY_SNAPSHOT_RESTORE_NEEDED_WRITE_PRIVILEGES); } - if (additionalPermissionsRequired.size() > 1) { - traceAction("Additional permissions required: {}", additionalPermissionsRequired); + ImmutableSet result = additionalPermissionsRequired.build(); + + if (result.size() > 1) { + traceAction("Additional permissions required: {}", result); } - if (log.isDebugEnabled() && additionalPermissionsRequired.size() > 1) { - log.debug("Additional permissions required: {}", additionalPermissionsRequired); + if (log.isDebugEnabled() && result.size() > 1) { + log.debug("Additional permissions required: {}", result); } - return Collections.unmodifiableSet(additionalPermissionsRequired); + return result; } public static boolean isClusterPerm(String action0) { @@ -702,14 +727,14 @@ private boolean checkFilteredAliases(Resolved requestedResolved, String action, indexMetaDataCollection = new Iterable() { @Override public Iterator iterator() { - return clusterService.state().getMetadata().getIndices().values().iterator(); + return clusterStateSupplier.get().getMetadata().getIndices().values().iterator(); } }; } else { Set indexMetaDataSet = new HashSet<>(requestedResolved.getAllIndices().size()); for (String requestAliasOrIndex : requestedResolved.getAllIndices()) { - IndexMetadata indexMetaData = clusterService.state().getMetadata().getIndices().get(requestAliasOrIndex); + IndexMetadata indexMetaData = clusterStateSupplier.get().getMetadata().getIndices().get(requestAliasOrIndex); if (indexMetaData == null) { if (isDebugEnabled) { log.debug("{} does not exist in cluster metadata", requestAliasOrIndex); diff --git a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java index 915514264c..d072ec301c 100644 --- a/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java +++ b/src/main/java/org/opensearch/security/privileges/PrivilegesEvaluatorResponse.java @@ -26,33 +26,115 @@ package org.opensearch.security.privileges; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; +import com.google.common.collect.ImmutableSet; + import org.opensearch.action.admin.indices.create.CreateIndexRequestBuilder; +import com.selectivem.collections.CheckTable; + public class PrivilegesEvaluatorResponse { boolean allowed = false; - Set missingPrivileges = new HashSet(); Set missingSecurityRoles = new HashSet<>(); - Set resolvedSecurityRoles = new HashSet<>(); PrivilegesEvaluatorResponseState state = PrivilegesEvaluatorResponseState.PENDING; CreateIndexRequestBuilder createIndexRequestBuilder; - + private Set onlyAllowedForIndices = ImmutableSet.of(); + private CheckTable indexToActionCheckTable; + private String privilegeMatrix; + private String reason; + + /** + * Contains issues that were encountered during privilege evaluation. Can be used for logging. + */ + private List evaluationExceptions = new ArrayList<>(); + + /** + * Returns true if the request can be fully allowed. See also isAllowedForSpecificIndices(). + */ public boolean isAllowed() { return allowed; } + /** + * Returns true if the request can be allowed if the referenced indices are reduced (aka "do not fail on forbidden"). + * See getAvailableIndices() for the indices for which we have privileges. + */ + public boolean isPartiallyOk() { + return !this.onlyAllowedForIndices.isEmpty(); + } + + /** + * In case isPartiallyOk() is true, this returns the indices for which we have privileges. + */ + public Set getAvailableIndices() { + return this.onlyAllowedForIndices; + } + + /** + * In case isAllowed() is false, this returns the privileges (aka action names) for which we do not have sufficient + * privileges. + */ public Set getMissingPrivileges() { - return new HashSet(missingPrivileges); + return this.indexToActionCheckTable != null ? this.indexToActionCheckTable.getIncompleteColumns() : Collections.emptySet(); } - public Set getMissingSecurityRoles() { - return new HashSet<>(missingSecurityRoles); + /** + * Returns a human-readable reason for the missing privilege. Can be used to make the error message more easy + * to understand. + */ + public String getReason() { + return this.reason; + } + + public PrivilegesEvaluatorResponse reason(String reason) { + this.reason = reason; + return this; + } + + /** + * Returns a diagnostic string that contains issues that were encountered during privilege evaluation. Can be used for logging. + */ + public String getEvaluationExceptionInfo() { + StringBuilder result = new StringBuilder("Exceptions encountered during privilege evaluation:\n"); + + for (PrivilegesEvaluationException evaluationException : this.evaluationExceptions) { + result.append(evaluationException.getNestedMessages()).append("\n"); + } + + return result.toString(); + } + + public boolean hasEvaluationExceptions() { + return !evaluationExceptions.isEmpty(); } - public Set getResolvedSecurityRoles() { - return new HashSet<>(resolvedSecurityRoles); + public PrivilegesEvaluatorResponse evaluationExceptions(Collection evaluationExceptions) { + this.evaluationExceptions.addAll(evaluationExceptions); + return this; + } + + /** + * Returns an ASCII string showing a matrix of available/missing privileges. + * Rows represent indices, columns represent actions. + */ + public String getPrivilegeMatrix() { + String result = this.privilegeMatrix; + + if (result == null) { + result = this.indexToActionCheckTable.toTableString("ok", "MISSING"); + this.privilegeMatrix = result; + } + return result; + } + + public Set getMissingSecurityRoles() { + return new HashSet<>(missingSecurityRoles); } public CreateIndexRequestBuilder getCreateIndexRequestBuilder() { @@ -79,11 +161,46 @@ public boolean isPending() { @Override public String toString() { - return "PrivEvalResponse [allowed=" + allowed + ", missingPrivileges=" + missingPrivileges + "]"; + return "PrivEvalResponse [\nallowed=" + + allowed + + ",\nonlyAllowedForIndices=" + + onlyAllowedForIndices + + ",\n" + + (indexToActionCheckTable != null ? indexToActionCheckTable.toTableString("ok", "MISSING") : "") + + "]"; + } + + public static PrivilegesEvaluatorResponse ok() { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.allowed = true; + return response; + } + + public static PrivilegesEvaluatorResponse partiallyOk( + Set availableIndices, + CheckTable indexToActionCheckTable + ) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.onlyAllowedForIndices = ImmutableSet.copyOf(availableIndices); + response.indexToActionCheckTable = indexToActionCheckTable; + return response; + } + + public static PrivilegesEvaluatorResponse insufficient(String missingPrivilege) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.indexToActionCheckTable = CheckTable.create(ImmutableSet.of("_"), ImmutableSet.of(missingPrivilege)); + return response; + } + + public static PrivilegesEvaluatorResponse insufficient(CheckTable indexToActionCheckTable) { + PrivilegesEvaluatorResponse response = new PrivilegesEvaluatorResponse(); + response.indexToActionCheckTable = indexToActionCheckTable; + return response; } public static enum PrivilegesEvaluatorResponseState { PENDING, COMPLETE; } + } diff --git a/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java b/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java index d1ccb84fc8..b1f994163c 100644 --- a/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluator.java @@ -16,89 +16,38 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchSecurityException; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.core.common.transport.TransportAddress; -import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.user.User; -import org.opensearch.threadpool.ThreadPool; - -import org.greenrobot.eventbus.Subscribe; public class RestLayerPrivilegesEvaluator { protected final Logger log = LogManager.getLogger(this.getClass()); - private final ClusterService clusterService; - private ThreadContext threadContext; - private ConfigModel configModel; - - public RestLayerPrivilegesEvaluator(final ClusterService clusterService, final ThreadPool threadPool) { - this.clusterService = clusterService; - this.threadContext = threadPool.getThreadContext(); - } - - @Subscribe - public void onConfigModelChanged(final ConfigModel configModel) { - this.configModel = configModel; - } - - SecurityRoles getSecurityRoles(final Set roles) { - return configModel.getSecurityRoles().filter(roles); - } + private final PrivilegesEvaluator privilegesEvaluator; - boolean isInitialized() { - return configModel != null && configModel.getSecurityRoles() != null; + public RestLayerPrivilegesEvaluator(PrivilegesEvaluator privilegesEvaluator) { + this.privilegesEvaluator = privilegesEvaluator; } - public PrivilegesEvaluatorResponse evaluate(final User user, final Set actions) { - if (!isInitialized()) { - throw new OpenSearchSecurityException("OpenSearch Security is not initialized."); - } - - final PrivilegesEvaluatorResponse presponse = new PrivilegesEvaluatorResponse(); - - final TransportAddress caller = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS); - - final Set mappedRoles = mapRoles(user, caller); - - presponse.resolvedSecurityRoles.addAll(mappedRoles); - final SecurityRoles securityRoles = getSecurityRoles(mappedRoles); + public PrivilegesEvaluatorResponse evaluate(final User user, final String routeName, final Set actions) { + PrivilegesEvaluationContext context = privilegesEvaluator.createContext(user, routeName); final boolean isDebugEnabled = log.isDebugEnabled(); if (isDebugEnabled) { - log.debug("Evaluate permissions for {} on {}", user, clusterService.localNode().getName()); + log.debug("Evaluate permissions for {}", user); log.debug("Action: {}", actions); - log.debug("Mapped roles: {}", mappedRoles.toString()); + log.debug("Mapped roles: {}", context.getMappedRoles().toString()); } - for (final String action : actions) { - if (!securityRoles.impliesClusterPermissionPermission(action)) { - presponse.missingPrivileges.add(action); - presponse.allowed = false; - log.info( - "No permission match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", - user, - action, - securityRoles.getRoleNames(), - presponse.missingPrivileges - ); - } else { - if (isDebugEnabled) { - log.debug("Allowed because we have permissions for {}", actions); - } - presponse.allowed = true; + PrivilegesEvaluatorResponse result = privilegesEvaluator.getActionPrivileges().hasAnyClusterPrivilege(context, actions); - // break the loop as we found the matching permission - break; - } + if (!result.allowed) { + log.info( + "No permission match for {} [Action [{}]] [RolesChecked {}]. No permissions for {}", + user, + routeName, + context.getMappedRoles(), + result.getMissingPrivileges() + ); } - return presponse; - } - - Set mapRoles(final User user, final TransportAddress caller) { - return this.configModel.mapSecurityRoles(user, caller); + return result; } } diff --git a/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java b/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java index 38825a9bf1..99828f7b17 100644 --- a/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/SystemIndexAccessEvaluator.java @@ -32,20 +32,18 @@ import java.util.Set; import java.util.stream.Collectors; +import com.google.common.collect.ImmutableSet; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionRequest; import org.opensearch.action.RealtimeRequest; import org.opensearch.action.search.SearchRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.indices.SystemIndexRegistry; import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.SecurityRoles; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; @@ -72,6 +70,7 @@ public class SystemIndexAccessEvaluator { private final boolean isSystemIndexEnabled; private final boolean isSystemIndexPermissionEnabled; + private final static ImmutableSet SYSTEM_INDEX_PERMISSION_SET = ImmutableSet.of(ConfigConstants.SYSTEM_INDEX_PERMISSION); public SystemIndexAccessEvaluator(final Settings settings, AuditLog auditLog, IndexResolverReplacer irr) { this.securityIndex = settings.get( @@ -128,12 +127,11 @@ public PrivilegesEvaluatorResponse evaluate( final String action, final Resolved requestedResolved, final PrivilegesEvaluatorResponse presponse, - final SecurityRoles securityRoles, - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService clusterService + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, + final User user ) { - evaluateSystemIndicesAccess(action, requestedResolved, request, task, presponse, securityRoles, user, resolver, clusterService); + evaluateSystemIndicesAccess(action, requestedResolved, request, task, presponse, context, actionPrivileges, user); if (requestedResolved.isLocalAll() || requestedResolved.getAllIndices().contains(securityIndex) @@ -235,10 +233,9 @@ private boolean isActionAllowed(String action) { * @param request the action request to be used for audit logging * @param task task in which this access check will be performed * @param presponse the pre-response object that will eventually become a response and returned to the requester - * @param securityRoles user's roles which will be used for access evaluation + * @param context conveys information about user and mapped roles, etc. + * @param actionPrivileges the up-to-date ActionPrivileges instance * @param user this user's permissions will be looked up - * @param resolver the index expression resolver - * @param clusterService required to fetch cluster state metadata */ private void evaluateSystemIndicesAccess( final String action, @@ -246,10 +243,9 @@ private void evaluateSystemIndicesAccess( final ActionRequest request, final Task task, final PrivilegesEvaluatorResponse presponse, - SecurityRoles securityRoles, - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService clusterService + final PrivilegesEvaluationContext context, + final ActionPrivileges actionPrivileges, + final User user ) { // Perform access check is system index permissions are enabled boolean containsSystemIndex = requestContainsAnySystemIndices(requestedResolved); @@ -260,7 +256,7 @@ private void evaluateSystemIndicesAccess( if (serviceAccountUser && containsRegularIndex) { auditLog.logSecurityIndexAttempt(request, action, task); if (!containsSystemIndex && log.isInfoEnabled()) { - log.info("{} not permitted for a service account {} on non-system indices.", action, securityRoles); + log.info("{} not permitted for a service account {} on non-system indices.", action, context.getMappedRoles()); } else if (containsSystemIndex && log.isDebugEnabled()) { List regularIndices = requestedResolved.getAllIndices() .stream() @@ -282,7 +278,7 @@ private void evaluateSystemIndicesAccess( log.info( "{} not permitted for a regular user {} on protected system indices {}", action, - securityRoles, + context.getMappedRoles(), String.join(", ", getAllProtectedSystemIndices(requestedResolved)) ); } @@ -290,19 +286,13 @@ private void evaluateSystemIndicesAccess( presponse.markComplete(); return; } else if (containsSystemIndex - && !securityRoles.hasExplicitIndexPermission( - requestedResolved, - user, - new String[] { ConfigConstants.SYSTEM_INDEX_PERMISSION }, - resolver, - clusterService - )) { + && !actionPrivileges.hasExplicitIndexPrivilege(context, SYSTEM_INDEX_PERMISSION_SET, requestedResolved).isAllowed()) { auditLog.logSecurityIndexAttempt(request, action, task); if (log.isInfoEnabled()) { log.info( "No {} permission for user roles {} to System Indices {}", action, - securityRoles, + context.getMappedRoles(), String.join(", ", getAllSystemIndices(requestedResolved)) ); } diff --git a/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java b/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java index cc0bf25b5e..a2cd1c16a7 100644 --- a/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java +++ b/src/main/java/org/opensearch/security/privileges/TermsAggregationEvaluator.java @@ -26,8 +26,8 @@ package org.opensearch.security.privileges; -import java.util.Set; - +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Sets; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -38,27 +38,24 @@ import org.opensearch.action.search.MultiSearchAction; import org.opensearch.action.search.SearchAction; import org.opensearch.action.search.SearchRequest; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.TermsQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.SecurityRoles; -import org.opensearch.security.user.User; public class TermsAggregationEvaluator { protected final Logger log = LogManager.getLogger(this.getClass()); - private static final String[] READ_ACTIONS = new String[] { + private static final ImmutableSet READ_ACTIONS = ImmutableSet.of( MultiSearchAction.NAME, MultiGetAction.NAME, GetAction.NAME, SearchAction.NAME, - FieldCapabilitiesAction.NAME }; + FieldCapabilitiesAction.NAME + ); private static final QueryBuilder NONE_QUERY = new MatchNoneQueryBuilder(); @@ -67,10 +64,8 @@ public TermsAggregationEvaluator() {} public PrivilegesEvaluatorResponse evaluate( final Resolved resolved, final ActionRequest request, - ClusterService clusterService, - User user, - SecurityRoles securityRoles, - IndexNameExpressionResolver resolver, + PrivilegesEvaluationContext context, + ActionPrivileges actionPrivileges, PrivilegesEvaluatorResponse presponse ) { try { @@ -89,17 +84,22 @@ public PrivilegesEvaluatorResponse evaluate( && ab.getPipelineAggregations().isEmpty() && ab.getSubAggregations().isEmpty()) { - final Set allPermittedIndices = securityRoles.getAllPermittedIndicesForDashboards( - resolved, - user, + PrivilegesEvaluatorResponse subResponse = actionPrivileges.hasIndexPrivilege( + context, READ_ACTIONS, - resolver, - clusterService + Resolved._LOCAL_ALL ); - if (allPermittedIndices == null || allPermittedIndices.isEmpty()) { + + if (subResponse.isPartiallyOk()) { + sr.source() + .query( + new TermsQueryBuilder( + "_index", + Sets.union(subResponse.getAvailableIndices(), resolved.getRemoteIndices()) + ) + ); + } else if (!subResponse.isAllowed()) { sr.source().query(NONE_QUERY); - } else { - sr.source().query(new TermsQueryBuilder("_index", allPermittedIndices)); } presponse.allowed = true; diff --git a/src/main/java/org/opensearch/security/privileges/UserAttributes.java b/src/main/java/org/opensearch/security/privileges/UserAttributes.java index e138c5f621..a1a949d96c 100644 --- a/src/main/java/org/opensearch/security/privileges/UserAttributes.java +++ b/src/main/java/org/opensearch/security/privileges/UserAttributes.java @@ -15,6 +15,7 @@ import com.google.common.base.Joiner; import com.google.common.collect.Iterables; +import com.google.common.collect.Sets; import org.opensearch.security.user.User; @@ -24,6 +25,34 @@ * This code was moved over from ConfigModelV7. */ public class UserAttributes { + public static String replaceProperties(String orig, PrivilegesEvaluationContext context) { + User user = context.getUser(); + + orig = orig.replace("${user.name}", user.getName()).replace("${user_name}", user.getName()); + orig = replaceRoles(orig, user); + orig = replaceSecurityRoles(orig, context); + for (Map.Entry entry : user.getCustomAttributesMap().entrySet()) { + if (entry.getKey() == null || entry.getValue() == null) { + continue; + } + orig = orig.replace("${" + entry.getKey() + "}", entry.getValue()); + orig = orig.replace("${" + entry.getKey().replace('.', '_') + "}", entry.getValue()); + } + return orig; + } + + private static String replaceSecurityRoles(final String orig, PrivilegesEvaluationContext context) { + String retVal = orig; + if (orig.contains("${user.securityRoles}") || orig.contains("${user_securityRoles}")) { + final String commaSeparatedRoles = toQuotedCommaSeparatedString( + Sets.union(context.getUser().getSecurityRoles(), context.getMappedRoles()) + ); + retVal = orig.replace("${user.securityRoles}", commaSeparatedRoles).replace("${user_securityRoles}", commaSeparatedRoles); + } + return retVal; + } + + @Deprecated public static String replaceProperties(String orig, User user) { if (user == null || orig == null) { @@ -52,6 +81,7 @@ private static String replaceRoles(final String orig, final User user) { return retVal; } + @Deprecated private static String replaceSecurityRoles(final String orig, final User user) { String retVal = orig; if (orig.contains("${user.securityRoles}") || orig.contains("${user_securityRoles}")) { diff --git a/src/main/java/org/opensearch/security/privileges/WellKnownActions.java b/src/main/java/org/opensearch/security/privileges/WellKnownActions.java new file mode 100644 index 0000000000..af4f0bb025 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/WellKnownActions.java @@ -0,0 +1,88 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges; + +import com.google.common.collect.ImmutableSet; + +import org.opensearch.action.admin.cluster.health.ClusterHealthAction; +import org.opensearch.action.admin.cluster.node.stats.NodesStatsAction; +import org.opensearch.action.admin.cluster.state.ClusterStateAction; +import org.opensearch.action.admin.cluster.stats.ClusterStatsAction; +import org.opensearch.action.admin.indices.analyze.AnalyzeAction; +import org.opensearch.action.admin.indices.create.AutoCreateAction; +import org.opensearch.action.admin.indices.mapping.put.AutoPutMappingAction; +import org.opensearch.action.admin.indices.mapping.put.PutMappingAction; +import org.opensearch.action.admin.indices.refresh.RefreshAction; +import org.opensearch.action.admin.indices.refresh.TransportShardRefreshAction; +import org.opensearch.action.bulk.BulkAction; +import org.opensearch.action.bulk.TransportShardBulkAction; +import org.opensearch.action.delete.DeleteAction; +import org.opensearch.action.fieldcaps.FieldCapabilitiesAction; +import org.opensearch.action.get.GetAction; +import org.opensearch.action.get.MultiGetAction; +import org.opensearch.action.index.IndexAction; +import org.opensearch.action.main.MainAction; +import org.opensearch.action.search.ClearScrollAction; +import org.opensearch.action.search.MultiSearchAction; +import org.opensearch.action.search.SearchAction; +import org.opensearch.action.search.SearchScrollAction; +import org.opensearch.action.termvectors.MultiTermVectorsAction; +import org.opensearch.action.termvectors.TermVectorsAction; +import org.opensearch.action.update.UpdateAction; +import org.opensearch.index.reindex.DeleteByQueryAction; +import org.opensearch.index.reindex.UpdateByQueryAction; +import org.opensearch.security.support.ConfigConstants; + +/** + * This class lists so-called "well-known actions". These are taken into account when creating the pre-computed + * data structures of the ActionPrivileges class. Thus, a very fast performance evaluation will be possible for + * these actions. The trade-off is that each well-known action increases the heap footprint required by the data + * structures. Thus, it makes sense to limit these actions to these which are really performance critical. + */ +public class WellKnownActions { + public static final ImmutableSet CLUSTER_ACTIONS = ImmutableSet.of( + MultiGetAction.NAME, + BulkAction.NAME, + SearchScrollAction.NAME, + MultiSearchAction.NAME, + MultiTermVectorsAction.NAME, + ClearScrollAction.NAME, + MainAction.NAME, + ClusterStatsAction.NAME, + ClusterStateAction.NAME, + ClusterHealthAction.NAME, + NodesStatsAction.NAME + ); + + public static final ImmutableSet INDEX_ACTIONS = ImmutableSet.of( + IndexAction.NAME, + GetAction.NAME, + TermVectorsAction.NAME, + DeleteAction.NAME, + UpdateAction.NAME, + SearchAction.NAME, + UpdateByQueryAction.NAME, + DeleteByQueryAction.NAME, + TransportShardBulkAction.ACTION_NAME, + PutMappingAction.NAME, + AutoPutMappingAction.NAME, + AnalyzeAction.NAME, + AutoCreateAction.NAME, + RefreshAction.NAME, + TransportShardRefreshAction.NAME, + FieldCapabilitiesAction.NAME + ); + + /** + * Compare https://github.com/opensearch-project/security/pull/2887 + */ + public static final ImmutableSet EXPLICITLY_REQUIRED_INDEX_ACTIONS = ImmutableSet.of(ConfigConstants.SYSTEM_INDEX_PERMISSION); +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java new file mode 100644 index 0000000000..43baf8090d --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/AbstractRuleBasedPrivileges.java @@ -0,0 +1,833 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import com.google.common.collect.ImmutableMap; +import org.apache.commons.collections4.CollectionUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.IndexPattern; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.resolver.IndexResolverReplacer; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.WildcardMatcher; + +import com.selectivem.collections.CompactMapGroupBuilder; +import com.selectivem.collections.DeduplicatingCompactSubSetBuilder; + +/** + * Abstract super class which provides common DLS/FLS/FM rule evaluation functionality for the concrete classes + * DocumentPrivileges, FieldPrivileges and FieldMasking. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Following the secure-by-default principle, this class returns full restrictions if there is no role covering the + * requested index. It has two fundamental working modes, based on the value of the plugins.security.dfm_empty_overrides_all + * setting: If the setting is true, roles without a DLS/FLS/FM rule are always considered to grant full access. If the + * setting is false, roles without a DLS/FLS/FM rule are ONLY considered if there are no other roles that restrict access. + * The former is the more logical one, as it follows the rule that a user gaining more roles can only gain more privileges. + * The latter breaks that rule. In that case, a user with more roles can have fewer privileges. + *

+ * Concrete sub-classes of this class must define concrete types for SingleRule and JoinedRule. These should be immutable + * types. Additionally, they must define a function that converts roles to SingleRule objects and pass that function + * to the constructor via the roleToRuleFunction parameter. Finally, the abstract methods unrestricted(), restricted() + * and compile() must be implemented. + * + * @param A single DLS/FLS/FM rule as defined in roles.yml. + * @param A merged DLS/FLS/FM rule that might contain SingleRules from several roles that apply to a user at the same time. + */ +abstract class AbstractRuleBasedPrivileges { + private static final Logger log = LogManager.getLogger(AbstractRuleBasedPrivileges.class); + + /** + * The roles configuration this instance is based on + */ + protected final SecurityDynamicConfiguration roles; + + /** + * Compiled rules that are immutable. + */ + protected final StaticRules staticRules; + + /** + * Compiled rules, that are denormalized based on the current indices. These are updated whenever the indices change. + * As this attribute is volatile, access to this attribute should be limited, e.g., not used in tight loops. + */ + private volatile StatefulRules statefulRules; + + /** + * A function that converts role instances to rules. + */ + private final RoleToRuleFunction roleToRuleFunction; + + /** + * Corresponds to the settings flag plugins.security.dfm_empty_overrides_all. + */ + private final boolean dfmEmptyOverridesAll; + + public AbstractRuleBasedPrivileges( + SecurityDynamicConfiguration roles, + Map indexMetadata, + RoleToRuleFunction roleToRuleFunction, + Settings settings + ) { + this.roles = roles; + this.roleToRuleFunction = roleToRuleFunction; + this.staticRules = new StaticRules<>(roles, roleToRuleFunction); + this.dfmEmptyOverridesAll = settings.getAsBoolean(ConfigConstants.SECURITY_DFM_EMPTY_OVERRIDES_ALL, false); + this.statefulRules = new StatefulRules<>(roles, indexMetadata, roleToRuleFunction); + } + + /** + * Returns true if the user identified in the PrivilegesEvaluationContext does not have any restrictions in any case, + * independently of the indices they are requesting. + */ + public boolean isUniversallyUnrestricted(PrivilegesEvaluationContext context) { + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + return false; + } + + /** + * Returns true if the user identified in the PrivilegesEvaluationContext does not have any restrictions for the + * given resolved indices. + * + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public boolean isUnrestricted(PrivilegesEvaluationContext context, IndexResolverReplacer.Resolved resolved) + throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return false; + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + if (resolved == null) { + return false; + } + + if (this.hasRestrictedRulesWithIndexWildcard(context)) { + return false; + } + + StatefulRules statefulRules = this.statefulRules; + + // The logic is here a bit tricky: For each index/alias/data stream we assume restrictions until we found an unrestricted role. + // If we found an unrestricted role, we continue with the next index/alias/data stream. If we found a restricted role, we abort + // early and return true. + + for (String index : resolved.getAllIndicesResolved(context.getClusterStateSupplier(), context.getIndexNameExpressionResolver())) { + if (this.dfmEmptyOverridesAll) { + // We assume that we have a restriction unless there are roles without restriction. + // Thus, we only have to check the roles without restriction. + if (!this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } + } else { + // if dfmEmptyOverwritesAll == false, we prefer restricted roles over unrestricted ones. + // Thus, we first check for restricted roles. Only if there are not any restricted roles, + // we check for the presence of unrestricted roles. If there are not any matching roles, + // we also assume full restrictions. + + if (this.hasRestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } else if (!CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles()) + && !this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } + } + } + + return true; + } + + /** + * Returns true if there are roles without a rule which imposes restrictions for the particular index. + * Does consider rules with index wildcards ("*"). + */ + public boolean isUnrestricted(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return false; + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + if (this.hasRestrictedRulesWithIndexWildcard(context)) { + return false; + } + + if (this.dfmEmptyOverridesAll) { + // We assume that we have a restriction unless there are roles without restriction. + // Thus, we only have to check the roles without restriction. + return this.hasUnrestrictedRulesExplicit(context, statefulRules, index); + } else { + // if dfmEmptyOverwritesAll == false, we prefer restricted roles over unrestricted ones. + // Thus, we first check for restricted roles. Only if there are not any restricted roles, + // we check for the presence of unrestricted roles. If there are not any matching roles, + // we also assume full restrictions. + + if (this.hasRestrictedRulesExplicit(context, statefulRules, index)) { + return false; + } else { + if (CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return true; + } + + return this.hasUnrestrictedRulesExplicit(context, statefulRules, index); + } + } + } + + /** + * Returns true if there are roles without a rule which imposes restrictions for the particular index. + * Does not consider rules with index wildcards ("*") - this is reflected by the "explicit" in the method name. + */ + private boolean hasUnrestrictedRulesExplicit(PrivilegesEvaluationContext context, StatefulRules statefulRules, String index) + throws PrivilegesEvaluationException { + + if (statefulRules != null && statefulRules.covers(index)) { + Set roleWithoutRule = statefulRules.indexToRoleWithoutRule.get(index); + + if (roleWithoutRule != null && CollectionUtils.containsAny(roleWithoutRule, context.getMappedRoles())) { + return true; + } + } else { + if (this.staticRules.hasUnrestrictedPatterns(context, index)) { + return true; + } + } + + if (this.staticRules.hasUnrestrictedPatternTemplates(context, index)) { + return true; + } + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + if (hasUnrestrictedRulesExplicit(context, statefulRules, parent)) { + return true; + } + } + } + + return false; + + } + + /** + * Returns true if there are roles with a rule which imposes restrictions for the particular index. + * Does not consider rules with index wildcards ("*") - this is reflected by the "explicit" in the method name. + */ + private boolean hasRestrictedRulesExplicit(PrivilegesEvaluationContext context, StatefulRules statefulRules, String index) + throws PrivilegesEvaluationException { + + if (statefulRules != null && statefulRules.covers(index)) { + Map roleWithRule = statefulRules.indexToRoleToRule.get(index); + + if (roleWithRule != null && CollectionUtils.containsAny(roleWithRule.keySet(), context.getMappedRoles())) { + return true; + } + } else { + if (this.staticRules.hasRestrictedPatterns(context, index)) { + return true; + } + } + + if (this.staticRules.hasRestrictedPatternTemplates(context, index)) { + return true; + } + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + if (hasRestrictedRulesExplicit(context, statefulRules, parent)) { + return true; + } + } + } + + return false; + } + + /** + * Returns true if the user specified by the given context parameter has roles which apply for the index wildcard ("*") + * and which specify DLS rules. + */ + private boolean hasRestrictedRulesWithIndexWildcard(PrivilegesEvaluationContext context) { + return CollectionUtils.containsAny(this.staticRules.roleWithIndexWildcardToRule.keySet(), context.getMappedRoles()); + } + + /** + * Returns the joined restrictions for the given index. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param index The index to be considered. This can be ONLY a concrete index, not an alias or data stream. + * @return The joined restrictions for the given index. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public JoinedRule getRestriction(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + return getRestriction(context, index, fullyRestricted()); + } + + /** + * Returns the joined restrictions for the given index. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param index The index to be considered. This can be ONLY a concrete index, not an alias or data stream. + * @param noRulesDefault Specifies the restriction that shall be used in case no rules are found for an index. Ideally, + * this is fullRestriction(), as the absence of any role mentioning an index means no privileges. + * For backwards compatibility, this might need to be noRestriction(). * @return The joined restrictions for the given index. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public JoinedRule getRestriction(PrivilegesEvaluationContext context, String index, JoinedRule noRulesDefault) + throws PrivilegesEvaluationException { + if (context.getMappedRoles().isEmpty()) { + return fullyRestricted(); + } + + if (this.dfmEmptyOverridesAll + && CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles())) { + return unrestricted(); + } + + StatefulRules statefulRules = this.statefulRules; + if (statefulRules != null && !statefulRules.covers(index)) { + statefulRules = null; + } + + if (this.dfmEmptyOverridesAll && this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + // If dfmEmptyOverwritesAll == true, we can abort early in case unrestricted rules are present. These + // will overrule any other rules. + return unrestricted(); + } + + // Collect rules into ruleSink + Set ruleSink = new HashSet<>(); + collectRules(context, ruleSink, index, statefulRules); + + IndexAbstraction indexAbstraction = context.getIndicesLookup().get(index); + + if (indexAbstraction != null) { + for (String parent : getParents(indexAbstraction)) { + collectRules(context, ruleSink, parent, statefulRules); + } + } + + if (ruleSink.isEmpty()) { + if (this.dfmEmptyOverridesAll) { + // If we did not find any rules, we assume full restrictions + return noRulesDefault; + } else { + // In case dfmEmptyOverwritesAll == false, we now check for unrestricted rules. If these are present, + // we give full access. Otherwise, we also assume full restrictions + if (CollectionUtils.containsAny(this.staticRules.rolesWithIndexWildcardWithoutRule, context.getMappedRoles()) + || this.hasUnrestrictedRulesExplicit(context, statefulRules, index)) { + return unrestricted(); + } else { + return noRulesDefault; + } + } + } else { + return compile(context, ruleSink); + } + } + + /** + * Returns the joined restrictions for the given indices. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param indices The indices to be considered. This can be ONLY concrete indices, not aliases or data streams. + * @return The joined restrictions for the given indices. The resulting map is guaranteed to contain entries for + * all indices specified in the corresponding parameter. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public IndexToRuleMap getRestrictions(PrivilegesEvaluationContext context, Collection indices) + throws PrivilegesEvaluationException { + return getRestrictions(context, indices, fullyRestricted()); + } + + /** + * Returns the joined restrictions for the given indices. + *

+ * If you only need to know whether there are restrictions for an index or not, prefer to use isUnrestricted(), + * as this might be faster. + * + * @param context The current PrivilegesEvaluationContext + * @param indices The indices to be considered. This can be ONLY concrete indices, not aliases or data streams.y + * @param noRulesDefault Specifies the restriction that shall be used in case no rules are found for an index. Ideally, + * this is fullRestriction(), as the absence of any role mentioning an index means no privileges. + * For backwards compatibility, this might need to be noRestriction(). + * @return The joined restrictions for the given indices. The resulting map is guaranteed to contain entries for + * all indices specified in the corresponding parameter. + * @throws PrivilegesEvaluationException If something went wrong during privileges evaluation. In such cases, any + * access should be denied to make sure that no unauthorized information is exposed. + */ + public IndexToRuleMap getRestrictions( + PrivilegesEvaluationContext context, + Collection indices, + JoinedRule noRulesDefault + ) throws PrivilegesEvaluationException { + if (isUniversallyUnrestricted(context)) { + return IndexToRuleMap.unrestricted(); + } + + ImmutableMap.Builder result = ImmutableMap.builderWithExpectedSize(indices.size()); + + int restrictedIndices = 0; + + for (String index : indices) { + JoinedRule restriction = getRestriction(context, index, noRulesDefault); + + if (!restriction.isUnrestricted()) { + restrictedIndices++; + } + + result.put(index, restriction); + } + + if (restrictedIndices == 0) { + return IndexToRuleMap.unrestricted(); + } + + return new IndexToRuleMap<>(result.build()); + } + + /** + * Collects the rules for the given index and adds them to the given ruleSink set. + */ + private void collectRules( + PrivilegesEvaluationContext context, + Set ruleSink, + String index, + StatefulRules statefulRules + ) throws PrivilegesEvaluationException { + Map statefulRoleToRule = null; + boolean statefulRulesEffective; + + if (statefulRules != null) { + statefulRoleToRule = statefulRules.indexToRoleToRule.get(index); + statefulRulesEffective = true; + } else { + statefulRulesEffective = false; + } + + for (String role : context.getMappedRoles()) { + { + SingleRule rule = this.staticRules.roleWithIndexWildcardToRule.get(role); + + if (rule != null) { + ruleSink.add(rule); + } + } + + if (statefulRoleToRule != null) { + SingleRule rule = statefulRoleToRule.get(role); + + if (rule != null) { + ruleSink.add(rule); + } + } + + if (!statefulRulesEffective) { + // Only when we have no stateful information, we also check the static index patterns + + Map indexPatternToRule = this.staticRules.rolesToStaticIndexPatternToRule.get(role); + if (indexPatternToRule != null) { + for (Map.Entry entry : indexPatternToRule.entrySet()) { + WildcardMatcher pattern = entry.getKey(); + + if (pattern.test(index)) { + ruleSink.add(entry.getValue()); + } + } + } + } + + Map dynamicIndexPatternToRule = this.staticRules.rolesToDynamicIndexPatternToRule.get(role); + + if (dynamicIndexPatternToRule != null) { + for (Map.Entry entry : dynamicIndexPatternToRule.entrySet()) { + try { + if (entry.getKey().matches(index, context, context.getIndicesLookup())) { + ruleSink.add(entry.getValue()); + } + } catch (PrivilegesEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating index pattern of role " + role, e); + } + } + } + } + } + + /** + * Returns a rule that signifies full access + */ + protected abstract JoinedRule unrestricted(); + + /** + * Returns a rule that signifies that a user cannot access anything. + */ + protected abstract JoinedRule fullyRestricted(); + + /** + * Merges the given collection of single rules into one joined rule. + */ + protected abstract JoinedRule compile(PrivilegesEvaluationContext context, Collection rules) + throws PrivilegesEvaluationException; + + synchronized void updateIndices(Map indexMetadata) { + StatefulRules statefulRules = this.statefulRules; + + if (statefulRules == null || !statefulRules.indexMetadata.keySet().equals(indexMetadata.keySet())) { + this.statefulRules = new StatefulRules<>(roles, indexMetadata, this.roleToRuleFunction); + } + } + + /** + * Returns aliases and/or data streams containing the specified index. + */ + private Collection getParents(IndexAbstraction indexAbstraction) { + if (indexAbstraction instanceof IndexAbstraction.Index) { + IndexAbstraction.Index index = (IndexAbstraction.Index) indexAbstraction; + + if (index.getWriteIndex().getAliases().isEmpty() && index.getParentDataStream() == null) { + return Collections.emptySet(); + } + + List result = new ArrayList<>(index.getWriteIndex().getAliases().size() + 1); + + for (String aliasName : index.getWriteIndex().getAliases().keySet()) { + result.add(aliasName); + } + + if (indexAbstraction.getParentDataStream() != null) { + result.add(indexAbstraction.getParentDataStream().getName()); + } + + return result; + } else { + return Collections.emptySet(); + } + } + + /** + * This is an immutable class that contains compiled rules. It is independent of the current indices. + */ + static class StaticRules { + + protected final Set rolesWithIndexWildcardWithoutRule; + protected final Map roleWithIndexWildcardToRule; + protected final Map> rolesToDynamicIndexPatternToRule; + protected final Map> rolesToDynamicIndexPatternWithoutRule; + + /** + * Only used when no index metadata is available upon construction + */ + protected final Map> rolesToStaticIndexPatternToRule; + + /** + * Only used when no index metadata is available upon construction + */ + protected final Map rolesToStaticIndexPatternWithoutRule; + + protected final RoleToRuleFunction roleToRuleFunction; + + StaticRules(SecurityDynamicConfiguration roles, RoleToRuleFunction roleToRuleFunction) { + this.roleToRuleFunction = roleToRuleFunction; + + Set rolesWithIndexWildcardWithoutRule = new HashSet<>(); + Map roleWithIndexWildcardToRule = new HashMap<>(); + Map> rolesToDynamicIndexPatternToRule = new HashMap<>(); + Map> rolesToDynamicIndexPatternWithoutRule = new HashMap<>(); + Map> rolesToStaticIndexPatternToRule = new HashMap<>(); + Map> rolesToStaticIndexPatternWithoutRule = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + for (RoleV7.Index rolePermissions : role.getIndex_permissions()) { + if (rolePermissions.getIndex_patterns().contains("*")) { + SingleRule singleRule = this.roleToRule(rolePermissions); + + if (singleRule == null) { + rolesWithIndexWildcardWithoutRule.add(roleName); + } else { + roleWithIndexWildcardToRule.put(roleName, singleRule); + } + } else { + SingleRule singleRule = this.roleToRule(rolePermissions); + IndexPattern indexPattern = IndexPattern.from(rolePermissions.getIndex_patterns()); + + if (indexPattern.hasStaticPattern()) { + if (singleRule == null) { + rolesToStaticIndexPatternWithoutRule.computeIfAbsent(roleName, k -> new ArrayList<>()) + .add(indexPattern.getStaticPattern()); + } else { + rolesToStaticIndexPatternToRule.computeIfAbsent(roleName, k -> new HashMap<>()) + .put(indexPattern.getStaticPattern(), singleRule); + } + } + + if (indexPattern.hasDynamicPattern()) { + if (singleRule == null) { + rolesToDynamicIndexPatternWithoutRule.computeIfAbsent(roleName, k -> new HashSet<>()) + .add(indexPattern.dynamicOnly()); + } else { + rolesToDynamicIndexPatternToRule.computeIfAbsent(roleName, k -> new HashMap<>()) + .put(indexPattern.dynamicOnly(), singleRule); + } + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry, e); + } + } + + this.rolesWithIndexWildcardWithoutRule = rolesWithIndexWildcardWithoutRule; + this.roleWithIndexWildcardToRule = roleWithIndexWildcardToRule; + this.rolesToDynamicIndexPatternToRule = rolesToDynamicIndexPatternToRule; + this.rolesToDynamicIndexPatternWithoutRule = rolesToDynamicIndexPatternWithoutRule; + + this.rolesToStaticIndexPatternToRule = rolesToStaticIndexPatternToRule; + this.rolesToStaticIndexPatternWithoutRule = rolesToStaticIndexPatternWithoutRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> WildcardMatcher.from(entry.getValue()))); + } + + protected SingleRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + return this.roleToRuleFunction.apply(rolePermissions); + } + + /** + * Only to be used if there is no stateful index information + */ + boolean hasUnrestrictedPatterns(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + // We assume that we have a restriction unless there are roles without restriction. This, we only have to check the roles + // without restriction. + for (String role : context.getMappedRoles()) { + WildcardMatcher pattern = this.rolesToStaticIndexPatternWithoutRule.get(role); + + if (pattern != null && pattern.test(index)) { + return true; + } + } + + // If we found no roles without restriction, we assume a restriction + return false; + } + + boolean hasUnrestrictedPatternTemplates(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + // We assume that we have a restriction unless there are roles without restriction. This, we only have to check the roles + // without restriction. + for (String role : context.getMappedRoles()) { + Set dynamicIndexPatternsWithoutRule = this.rolesToDynamicIndexPatternWithoutRule.get(role); + + if (dynamicIndexPatternsWithoutRule != null) { + for (IndexPattern indexPatternTemplate : dynamicIndexPatternsWithoutRule) { + try { + if (indexPatternTemplate.matches(index, context, context.getIndicesLookup())) { + return true; + } + } catch (PrivilegesEvaluationException e) { + log.error("Error while matching index pattern of role {}", role, e); + } + } + } + } + + // If we found no roles without restriction, we assume a restriction + return false; + } + + /** + * Only to be used if there is no stateful index information + */ + boolean hasRestrictedPatterns(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + for (String role : context.getMappedRoles()) { + Map indexPatternToRule = this.rolesToStaticIndexPatternToRule.get(role); + + if (indexPatternToRule != null) { + for (WildcardMatcher indexPattern : indexPatternToRule.keySet()) { + if (indexPattern.test(index)) { + return true; + } + } + } + } + + return false; + } + + boolean hasRestrictedPatternTemplates(PrivilegesEvaluationContext context, String index) throws PrivilegesEvaluationException { + for (String role : context.getMappedRoles()) { + Map dynamicIndexPatternToRule = this.rolesToDynamicIndexPatternToRule.get(role); + + if (dynamicIndexPatternToRule != null) { + for (IndexPattern indexPattern : dynamicIndexPatternToRule.keySet()) { + try { + if (indexPattern.matches(index, context, context.getIndicesLookup())) { + return true; + } + } catch (PrivilegesEvaluationException e) { + throw new PrivilegesEvaluationException("Error while evaluating index pattern of role " + role, e); + } + } + } + } + + return false; + } + } + + /** + * This is an immutable class which contains compiled rules based on the set of actually existing indices. Objects + * of this class need to be re-constructed whenever the set of indices changes. + */ + static class StatefulRules { + final Map indexMetadata; + + final ImmutableMap> indexToRoleToRule; + final ImmutableMap> indexToRoleWithoutRule; + + private final RoleToRuleFunction roleToRuleFunction; + + StatefulRules( + SecurityDynamicConfiguration roles, + Map indexMetadata, + RoleToRuleFunction roleToRuleFunction + ) { + this.roleToRuleFunction = roleToRuleFunction; + this.indexMetadata = indexMetadata; + + DeduplicatingCompactSubSetBuilder roleSetBuilder = new DeduplicatingCompactSubSetBuilder<>( + roles.getCEntries().keySet() + ); + CompactMapGroupBuilder roleMapBuilder = new CompactMapGroupBuilder<>(roles.getCEntries().keySet()); + Map> indexToRoleWithoutRule = new HashMap<>(); + Map> indexToRoleToRule = new HashMap<>(); + + for (Map.Entry entry : roles.getCEntries().entrySet()) { + try { + String roleName = entry.getKey(); + RoleV7 role = entry.getValue(); + + roleSetBuilder.next(roleName); + + for (RoleV7.Index indexPermissions : role.getIndex_permissions()) { + if (indexPermissions.getIndex_patterns().contains("*")) { + // Wildcard index patterns are handled in the static IndexPermissions object. + continue; + } + + WildcardMatcher indexMatcher = IndexPattern.from(indexPermissions.getIndex_patterns()).getStaticPattern(); + + if (indexMatcher == WildcardMatcher.NONE) { + // The pattern is likely blank because there are only dynamic patterns. + // Dynamic index patterns are not handled here, but in the static IndexPermissions object + continue; + } + + SingleRule rule = this.roleToRule(indexPermissions); + + if (rule != null) { + for (String index : indexMatcher.iterateMatching(indexMetadata.keySet())) { + indexToRoleToRule.computeIfAbsent(index, k -> roleMapBuilder.createMapBuilder()).put(roleName, rule); + } + } else { + for (String index : indexMatcher.iterateMatching(indexMetadata.keySet())) { + indexToRoleWithoutRule.computeIfAbsent(index, k -> roleSetBuilder.createSubSetBuilder()).add(roleName); + } + } + } + } catch (Exception e) { + log.error("Unexpected exception while processing role: {}\nIgnoring role.", entry, e); + } + } + + DeduplicatingCompactSubSetBuilder.Completed completed = roleSetBuilder.build(); + + this.indexToRoleToRule = indexToRoleToRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> entry.getValue().build())); + this.indexToRoleWithoutRule = indexToRoleWithoutRule.entrySet() + .stream() + .collect(ImmutableMap.toImmutableMap(entry -> entry.getKey(), entry -> entry.getValue().build(completed))); + + } + + protected SingleRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + return this.roleToRuleFunction.apply(rolePermissions); + } + + /** + * Returns true if the given index is known to this instance - then it can be assumed that this instance + * has proper rules for the index in the indexToRoleToRule and the indexToRoleWithoutRule attributes. + *

+ * If this returns false, this instance cannot be relied on to determine the correct rules. + */ + boolean covers(String index) { + return this.indexMetadata.get(index) != null; + } + } + + @FunctionalInterface + static interface RoleToRuleFunction { + SingleRule apply(RoleV7.Index indexPrivileges) throws PrivilegesConfigurationValidationException; + } + + static abstract class Rule { + abstract boolean isUnrestricted(); + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java new file mode 100644 index 0000000000..232e2d7422 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsBaseContext.java @@ -0,0 +1,68 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.configuration.AdminDNs; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluator; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.HeaderHelper; +import org.opensearch.security.user.User; + +/** + * Node global context data for DLS/FLS. The lifecycle of an instance of this class is equal to the lifecycle of a running node. + */ +public class DlsFlsBaseContext { + private final PrivilegesEvaluator privilegesEvaluator; + private final ThreadContext threadContext; + private final AdminDNs adminDNs; + + public DlsFlsBaseContext(PrivilegesEvaluator privilegesEvaluator, ThreadContext threadContext, AdminDNs adminDNs) { + this.privilegesEvaluator = privilegesEvaluator; + this.threadContext = threadContext; + this.adminDNs = adminDNs; + } + + /** + * Returns the PrivilegesEvaluationContext for the current thread. Returns null if the current thread is not + * associated with a user. This indicates a system action. In these cases, no privilege evaluation should be performed. + */ + public PrivilegesEvaluationContext getPrivilegesEvaluationContext() { + User user = threadContext.getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER); + + if (user == null || adminDNs.isAdmin(user)) { + return null; + } + + return this.privilegesEvaluator.createContext(user, null); + } + + public boolean isDlsDoneOnFilterLevel() { + if (threadContext.getHeader(ConfigConstants.OPENDISTRO_SECURITY_FILTER_LEVEL_DLS_DONE) != null) { + return true; + } else { + return false; + } + } + + /** + * Returns true for requests that have raised privileges. This corresponds to the check in SecurityFilter: + * https://github.com/opensearch-project/security/blob/1c898dcc4a92e8d4aa8b18c3fed761b5f6e52d4f/src/main/java/org/opensearch/security/filter/SecurityFilter.java#L209 + *

+ * In earlier versions the check in SecurityFilter would automatically bypass any DLS/FLS logic if it was true, + * because no DLS/FLS thread context headers were written. As these are no longer used and the DLS/FLS components + * do the access control checks by themselves, we now need to do that check at these particular locations. + */ + public boolean isPrivilegedConfigRequest() { + return "true".equals(HeaderHelper.getSafeFromHeader(threadContext, ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER)); + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java new file mode 100644 index 0000000000..b4e35d96cc --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsLegacyHeaders.java @@ -0,0 +1,247 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.Sets; + +import org.opensearch.Version; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsRequest; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.support.Base64Helper; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.HeaderHelper; +import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportRequest; + +/** + * Encapsulates functionality to provide transport headers with DLS/FLS information that need to be sent + * to nodes which run on the legacy DLS/FLS implementation. This is only needed for mixed clusters. + * See the attribute LEGACY_HEADERS_UNNECESSARY_AS_OF for the concrete version. + *

+ * As soon as backward compat in mixed clusters is no longer required, this class should be removed. + * + */ +public class DlsFlsLegacyHeaders { + /** + * Defines the first OpenSearch version which does not need the legacy headers + * TODO this needs to be adapted if backported + */ + static final Version LEGACY_HEADERS_UNNECESSARY_AS_OF = Version.V_3_0_0; + + /** + * Returns true if the current cluster still contains nodes which are on an OpenSearch version which + * requires the legacy DLS/FLS transport headers to be set. This still does not necessarily indicate that the + * headers must be set, as this also depends on the concrete message that is being sent. + */ + public static boolean possiblyRequired(ClusterService clusterService) { + return !clusterService.state().nodes().getMinNodeVersion().onOrAfter(LEGACY_HEADERS_UNNECESSARY_AS_OF); + } + + /** + * Creates an DlsFlsLegacyHeaders instance and puts it asa transient into the thread context. This should be only called + * if DlsFlsLegacyHeaders.possiblyRequired() returns true. + *

+ * This method should be called in the DlsFlsRequestValve implementation, i.e., during action filtering. + * Later, when transport messages are sent, performHeaderDecoration() should be called in the SecurityInterceptor + * class. + */ + public static void prepare( + ThreadContext threadContext, + PrivilegesEvaluationContext context, + DlsFlsProcessedConfig config, + Metadata metadata, + boolean doFilterLevelDls + ) throws PrivilegesEvaluationException { + DlsFlsLegacyHeaders preparedHeaders = new DlsFlsLegacyHeaders(context, config, metadata, doFilterLevelDls); + + if (context.getRequest() instanceof ClusterSearchShardsRequest && HeaderHelper.isTrustedClusterRequest(threadContext)) { + // Special case: Another cluster tries to initiate a cross cluster search and will talk directly to + // the shards on our cluster. In this case, we do send the information as response headers. + // The other cluster has code to correctly evaluate these response headers + preparedHeaders.performResponseHeaderDecoration(threadContext); + } else if (threadContext.getTransient(TRANSIENT_HEADER) == null) { + // Normal case: No CCS involved + threadContext.putTransient(TRANSIENT_HEADER, preparedHeaders); + } + } + + public static final String TRANSIENT_HEADER = ConfigConstants.OPENDISTRO_SECURITY_CONFIG_PREFIX + "dls_fls_legacy_headers"; + + private final DlsFlsProcessedConfig config; + + private final String dlsHeader; + private final String flsHeader; + private final String fmHeader; + + public DlsFlsLegacyHeaders( + PrivilegesEvaluationContext context, + DlsFlsProcessedConfig config, + Metadata metadata, + boolean doFilterLevelDls + ) throws PrivilegesEvaluationException { + this.config = config; + this.dlsHeader = !doFilterLevelDls ? getDlsHeader(context, config.getDocumentPrivileges(), metadata) : null; + this.flsHeader = getFlsHeader(context, config.getFieldPrivileges(), metadata); + this.fmHeader = getFieldMaskingHeader(context, config.getFieldMasking(), metadata); + } + + /** + * Writes the prepared DLS/FLS headers into the given map IF this method deems that it is necessary. + * To be called when a transport message is sent to another node, i.e. in TransportInterceptor.interceptSender(). + */ + public void performHeaderDecoration(Transport.Connection connection, TransportRequest request, Map headerMap) { + + if (connection.getVersion().onOrAfter(LEGACY_HEADERS_UNNECESSARY_AS_OF)) { + // Target node is new enough -> no headers to be applied + return; + } + + if (request instanceof ActionRequest) { + // The legacy implementation will create the information by itself in DlsFlsValve if an ActionRequest is received + // Thus, if we have an ActionRequest, we do not need to get active either + return; + } + + if (dlsHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, dlsHeader); + } + + if (flsHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, flsHeader); + } + + if (fmHeader != null) { + headerMap.put(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, fmHeader); + } + } + + /** + * Only necessary for CCS in the case that another cluster checks out our shards with ClusterSearchShardsRequest: + * In this case, we send the necessary information as response headers. The other cluster has code to evaluate + * these response headers. + */ + public void performResponseHeaderDecoration(ThreadContext threadContext) { + if (dlsHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER, dlsHeader); + } + + if (flsHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER, flsHeader); + } + + if (fmHeader != null) { + threadContext.addResponseHeader(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER, fmHeader); + } + } + + public String getDlsHeader() { + return dlsHeader; + } + + public String getFlsHeader() { + return flsHeader; + } + + public String getFmHeader() { + return fmHeader; + } + + private static String getDlsHeader(PrivilegesEvaluationContext context, DocumentPrivileges documentPrivileges, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap dlsRestrictionMap = documentPrivileges.getRestrictions( + context, + metadata.indices().keySet(), + documentPrivileges.unrestricted() + ); + + if (dlsRestrictionMap.isUnrestricted()) { + return null; + } + + Map> dlsQueriesByIndex = new HashMap<>(); + + for (Map.Entry entry : dlsRestrictionMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != documentPrivileges.unrestricted()) { + dlsQueriesByIndex.put( + entry.getKey(), + entry.getValue().getQueries().stream().map(query -> query.getRenderedSource()).collect(Collectors.toSet()) + ); + } + } + + return Base64Helper.serializeObject((Serializable) dlsQueriesByIndex); + } + + private static String getFlsHeader(PrivilegesEvaluationContext context, FieldPrivileges fieldPrivileges, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap flsRuleMap = fieldPrivileges.getRestrictions( + context, + metadata.indices().keySet(), + fieldPrivileges.unrestricted() + ); + + if (flsRuleMap.isUnrestricted()) { + return null; + } + + Map> flsFields = new HashMap<>(); + + for (Map.Entry entry : flsRuleMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != fieldPrivileges.unrestricted()) { + flsFields.put(entry.getKey(), Sets.newHashSet(entry.getValue().getSource())); + } + + } + + return Base64Helper.serializeObject((Serializable) flsFields); + } + + private static String getFieldMaskingHeader(PrivilegesEvaluationContext context, FieldMasking fieldMasking, Metadata metadata) + throws PrivilegesEvaluationException { + IndexToRuleMap fmRuleMap = fieldMasking.getRestrictions( + context, + metadata.indices().keySet(), + fieldMasking.unrestricted() + ); + + if (fmRuleMap.isUnrestricted()) { + return null; + } + + Map> maskedFieldsMap = new HashMap<>(); + + for (Map.Entry entry : fmRuleMap.getIndexMap().entrySet()) { + // Do not include implicitly unrestricted rules (this is achieved by the != operator, an equals() would also catch explicit + // unrestricted rules) + if (entry.getValue() != fieldMasking.unrestricted()) { + maskedFieldsMap.put(entry.getKey(), Sets.newHashSet(entry.getValue().getSource())); + } + } + + return Base64Helper.serializeObject((Serializable) maskedFieldsMap); + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java new file mode 100644 index 0000000000..b217b59df3 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsFlsProcessedConfig.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Map; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.security.privileges.ClusterStateMetadataDependentPrivileges; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; + +/** + * Encapsulates the processed DLS/FLS configuration from roles.yml. + * The current instance is held and managed by DlsFlsValveImpl. + */ +public class DlsFlsProcessedConfig extends ClusterStateMetadataDependentPrivileges { + private static final Logger log = LogManager.getLogger(DlsFlsProcessedConfig.class); + + private final DocumentPrivileges documentPrivileges; + private final FieldPrivileges fieldPrivileges; + private final FieldMasking fieldMasking; + private long metadataVersionEffective = -1; + + public DlsFlsProcessedConfig( + SecurityDynamicConfiguration rolesConfiguration, + Map indexMetadata, + NamedXContentRegistry xContentRegistry, + Settings settings, + FieldMasking.Config fieldMaskingConfig + ) { + this.documentPrivileges = new DocumentPrivileges(rolesConfiguration, indexMetadata, xContentRegistry, settings); + this.fieldPrivileges = new FieldPrivileges(rolesConfiguration, indexMetadata, settings); + this.fieldMasking = new FieldMasking(rolesConfiguration, indexMetadata, fieldMaskingConfig, settings); + } + + public DocumentPrivileges getDocumentPrivileges() { + return this.documentPrivileges; + } + + public FieldPrivileges getFieldPrivileges() { + return this.fieldPrivileges; + } + + public FieldMasking getFieldMasking() { + return this.fieldMasking; + } + + @Override + protected void updateClusterStateMetadata(Metadata metadata) { + long start = System.currentTimeMillis(); + Map indexLookup = metadata.getIndicesLookup(); + + this.documentPrivileges.updateIndices(indexLookup); + this.fieldPrivileges.updateIndices(indexLookup); + this.fieldMasking.updateIndices(indexLookup); + + long duration = System.currentTimeMillis() - start; + + log.debug("Updating DlsFlsProcessedConfig took {} ms", duration); + } + + @Override + protected long getCurrentlyUsedMetadataVersion() { + return this.metadataVersionEffective; + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java new file mode 100644 index 0000000000..242e0000a4 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DlsRestriction.java @@ -0,0 +1,122 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.Collections; +import java.util.List; +import java.util.function.Function; + +import com.google.common.collect.ImmutableList; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.join.ToChildBlockJoinQuery; + +import org.opensearch.index.query.ParsedQuery; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.TermsQueryBuilder; +import org.opensearch.security.queries.QueryBuilderTraverser; + +/** + * Represents the DlsRestriction for a particular index. Internally, the DLS restriction is realized by boolean queries, + * which restrict the allowed documents. + */ +public class DlsRestriction extends AbstractRuleBasedPrivileges.Rule { + + public static final DlsRestriction NONE = new DlsRestriction(Collections.emptyList()); + public static final DlsRestriction FULL = new DlsRestriction(ImmutableList.of(DocumentPrivileges.RenderedDlsQuery.MATCH_NONE)); + + private static final Query NON_NESTED_QUERY; + + static { + // Moved from + // https://github.com/opensearch-project/security/blob/main/src/main/java/org/opensearch/security/configuration/DlsQueryParser.java + // Match all documents but not the nested ones + // Nested document types start with __ + // https://discuss.elastic.co/t/whats-nested-documents-layout-inside-the-lucene/59944/9 + NON_NESTED_QUERY = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), BooleanClause.Occur.FILTER) + .add(new PrefixQuery(new Term("_type", "__")), BooleanClause.Occur.MUST_NOT) + .build(); + } + + private final ImmutableList queries; + + DlsRestriction(List queries) { + this.queries = ImmutableList.copyOf(queries); + } + + @Override + public boolean isUnrestricted() { + return this.queries.isEmpty(); + } + + public org.apache.lucene.search.BooleanQuery.Builder toBooleanQueryBuilder( + QueryShardContext queryShardContext, + Function queryMapFunction + ) { + if (this.queries.isEmpty()) { + return null; + } + + boolean hasNestedMapping = queryShardContext.getMapperService().hasNested(); + + org.apache.lucene.search.BooleanQuery.Builder dlsQueryBuilder = new org.apache.lucene.search.BooleanQuery.Builder(); + dlsQueryBuilder.setMinimumNumberShouldMatch(1); + + for (DocumentPrivileges.RenderedDlsQuery query : this.queries) { + ParsedQuery parsedQuery = queryShardContext.toQuery(query.getQueryBuilder()); + org.apache.lucene.search.Query luceneQuery = parsedQuery.query(); + + if (queryMapFunction != null) { + luceneQuery = queryMapFunction.apply(luceneQuery); + } + + dlsQueryBuilder.add(luceneQuery, BooleanClause.Occur.SHOULD); + + if (hasNestedMapping) { + final BitSetProducer parentDocumentsFilter = queryShardContext.bitsetFilter(NON_NESTED_QUERY); + dlsQueryBuilder.add(new ToChildBlockJoinQuery(luceneQuery, parentDocumentsFilter), BooleanClause.Occur.SHOULD); + } + } + + return dlsQueryBuilder; + } + + public boolean containsTermLookupQuery() { + for (DocumentPrivileges.RenderedDlsQuery query : this.queries) { + if (QueryBuilderTraverser.exists( + query.getQueryBuilder(), + (q) -> (q instanceof TermsQueryBuilder) && ((TermsQueryBuilder) q).termsLookup() != null + )) { + return true; + } + } + + return false; + } + + @Override + public String toString() { + if (isUnrestricted()) { + return "DLS:"; + } else { + return "DLS:" + queries; + } + } + + public ImmutableList getQueries() { + return queries; + } +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java new file mode 100644 index 0000000000..2afcdd4b82 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/DocumentPrivileges.java @@ -0,0 +1,210 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import org.apache.logging.log4j.util.Strings; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.index.query.AbstractQueryBuilder; +import org.opensearch.index.query.MatchNoneQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.privileges.UserAttributes; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking DLS privileges. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class DocumentPrivileges extends AbstractRuleBasedPrivileges { + + private final NamedXContentRegistry xContentRegistry; + + public DocumentPrivileges( + SecurityDynamicConfiguration roles, + Map indexMetadata, + NamedXContentRegistry xContentRegistry, + Settings settings + ) { + super(roles, indexMetadata, (rolePermissions) -> roleToRule(rolePermissions, xContentRegistry), settings); + this.xContentRegistry = xContentRegistry; + } + + static DlsQuery roleToRule(RoleV7.Index rolePermissions, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + String dlsQueryTemplate = rolePermissions.getDls(); + + if (dlsQueryTemplate != null && !Strings.isBlank(dlsQueryTemplate)) { + return DlsQuery.create(dlsQueryTemplate, xContentRegistry); + } else { + return null; + } + } + + @Override + protected DlsRestriction unrestricted() { + return DlsRestriction.NONE; + } + + @Override + protected DlsRestriction fullyRestricted() { + return DlsRestriction.FULL; + } + + @Override + protected DlsRestriction compile(PrivilegesEvaluationContext context, Collection rules) throws PrivilegesEvaluationException { + List renderedQueries = new ArrayList<>(rules.size()); + + for (DlsQuery query : rules) { + renderedQueries.add(query.evaluate(context)); + } + + return new DlsRestriction(renderedQueries); + } + + /** + * The basic rules of DLS are queries. This class encapsulates single queries. + */ + static abstract class DlsQuery { + final String queryString; + + DlsQuery(String queryString) { + this.queryString = queryString; + } + + abstract RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) throws PrivilegesEvaluationException; + + @Override + public int hashCode() { + return queryString.hashCode(); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (!(obj instanceof DlsQuery)) { + return false; + } + DlsQuery other = (DlsQuery) obj; + return Objects.equals(this.queryString, other.queryString); + } + + protected QueryBuilder parseQuery(String queryString, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + try { + XContentParser parser = JsonXContent.jsonXContent.createParser( + xContentRegistry, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + queryString + ); + return AbstractQueryBuilder.parseInnerQueryBuilder(parser); + } catch (Exception e) { + throw new PrivilegesConfigurationValidationException("Invalid DLS query: " + queryString, e); + } + } + + static DlsQuery create(String queryString, NamedXContentRegistry xContentRegistry) + throws PrivilegesConfigurationValidationException { + if (queryString.contains("${")) { + return new DlsQuery.Dynamic(queryString, xContentRegistry); + } else { + return new DlsQuery.Constant(queryString, xContentRegistry); + } + } + + /** + * Represents a DLS query WITHOUT user attribute references like "${user.name}". These queries are already + * pre-parsed and ready for use. + */ + static class Constant extends DlsQuery { + private final RenderedDlsQuery renderedDlsQuery; + + Constant(String queryString, NamedXContentRegistry xContentRegistry) throws PrivilegesConfigurationValidationException { + super(queryString); + this.renderedDlsQuery = new RenderedDlsQuery(parseQuery(queryString, xContentRegistry), queryString); + } + + @Override + RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) { + return renderedDlsQuery; + } + } + + /** + * Represents a DLS query with user attribute references like "${user.name}". These queries are parsed + * during privilege evaluation time, after user attribute interpolation has been performed. + */ + static class Dynamic extends DlsQuery { + private final NamedXContentRegistry xContentRegistry; + + Dynamic(String queryString, NamedXContentRegistry xContentRegistry) { + super(queryString); + this.xContentRegistry = xContentRegistry; + } + + @Override + RenderedDlsQuery evaluate(PrivilegesEvaluationContext context) throws PrivilegesEvaluationException { + String effectiveQueryString = UserAttributes.replaceProperties(this.queryString, context); + try { + return new RenderedDlsQuery(parseQuery(effectiveQueryString, xContentRegistry), effectiveQueryString); + } catch (Exception e) { + throw new PrivilegesEvaluationException("Invalid DLS query: " + effectiveQueryString, e); + } + } + } + } + + /** + * This is a DLS query where any templates (like ${user.name}) have been interpolated and which has been + * succesfully parsed to a QueryBuilder instance. + */ + public static class RenderedDlsQuery { + public static RenderedDlsQuery MATCH_NONE = new RenderedDlsQuery(new MatchNoneQueryBuilder(), "{\"match_none:\" {}}"); + + private final QueryBuilder queryBuilder; + private final String renderedSource; + + RenderedDlsQuery(QueryBuilder queryBuilder, String renderedSource) { + this.queryBuilder = queryBuilder; + this.renderedSource = renderedSource; + } + + public QueryBuilder getQueryBuilder() { + return queryBuilder; + } + + public String getRenderedSource() { + return renderedSource; + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java new file mode 100644 index 0000000000..b0b77375be --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldMasking.java @@ -0,0 +1,487 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; + +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import org.apache.commons.lang3.StringUtils; +import org.apache.lucene.util.BytesRef; +import org.bouncycastle.util.encoders.Hex; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.configuration.Salt; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.ConfigConstants; +import org.opensearch.security.support.WildcardMatcher; + +import com.rfksystems.blake2b.Blake2b; + +/** + * This class converts role configuration into pre-computed, optimized data structures for applying field masking + * to indexed documents. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class FieldMasking extends AbstractRuleBasedPrivileges { + + private final FieldMasking.Config fieldMaskingConfig; + + public FieldMasking( + SecurityDynamicConfiguration roles, + Map indexMetadata, + FieldMasking.Config fieldMaskingConfig, + Settings settings + ) { + super(roles, indexMetadata, (rolePermissions) -> roleToRule(rolePermissions, fieldMaskingConfig), settings); + this.fieldMaskingConfig = fieldMaskingConfig; + } + + static FieldMaskingRule.SimpleRule roleToRule(RoleV7.Index rolePermissions, FieldMasking.Config fieldMaskingConfig) + throws PrivilegesConfigurationValidationException { + List fmExpressions = rolePermissions.getMasked_fields(); + + if (fmExpressions != null && !fmExpressions.isEmpty()) { + return new FieldMaskingRule.SimpleRule(rolePermissions, fieldMaskingConfig); + } else { + return null; + } + } + + @Override + protected FieldMaskingRule unrestricted() { + return FieldMaskingRule.ALLOW_ALL; + } + + @Override + protected FieldMaskingRule fullyRestricted() { + return new FieldMaskingRule.SimpleRule( + ImmutableList.of(new FieldMaskingRule.Field(FieldMaskingExpression.MASK_ALL, fieldMaskingConfig)) + ); + } + + @Override + protected FieldMaskingRule compile(PrivilegesEvaluationContext context, Collection rules) + throws PrivilegesEvaluationException { + return new FieldMaskingRule.MultiRole(rules); + } + + public static abstract class FieldMaskingRule extends AbstractRuleBasedPrivileges.Rule { + public static final FieldMaskingRule ALLOW_ALL = new SimpleRule(ImmutableList.of()); + + public static FieldMaskingRule of(FieldMasking.Config fieldMaskingConfig, String... rules) + throws PrivilegesConfigurationValidationException { + ImmutableList.Builder patterns = new ImmutableList.Builder<>(); + + for (String rule : rules) { + patterns.add(new Field(new FieldMaskingExpression(rule), fieldMaskingConfig)); + } + + return new SimpleRule(patterns.build()); + } + + public abstract Field get(String field); + + public abstract boolean isAllowAll(); + + public boolean isMasked(String field) { + return get(field) != null; + } + + public boolean isUnrestricted() { + return this.isAllowAll(); + } + + public abstract List getSource(); + + /** + * A rule which was derived directly from exactly one role. + */ + public static class SimpleRule extends FieldMaskingRule { + + final RoleV7.Index sourceIndex; + final ImmutableList expressions; + + SimpleRule(RoleV7.Index sourceIndex, FieldMasking.Config fieldMaskingConfig) throws PrivilegesConfigurationValidationException { + this.sourceIndex = sourceIndex; + this.expressions = parseExpressions(sourceIndex, fieldMaskingConfig); + } + + SimpleRule(ImmutableList expressions) { + this.sourceIndex = null; + this.expressions = expressions; + } + + public Field get(String field) { + return internalGet(stripKeywordSuffix(field)); + } + + private Field internalGet(String field) { + for (Field expression : this.expressions) { + if (expression.getPattern().test(field)) { + return expression; + } + } + + return null; + } + + public boolean isAllowAll() { + return expressions.isEmpty(); + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FM:[]"; + } else { + return "FM:" + expressions; + } + } + + @Override + public List getSource() { + return this.expressions.stream().map(FieldMaskingRule.Field::getSource).collect(Collectors.toList()); + } + + static ImmutableList parseExpressions(RoleV7.Index index, FieldMasking.Config fieldMaskingConfig) + throws PrivilegesConfigurationValidationException { + ImmutableList.Builder result = ImmutableList.builder(); + + for (String source : index.getMasked_fields()) { + result.add(new Field(new FieldMaskingExpression(source), fieldMaskingConfig)); + } + + return result.build(); + } + } + + public static class MultiRole extends FieldMaskingRule { + final ImmutableList parts; + final boolean allowAll; + + MultiRole(Collection parts) { + this.parts = ImmutableList.copyOf(parts); + this.allowAll = this.parts.stream().anyMatch(SimpleRule::isAllowAll); + } + + public Field get(String field) { + field = stripKeywordSuffix(field); + + for (SimpleRule part : parts) { + Field masking = part.get(field); + + if (masking != null) { + return masking; + } + } + + return null; + } + + public boolean isAllowAll() { + return allowAll; + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FM:[]"; + } else { + return "FM:" + parts.stream().map((p) -> p.expressions).collect(Collectors.toList()); + } + } + + @Override + public List getSource() { + return this.parts.stream().flatMap(r -> r.getSource().stream()).collect(Collectors.toList()); + } + } + + /** + * Represents a single field that is supposed to be masked. Combines a single expression with the global + * configuration. + */ + public static class Field { + private final FieldMaskingExpression expression; + + private final String hashAlgorithm; + private final Salt salt; + private final byte[] saltBytes; + + Field(FieldMaskingExpression expression, FieldMasking.Config fieldMaskingConfig) { + this.expression = expression; + this.hashAlgorithm = expression.getAlgoName() != null ? expression.getAlgoName() + : StringUtils.isNotEmpty(fieldMaskingConfig.getDefaultHashAlgorithm()) ? fieldMaskingConfig.getDefaultHashAlgorithm() + : null; + this.salt = fieldMaskingConfig.getSalt(); + this.saltBytes = this.salt.getSalt16(); + } + + public WildcardMatcher getPattern() { + return expression.getPattern(); + } + + public byte[] apply(byte[] value) { + if (expression.getRegexReplacements() != null) { + return applyRegexReplacements(value, expression.getRegexReplacements()); + } else if (this.hashAlgorithm != null) { + return customHash(value, this.hashAlgorithm); + } else { + return blake2bHash(value); + } + } + + public String apply(String value) { + return new String(apply(value.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8); + } + + public BytesRef apply(BytesRef value) { + if (value == null) { + return null; + } + + return new BytesRef(apply(BytesRef.deepCopyOf(value).bytes)); + } + + @Override + public String toString() { + return expression.toString(); + } + + String getSource() { + return expression.getSource(); + } + + FieldMaskingExpression getExpression() { + return expression; + } + + private static byte[] customHash(byte[] in, String algorithm) { + try { + MessageDigest digest = MessageDigest.getInstance(algorithm); + return Hex.encode(digest.digest(in)); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e); + } + } + + private byte[] applyRegexReplacements(byte[] value, List regexReplacements) { + String string = new String(value, StandardCharsets.UTF_8); + for (FieldMaskingExpression.RegexReplacement rr : regexReplacements) { + string = rr.getRegex().matcher(string).replaceAll(rr.getReplacement()); + } + return string.getBytes(StandardCharsets.UTF_8); + } + + private byte[] blake2bHash(byte[] in) { + // Salt is passed incorrectly but order of parameters is retained at present to ensure full backwards compatibility + // Tracking with https://github.com/opensearch-project/security/issues/4274 + final Blake2b hash = new Blake2b(null, 32, null, saltBytes); + hash.update(in, 0, in.length); + final byte[] out = new byte[hash.getDigestSize()]; + hash.digest(out, 0); + + return Hex.encode(out); + } + } + + static String stripKeywordSuffix(String field) { + if (field.endsWith(".keyword")) { + return field.substring(0, field.length() - ".keyword".length()); + } else { + return field; + } + } + } + + /** + * Represents a parsed field masking expression from the roles.yml file. + */ + public static class FieldMaskingExpression { + public static final FieldMaskingExpression MASK_ALL = new FieldMaskingExpression(WildcardMatcher.ANY, "*"); + + private final WildcardMatcher pattern; + private final String algoName; + private final List regexReplacements; + private final String source; + + public FieldMaskingExpression(String value) throws PrivilegesConfigurationValidationException { + this.source = value; + + List tokens = Splitter.on("::").splitToList(value); + pattern = WildcardMatcher.from(tokens.get(0)); + + if (tokens.size() == 1) { + algoName = null; + regexReplacements = null; + } else if (tokens.size() == 2) { + regexReplacements = null; + try { + this.algoName = tokens.get(1); + // We try to instantiate the MessageDigest instance already now to make sure that it is valid. + // However, we do not store the instance as MessageDigest instance are NOT thread safe. + // Some MessageDigest implementations allow to be cloned. A possible future optimization would + // be detecting whether the instances can be cloned and then using the clone method for + // construction. + MessageDigest.getInstance(tokens.get(1)); + } catch (NoSuchAlgorithmException e) { + throw new PrivilegesConfigurationValidationException("Invalid algorithm " + tokens.get(1)); + } + } else if (tokens.size() % 2 == 1) { + algoName = null; + regexReplacements = new ArrayList<>((tokens.size() - 1) / 2); + for (int i = 1; i < tokens.size() - 1; i = i + 2) { + regexReplacements.add(new RegexReplacement(tokens.get(i), tokens.get(i + 1))); + } + } else { + throw new PrivilegesConfigurationValidationException( + "A field masking expression must have the form 'field_name', 'field_name::algorithm', 'field_name::regex::replacement' or 'field_name::(regex::replacement)+'" + ); + } + } + + private FieldMaskingExpression(WildcardMatcher pattern, String source) { + this.pattern = pattern; + this.source = source; + this.algoName = null; + this.regexReplacements = null; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof FieldMaskingExpression)) { + return false; + } + FieldMaskingExpression that = (FieldMaskingExpression) o; + return Objects.equals(pattern, that.pattern) + && Objects.equals(algoName, that.algoName) + && Objects.equals(regexReplacements, that.regexReplacements); + } + + @Override + public int hashCode() { + return Objects.hash(pattern, algoName, regexReplacements); + } + + static class RegexReplacement { + private final java.util.regex.Pattern regex; + private final String replacement; + + RegexReplacement(String regex, String replacement) throws PrivilegesConfigurationValidationException { + if (!regex.startsWith("/") || !regex.endsWith("/")) { + throw new PrivilegesConfigurationValidationException("A regular expression needs to be wrapped in /.../"); + } + + try { + this.regex = java.util.regex.Pattern.compile(regex.substring(1).substring(0, regex.length() - 2)); + } catch (PatternSyntaxException e) { + throw new PrivilegesConfigurationValidationException(e.getMessage(), e); + } + + this.replacement = replacement; + } + + java.util.regex.Pattern getRegex() { + return regex; + } + + String getReplacement() { + return replacement; + } + + @Override + public String toString() { + return "/" + regex + "/::" + replacement; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof RegexReplacement that)) return false; + return Objects.equals(regex.pattern(), that.regex.pattern()) && Objects.equals(replacement, that.replacement); + } + + @Override + public int hashCode() { + return Objects.hash(regex.pattern(), replacement); + } + } + + @Override + public String toString() { + return source; + } + + String getAlgoName() { + return algoName; + } + + List getRegexReplacements() { + return regexReplacements; + } + + WildcardMatcher getPattern() { + return pattern; + } + + String getSource() { + return source; + } + } + + public static class Config { + public static Config fromSettings(Settings settings) { + return new Config(settings.get(ConfigConstants.SECURITY_MASKED_FIELDS_ALGORITHM_DEFAULT), Salt.from(settings)); + } + + public static final Config DEFAULT = fromSettings(Settings.EMPTY); + + private final String defaultHashAlgorithm; + private final Salt salt; + + Config(String defaultHashAlgorithm, Salt salt) { + this.defaultHashAlgorithm = defaultHashAlgorithm; + this.salt = salt; + } + + public String getDefaultHashAlgorithm() { + return defaultHashAlgorithm; + } + + public Salt getSalt() { + return salt; + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java new file mode 100644 index 0000000000..83eab0f13a --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FieldPrivileges.java @@ -0,0 +1,378 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.regex.PatternSyntaxException; + +import com.google.common.collect.ImmutableList; + +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.common.settings.Settings; +import org.opensearch.security.privileges.PrivilegesConfigurationValidationException; +import org.opensearch.security.privileges.PrivilegesEvaluationContext; +import org.opensearch.security.privileges.PrivilegesEvaluationException; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; +import org.opensearch.security.support.WildcardMatcher; + +/** + * This class converts role configuration into pre-computed, optimized data structures for checking FLS privileges. + *

+ * With the exception of the statefulRules property, instances of this class are immutable. The life-cycle of an + * instance of this class corresponds to the life-cycle of the role configuration. If the role configuration is changed, + * a new instance needs to be built. + *

+ * Instances of this class are managed by DlsFlsProcessedConfig. + */ +public class FieldPrivileges extends AbstractRuleBasedPrivileges { + public FieldPrivileges(SecurityDynamicConfiguration roles, Map indexMetadata, Settings settings) { + super(roles, indexMetadata, FieldPrivileges::roleToRule, settings); + } + + static FlsRule roleToRule(RoleV7.Index rolePermissions) throws PrivilegesConfigurationValidationException { + List flsPatterns = rolePermissions.getFls(); + + if (flsPatterns != null && !flsPatterns.isEmpty()) { + return FlsRule.from(rolePermissions); + } else { + return null; + } + } + + @Override + protected FlsRule unrestricted() { + return FlsRule.ALLOW_ALL; + } + + @Override + protected FlsRule fullyRestricted() { + return FlsRule.DENY_ALL; + } + + @Override + protected FlsRule compile(PrivilegesEvaluationContext context, Collection rules) throws PrivilegesEvaluationException { + return FlsRule.merge(rules); + } + + /** + * Represents a set of FlsPatterns for a specific index. + */ + public static class FlsRule extends AbstractRuleBasedPrivileges.Rule { + static FlsRule of(String... rules) throws PrivilegesConfigurationValidationException { + return from(FlsPattern.parse(Arrays.asList(rules)), ImmutableList.of()); + } + + static FlsRule from(RoleV7.Index role) throws PrivilegesConfigurationValidationException { + return from(FlsPattern.parse(role.getFls()), ImmutableList.of(role)); + } + + static FlsRule from(List flsPatterns, ImmutableList sourceRoles) + throws PrivilegesConfigurationValidationException { + Set flsPatternsIncludingObjectsOnly = new HashSet<>(); + + for (FlsPattern flsPattern : flsPatterns) { + flsPatternsIncludingObjectsOnly.addAll(flsPattern.getParentObjectPatterns()); + } + + // If there are already explicit exclusions on certain object-only inclusions, we can remove these again + flsPatternsIncludingObjectsOnly.removeAll(flsPatterns); + + return new FlsRule(flsPatterns, flsPatternsIncludingObjectsOnly, sourceRoles); + } + + static FlsRule merge(Collection rules) { + if (rules.size() == 1) { + return rules.iterator().next(); + } + + Set patterns = new HashSet<>(); + Set objectOnlyPatterns = new HashSet<>(); + ImmutableList.Builder roles = ImmutableList.builderWithExpectedSize(rules.size()); + + for (FlsRule flsRule : rules) { + patterns.addAll(flsRule.patterns); + objectOnlyPatterns.addAll(flsRule.objectOnlyPatterns); + roles.addAll(flsRule.sourceRole); + } + + objectOnlyPatterns.removeAll(patterns); + + return new FlsRule(patterns, objectOnlyPatterns, roles.build()); + } + + public static final FlsRule ALLOW_ALL = new FlsRule(ImmutableList.of(), ImmutableList.of(), ImmutableList.of()); + public static final FlsRule DENY_ALL = new FlsRule( + ImmutableList.of(FlsPattern.EXCLUDE_ALL), + ImmutableList.of(), + ImmutableList.of() + ); + + final ImmutableList sourceRole; + final ImmutableList patterns; + final ImmutableList effectivePatterns; + final ImmutableList objectOnlyPatterns; + final boolean allowAll; + final boolean excluding; + + FlsRule( + Collection patterns, + Collection flsPatternsIncludingObjectsOnly, + ImmutableList sourceRole + ) { + this.sourceRole = sourceRole; + + Set flsPatternsExcluding = new HashSet<>(patterns.size()); + Set flsPatternsIncluding = new HashSet<>(patterns.size()); + + for (FlsPattern flsPattern : patterns) { + if (flsPattern.isExcluded()) { + flsPatternsExcluding.add(flsPattern); + } else { + flsPatternsIncluding.add(flsPattern); + } + } + + int exclusions = flsPatternsExcluding.size(); + int inclusions = flsPatternsIncluding.size(); + + if (exclusions == 0 && inclusions == 0) { + // Empty + this.effectivePatterns = this.patterns = ImmutableList.of(FlsPattern.INCLUDE_ALL); + this.excluding = false; + this.allowAll = true; + } else if (exclusions != 0 && inclusions == 0) { + // Only exclusions + this.effectivePatterns = this.patterns = ImmutableList.copyOf(flsPatternsExcluding); + this.excluding = true; + this.allowAll = false; + } else if (exclusions == 0 && inclusions != 0) { + // Only inclusions + this.effectivePatterns = this.patterns = ImmutableList.copyOf(flsPatternsIncluding); + this.excluding = false; + this.allowAll = flsPatternsIncluding.contains(FlsPattern.INCLUDE_ALL); + } else { + // Mixed inclusions and exclusions + // + // While the docs say that mixing inclusions and exclusions is not supported, the original + // implementation only regarded exclusions and disregarded inclusions if these were mixed. + // We are mirroring this behaviour here. It might make sense to rethink the semantics here, + // though, as there might be semantics which make more sense. From a UX POV, the current behavior + // can be quite confusing. + // + // See: + // https://github.com/opensearch-project/security/blob/e73fc24509363cb1573607c6cf47c98780fc89de/src/main/java/org/opensearch/security/configuration/DlsFlsFilterLeafReader.java#L658-L662 + // https://opensearch.org/docs/latest/security/access-control/field-level-security/ + this.patterns = ImmutableList.copyOf(patterns); + this.effectivePatterns = ImmutableList.copyOf(flsPatternsExcluding); + this.excluding = true; + this.allowAll = false; + } + + this.objectOnlyPatterns = ImmutableList.copyOf(flsPatternsIncludingObjectsOnly); + } + + public boolean isAllowed(String field) { + if (isAllowAll()) { + return true; + } + + field = stripKeywordSuffix(field); + + if (excluding) { + for (FlsPattern pattern : this.effectivePatterns) { + assert pattern.isExcluded(); + if (pattern.getPattern().test(field)) { + return false; + } + } + return true; + } else { + // including + for (FlsPattern pattern : this.effectivePatterns) { + assert !pattern.isExcluded(); + if (pattern.getPattern().test(field)) { + return true; + } + } + return false; + } + } + + public boolean isObjectAllowed(String field) { + if (excluding) { + return isAllowed(field); + } + + for (FlsPattern pattern : this.objectOnlyPatterns) { + if (pattern.getPattern().test(field)) { + return true; + } + } + + return false; + } + + public boolean isAllowAll() { + return allowAll; + } + + @Override + public String toString() { + if (isAllowAll()) { + return "FLS:*"; + } else { + return "FLS:" + patterns; + } + } + + public List getSource() { + return patterns.stream().map(FlsPattern::getSource).collect(ImmutableList.toImmutableList()); + } + + @Override + public boolean isUnrestricted() { + return this.isAllowAll(); + } + + /** + * See https://github.com/opensearch-project/security/pull/2375 + */ + static String stripKeywordSuffix(String field) { + if (field.endsWith(".keyword")) { + return field.substring(0, field.length() - ".keyword".length()); + } else { + return field; + } + } + } + + /** + * Represents a single FLS pattern that is matched again a field name. + *

+ * FLS patterns can look like this: + *

    + *
  • field - just a simple field name, included in the visible fields + *
  • field* - a pattern on a field name, included in the visible fields + *
  • ~field - a simple field name, excluded from the visible fields (the prefix ! is also supported for legacy reasons, but it is undocumented) + *
  • field.field - a field inside another field + *
  • Regular expressions enclosed in /.../ (undocumented, does not pair well with nested objects) + *
  • Any combination of above + *
+ */ + public static class FlsPattern { + public static final FlsPattern INCLUDE_ALL = new FlsPattern(WildcardMatcher.ANY, false, "*"); + public static final FlsPattern EXCLUDE_ALL = new FlsPattern(WildcardMatcher.ANY, true, "~*"); + + /** + * True if the attribute is supposed to be excluded (i.e., pattern started with ~), false otherwise. + */ + private final boolean excluded; + + /** + * The compiled pattern (excluding leading ~) + */ + private final WildcardMatcher pattern; + + /** + * The original string + */ + private final String source; + + public FlsPattern(String string) throws PrivilegesConfigurationValidationException { + try { + if (string.startsWith("~") || string.startsWith("!")) { + excluded = true; + pattern = WildcardMatcher.from(string.substring(1)); + } else { + pattern = WildcardMatcher.from(string); + excluded = false; + } + + this.source = string; + } catch (PatternSyntaxException e) { + throw new PrivilegesConfigurationValidationException("Invalid FLS pattern " + string, e); + } + } + + FlsPattern(WildcardMatcher pattern, boolean excluded, String source) { + this.pattern = pattern; + this.excluded = excluded; + this.source = source; + } + + public String getSource() { + return source; + } + + public WildcardMatcher getPattern() { + return pattern; + } + + public boolean isExcluded() { + return excluded; + } + + @Override + public String toString() { + return source; + } + + List getParentObjectPatterns() { + if (excluded || source.indexOf('.') == -1 || (source.startsWith("/") && source.endsWith("/"))) { + return Collections.emptyList(); + } + + List subPatterns = new ArrayList<>(); + + for (int pos = source.indexOf('.'); pos != -1; pos = source.indexOf('.', pos + 1)) { + String subString = source.substring(0, pos); + + subPatterns.add(new FlsPattern(WildcardMatcher.from(subString), false, subString)); + } + + return subPatterns; + } + + @Override + public boolean equals(Object o) { + if (o instanceof FlsPattern that) { + return this.source.equals(that.source); + } else { + return false; + } + } + + @Override + public int hashCode() { + return source.hashCode(); + } + + public static List parse(List flsPatternStrings) throws PrivilegesConfigurationValidationException { + List flsPatterns = new ArrayList<>(flsPatternStrings.size()); + + for (String flsPatternSource : flsPatternStrings) { + flsPatterns.add(new FlsPattern(flsPatternSource)); + } + + return flsPatterns; + } + + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java new file mode 100644 index 0000000000..9f36d2ef5c --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsDocumentFilter.java @@ -0,0 +1,207 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.util.ArrayDeque; +import java.util.Deque; +import java.util.Set; + +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonToken; + +/** + * Implements document transformation for FLS and field masking using a chained streaming parser and generator. + * This provides optimal throughput while keeping the heap footprint low. + *

+ * This class is supposed to operate on _source documents. It will filter these document and remove fields disallowed + * by FLS, and mask fields when required for field masking. + *

+ * While FLS applies to attributes of any type, field masking is only available for string valued attributes. + */ +class FlsDocumentFilter { + private static final JsonFactory JSON_FACTORY = new JsonFactory(); + + static byte[] filter( + byte[] bytes, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) throws IOException { + try (InputStream in = new ByteArrayInputStream(bytes); ByteArrayOutputStream out = new ByteArrayOutputStream()) { + filter(in, out, flsRule, fieldMaskingRule, metaFields); + return out.toByteArray(); + } + } + + static void filter( + InputStream in, + OutputStream out, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) throws IOException { + try (JsonParser parser = JSON_FACTORY.createParser(in); JsonGenerator generator = JSON_FACTORY.createGenerator(out)) { + new FlsDocumentFilter(parser, generator, flsRule, fieldMaskingRule, metaFields).copy(); + } + } + + private final JsonParser parser; + private final JsonGenerator generator; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fieldMaskingRule; + + /** + * Names of meta fields. Meta fields will be always kept included in the documents, even if the FLS or + * fieldMaskingRule would forbid them. + */ + private final Set metaFields; + + /** + * A stack of field names. The first element will be the name of the attribute in the root object. Does not include + * fullParentName. + */ + private Deque nameStack = new ArrayDeque<>(); + + FlsDocumentFilter( + JsonParser parser, + JsonGenerator generator, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) { + this.parser = parser; + this.generator = generator; + this.flsRule = flsRule; + this.fieldMaskingRule = fieldMaskingRule; + this.metaFields = metaFields; + } + + @SuppressWarnings("incomplete-switch") + private void copy() throws IOException { + // queuedFieldName will contain the unqualified name of a field that was encountered, but not yet written. + // It is necessary to queue the field names because it can depend on the type of the following value whether + // the field/value pair will be written: If the value is object-valued, we will also start writing the object + // if we expect the object to contain allowed values, even if the object itself is not fully allowed. + String queuedFieldName = null; + // fullCurrentName contains the qualified name of the current field. Changes for every FIELD_NAME token. Does + // include names of parent objects concatenated by ".". If the current field is named "c" and the parent + // objects are named "a", "b", this will contain "a.b.c". + String fullCurrentName = null; + // fullParentName contains the qualified name of the object containing the current field. Will be null if the + // current field is at the root object of the document. + String fullParentName = null; + + for (JsonToken token = parser.currentToken() != null ? parser.currentToken() : parser.nextToken(); token != null; token = parser + .nextToken()) { + + if (queuedFieldName != null) { + boolean startOfObjectOrArray = (token == JsonToken.START_OBJECT || token == JsonToken.START_ARRAY); + String fullQueuedFieldName = fullParentName == null ? queuedFieldName : fullParentName + "." + queuedFieldName; + queuedFieldName = null; + + if (metaFields.contains(fullQueuedFieldName) + || flsRule.isAllowed(fullQueuedFieldName) + || (startOfObjectOrArray && flsRule.isObjectAllowed(fullQueuedFieldName))) { + generator.writeFieldName(parser.currentName()); + fullCurrentName = fullQueuedFieldName; + } else { + // If the current field name is disallowed by FLS, we will skip the next token. + // If the next token is an object or array start, all the child tokens will be also skipped + if (startOfObjectOrArray) { + parser.skipChildren(); + } + continue; + } + } + + switch (token) { + case FIELD_NAME: + // We do not immediately write field names, because we need to know the type of the value + // when checking FLS rules + queuedFieldName = parser.currentName(); + break; + + case START_OBJECT: + generator.writeStartObject(); + if (fullParentName != null) { + nameStack.add(fullParentName); + } + fullParentName = fullCurrentName; + break; + + case END_OBJECT: + generator.writeEndObject(); + fullCurrentName = fullParentName; + if (nameStack.isEmpty()) { + fullParentName = null; + } else { + fullParentName = nameStack.removeLast(); + } + break; + + case START_ARRAY: + generator.writeStartArray(); + break; + + case END_ARRAY: + generator.writeEndArray(); + break; + + case VALUE_TRUE: + generator.writeBoolean(Boolean.TRUE); + break; + + case VALUE_FALSE: + generator.writeBoolean(Boolean.FALSE); + break; + + case VALUE_NULL: + generator.writeNull(); + break; + + case VALUE_NUMBER_FLOAT: + generator.writeNumber(parser.getDecimalValue()); + break; + + case VALUE_NUMBER_INT: + generator.writeNumber(parser.getBigIntegerValue()); + break; + + case VALUE_STRING: + FieldMasking.FieldMaskingRule.Field field = fieldMaskingRule.get(fullCurrentName); + + if (field != null) { + generator.writeString(field.apply(parser.getText())); + } else { + generator.writeString(parser.getText()); + } + break; + + case VALUE_EMBEDDED_OBJECT: + generator.writeEmbeddedObject(parser.getEmbeddedObject()); + break; + + default: + throw new IllegalStateException("Unexpected token: " + token); + + } + + } + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java new file mode 100644 index 0000000000..e504eed41d --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/FlsStoredFieldVisitor.java @@ -0,0 +1,130 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.io.IOException; +import java.util.Set; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.StoredFieldVisitor; + +import org.opensearch.OpenSearchException; + +/** + * Applies FLS and field masking while reading documents. This does two things: + *

    + *
  • Filter the _source document and remove fields disallowed by FLS, and mask fields when required for field masking
  • + *
  • Filter out other fields disallowed by FLS by using the needsField() method
  • + *
+ */ +public class FlsStoredFieldVisitor extends StoredFieldVisitor { + private static final Logger log = LogManager.getLogger(FlsStoredFieldVisitor.class); + + private final StoredFieldVisitor delegate; + private final FieldPrivileges.FlsRule flsRule; + private final FieldMasking.FieldMaskingRule fieldMaskingRule; + private final Set metaFields; + + public FlsStoredFieldVisitor( + StoredFieldVisitor delegate, + FieldPrivileges.FlsRule flsRule, + FieldMasking.FieldMaskingRule fieldMaskingRule, + Set metaFields + ) { + super(); + this.delegate = delegate; + this.flsRule = flsRule; + this.fieldMaskingRule = fieldMaskingRule; + this.metaFields = metaFields; + + if (log.isDebugEnabled()) { + log.debug("Created FlsStoredFieldVisitor for {}; {}", flsRule, fieldMaskingRule); + } + } + + @Override + public void binaryField(FieldInfo fieldInfo, byte[] value) throws IOException { + if (fieldInfo.name.equals("_source")) { + try { + delegate.binaryField(fieldInfo, FlsDocumentFilter.filter(value, flsRule, fieldMaskingRule, metaFields)); + } catch (IOException e) { + throw new OpenSearchException("Cannot filter source of document", e); + } + } else { + // See https://github.com/opensearch-project/security/pull/4826 + FieldMasking.FieldMaskingRule.Field field = this.fieldMaskingRule.get(fieldInfo.name); + + if (field != null) { + delegate.binaryField(fieldInfo, field.apply(value)); + } else { + delegate.binaryField(fieldInfo, value); + } + } + } + + @Override + public void stringField(FieldInfo fieldInfo, String value) throws IOException { + FieldMasking.FieldMaskingRule.Field field = this.fieldMaskingRule.get(fieldInfo.name); + + if (field != null) { + delegate.stringField(fieldInfo, field.apply(value)); + } else { + delegate.stringField(fieldInfo, value); + } + } + + @Override + public Status needsField(FieldInfo fieldInfo) throws IOException { + return metaFields.contains(fieldInfo.name) || flsRule.isAllowed(fieldInfo.name) ? delegate.needsField(fieldInfo) : Status.NO; + } + + @Override + public int hashCode() { + return delegate.hashCode(); + } + + @Override + public void intField(final FieldInfo fieldInfo, final int value) throws IOException { + delegate.intField(fieldInfo, value); + } + + @Override + public void longField(final FieldInfo fieldInfo, final long value) throws IOException { + delegate.longField(fieldInfo, value); + } + + @Override + public void floatField(final FieldInfo fieldInfo, final float value) throws IOException { + delegate.floatField(fieldInfo, value); + } + + @Override + public void doubleField(final FieldInfo fieldInfo, final double value) throws IOException { + delegate.doubleField(fieldInfo, value); + } + + @Override + public boolean equals(final Object obj) { + return delegate.equals(obj); + } + + @Override + public String toString() { + return delegate.toString(); + } + + public StoredFieldVisitor delegate() { + return this.delegate; + } + +} diff --git a/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java b/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java new file mode 100644 index 0000000000..2c359af032 --- /dev/null +++ b/src/main/java/org/opensearch/security/privileges/dlsfls/IndexToRuleMap.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.privileges.dlsfls; + +import java.util.function.Predicate; + +import com.google.common.collect.ImmutableMap; + +/** + * Maps index names to DLS/FLS/FM rules. + *

+ * This only contains index names, not any alias or data stream names. + *

+ * This map should be only used when really necessary, as computing a whole map of indices can be expensive. + * It should be preferred to directly query the privilege status of indices using the getRestriction() methods + * of the sub-classes of AbstractRuleBasedPrivileges. + */ +public class IndexToRuleMap { + private static final IndexToRuleMap UNRESTRICTED = new IndexToRuleMap(ImmutableMap.of()); + + private final ImmutableMap indexMap; + + IndexToRuleMap(ImmutableMap indexMap) { + this.indexMap = indexMap; + } + + public boolean isUnrestricted() { + return this.indexMap.isEmpty() || this.indexMap.values().stream().allMatch(Rule::isUnrestricted); + } + + public ImmutableMap getIndexMap() { + return indexMap; + } + + public boolean containsAny(Predicate predicate) { + if (indexMap.isEmpty()) { + return false; + } + + for (Rule rule : this.indexMap.values()) { + if (predicate.test(rule)) { + return true; + } + } + + return false; + } + + @SuppressWarnings("unchecked") + public static IndexToRuleMap unrestricted() { + return (IndexToRuleMap) UNRESTRICTED; + } +} diff --git a/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java b/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java index 4a4e714348..ebca3e652e 100644 --- a/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java +++ b/src/main/java/org/opensearch/security/resolver/IndexResolverReplacer.java @@ -30,6 +30,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.Iterator; import java.util.List; @@ -37,6 +38,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Supplier; import java.util.regex.PatternSyntaxException; import java.util.stream.Collectors; @@ -104,13 +106,17 @@ public class IndexResolverReplacer { private static final Set NULL_SET = new HashSet<>(Collections.singleton(null)); private final Logger log = LogManager.getLogger(this.getClass()); private final IndexNameExpressionResolver resolver; - private final ClusterService clusterService; + private final Supplier clusterStateSupplier; private final ClusterInfoHolder clusterInfoHolder; private volatile boolean respectRequestIndicesOptions = false; - public IndexResolverReplacer(IndexNameExpressionResolver resolver, ClusterService clusterService, ClusterInfoHolder clusterInfoHolder) { + public IndexResolverReplacer( + IndexNameExpressionResolver resolver, + Supplier clusterStateSupplier, + ClusterInfoHolder clusterInfoHolder + ) { this.resolver = resolver; - this.clusterService = clusterService; + this.clusterStateSupplier = clusterStateSupplier; this.clusterInfoHolder = clusterInfoHolder; } @@ -236,10 +242,10 @@ private void resolveIndexPatterns( final RemoteClusterService remoteClusterService = OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService(); - if (remoteClusterService.isCrossClusterSearchEnabled() && enableCrossClusterResolution) { + if (remoteClusterService != null && remoteClusterService.isCrossClusterSearchEnabled() && enableCrossClusterResolution) { remoteIndices = new HashSet<>(); final Map remoteClusterIndices = OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService() - .groupIndices(indicesOptions, original, idx -> resolver.hasIndexAbstraction(idx, clusterService.state())); + .groupIndices(indicesOptions, original, idx -> resolver.hasIndexAbstraction(idx, clusterStateSupplier.get())); final Set remoteClusters = remoteClusterIndices.keySet() .stream() .filter(k -> !RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY.equals(k)) @@ -292,7 +298,7 @@ private void resolveIndexPatterns( } else { - final ClusterState state = clusterService.state(); + final ClusterState state = clusterStateSupplier.get(); final Set dateResolvedLocalRequestedPatterns = localRequestedPatterns.stream() .map(resolver::resolveDateMathExpression) .collect(Collectors.toSet()); @@ -425,6 +431,10 @@ public String[] provide(String[] original, Object request, boolean supportsRepla }, false); } + public boolean replace(final TransportRequest request, boolean retainMode, Collection replacements) { + return replace(request, retainMode, replacements.toArray(new String[replacements.size()])); + } + public Resolved resolveRequest(final Object request) { if (log.isDebugEnabled()) { log.debug("Resolve aliases, indices and types from {}", request.getClass().getSimpleName()); @@ -449,6 +459,11 @@ public final static class Resolved { SearchRequest.DEFAULT_INDICES_OPTIONS ); + private static final IndicesOptions EXACT_INDEX_OPTIONS = new IndicesOptions( + EnumSet.of(IndicesOptions.Option.FORBID_ALIASES_TO_MULTIPLE_INDICES), + EnumSet.noneOf(IndicesOptions.WildcardStates.class) + ); + private final Set aliases; private final Set allIndices; private final Set originalRequested; @@ -485,8 +500,12 @@ public Set getAllIndices() { } public Set getAllIndicesResolved(ClusterService clusterService, IndexNameExpressionResolver resolver) { + return getAllIndicesResolved(clusterService::state, resolver); + } + + public Set getAllIndicesResolved(Supplier clusterStateSupplier, IndexNameExpressionResolver resolver) { if (isLocalAll) { - return new HashSet<>(Arrays.asList(resolver.concreteIndexNames(clusterService.state(), indicesOptions, "*"))); + return new HashSet<>(Arrays.asList(resolver.concreteIndexNames(clusterStateSupplier.get(), indicesOptions, "*"))); } else { return allIndices; } @@ -550,6 +569,11 @@ public boolean equals(Object obj) { } else if (!remoteIndices.equals(other.remoteIndices)) return false; return true; } + + public static Resolved ofIndex(String index) { + ImmutableSet indexSet = ImmutableSet.of(index); + return new Resolved(ImmutableSet.of(), indexSet, indexSet, ImmutableSet.of(), EXACT_INDEX_OPTIONS); + } } private List renamedIndices(final RestoreSnapshotRequest request, final List filteredIndices) { diff --git a/src/main/java/org/opensearch/security/securityconf/ConfigModel.java b/src/main/java/org/opensearch/security/securityconf/ConfigModel.java index 33af51257c..7429a2c776 100644 --- a/src/main/java/org/opensearch/security/securityconf/ConfigModel.java +++ b/src/main/java/org/opensearch/security/securityconf/ConfigModel.java @@ -38,7 +38,5 @@ public abstract class ConfigModel { public abstract Set mapSecurityRoles(User user, TransportAddress caller); - public abstract SecurityRoles getSecurityRoles(); - public abstract Set getAllConfiguredTenantNames(); } diff --git a/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java b/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java index f78c173202..a30eda73ba 100644 --- a/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java +++ b/src/main/java/org/opensearch/security/securityconf/ConfigModelV7.java @@ -17,15 +17,12 @@ package org.opensearch.security.securityconf; -import java.util.Arrays; -import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Objects; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.Callable; @@ -34,12 +31,10 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; -import java.util.function.Function; import java.util.regex.Pattern; import java.util.stream.Collectors; import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.ImmutableSet; import com.google.common.collect.ListMultimap; import com.google.common.collect.MultimapBuilder.SetMultimapBuilder; import com.google.common.collect.SetMultimap; @@ -47,35 +42,24 @@ import org.apache.logging.log4j.Logger; import org.opensearch.ExceptionsHelper; -import org.opensearch.action.support.IndicesOptions; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.set.Sets; import org.opensearch.core.common.transport.TransportAddress; -import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.security.privileges.UserAttributes; -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; import org.opensearch.security.securityconf.impl.v7.ActionGroupsV7; import org.opensearch.security.securityconf.impl.v7.RoleMappingsV7; import org.opensearch.security.securityconf.impl.v7.RoleV7; -import org.opensearch.security.securityconf.impl.v7.RoleV7.Index; import org.opensearch.security.securityconf.impl.v7.TenantV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.support.WildcardMatcher; import org.opensearch.security.user.User; -import static org.opensearch.cluster.metadata.IndexAbstraction.Type.ALIAS; -import static org.opensearch.cluster.metadata.IndexAbstraction.Type.DATA_STREAM; - public class ConfigModelV7 extends ConfigModel { protected final Logger log = LogManager.getLogger(this.getClass()); private ConfigConstants.RolesMappingResolution rolesMappingResolution; private FlattenedActionGroups actionGroups; - private SecurityRoles securityRoles = null; private TenantHolder tenantHolder; private RoleMappingHolder roleMappingHolder; private SecurityDynamicConfiguration roles; @@ -106,7 +90,6 @@ public ConfigModelV7( } actionGroups = actiongroups != null ? new FlattenedActionGroups(actiongroups) : FlattenedActionGroups.EMPTY; - securityRoles = reload(roles); tenantHolder = new TenantHolder(roles, tenants); roleMappingHolder = new RoleMappingHolder(rolemappings, dcm.getHostsResolverMode()); } @@ -115,894 +98,6 @@ public Set getAllConfiguredTenantNames() { return Collections.unmodifiableSet(tenants.getCEntries().keySet()); } - public SecurityRoles getSecurityRoles() { - return securityRoles; - } - - private SecurityRoles reload(SecurityDynamicConfiguration settings) { - - final Set> futures = new HashSet<>(5000); - final ExecutorService execs = Executors.newFixedThreadPool(10); - - for (Entry securityRole : settings.getCEntries().entrySet()) { - - Future future = execs.submit(new Callable() { - - @Override - public SecurityRole call() throws Exception { - SecurityRole.Builder _securityRole = new SecurityRole.Builder(securityRole.getKey()); - - if (securityRole.getValue() == null) { - return null; - } - - final Set permittedClusterActions = actionGroups.resolve(securityRole.getValue().getCluster_permissions()); - _securityRole.addClusterPerms(permittedClusterActions); - - /*for(RoleV7.Tenant tenant: securityRole.getValue().getTenant_permissions()) { - - //if(tenant.equals(user.getName())) { - // continue; - //} - - if(isTenantsRw(tenant)) { - _securityRole.addTenant(new Tenant(tenant.getKey(), true)); - } else { - _securityRole.addTenant(new Tenant(tenant.getKey(), false)); - } - }*/ - - for (final Index permittedAliasesIndex : securityRole.getValue().getIndex_permissions()) { - - final String dls = permittedAliasesIndex.getDls(); - final List fls = permittedAliasesIndex.getFls(); - final List maskedFields = permittedAliasesIndex.getMasked_fields(); - - for (String pat : permittedAliasesIndex.getIndex_patterns()) { - IndexPattern _indexPattern = new IndexPattern(pat); - _indexPattern.setDlsQuery(dls); - _indexPattern.addFlsFields(fls); - _indexPattern.addMaskedFields(maskedFields); - _indexPattern.addPerm(actionGroups.resolve(permittedAliasesIndex.getAllowed_actions())); - - /*for(Entry> type: permittedAliasesIndex.getValue().getTypes(-).entrySet()) { - TypePerm typePerm = new TypePerm(type.getKey()); - final List perms = type.getValue(); - typePerm.addPerms(agr.resolvedActions(perms)); - _indexPattern.addTypePerms(typePerm); - }*/ - - _securityRole.addIndexPattern(_indexPattern); - - } - - } - - return _securityRole.build(); - } - }); - - futures.add(future); - } - - execs.shutdown(); - try { - execs.awaitTermination(30, TimeUnit.SECONDS); - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (1) while loading roles"); - return null; - } - - try { - SecurityRoles _securityRoles = new SecurityRoles(futures.size()); - for (Future future : futures) { - _securityRoles.addSecurityRole(future.get()); - } - - return _securityRoles; - } catch (InterruptedException e) { - Thread.currentThread().interrupt(); - log.error("Thread interrupted (2) while loading roles"); - return null; - } catch (ExecutionException e) { - log.error("Error while updating roles: {}", e.getCause(), e.getCause()); - throw ExceptionsHelper.convertToOpenSearchException(e); - } - } - - // beans - - public static class SecurityRoles implements org.opensearch.security.securityconf.SecurityRoles { - - protected final Logger log = LogManager.getLogger(this.getClass()); - - final Set roles; - - private SecurityRoles(int roleCount) { - roles = new HashSet<>(roleCount); - } - - private SecurityRoles addSecurityRole(SecurityRole securityRole) { - if (securityRole != null) { - this.roles.add(securityRole); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((roles == null) ? 0 : roles.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRoles other = (SecurityRoles) obj; - if (roles == null) { - if (other.roles != null) return false; - } else if (!roles.equals(other.roles)) return false; - return true; - } - - @Override - public String toString() { - return "roles=" + roles; - } - - public Set getRoles() { - return Collections.unmodifiableSet(roles); - } - - public Set getRoleNames() { - return getRoles().stream().map(r -> r.getName()).collect(Collectors.toSet()); - } - - public SecurityRoles filter(Set keep) { - final SecurityRoles retVal = new SecurityRoles(roles.size()); - for (SecurityRole sr : roles) { - if (keep.contains(sr.getName())) { - retVal.addSecurityRole(sr); - } - } - return retVal; - } - - @Override - public EvaluatedDlsFlsConfig getDlsFls( - User user, - boolean dfmEmptyOverwritesAll, - IndexNameExpressionResolver resolver, - ClusterService cs, - NamedXContentRegistry namedXContentRegistry - ) { - - if (!containsDlsFlsConfig()) { - if (log.isDebugEnabled()) { - log.debug("No fls or dls found for {} in {} security roles", user, roles.size()); - } - - return EvaluatedDlsFlsConfig.EMPTY; - } - - Map> dlsQueriesByIndex = new HashMap>(); - Map> flsFields = new HashMap>(); - Map> maskedFieldsMap = new HashMap>(); - - // we capture all concrete indices that do not have any - // DLS/FLS/Masked Fields restrictions. If the dfm_empty_overwrites_all - // switch is enabled, this trumps any restrictions on those indices - // that may be imposed by other roles. - Set noDlsConcreteIndices = new HashSet<>(); - Set noFlsConcreteIndices = new HashSet<>(); - Set noMaskedFieldConcreteIndices = new HashSet<>(); - - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - final Set concreteIndices = ip.concreteIndexNames(user, resolver, cs); - String dls = ip.getDlsQuery(user); - - if (dls != null && dls.length() > 0) { - - for (String concreteIndex : concreteIndices) { - dlsQueriesByIndex.computeIfAbsent(concreteIndex, (key) -> new HashSet()).add(dls); - } - } else if (dfmEmptyOverwritesAll) { - noDlsConcreteIndices.addAll(concreteIndices); - } - - Set fls = ip.getFls(); - - if (fls != null && fls.size() > 0) { - - for (String concreteIndex : concreteIndices) { - if (flsFields.containsKey(concreteIndex)) { - flsFields.get(concreteIndex).addAll(Sets.newHashSet(fls)); - } else { - flsFields.put(concreteIndex, new HashSet()); - flsFields.get(concreteIndex).addAll(Sets.newHashSet(fls)); - } - } - } else if (dfmEmptyOverwritesAll) { - noFlsConcreteIndices.addAll(concreteIndices); - } - - Set maskedFields = ip.getMaskedFields(); - - if (maskedFields != null && maskedFields.size() > 0) { - - for (String concreteIndex : concreteIndices) { - if (maskedFieldsMap.containsKey(concreteIndex)) { - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } else { - maskedFieldsMap.put(concreteIndex, new HashSet()); - maskedFieldsMap.get(concreteIndex).addAll(Sets.newHashSet(maskedFields)); - } - } - } else if (dfmEmptyOverwritesAll) { - noMaskedFieldConcreteIndices.addAll(concreteIndices); - } - } - } - if (dfmEmptyOverwritesAll) { - if (log.isDebugEnabled()) { - log.debug( - "Index patterns with no dls queries attached: {} - They will be removed from {}", - noDlsConcreteIndices, - dlsQueriesByIndex.keySet() - ); - log.debug( - "Index patterns with no fls fields attached: {} - They will be removed from {}", - noFlsConcreteIndices, - flsFields.keySet() - ); - log.debug( - "Index patterns with no masked fields attached: {} - They will be removed from {}", - noMaskedFieldConcreteIndices, - maskedFieldsMap.keySet() - ); - } - // removing the indices that do not have D/M/F restrictions - // from the keySet will also modify the underlying map - dlsQueriesByIndex.keySet().removeAll(noDlsConcreteIndices); - flsFields.keySet().removeAll(noFlsConcreteIndices); - maskedFieldsMap.keySet().removeAll(noMaskedFieldConcreteIndices); - } - - return new EvaluatedDlsFlsConfig(dlsQueriesByIndex, flsFields, maskedFieldsMap); - } - - // opensearchDashboards special only, terms eval - public Set getAllPermittedIndicesForDashboards( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(Resolved._LOCAL_ALL, user, actions, resolver, cs, Function.identity())); - retVal.addAll(resolved.getRemoteIndices()); - } - return Collections.unmodifiableSet(retVal); - } - - // dnfof only - public Set reduce(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - Set retVal = new HashSet<>(); - for (SecurityRole sr : roles) { - retVal.addAll(sr.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, Function.identity())); - } - if (log.isDebugEnabled()) { - log.debug("Reduced requested resolved indices {} to permitted indices {}.", resolved, retVal.toString()); - } - return Collections.unmodifiableSet(retVal); - } - - // return true on success - public boolean get(Resolved resolved, User user, String[] actions, IndexNameExpressionResolver resolver, ClusterService cs) { - for (SecurityRole sr : roles) { - if (ConfigModelV7.impliesTypePerm(sr.getIpatterns(), resolved, user, actions, resolver, cs)) { - return true; - } - } - return false; - } - - @Override - public boolean impliesClusterPermissionPermission(String action) { - return roles.stream().filter(r -> r.impliesClusterPermission(action)).count() > 0; - } - - @Override - public boolean hasExplicitClusterPermissionPermission(String action) { - return roles.stream().map(r -> matchExplicitly(r.clusterPerms)).filter(m -> m.test(action)).count() > 0; - } - - private static WildcardMatcher matchExplicitly(final WildcardMatcher matcher) { - return matcher == WildcardMatcher.ANY ? WildcardMatcher.NONE : matcher; - } - - @Override - public boolean hasExplicitIndexPermission( - final Resolved resolved, - final User user, - final String[] actions, - final IndexNameExpressionResolver resolver, - final ClusterService cs - ) { - - final Set indicesForRequest = new HashSet<>(resolved.getAllIndicesResolved(cs, resolver)); - if (indicesForRequest.isEmpty()) { - // If no indices could be found on the request there is no way to check for the explicit permissions - return false; - } - - final Set explicitlyAllowedIndices = roles.stream() - .map(role -> role.getAllResolvedPermittedIndices(resolved, user, actions, resolver, cs, SecurityRoles::matchExplicitly)) - .flatMap(Collection::stream) - .collect(Collectors.toSet()); - - if (log.isDebugEnabled()) { - log.debug( - "ExplicitIndexPermission check indices for request {}, explicitly allowed indices {}", - indicesForRequest.toString(), - explicitlyAllowedIndices.toString() - ); - } - - indicesForRequest.removeAll(explicitlyAllowedIndices); - return indicesForRequest.isEmpty(); - } - - // rolespan - public boolean impliesTypePermGlobal( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set ipatterns = new HashSet(); - roles.stream().forEach(p -> ipatterns.addAll(p.getIpatterns())); - return ConfigModelV7.impliesTypePerm(ipatterns, resolved, user, actions, resolver, cs); - } - - private boolean containsDlsFlsConfig() { - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - if (ip.hasDlsQuery() || ip.hasFlsFields() || ip.hasMaskedFields()) { - return true; - } - } - } - - return false; - } - - @Override - public boolean isPermittedOnSystemIndex(String indexName) { - boolean isPatternMatched = false; - boolean isPermitted = false; - for (SecurityRole role : roles) { - for (IndexPattern ip : role.getIpatterns()) { - WildcardMatcher wildcardMatcher = WildcardMatcher.from(ip.indexPattern); - if (wildcardMatcher.test(indexName)) { - isPatternMatched = true; - } - if (ip.perms.contains(ConfigConstants.SYSTEM_INDEX_PERMISSION)) { - isPermitted = true; - } - } - } - return isPatternMatched && isPermitted; - } - } - - public static class SecurityRole { - private final String name; - private final Set ipatterns; - private final WildcardMatcher clusterPerms; - - public static final class Builder { - private final String name; - private final Set clusterPerms = new HashSet<>(); - private final Set ipatterns = new HashSet<>(); - - public Builder(String name) { - this.name = Objects.requireNonNull(name); - } - - public Builder addIndexPattern(IndexPattern indexPattern) { - this.ipatterns.add(indexPattern); - return this; - } - - public Builder addClusterPerms(Collection clusterPerms) { - if (clusterPerms != null) { - this.clusterPerms.addAll(clusterPerms); - } - return this; - } - - public SecurityRole build() { - return new SecurityRole(name, ipatterns, WildcardMatcher.from(clusterPerms)); - } - } - - private SecurityRole(String name, Set ipatterns, WildcardMatcher clusterPerms) { - this.name = Objects.requireNonNull(name); - this.ipatterns = ipatterns; - this.clusterPerms = clusterPerms; - } - - private boolean impliesClusterPermission(String action) { - return clusterPerms.test(action); - } - - // get indices which are permitted for the given types and actions - // dnfof + opensearchDashboards special only - private Set getAllResolvedPermittedIndices( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs, - Function matcherModification - ) { - - final Set retVal = new HashSet<>(); - for (IndexPattern p : ipatterns) { - // what if we cannot resolve one (for create purposes) - final boolean patternMatch = matcherModification.apply(p.getPerms()).matchAll(actions); - - // final Set tperms = p.getTypePerms(); - // for (TypePerm tp : tperms) { - // if (WildcardMatcher.matchAny(tp.typePattern, resolved.getTypes(-).toArray(new String[0]))) { - // patternMatch = WildcardMatcher.matchAll(tp.perms.toArray(new String[0]), actions); - // } - // } - if (patternMatch) { - // resolved but can contain patterns for nonexistent indices - final WildcardMatcher permitted = WildcardMatcher.from(p.attemptResolveIndexNames(user, resolver, cs)); // maybe they do - // not exist - final Set res = new HashSet<>(); - if (!resolved.isLocalAll() && !resolved.getAllIndices().contains("*") && !resolved.getAllIndices().contains("_all")) { - // resolved but can contain patterns for nonexistent indices - resolved.getAllIndices().stream().filter(permitted).forEach(res::add); - } else { - // we want all indices so just return what's permitted - - // #557 - // final String[] allIndices = resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), "*"); - final String[] allIndices = cs.state().metadata().getConcreteAllOpenIndices(); - Arrays.stream(allIndices).filter(permitted).forEach(res::add); - } - retVal.addAll(res); - } - } - - // all that we want and all thats permitted of them - return Collections.unmodifiableSet(retVal); - } - - /*private SecurityRole addTenant(Tenant tenant) { - if (tenant != null) { - this.tenants.add(tenant); - } - return this; - }*/ - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((clusterPerms == null) ? 0 : clusterPerms.hashCode()); - result = prime * result + ((ipatterns == null) ? 0 : ipatterns.hashCode()); - result = prime * result + ((name == null) ? 0 : name.hashCode()); - // result = prime * result + ((tenants == null) ? 0 : tenants.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - SecurityRole other = (SecurityRole) obj; - if (clusterPerms == null) { - if (other.clusterPerms != null) return false; - } else if (!clusterPerms.equals(other.clusterPerms)) return false; - if (ipatterns == null) { - if (other.ipatterns != null) return false; - } else if (!ipatterns.equals(other.ipatterns)) return false; - if (name == null) { - if (other.name != null) return false; - } else if (!name.equals(other.name)) return false; - // if (tenants == null) { - // if (other.tenants != null) - // return false; - // } else if (!tenants.equals(other.tenants)) - // return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " " - + name - + System.lineSeparator() - + " ipatterns=" - + ipatterns - + System.lineSeparator() - + " clusterPerms=" - + clusterPerms; - } - - // public Set getTenants(User user) { - // //TODO filter out user tenants - // return Collections.unmodifiableSet(tenants); - // } - - public Set getIpatterns() { - return Collections.unmodifiableSet(ipatterns); - } - - public String getName() { - return name; - } - - } - - // sg roles - public static class IndexPattern { - private final String indexPattern; - private String dlsQuery; - private final Set fls = new HashSet<>(); - private final Set maskedFields = new HashSet<>(); - private final Set perms = new HashSet<>(); - - public IndexPattern(String indexPattern) { - super(); - this.indexPattern = Objects.requireNonNull(indexPattern); - } - - public IndexPattern addFlsFields(List flsFields) { - if (flsFields != null) { - this.fls.addAll(flsFields); - } - return this; - } - - public IndexPattern addMaskedFields(List maskedFields) { - if (maskedFields != null) { - this.maskedFields.addAll(maskedFields); - } - return this; - } - - public IndexPattern addPerm(Set perms) { - if (perms != null) { - this.perms.addAll(perms); - } - return this; - } - - public IndexPattern setDlsQuery(String dlsQuery) { - if (dlsQuery != null) { - this.dlsQuery = dlsQuery; - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((dlsQuery == null) ? 0 : dlsQuery.hashCode()); - result = prime * result + ((fls == null) ? 0 : fls.hashCode()); - result = prime * result + ((maskedFields == null) ? 0 : maskedFields.hashCode()); - result = prime * result + ((indexPattern == null) ? 0 : indexPattern.hashCode()); - result = prime * result + ((perms == null) ? 0 : perms.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - IndexPattern other = (IndexPattern) obj; - if (dlsQuery == null) { - if (other.dlsQuery != null) return false; - } else if (!dlsQuery.equals(other.dlsQuery)) return false; - if (fls == null) { - if (other.fls != null) return false; - } else if (!fls.equals(other.fls)) return false; - if (maskedFields == null) { - if (other.maskedFields != null) return false; - } else if (!maskedFields.equals(other.maskedFields)) return false; - if (indexPattern == null) { - if (other.indexPattern != null) return false; - } else if (!indexPattern.equals(other.indexPattern)) return false; - if (perms == null) { - if (other.perms != null) return false; - } else if (!perms.equals(other.perms)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " indexPattern=" - + indexPattern - + System.lineSeparator() - + " dlsQuery=" - + dlsQuery - + System.lineSeparator() - + " fls=" - + fls - + System.lineSeparator() - + " perms=" - + perms; - } - - public String getUnresolvedIndexPattern(User user) { - return UserAttributes.replaceProperties(indexPattern, user); - } - - /** Finds the indices accessible to the user and resolves them to concrete names */ - public Set concreteIndexNames(final User user, final IndexNameExpressionResolver resolver, final ClusterService cs) { - return getResolvedIndexPattern(user, resolver, cs, false); - } - - /** Finds the indices accessible to the user and attempts to resolve them to names, also includes any unresolved names */ - public Set attemptResolveIndexNames(final User user, final IndexNameExpressionResolver resolver, final ClusterService cs) { - return getResolvedIndexPattern(user, resolver, cs, true); - } - - public Set getResolvedIndexPattern( - final User user, - final IndexNameExpressionResolver resolver, - final ClusterService cs, - final boolean appendUnresolved - ) { - final String unresolved = getUnresolvedIndexPattern(user); - final ImmutableSet.Builder resolvedIndices = new ImmutableSet.Builder<>(); - - final WildcardMatcher matcher = WildcardMatcher.from(unresolved); - boolean includeDataStreams = true; - if (!(matcher instanceof WildcardMatcher.Exact)) { - final String[] aliasesAndDataStreamsForPermittedPattern = cs.state() - .getMetadata() - .getIndicesLookup() - .entrySet() - .stream() - .filter(e -> (e.getValue().getType() == ALIAS) || (e.getValue().getType() == DATA_STREAM)) - .filter(e -> matcher.test(e.getKey())) - .map(e -> e.getKey()) - .toArray(String[]::new); - if (aliasesAndDataStreamsForPermittedPattern.length > 0) { - final String[] resolvedAliasesAndDataStreamIndices = resolver.concreteIndexNames( - cs.state(), - IndicesOptions.lenientExpandOpen(), - includeDataStreams, - aliasesAndDataStreamsForPermittedPattern - ); - resolvedIndices.addAll(Arrays.asList(resolvedAliasesAndDataStreamIndices)); - } - } - - if (!(unresolved == null || unresolved.isBlank())) { - final String[] resolvedIndicesFromPattern = resolver.concreteIndexNames( - cs.state(), - IndicesOptions.lenientExpandOpen(), - includeDataStreams, - unresolved - ); - resolvedIndices.addAll(Arrays.asList(resolvedIndicesFromPattern)); - } - - if (appendUnresolved || resolvedIndices.build().isEmpty()) { - resolvedIndices.add(unresolved); - } - return resolvedIndices.build(); - } - - public String getDlsQuery(User user) { - return UserAttributes.replaceProperties(dlsQuery, user); - } - - public boolean hasDlsQuery() { - return dlsQuery != null && !dlsQuery.isEmpty(); - } - - public Set getFls() { - return Collections.unmodifiableSet(fls); - } - - public boolean hasFlsFields() { - return fls != null && !fls.isEmpty(); - } - - public Set getMaskedFields() { - return Collections.unmodifiableSet(maskedFields); - } - - public boolean hasMaskedFields() { - return maskedFields != null && !maskedFields.isEmpty(); - } - - public WildcardMatcher getPerms() { - return WildcardMatcher.from(perms); - } - - } - - /*public static class TypePerm { - private final String typePattern; - private final Set perms = new HashSet<>(); - - private TypePerm(String typePattern) { - super(); - this.typePattern = Objects.requireNonNull(typePattern); - /*if(IGNORED_TYPES.contains(typePattern)) { - throw new RuntimeException("typepattern '"+typePattern+"' not allowed"); - } - } - - private TypePerm addPerms(Collection perms) { - if (perms != null) { - this.perms.addAll(perms); - } - return this; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + ((perms == null) ? 0 : perms.hashCode()); - result = prime * result + ((typePattern == null) ? 0 : typePattern.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) - return true; - if (obj == null) - return false; - if (getClass() != obj.getClass()) - return false; - TypePerm other = (TypePerm) obj; - if (perms == null) { - if (other.perms != null) - return false; - } else if (!perms.equals(other.perms)) - return false; - if (typePattern == null) { - if (other.typePattern != null) - return false; - } else if (!typePattern.equals(other.typePattern)) - return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() + " typePattern=" + typePattern + System.lineSeparator() + " perms=" + perms; - } - - public String getTypePattern() { - return typePattern; - } - - public Set getPerms() { - return Collections.unmodifiableSet(perms); - } - - }*/ - - public static class Tenant { - private final String tenant; - private final boolean readWrite; - - private Tenant(String tenant, boolean readWrite) { - super(); - this.tenant = tenant; - this.readWrite = readWrite; - } - - public String getTenant() { - return tenant; - } - - public boolean isReadWrite() { - return readWrite; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + (readWrite ? 1231 : 1237); - result = prime * result + ((tenant == null) ? 0 : tenant.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - Tenant other = (Tenant) obj; - if (readWrite != other.readWrite) return false; - if (tenant == null) { - if (other.tenant != null) return false; - } else if (!tenant.equals(other.tenant)) return false; - return true; - } - - @Override - public String toString() { - return System.lineSeparator() - + " tenant=" - + tenant - + System.lineSeparator() - + " readWrite=" - + readWrite; - } - } - - private static final class IndexMatcherAndPermissions { - private WildcardMatcher matcher; - private WildcardMatcher perms; - - public IndexMatcherAndPermissions(Set patterns, Set perms) { - this.matcher = WildcardMatcher.from(patterns); - this.perms = WildcardMatcher.from(perms); - } - - public boolean matches(String index, String action) { - return matcher.test(index) && perms.test(action); - } - } - - private static boolean impliesTypePerm( - Set ipatterns, - Resolved resolved, - User user, - String[] requestedActions, - IndexNameExpressionResolver resolver, - ClusterService cs - ) { - Set resolvedRequestedIndices = resolved.getAllIndices(); - IndexMatcherAndPermissions[] indexMatcherAndPermissions; - if (resolved.isLocalAll()) { - indexMatcherAndPermissions = ipatterns.stream() - .filter(indexPattern -> "*".equals(indexPattern.getUnresolvedIndexPattern(user))) - .map(p -> new IndexMatcherAndPermissions(p.attemptResolveIndexNames(user, resolver, cs), p.perms)) - .toArray(IndexMatcherAndPermissions[]::new); - } else { - indexMatcherAndPermissions = ipatterns.stream() - .map(p -> new IndexMatcherAndPermissions(p.attemptResolveIndexNames(user, resolver, cs), p.perms)) - .toArray(IndexMatcherAndPermissions[]::new); - } - return resolvedRequestedIndices.stream() - .allMatch( - index -> Arrays.stream(requestedActions) - .allMatch(action -> Arrays.stream(indexMatcherAndPermissions).anyMatch(ipap -> ipap.matches(index, action))) - ); - } - private class TenantHolder { private SetMultimap> tenantsMM = null; diff --git a/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java b/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java index 17e08cfcfd..10402f7b56 100644 --- a/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java +++ b/src/main/java/org/opensearch/security/securityconf/DynamicConfigFactory.java @@ -103,7 +103,7 @@ private void loadStaticConfig() throws IOException { staticTenants = SecurityDynamicConfiguration.fromNode(staticTenantsJsonNode, CType.TENANTS, 2, 0, 0); } - public final static SecurityDynamicConfiguration addStatics(SecurityDynamicConfiguration original) { + public final static SecurityDynamicConfiguration addStatics(SecurityDynamicConfiguration original) { if (original.getCType() == CType.ACTIONGROUPS && !staticActionGroups.getCEntries().isEmpty()) { original.add(staticActionGroups.deepClone()); } diff --git a/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java b/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java deleted file mode 100644 index aa22e8729f..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/EvaluatedDlsFlsConfig.java +++ /dev/null @@ -1,141 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf; - -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.support.WildcardMatcher; - -public class EvaluatedDlsFlsConfig { - public static EvaluatedDlsFlsConfig EMPTY = new EvaluatedDlsFlsConfig( - Collections.emptyMap(), - Collections.emptyMap(), - Collections.emptyMap() - ); - - private final Map> dlsQueriesByIndex; - private final Map> flsByIndex; - private final Map> fieldMaskingByIndex; - - public EvaluatedDlsFlsConfig( - Map> dlsQueriesByIndex, - Map> flsByIndex, - Map> fieldMaskingByIndex - ) { - this.dlsQueriesByIndex = Collections.unmodifiableMap(dlsQueriesByIndex); - this.flsByIndex = Collections.unmodifiableMap(flsByIndex); - this.fieldMaskingByIndex = Collections.unmodifiableMap(fieldMaskingByIndex); - } - - public Map> getDlsQueriesByIndex() { - return dlsQueriesByIndex; - } - - public Map> getFlsByIndex() { - return flsByIndex; - } - - public Map> getFieldMaskingByIndex() { - return fieldMaskingByIndex; - } - - public Set getAllQueries() { - int mapSize = dlsQueriesByIndex.size(); - - if (mapSize == 0) { - return Collections.emptySet(); - } else if (mapSize == 1) { - return dlsQueriesByIndex.values().iterator().next(); - } else { - Set result = new HashSet<>(); - - for (Set queries : dlsQueriesByIndex.values()) { - result.addAll(queries); - } - - return result; - } - } - - public boolean hasFls() { - return !flsByIndex.isEmpty(); - } - - public boolean hasFieldMasking() { - return !fieldMaskingByIndex.isEmpty(); - } - - public boolean hasDls() { - return !dlsQueriesByIndex.isEmpty(); - } - - public boolean isEmpty() { - return fieldMaskingByIndex.isEmpty() && flsByIndex.isEmpty() && dlsQueriesByIndex.isEmpty(); - } - - public EvaluatedDlsFlsConfig filter(Resolved indices) { - if (indices.isAllIndicesEmpty()) { - return EMPTY; - } else if (this.isEmpty() || indices.isLocalAll()) { - return this; - } else { - Set allIndices = indices.getAllIndices(); - - return new EvaluatedDlsFlsConfig( - filter(dlsQueriesByIndex, allIndices), - filter(flsByIndex, allIndices), - filter(fieldMaskingByIndex, allIndices) - ); - } - } - - public EvaluatedDlsFlsConfig withoutDls() { - if (!hasDls()) { - return this; - } else { - return new EvaluatedDlsFlsConfig(Collections.emptyMap(), flsByIndex, fieldMaskingByIndex); - } - } - - private Map> filter(Map> map, Set allIndices) { - if (allIndices.isEmpty() || map.isEmpty()) { - return map; - } - - HashMap> result = new HashMap<>(map.size()); - - for (Map.Entry> entry : map.entrySet()) { - if (WildcardMatcher.from(entry.getKey(), false).matchAny(allIndices)) { - result.put(entry.getKey(), entry.getValue()); - } - } - - return result; - } - - @Override - public String toString() { - return "EvaluatedDlsFlsConfig [dlsQueriesByIndex=" - + dlsQueriesByIndex - + ", flsByIndex=" - + flsByIndex - + ", fieldMaskingByIndex=" - + fieldMaskingByIndex - + "]"; - } - -} diff --git a/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java b/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java deleted file mode 100644 index fb25e1a21f..0000000000 --- a/src/main/java/org/opensearch/security/securityconf/SecurityRoles.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright 2015-2017 floragunn GmbH - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - */ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf; - -import java.util.Set; - -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.core.xcontent.NamedXContentRegistry; -import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.user.User; - -public interface SecurityRoles { - - boolean impliesClusterPermissionPermission(String action0); - - boolean hasExplicitClusterPermissionPermission(String action); - - /** - * Determines if the actions are explicitly granted for indices - * @return if all indices in the request have an explicit grant for all actions - */ - boolean hasExplicitIndexPermission( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ); - - Set getRoleNames(); - - Set reduce( - Resolved requestedResolved, - User user, - String[] strings, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - boolean impliesTypePermGlobal( - Resolved requestedResolved, - User user, - String[] allIndexPermsRequiredA, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - boolean get( - Resolved requestedResolved, - User user, - String[] allIndexPermsRequiredA, - IndexNameExpressionResolver resolver, - ClusterService clusterService - ); - - EvaluatedDlsFlsConfig getDlsFls( - User user, - boolean dfmEmptyOverwritesAll, - IndexNameExpressionResolver resolver, - ClusterService clusterService, - NamedXContentRegistry namedXContentRegistry - ); - - Set getAllPermittedIndicesForDashboards( - Resolved resolved, - User user, - String[] actions, - IndexNameExpressionResolver resolver, - ClusterService cs - ); - - SecurityRoles filter(Set roles); - - boolean isPermittedOnSystemIndex(String indexName); -} diff --git a/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java b/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java index fb4d4afd02..790f39d12b 100644 --- a/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java +++ b/src/main/java/org/opensearch/security/securityconf/impl/SecurityDynamicConfiguration.java @@ -155,6 +155,20 @@ public static SecurityDynamicConfiguration fromNode(JsonNode json, CType< ); } + /** + * For testing only + */ + public static SecurityDynamicConfiguration fromYaml(String yaml, CType ctype) throws JsonProcessingException { + Class implementationClass = ctype.getConfigClass(); + SecurityDynamicConfiguration result = DefaultObjectMapper.YAML_MAPPER.readValue( + yaml, + DefaultObjectMapper.getTypeFactory().constructParametricType(SecurityDynamicConfiguration.class, implementationClass) + ); + result.ctype = ctype; + result.version = 2; + return result; + } + // for Jackson private SecurityDynamicConfiguration() { super(); @@ -311,6 +325,18 @@ public Class getImplementingClass() { } @SuppressWarnings("unchecked") + @JsonIgnore + public SecurityDynamicConfiguration clone() { + SecurityDynamicConfiguration result = new SecurityDynamicConfiguration(); + result.version = this.version; + result.ctype = this.ctype; + result.primaryTerm = this.primaryTerm; + result.seqNo = this.seqNo; + result._meta = this._meta; + result.centries.putAll(this.centries); + return result; + } + @JsonIgnore public SecurityDynamicConfiguration deepClone() { try { diff --git a/src/main/java/org/opensearch/security/ssl/DefaultSecurityKeyStore.java b/src/main/java/org/opensearch/security/ssl/DefaultSecurityKeyStore.java index 8dbd2f139a..61dac199ae 100644 --- a/src/main/java/org/opensearch/security/ssl/DefaultSecurityKeyStore.java +++ b/src/main/java/org/opensearch/security/ssl/DefaultSecurityKeyStore.java @@ -133,7 +133,9 @@ private void printJCEWarnings() { public final SslProvider sslTransportServerProvider; public final SslProvider sslTransportClientProvider; private final boolean httpSSLEnabled; + private final boolean httpSSLEnforceCertReloadDnVerification; private final boolean transportSSLEnabled; + private final boolean transportSSLEnforceCertReloadDnVerification; private ArrayList enabledHttpCiphersJDKProvider; private ArrayList enabledHttpCiphersOpenSSLProvider; @@ -166,10 +168,18 @@ public DefaultSecurityKeyStore(final Settings settings, final Path configPath) { SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED, SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED_DEFAULT ); + httpSSLEnforceCertReloadDnVerification = settings.getAsBoolean( + SSLConfigConstants.SECURITY_SSL_HTTP_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + true + ); transportSSLEnabled = settings.getAsBoolean( SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED, SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED_DEFAULT ); + transportSSLEnforceCertReloadDnVerification = settings.getAsBoolean( + SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + true + ); final boolean useOpenSSLForHttpIfAvailable = OpenSearchSecuritySSLPlugin.OPENSSL_SUPPORTED && settings.getAsBoolean(SSLConfigConstants.SECURITY_SSL_HTTP_ENABLE_OPENSSL_IF_AVAILABLE, true); final boolean useOpenSSLForTransportIfAvailable = OpenSearchSecuritySSLPlugin.OPENSSL_SUPPORTED @@ -422,7 +432,7 @@ public void initTransportSSLConfig() { certFromTruststore = new CertFromTruststore(truststoreProps, truststoreAlias); } - validateNewCerts(transportCerts, certFromKeystore.getCerts()); + validateNewCerts(transportCerts, certFromKeystore.getCerts(), transportSSLEnforceCertReloadDnVerification); transportServerSslContext = buildSSLServerContext( certFromKeystore.getServerKey(), certFromKeystore.getServerCert(), @@ -473,7 +483,7 @@ public void initTransportSSLConfig() { certFromFile = new CertFromFile(certProps); } - validateNewCerts(transportCerts, certFromFile.getCerts()); + validateNewCerts(transportCerts, certFromFile.getCerts(), transportSSLEnforceCertReloadDnVerification); transportServerSslContext = buildSSLServerContext( certFromFile.getServerPemKey(), certFromFile.getServerPemCert(), @@ -571,7 +581,7 @@ public void initHttpSSLConfig() { certFromTruststore = new CertFromTruststore(truststoreProps, truststoreAlias); } - validateNewCerts(httpCerts, certFromKeystore.getCerts()); + validateNewCerts(httpCerts, certFromKeystore.getCerts(), httpSSLEnforceCertReloadDnVerification); httpSslContext = buildSSLServerContext( certFromKeystore.getServerKey(), certFromKeystore.getServerCert(), @@ -602,7 +612,7 @@ public void initHttpSSLConfig() { ); CertFromFile certFromFile = new CertFromFile(certFileProps); - validateNewCerts(httpCerts, certFromFile.getCerts()); + validateNewCerts(httpCerts, certFromFile.getCerts(), httpSSLEnforceCertReloadDnVerification); httpSslContext = buildSSLServerContext( certFromFile.getServerPemKey(), certFromFile.getServerPemCert(), @@ -633,11 +643,16 @@ public void initHttpSSLConfig() { * If the current and new certificates are same, skip remaining checks. * For new X509 cert to be valid Issuer, Subject DN must be the same and * new certificates should expire after current ones. - * @param currentX509Certs Array of current x509 certificates - * @param newX509Certs Array of x509 certificates which will replace our current cert + * @param currentX509Certs Array of current x509 certificates + * @param newX509Certs Array of x509 certificates which will replace our current cert + * @param verifyValidDNs Whether to verify that new certs have valid IssuerDN, SubjectDN and SAN * @throws Exception if certificate is invalid */ - private void validateNewCerts(final X509Certificate[] currentX509Certs, final X509Certificate[] newX509Certs) throws Exception { + private void validateNewCerts( + final X509Certificate[] currentX509Certs, + final X509Certificate[] newX509Certs, + final boolean verifyValidDNs + ) throws Exception { // First time we init certs ignore validity check if (currentX509Certs == null) { @@ -654,7 +669,7 @@ private void validateNewCerts(final X509Certificate[] currentX509Certs, final X5 } // Check if new X509 certs have valid IssuerDN, SubjectDN or SAN - if (!hasValidDNs(currentX509Certs, newX509Certs)) { + if (verifyValidDNs && !hasValidDNs(currentX509Certs, newX509Certs)) { throw new Exception("New Certs do not have valid Issuer DN, Subject DN or SAN."); } } diff --git a/src/main/java/org/opensearch/security/ssl/OpenSearchSecureSettingsFactory.java b/src/main/java/org/opensearch/security/ssl/OpenSearchSecureSettingsFactory.java index 5351eea57e..43f6cc4f29 100644 --- a/src/main/java/org/opensearch/security/ssl/OpenSearchSecureSettingsFactory.java +++ b/src/main/java/org/opensearch/security/ssl/OpenSearchSecureSettingsFactory.java @@ -25,8 +25,10 @@ import org.opensearch.plugins.SecureTransportSettingsProvider; import org.opensearch.plugins.TransportExceptionHandler; import org.opensearch.security.filter.SecurityRestFilter; +import org.opensearch.security.ssl.config.CertType; import org.opensearch.security.ssl.http.netty.Netty4ConditionalDecompressor; import org.opensearch.security.ssl.http.netty.Netty4HttpRequestHeaderVerifier; +import org.opensearch.security.ssl.transport.SSLConfig; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportAdapterProvider; @@ -35,20 +37,23 @@ public class OpenSearchSecureSettingsFactory implements SecureSettingsFactory { private final ThreadPool threadPool; - private final SecurityKeyStore sks; + private final SslSettingsManager sslSettingsManager; private final SslExceptionHandler sslExceptionHandler; private final SecurityRestFilter restFilter; + private final SSLConfig sslConfig; public OpenSearchSecureSettingsFactory( ThreadPool threadPool, - SecurityKeyStore sks, + SslSettingsManager sslSettingsManager, SslExceptionHandler sslExceptionHandler, - SecurityRestFilter restFilter + SecurityRestFilter restFilter, + SSLConfig sslConfig ) { this.threadPool = threadPool; - this.sks = sks; + this.sslSettingsManager = sslSettingsManager; this.sslExceptionHandler = sslExceptionHandler; this.restFilter = restFilter; + this.sslConfig = sslConfig; } @Override @@ -64,14 +69,24 @@ public void onError(Throwable t) { }); } + @Override + public Optional parameters(Settings settings) { + return Optional.of(new SecureTransportParameters() { + @Override + public boolean dualModeEnabled() { + return sslConfig.isDualModeEnabled(); + } + }); + } + @Override public Optional buildSecureServerTransportEngine(Settings settings, Transport transport) throws SSLException { - return Optional.of(sks.createServerTransportSSLEngine()); + return sslSettingsManager.sslContextHandler(CertType.TRANSPORT).map(SslContextHandler::createSSLEngine); } @Override public Optional buildSecureClientTransportEngine(Settings settings, String hostname, int port) throws SSLException { - return Optional.of(sks.createClientTransportSSLEngine(hostname, port)); + return sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).map(c -> c.createSSLEngine(hostname, port)); } }); } @@ -128,7 +143,7 @@ public void onError(Throwable t) { @Override public Optional buildSecureHttpServerEngine(Settings settings, HttpServerTransport transport) throws SSLException { - return Optional.of(sks.createHTTPSSLEngine()); + return sslSettingsManager.sslContextHandler(CertType.HTTP).map(SslContextHandler::createSSLEngine); } }); } diff --git a/src/main/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPlugin.java b/src/main/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPlugin.java index e6a1b47888..c12424f028 100644 --- a/src/main/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPlugin.java +++ b/src/main/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPlugin.java @@ -126,7 +126,7 @@ public class OpenSearchSecuritySSLPlugin extends Plugin implements SystemIndexPl protected final Settings settings; protected volatile SecurityRestFilter securityRestHandler; protected final SharedGroupFactory sharedGroupFactory; - protected final SecurityKeyStore sks; + protected final SslSettingsManager sslSettingsManager; protected PrincipalExtractor principalExtractor; protected final Path configPath; private final static SslExceptionHandler NOOP_SSL_EXCEPTION_HANDLER = new SslExceptionHandler() { @@ -144,7 +144,7 @@ protected OpenSearchSecuritySSLPlugin(final Settings settings, final Path config this.httpSSLEnabled = false; this.transportSSLEnabled = false; this.extendedKeyUsageEnabled = false; - this.sks = null; + this.sslSettingsManager = null; this.configPath = null; SSLConfig = new SSLConfig(false, false); @@ -246,11 +246,7 @@ public Object run() { log.error("SSL not activated for http and/or transport."); } - if (ExternalSecurityKeyStore.hasExternalSslContext(settings)) { - this.sks = new ExternalSecurityKeyStore(settings); - } else { - this.sks = new DefaultSecurityKeyStore(settings, configPath); - } + this.sslSettingsManager = new SslSettingsManager(new Environment(settings, configPath)); } @Override @@ -311,7 +307,7 @@ public List getRestHandlers( final List handlers = new ArrayList(1); if (!client) { - handlers.add(new SecuritySSLInfoAction(settings, configPath, restController, sks, Objects.requireNonNull(principalExtractor))); + handlers.add(new SecuritySSLInfoAction(settings, configPath, sslSettingsManager, Objects.requireNonNull(principalExtractor))); } return handlers; @@ -638,6 +634,23 @@ public List> getSettings() { Setting.longSetting(SSLConfigConstants.SECURITY_SSL_HTTP_CRL_VALIDATION_DATE, -1, -1, Property.NodeScope, Property.Filtered) ); + settings.add( + Setting.boolSetting( + SSLConfigConstants.SECURITY_SSL_HTTP_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + true, + Property.NodeScope, + Property.Filtered + ) + ); + settings.add( + Setting.boolSetting( + SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + true, + Property.NodeScope, + Property.Filtered + ) + ); + return settings; } @@ -674,7 +687,9 @@ public List getSettingsFilter() { @Override public Optional getSecureSettingFactory(Settings settings) { - return Optional.of(new OpenSearchSecureSettingsFactory(threadPool, sks, NOOP_SSL_EXCEPTION_HANDLER, securityRestHandler)); + return Optional.of( + new OpenSearchSecureSettingsFactory(threadPool, sslSettingsManager, NOOP_SSL_EXCEPTION_HANDLER, securityRestHandler, SSLConfig) + ); } protected Settings migrateSettings(Settings settings) { diff --git a/src/main/java/org/opensearch/security/ssl/SecureSSLSettings.java b/src/main/java/org/opensearch/security/ssl/SecureSSLSettings.java index 171bb18bb5..5aad07fbdd 100644 --- a/src/main/java/org/opensearch/security/ssl/SecureSSLSettings.java +++ b/src/main/java/org/opensearch/security/ssl/SecureSSLSettings.java @@ -36,7 +36,7 @@ public final class SecureSSLSettings { private static final Logger LOG = LogManager.getLogger(SecureSSLSettings.class); - private static final String SECURE_SUFFIX = "_secure"; + public static final String SECURE_SUFFIX = "_secure"; private static final String PREFIX = "plugins.security.ssl"; private static final String HTTP_PREFIX = PREFIX + ".http"; private static final String TRANSPORT_PREFIX = PREFIX + ".transport"; diff --git a/src/main/java/org/opensearch/security/ssl/SslConfiguration.java b/src/main/java/org/opensearch/security/ssl/SslConfiguration.java new file mode 100644 index 0000000000..2332867bd8 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/SslConfiguration.java @@ -0,0 +1,148 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.nio.file.Path; +import java.security.AccessController; +import java.security.PrivilegedActionException; +import java.security.PrivilegedExceptionAction; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.OpenSearchException; +import org.opensearch.security.ssl.config.Certificate; +import org.opensearch.security.ssl.config.KeyStoreConfiguration; +import org.opensearch.security.ssl.config.SslParameters; +import org.opensearch.security.ssl.config.TrustStoreConfiguration; + +import io.netty.handler.codec.http2.Http2SecurityUtil; +import io.netty.handler.ssl.ApplicationProtocolConfig; +import io.netty.handler.ssl.ApplicationProtocolNames; +import io.netty.handler.ssl.SslContext; +import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.SupportedCipherSuiteFilter; + +public class SslConfiguration { + + private final static Logger LOGGER = LogManager.getLogger(SslConfiguration.class); + + private final SslParameters sslParameters; + + private final TrustStoreConfiguration trustStoreConfiguration; + + private final KeyStoreConfiguration keyStoreConfiguration; + + public SslConfiguration( + final SslParameters sslParameters, + final TrustStoreConfiguration trustStoreConfiguration, + final KeyStoreConfiguration keyStoreConfiguration + ) { + this.sslParameters = sslParameters; + this.trustStoreConfiguration = trustStoreConfiguration; + this.keyStoreConfiguration = keyStoreConfiguration; + } + + public List dependentFiles() { + return Stream.concat(keyStoreConfiguration.files().stream(), Stream.of(trustStoreConfiguration.file())) + .collect(Collectors.toList()); + } + + public List certificates() { + return Stream.concat(trustStoreConfiguration.loadCertificates().stream(), keyStoreConfiguration.loadCertificates().stream()) + .collect(Collectors.toList()); + } + + public SslParameters sslParameters() { + return sslParameters; + } + + @SuppressWarnings("removal") + SslContext buildServerSslContext(final boolean validateCertificates) { + try { + return AccessController.doPrivileged( + (PrivilegedExceptionAction) () -> SslContextBuilder.forServer( + keyStoreConfiguration.createKeyManagerFactory(validateCertificates) + ) + .sslProvider(sslParameters.provider()) + .clientAuth(sslParameters.clientAuth()) + .protocols(sslParameters.allowedProtocols().toArray(new String[0])) + // TODO we always add all HTTP 2 ciphers, while maybe it is better to set them differently + .ciphers( + Stream.concat( + Http2SecurityUtil.CIPHERS.stream(), + StreamSupport.stream(sslParameters.allowedCiphers().spliterator(), false) + ).collect(Collectors.toSet()), + SupportedCipherSuiteFilter.INSTANCE + ) + .sessionCacheSize(0) + .sessionTimeout(0) + .applicationProtocolConfig( + new ApplicationProtocolConfig( + ApplicationProtocolConfig.Protocol.ALPN, + // NO_ADVERTISE is currently the only mode supported by both OpenSsl and JDK providers. + ApplicationProtocolConfig.SelectorFailureBehavior.NO_ADVERTISE, + // ACCEPT is currently the only mode supported by both OpenSsl and JDK providers. + ApplicationProtocolConfig.SelectedListenerFailureBehavior.ACCEPT, + ApplicationProtocolNames.HTTP_2, + ApplicationProtocolNames.HTTP_1_1 + ) + ) + .trustManager(trustStoreConfiguration.createTrustManagerFactory(validateCertificates)) + .build() + ); + } catch (PrivilegedActionException e) { + throw new OpenSearchException("Filed to build server SSL context", e); + } + } + + @SuppressWarnings("removal") + SslContext buildClientSslContext(final boolean validateCertificates) { + try { + return AccessController.doPrivileged( + (PrivilegedExceptionAction) () -> SslContextBuilder.forClient() + .sslProvider(sslParameters.provider()) + .protocols(sslParameters.allowedProtocols()) + .ciphers(sslParameters.allowedCiphers()) + .applicationProtocolConfig(ApplicationProtocolConfig.DISABLED) + .sessionCacheSize(0) + .sessionTimeout(0) + .sslProvider(sslParameters.provider()) + .keyManager(keyStoreConfiguration.createKeyManagerFactory(validateCertificates)) + .trustManager(trustStoreConfiguration.createTrustManagerFactory(validateCertificates)) + .build() + ); + } catch (PrivilegedActionException e) { + throw new OpenSearchException("Filed to build client SSL context", e); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SslConfiguration that = (SslConfiguration) o; + return Objects.equals(sslParameters, that.sslParameters) + && Objects.equals(trustStoreConfiguration, that.trustStoreConfiguration) + && Objects.equals(keyStoreConfiguration, that.keyStoreConfiguration); + } + + @Override + public int hashCode() { + return Objects.hash(sslParameters, trustStoreConfiguration, keyStoreConfiguration); + } +} diff --git a/src/main/java/org/opensearch/security/ssl/SslContextHandler.java b/src/main/java/org/opensearch/security/ssl/SslContextHandler.java new file mode 100644 index 0000000000..fae9cb27ba --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/SslContextHandler.java @@ -0,0 +1,206 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.nio.charset.StandardCharsets; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.net.ssl.SSLEngine; + +import org.opensearch.security.ssl.config.Certificate; +import org.opensearch.transport.NettyAllocator; + +import io.netty.handler.ssl.SslContext; + +import static java.util.function.Predicate.not; + +public class SslContextHandler { + + private SslContext sslContext; + + private final SslConfiguration sslConfiguration; + + private final List loadedCertificates; + + public SslContextHandler(final SslConfiguration sslConfiguration) { + this(sslConfiguration, false); + } + + public SslContextHandler(final SslConfiguration sslConfiguration, final boolean client) { + this.sslContext = client ? sslConfiguration.buildClientSslContext(true) : sslConfiguration.buildServerSslContext(true); + this.sslConfiguration = sslConfiguration; + this.loadedCertificates = sslConfiguration.certificates(); + } + + public SSLEngine createSSLEngine() { + return sslContext.newEngine(NettyAllocator.getAllocator()); + } + + public SSLEngine createSSLEngine(final String hostname, final int port) { + return sslContext.newEngine(NettyAllocator.getAllocator(), hostname, port); + } + + public SslConfiguration sslConfiguration() { + return sslConfiguration; + } + + SslContext sslContext() { + return sslContext; + } + + public Stream authorityCertificates() { + return authorityCertificates(loadedCertificates); + } + + Stream authorityCertificates(final List certificates) { + return certificates.stream().filter(not(Certificate::hasKey)); + } + + public Stream keyMaterialCertificates() { + return keyMaterialCertificates(loadedCertificates); + } + + Stream keyMaterialCertificates(final List certificates) { + return certificates.stream().filter(Certificate::hasKey); + } + + void reloadSslContext() throws CertificateException { + final var newCertificates = sslConfiguration.certificates(); + + boolean hasChanges = false; + + final var loadedAuthorityCertificates = authorityCertificates().collect(Collectors.toList()); + final var loadedKeyMaterialCertificates = keyMaterialCertificates().collect(Collectors.toList()); + final var newAuthorityCertificates = authorityCertificates(newCertificates).collect(Collectors.toList()); + final var newKeyMaterialCertificates = keyMaterialCertificates(newCertificates).collect(Collectors.toList()); + + if (notSameCertificates(loadedAuthorityCertificates, newAuthorityCertificates)) { + hasChanges = true; + validateDates(newAuthorityCertificates); + } + + if (notSameCertificates(loadedKeyMaterialCertificates, newKeyMaterialCertificates)) { + hasChanges = true; + validateNewKeyMaterialCertificates( + loadedKeyMaterialCertificates, + newKeyMaterialCertificates, + sslConfiguration.sslParameters().shouldValidateNewCertDNs() + ); + } + if (hasChanges) { + invalidateSessions(); + if (sslContext.isClient()) { + sslContext = sslConfiguration.buildClientSslContext(false); + } else { + sslContext = sslConfiguration.buildServerSslContext(false); + } + loadedCertificates.clear(); + loadedCertificates.addAll(newCertificates); + } + } + + private boolean notSameCertificates(final List loadedCertificates, final List newCertificates) { + final Set currentCertSignatureSet = loadedCertificates.stream() + .map(Certificate::x509Certificate) + .map(X509Certificate::getSignature) + .map(s -> new String(s, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()); + final Set newCertSignatureSet = newCertificates.stream() + .map(Certificate::x509Certificate) + .map(X509Certificate::getSignature) + .map(s -> new String(s, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()); + return !currentCertSignatureSet.equals(newCertSignatureSet); + } + + private void validateDates(final List newCertificates) throws CertificateException { + for (final var certificate : newCertificates) { + certificate.x509Certificate().checkValidity(); + } + } + + private void validateSubjectDns(final List loadedCertificates, final List newCertificates) + throws CertificateException { + final List currentSubjectDNs = loadedCertificates.stream().map(Certificate::subject).sorted().collect(Collectors.toList()); + final List newSubjectDNs = newCertificates.stream().map(Certificate::subject).sorted().collect(Collectors.toList()); + if (!currentSubjectDNs.equals(newSubjectDNs)) { + throw new CertificateException( + "New certificates do not have valid Subject DNs. Current Subject DNs " + + currentSubjectDNs + + " new Subject DNs " + + newSubjectDNs + ); + } + } + + private void validateIssuerDns(final List loadedCertificates, final List newCertificates) + throws CertificateException { + final List currentIssuerDNs = loadedCertificates.stream().map(Certificate::issuer).sorted().collect(Collectors.toList()); + final List newIssuerDNs = newCertificates.stream().map(Certificate::issuer).sorted().collect(Collectors.toList()); + if (!currentIssuerDNs.equals(newIssuerDNs)) { + throw new CertificateException( + "New certificates do not have valid Issuer DNs. Current Issuer DNs: " + + currentIssuerDNs + + " new Issuer DNs: " + + newIssuerDNs + ); + } + } + + private void validateSans(final List loadedCertificates, final List newCertificates) + throws CertificateException { + final List currentSans = loadedCertificates.stream() + .map(Certificate::subjectAlternativeNames) + .sorted() + .collect(Collectors.toList()); + final List newSans = newCertificates.stream() + .map(Certificate::subjectAlternativeNames) + .sorted() + .collect(Collectors.toList()); + if (!currentSans.equals(newSans)) { + throw new CertificateException( + "New certificates do not have valid SANs. Current SANs: " + currentSans + " new SANs: " + newSans + ); + } + } + + private void validateNewKeyMaterialCertificates( + final List loadedCertificates, + final List newCertificates, + boolean shouldValidateNewCertDNs + ) throws CertificateException { + validateDates(newCertificates); + if (shouldValidateNewCertDNs) { + validateSubjectDns(loadedCertificates, newCertificates); + validateIssuerDns(loadedCertificates, newCertificates); + validateSans(loadedCertificates, newCertificates); + } + } + + private void invalidateSessions() { + final var sessionContext = sslContext.sessionContext(); + if (sessionContext != null) { + for (final var sessionId : Collections.list(sessionContext.getIds())) { + final var session = sessionContext.getSession(sessionId); + if (session != null) { + session.invalidate(); + } + } + } + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/SslSettingsManager.java b/src/main/java/org/opensearch/security/ssl/SslSettingsManager.java new file mode 100644 index 0000000000..381c510894 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/SslSettingsManager.java @@ -0,0 +1,384 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.util.Locale; +import java.util.Map; +import java.util.Optional; +import javax.crypto.Cipher; + +import com.google.common.collect.ImmutableMap; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.security.ssl.config.CertType; +import org.opensearch.security.ssl.config.SslCertificatesLoader; +import org.opensearch.security.ssl.config.SslParameters; + +import io.netty.handler.ssl.ClientAuth; +import io.netty.handler.ssl.OpenSsl; +import io.netty.util.internal.PlatformDependent; + +import static org.opensearch.security.ssl.util.SSLConfigConstants.CLIENT_AUTH_MODE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.EXTENDED_KEY_USAGE_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_CERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_KEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_TRUSTED_CAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED_DEFAULT; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLE_OPENSSL_IF_AVAILABLE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_TRUSTSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED_DEFAULT; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLE_OPENSSL_IF_AVAILABLE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED_DEFAULT; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_SERVER_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_FILEPATH; + +public class SslSettingsManager { + + private final static Logger LOGGER = LogManager.getLogger(SslSettingsManager.class); + + private final Map sslSettingsContexts; + + public SslSettingsManager(final Environment environment) { + this.sslSettingsContexts = buildSslContexts(environment); + } + + public Optional sslConfiguration(final CertType certType) { + return Optional.ofNullable(sslSettingsContexts.get(certType)).map(SslContextHandler::sslConfiguration); + } + + public Optional sslContextHandler(final CertType sslConfigPrefix) { + return Optional.ofNullable(sslSettingsContexts.get(sslConfigPrefix)); + } + + private Map buildSslContexts(final Environment environment) { + final var contexts = new ImmutableMap.Builder(); + final var configurations = loadConfigurations(environment); + Optional.ofNullable(configurations.get(CertType.HTTP)) + .ifPresentOrElse( + sslConfiguration -> contexts.put(CertType.HTTP, new SslContextHandler(sslConfiguration)), + () -> LOGGER.warn("SSL Configuration for HTTP Layer hasn't been set") + ); + Optional.ofNullable(configurations.get(CertType.TRANSPORT)).ifPresentOrElse(sslConfiguration -> { + contexts.put(CertType.TRANSPORT, new SslContextHandler(sslConfiguration)); + final var transportClientConfiguration = Optional.ofNullable(configurations.get(CertType.TRANSPORT_CLIENT)) + .orElse(sslConfiguration); + contexts.put(CertType.TRANSPORT_CLIENT, new SslContextHandler(transportClientConfiguration, true)); + }, () -> LOGGER.warn("SSL Configuration for Transport Layer hasn't been set")); + return contexts.build(); + } + + public synchronized void reloadSslContext(final CertType certType) { + sslContextHandler(certType).ifPresentOrElse(sscContextHandler -> { + LOGGER.info("Reloading {} SSL context", certType.name()); + try { + sscContextHandler.reloadSslContext(); + } catch (CertificateException e) { + throw new OpenSearchException(e); + } + LOGGER.info("{} SSL context reloaded", certType.name()); + }, () -> LOGGER.error("Missing SSL Context for {}", certType.name())); + } + + private Map loadConfigurations(final Environment environment) { + final var settings = environment.settings(); + final var httpSettings = settings.getByPrefix(CertType.HTTP.sslConfigPrefix()); + final var transpotSettings = settings.getByPrefix(CertType.TRANSPORT.sslConfigPrefix()); + if (httpSettings.isEmpty() && transpotSettings.isEmpty()) { + throw new OpenSearchException("No SSL configuration found"); + } + jceWarnings(); + openSslWarnings(settings); + + final var httpEnabled = httpSettings.getAsBoolean(ENABLED, SECURITY_SSL_HTTP_ENABLED_DEFAULT); + final var transportEnabled = transpotSettings.getAsBoolean(ENABLED, SECURITY_SSL_TRANSPORT_ENABLED_DEFAULT); + + final var configurationBuilder = ImmutableMap.builder(); + if (httpEnabled && !clientNode(settings)) { + validateHttpSettings(httpSettings); + final var httpSslParameters = SslParameters.loader(httpSettings).load(true); + final var httpTrustAndKeyStore = new SslCertificatesLoader(CertType.HTTP.sslConfigPrefix()).loadConfiguration(environment); + configurationBuilder.put( + CertType.HTTP, + new SslConfiguration(httpSslParameters, httpTrustAndKeyStore.v1(), httpTrustAndKeyStore.v2()) + ); + LOGGER.info("TLS HTTP Provider : {}", httpSslParameters.provider()); + LOGGER.info("Enabled TLS protocols for HTTP layer : {}", httpSslParameters.allowedProtocols()); + } + final var transportSslParameters = SslParameters.loader(transpotSettings).load(false); + if (transportEnabled) { + if (hasExtendedKeyUsageEnabled(transpotSettings)) { + validateTransportSettings(transpotSettings); + final var transportServerTrustAndKeyStore = new SslCertificatesLoader( + CertType.TRANSPORT.sslConfigPrefix(), + SSL_TRANSPORT_SERVER_EXTENDED_PREFIX + ).loadConfiguration(environment); + configurationBuilder.put( + CertType.TRANSPORT, + new SslConfiguration(transportSslParameters, transportServerTrustAndKeyStore.v1(), transportServerTrustAndKeyStore.v2()) + ); + final var transportClientTrustAndKeyStore = new SslCertificatesLoader( + CertType.TRANSPORT.sslConfigPrefix(), + SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX + ).loadConfiguration(environment); + configurationBuilder.put( + CertType.TRANSPORT_CLIENT, + new SslConfiguration(transportSslParameters, transportClientTrustAndKeyStore.v1(), transportClientTrustAndKeyStore.v2()) + ); + } else { + validateTransportSettings(transpotSettings); + final var transportTrustAndKeyStore = new SslCertificatesLoader(CertType.TRANSPORT.sslConfigPrefix()).loadConfiguration( + environment + ); + configurationBuilder.put( + CertType.TRANSPORT, + new SslConfiguration(transportSslParameters, transportTrustAndKeyStore.v1(), transportTrustAndKeyStore.v2()) + ); + } + LOGGER.info("TLS Transport Client Provider : {}", transportSslParameters.provider()); + LOGGER.info("TLS Transport Server Provider : {}", transportSslParameters.provider()); + LOGGER.info("Enabled TLS protocols for Transport layer : {}", transportSslParameters.allowedProtocols()); + } + return configurationBuilder.build(); + } + + private boolean clientNode(final Settings settings) { + return !"node".equals(settings.get(OpenSearchSecuritySSLPlugin.CLIENT_TYPE)); + } + + private void validateHttpSettings(final Settings httpSettings) { + if (httpSettings == null) return; + if (!httpSettings.getAsBoolean(ENABLED, SECURITY_SSL_HTTP_ENABLED_DEFAULT)) return; + + final var clientAuth = ClientAuth.valueOf(httpSettings.get(CLIENT_AUTH_MODE, ClientAuth.OPTIONAL.name()).toUpperCase(Locale.ROOT)); + + if (hasPemStoreSettings(httpSettings)) { + if (!httpSettings.hasValue(PEM_CERT_FILEPATH) || !httpSettings.hasValue(PEM_KEY_FILEPATH)) { + throw new OpenSearchException( + "Wrong HTTP SSL configuration. " + + String.join(", ", SECURITY_SSL_HTTP_PEMCERT_FILEPATH, SECURITY_SSL_HTTP_PEMKEY_FILEPATH) + + " must be set" + ); + } + if (clientAuth == ClientAuth.REQUIRE && !httpSettings.hasValue(PEM_TRUSTED_CAS_FILEPATH)) { + throw new OpenSearchException( + "Wrong HTTP SSL configuration. " + SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH + " must be set if client auth is required" + ); + } + } else if (hasKeyOrTrustStoreSettings(httpSettings)) { + if (!httpSettings.hasValue(KEYSTORE_FILEPATH)) { + throw new OpenSearchException("Wrong HTTP SSL configuration. " + SECURITY_SSL_HTTP_KEYSTORE_FILEPATH + " must be set"); + } + if (clientAuth == ClientAuth.REQUIRE && !httpSettings.hasValue(TRUSTSTORE_FILEPATH)) { + throw new OpenSearchException( + "Wrong HTTP SSL configuration. " + SECURITY_SSL_HTTP_TRUSTSTORE_FILEPATH + " must be set if client auth is required" + ); + } + } else { + throw new OpenSearchException( + "Wrong HTTP SSL configuration. One of Keystore and Truststore files or X.509 PEM certificates and " + + "PKCS#8 keys groups should be set to configure HTTP layer" + ); + } + } + + private void validateTransportSettings(final Settings transportSettings) { + if (!hasExtendedKeyUsageEnabled(transportSettings)) { + if (hasPemStoreSettings(transportSettings)) { + if (!transportSettings.hasValue(PEM_CERT_FILEPATH) + || !transportSettings.hasValue(PEM_KEY_FILEPATH) + || !transportSettings.hasValue(PEM_TRUSTED_CAS_FILEPATH)) { + throw new OpenSearchException( + "Wrong Transport SSL configuration. " + + String.join( + ",", + SECURITY_SSL_TRANSPORT_PEMCERT_FILEPATH, + SECURITY_SSL_TRANSPORT_PEMKEY_FILEPATH, + SECURITY_SSL_TRANSPORT_PEMTRUSTEDCAS_FILEPATH + ) + + " must be set" + ); + } + + } else if (hasKeyOrTrustStoreSettings(transportSettings)) { + verifyKeyAndTrustStoreSettings(transportSettings); + } else { + throw new OpenSearchException( + "Wrong Transport SSL configuration. One of Keystore and Truststore files or X.509 PEM certificates and " + + "PKCS#8 keys groups should be set to configure Transport layer properly" + ); + } + } else { + final var serverTransportSettings = transportSettings.getByPrefix(SSL_TRANSPORT_SERVER_EXTENDED_PREFIX); + final var clientTransportSettings = transportSettings.getByPrefix(SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX); + if (hasKeyOrTrustStoreSettings(transportSettings)) { + verifyKeyAndTrustStoreSettings(transportSettings); + if (!serverTransportSettings.hasValue(KEYSTORE_ALIAS) + || !serverTransportSettings.hasValue(TRUSTSTORE_ALIAS) + || !clientTransportSettings.hasValue(KEYSTORE_ALIAS) + || !clientTransportSettings.hasValue(TRUSTSTORE_ALIAS)) { + throw new OpenSearchException( + "Wrong Transport/Transport Client SSL configuration. " + + String.join( + ",", + SECURITY_SSL_TRANSPORT_SERVER_KEYSTORE_ALIAS, + SECURITY_SSL_TRANSPORT_SERVER_TRUSTSTORE_ALIAS, + SECURITY_SSL_TRANSPORT_CLIENT_KEYSTORE_ALIAS, + SECURITY_SSL_TRANSPORT_CLIENT_TRUSTSTORE_ALIAS + ) + + " must be set if " + + SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED + + " is set" + ); + } + } else if (!hasKeyOrTrustStoreSettings(transportSettings)) { + if (!serverTransportSettings.hasValue(PEM_CERT_FILEPATH) + || !serverTransportSettings.hasValue(PEM_KEY_FILEPATH) + || !serverTransportSettings.hasValue(PEM_TRUSTED_CAS_FILEPATH) + || !clientTransportSettings.hasValue(PEM_CERT_FILEPATH) + || !clientTransportSettings.hasValue(PEM_KEY_FILEPATH) + || !clientTransportSettings.hasValue(PEM_TRUSTED_CAS_FILEPATH)) { + throw new OpenSearchException( + "Wrong Transport/Transport Client SSL configuration. " + + String.join( + ",", + SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH, + SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH, + SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH, + SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH, + SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH, + SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH + ) + + " must be set if " + + SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED + + " is set" + ); + } + } else { + throw new OpenSearchException( + "Wrong Transport/Transport Client SSL configuration. One of Keystore and Truststore files or X.509 PEM certificates and " + + "PKCS#8 keys groups should be set to configure HTTP layer" + ); + } + } + } + + private void verifyKeyAndTrustStoreSettings(final Settings settings) { + if (!settings.hasValue(KEYSTORE_FILEPATH) || !settings.hasValue(TRUSTSTORE_FILEPATH)) { + throw new OpenSearchException( + "Wrong Transport/Tran SSL configuration. One of Keystore and Truststore files or X.509 PEM certificates and " + + "PKCS#8 keys groups should be set to configure Transport layer properly" + ); + } + } + + private boolean hasExtendedKeyUsageEnabled(final Settings settings) { + return settings.getAsBoolean(EXTENDED_KEY_USAGE_ENABLED, SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED_DEFAULT); + } + + private boolean hasKeyOrTrustStoreSettings(final Settings settings) { + return settings.hasValue(KEYSTORE_FILEPATH) || settings.hasValue(TRUSTSTORE_FILEPATH); + } + + private boolean hasPemStoreSettings(final Settings settings) { + return settings.hasValue(PEM_KEY_FILEPATH) || settings.hasValue(PEM_CERT_FILEPATH) || settings.hasValue(PEM_TRUSTED_CAS_FILEPATH); + } + + void jceWarnings() { + try { + final int aesMaxKeyLength = Cipher.getMaxAllowedKeyLength("AES"); + + if (aesMaxKeyLength < 256) { + // CS-SUPPRESS-SINGLE: RegexpSingleline Java Cryptography Extension is unrelated to OpenSearch extensions + LOGGER.info( + "AES-256 not supported, max key length for AES is {} bit." + + " (This is not an issue, it just limits possible encryption strength. " + + "To enable AES 256, " + + "install 'Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files')", + aesMaxKeyLength + ); + // CS-ENFORCE-SINGLE + } + } catch (final NoSuchAlgorithmException e) { + LOGGER.error("AES encryption not supported (SG 1). ", e); + } + } + + void openSslWarnings(final Settings settings) { + if (!OpenSearchSecuritySSLPlugin.OPENSSL_SUPPORTED + && OpenSsl.isAvailable() + && (settings.getAsBoolean(SECURITY_SSL_HTTP_ENABLE_OPENSSL_IF_AVAILABLE, true) + || settings.getAsBoolean(SECURITY_SSL_TRANSPORT_ENABLE_OPENSSL_IF_AVAILABLE, true))) { + if (PlatformDependent.javaVersion() < 12) { + LOGGER.warn( + "Support for OpenSSL with Java 11 or prior versions require using Netty allocator. Set " + + "'opensearch.unsafe.use_netty_default_allocator' system property to true" + ); + } else { + LOGGER.warn("Support for OpenSSL with Java 12+ has been removed from OpenSearch Security. Using JDK SSL instead."); + } + } + if (OpenSearchSecuritySSLPlugin.OPENSSL_SUPPORTED && OpenSsl.isAvailable()) { + LOGGER.info("OpenSSL {} ({}) available", OpenSsl.versionString(), OpenSsl.version()); + + if (OpenSsl.version() < 0x10002000L) { + LOGGER.warn( + "Outdated OpenSSL version detected. You should update to 1.0.2k or later. Currently installed: {}", + OpenSsl.versionString() + ); + } + + if (!OpenSsl.supportsHostnameValidation()) { + LOGGER.warn( + "Your OpenSSL version {} does not support hostname verification. You should update to 1.0.2k or later.", + OpenSsl.versionString() + ); + } + + LOGGER.debug("OpenSSL available ciphers {}", OpenSsl.availableOpenSslCipherSuites()); + } else { + LOGGER.warn( + "OpenSSL not available (this is not an error, we simply fallback to built-in JDK SSL) because of {}", + OpenSsl.unavailabilityCause() + ); + } + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/CertType.java b/src/main/java/org/opensearch/security/ssl/config/CertType.java new file mode 100644 index 0000000000..09a8dcfae9 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/CertType.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_HTTP_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_CLIENT_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_PREFIX; + +public enum CertType { + HTTP(SSL_HTTP_PREFIX), + TRANSPORT(SSL_TRANSPORT_PREFIX), + TRANSPORT_CLIENT(SSL_TRANSPORT_CLIENT_PREFIX); + + private final String sslConfigPrefix; + + private CertType(String sslConfigPrefix) { + this.sslConfigPrefix = sslConfigPrefix; + } + + public String sslConfigPrefix() { + return sslConfigPrefix; + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/Certificate.java b/src/main/java/org/opensearch/security/ssl/config/Certificate.java new file mode 100644 index 0000000000..534148db57 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/Certificate.java @@ -0,0 +1,188 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.lang.reflect.Method; +import java.security.cert.CertificateParsingException; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; + +import com.google.common.collect.ImmutableList; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.bouncycastle.asn1.ASN1InputStream; +import org.bouncycastle.asn1.ASN1Object; +import org.bouncycastle.asn1.ASN1ObjectIdentifier; +import org.bouncycastle.asn1.ASN1Primitive; +import org.bouncycastle.asn1.ASN1Sequence; +import org.bouncycastle.asn1.ASN1String; +import org.bouncycastle.asn1.ASN1TaggedObject; + +public class Certificate { + + private final static Logger LOGGER = LogManager.getLogger(Certificate.class); + + private final X509Certificate certificate; + + private final String format; + + private final String alias; + + private final boolean hasKey; + + public Certificate(final X509Certificate certificate, final boolean hasKey) { + this(certificate, "pem", null, hasKey); + } + + public Certificate(final X509Certificate certificate, final String format, final String alias, final boolean hasKey) { + this.certificate = certificate; + this.format = format; + this.alias = alias; + this.hasKey = hasKey; + } + + public X509Certificate x509Certificate() { + return certificate; + } + + public String format() { + return format; + } + + public String alias() { + return alias; + } + + public boolean hasKey() { + return hasKey; + } + + public String subjectAlternativeNames() { + return loadSubjectAlternativeNames(); + } + + @Deprecated(since = "since JDK 21", forRemoval = true) + public String loadSubjectAlternativeNames() { + String san = ""; + try { + Collection> altNames = certificate != null && certificate.getSubjectAlternativeNames() != null + ? certificate.getSubjectAlternativeNames() + : null; + if (altNames != null) { + Comparator> comparator = Comparator.comparing((List altName) -> (Integer) altName.get(0)) + .thenComparing((List altName) -> (String) altName.get(1)); + + Set> sans = new TreeSet<>(comparator); + for (List altName : altNames) { + Integer type = (Integer) altName.get(0); + // otherName requires parsing to string + if (type == 0) { + List otherName = parseOtherName(altName); + if (otherName != null) { + sans.add(Arrays.asList(type, otherName)); + } + } else { + sans.add(altName); + } + } + san = sans.toString(); + } + } catch (CertificateParsingException e) { + LOGGER.error("Issue parsing SubjectAlternativeName:", e); + } + + return san; + } + + @Deprecated(since = "since JDK 21", forRemoval = true) + private List parseOtherName(List altName) { + if (altName.size() < 2) { + LOGGER.warn("Couldn't parse subject alternative names"); + return null; + } + try (final ASN1InputStream in = new ASN1InputStream((byte[]) altName.get(1))) { + final ASN1Primitive asn1Primitive = in.readObject(); + final ASN1Sequence sequence = ASN1Sequence.getInstance(asn1Primitive); + final ASN1ObjectIdentifier asn1ObjectIdentifier = ASN1ObjectIdentifier.getInstance(sequence.getObjectAt(0)); + final ASN1TaggedObject asn1TaggedObject = ASN1TaggedObject.getInstance(sequence.getObjectAt(1)); + Method getObjectMethod = getObjectMethod(); + ASN1Object maybeTaggedAsn1Primitive = (ASN1Primitive) getObjectMethod.invoke(asn1TaggedObject); + if (maybeTaggedAsn1Primitive instanceof ASN1TaggedObject) { + maybeTaggedAsn1Primitive = (ASN1Primitive) getObjectMethod.invoke(maybeTaggedAsn1Primitive); + } + if (maybeTaggedAsn1Primitive instanceof ASN1String) { + return ImmutableList.of(asn1ObjectIdentifier.getId(), maybeTaggedAsn1Primitive.toString()); + } else { + LOGGER.warn("Couldn't parse subject alternative names"); + return null; + } + } catch (final Exception ioe) { // catch all exception here since BC throws diff exceptions + throw new RuntimeException("Couldn't parse subject alternative names", ioe); + } + } + + static Method getObjectMethod() throws ClassNotFoundException, NoSuchMethodException { + Class asn1TaggedObjectClass = Class.forName("org.bouncycastle.asn1.ASN1TaggedObject"); + try { + return asn1TaggedObjectClass.getMethod("getBaseObject"); + } catch (NoSuchMethodException ex) { + return asn1TaggedObjectClass.getMethod("getObject"); + } + } + + public String serialNumber() { + return certificate.getSerialNumber().toString(); + } + + public String subject() { + return certificate.getSubjectX500Principal() != null ? certificate.getSubjectX500Principal().getName() : null; + } + + public String issuer() { + return certificate.getIssuerX500Principal() != null ? certificate.getIssuerX500Principal().getName() : null; + } + + public String notAfter() { + return certificate.getNotAfter() != null ? certificate.getNotAfter().toInstant().toString() : null; + } + + public String notBefore() { + return certificate.getNotBefore() != null ? certificate.getNotBefore().toInstant().toString() : null; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Certificate that = (Certificate) o; + return hasKey == that.hasKey + && Objects.equals(certificate, that.certificate) + && Objects.equals(format, that.format) + && Objects.equals(alias, that.alias); + } + + @Override + public int hashCode() { + return Objects.hash(certificate, format, alias, hasKey); + } + + @Override + public String toString() { + return "Certificate{" + "format='" + format + '\'' + ", alias='" + alias + '\'' + ", hasKey=" + hasKey + '}'; + } +} diff --git a/src/main/java/org/opensearch/security/ssl/config/KeyStoreConfiguration.java b/src/main/java/org/opensearch/security/ssl/config/KeyStoreConfiguration.java new file mode 100644 index 0000000000..b1675f093a --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/KeyStoreConfiguration.java @@ -0,0 +1,201 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import javax.net.ssl.KeyManagerFactory; + +import com.google.common.collect.ImmutableList; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.collect.Tuple; + +public interface KeyStoreConfiguration { + + List files(); + + List loadCertificates(); + + default KeyManagerFactory createKeyManagerFactory(boolean validateCertificates) { + final var keyStore = createKeyStore(); + if (validateCertificates) { + KeyStoreUtils.validateKeyStoreCertificates(keyStore.v1()); + } + return buildKeyManagerFactory(keyStore.v1(), keyStore.v2()); + } + + default KeyManagerFactory buildKeyManagerFactory(final KeyStore keyStore, final char[] password) { + try { + final var keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); + keyManagerFactory.init(keyStore, password); + return keyManagerFactory; + } catch (GeneralSecurityException e) { + throw new OpenSearchException("Failed to create KeyManagerFactory", e); + } + } + + Tuple createKeyStore(); + + final class JdkKeyStoreConfiguration implements KeyStoreConfiguration { + private final Path path; + + private final String type; + + private final String alias; + + private final char[] keyStorePassword; + + private final char[] keyPassword; + + public JdkKeyStoreConfiguration( + final Path path, + final String type, + final String alias, + final char[] keyStorePassword, + final char[] keyPassword + ) { + this.path = path; + this.type = type; + this.alias = alias; + this.keyStorePassword = keyStorePassword; + this.keyPassword = keyPassword; + } + + private void loadCertificateChain(final String alias, final KeyStore keyStore, final ImmutableList.Builder listBuilder) + throws KeyStoreException { + final var cc = keyStore.getCertificateChain(alias); + var first = true; + for (final var c : cc) { + if (c instanceof X509Certificate) { + listBuilder.add(new Certificate((X509Certificate) c, type, alias, first)); + first = false; + } + } + } + + @Override + public List loadCertificates() { + final var keyStore = KeyStoreUtils.loadKeyStore(path, type, keyStorePassword); + final var listBuilder = ImmutableList.builder(); + + try { + if (alias != null) { + if (keyStore.isKeyEntry(alias)) { + loadCertificateChain(alias, keyStore, listBuilder); + } + } else { + for (final var a : Collections.list(keyStore.aliases())) { + if (keyStore.isKeyEntry(a)) { + loadCertificateChain(a, keyStore, listBuilder); + } + } + } + final var list = listBuilder.build(); + if (list.isEmpty()) { + throw new OpenSearchException("The file " + path + " does not contain any certificates"); + } + return listBuilder.build(); + } catch (GeneralSecurityException e) { + throw new OpenSearchException("Couldn't load certificates from file " + path, e); + } + } + + @Override + public List files() { + return List.of(path); + } + + @Override + public Tuple createKeyStore() { + final var keyStore = KeyStoreUtils.newKeyStore(path, type, alias, keyStorePassword, keyPassword); + return Tuple.tuple(keyStore, keyPassword); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JdkKeyStoreConfiguration that = (JdkKeyStoreConfiguration) o; + return Objects.equals(path, that.path) + && Objects.equals(type, that.type) + && Objects.equals(alias, that.alias) + && Objects.deepEquals(keyStorePassword, that.keyStorePassword) + && Objects.deepEquals(keyPassword, that.keyPassword); + } + + @Override + public int hashCode() { + return Objects.hash(path, type, alias, Arrays.hashCode(keyStorePassword), Arrays.hashCode(keyPassword)); + } + } + + final class PemKeyStoreConfiguration implements KeyStoreConfiguration { + + private final Path certificateChainPath; + + private final Path keyPath; + + private final char[] keyPassword; + + public PemKeyStoreConfiguration(final Path certificateChainPath, final Path keyPath, final char[] keyPassword) { + this.certificateChainPath = certificateChainPath; + this.keyPath = keyPath; + this.keyPassword = keyPassword; + } + + @Override + public List loadCertificates() { + final var certificates = KeyStoreUtils.x509Certificates(certificateChainPath); + final var listBuilder = ImmutableList.builder(); + listBuilder.add(new Certificate(certificates[0], true)); + for (int i = 1; i < certificates.length; i++) { + listBuilder.add(new Certificate(certificates[i], false)); + } + return listBuilder.build(); + } + + @Override + public List files() { + return List.of(certificateChainPath, keyPath); + } + + @Override + public Tuple createKeyStore() { + final var keyStore = KeyStoreUtils.newKeyStoreFromPem(certificateChainPath, keyPath, keyPassword); + return Tuple.tuple(keyStore, keyPassword); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PemKeyStoreConfiguration that = (PemKeyStoreConfiguration) o; + return Objects.equals(certificateChainPath, that.certificateChainPath) + && Objects.equals(keyPath, that.keyPath) + && Objects.deepEquals(keyPassword, that.keyPassword); + } + + @Override + public int hashCode() { + return Objects.hash(certificateChainPath, keyPath, Arrays.hashCode(keyPassword)); + } + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/KeyStoreUtils.java b/src/main/java/org/opensearch/security/ssl/config/KeyStoreUtils.java new file mode 100644 index 0000000000..7c063bd312 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/KeyStoreUtils.java @@ -0,0 +1,218 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.InvalidAlgorithmParameterException; +import java.security.KeyException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.security.spec.InvalidKeySpecException; +import java.util.List; +import javax.crypto.NoSuchPaddingException; +import javax.net.ssl.SSLEngine; +import javax.net.ssl.SSLSessionContext; + +import org.opensearch.OpenSearchException; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.handler.ssl.ApplicationProtocolNegotiator; +import io.netty.handler.ssl.SslContext; + +final class KeyStoreUtils { + + private final static class SecuritySslContext extends SslContext { + + private SecuritySslContext() {} + + @Override + public boolean isClient() { + throw new UnsupportedOperationException("Method isClient is not supported"); + } + + @Override + public List cipherSuites() { + throw new UnsupportedOperationException("Method cipherSuites is not supported"); + } + + @Override + public ApplicationProtocolNegotiator applicationProtocolNegotiator() { + throw new UnsupportedOperationException("Method applicationProtocolNegotiator is not supported"); + } + + @Override + public SSLEngine newEngine(ByteBufAllocator alloc) { + throw new UnsupportedOperationException("Method newEngine is not supported"); + } + + @Override + public SSLEngine newEngine(ByteBufAllocator alloc, String peerHost, int peerPort) { + throw new UnsupportedOperationException("Method newEngine is not supported"); + } + + @Override + public SSLSessionContext sessionContext() { + throw new UnsupportedOperationException("Method sessionContext is not supported"); + } + + public static X509Certificate[] toX509Certificates(final File file) { + try { + return SslContext.toX509Certificates(file); + } catch (CertificateException e) { + throw new OpenSearchException("Couldn't read SSL certificates from " + file, e); + } + } + + protected static PrivateKey toPrivateKey(File keyFile, String keyPassword) throws InvalidAlgorithmParameterException, + NoSuchPaddingException, NoSuchAlgorithmException, InvalidKeySpecException, IOException, KeyException { + return SslContext.toPrivateKey(keyFile, keyPassword); + } + + } + + public static X509Certificate[] x509Certificates(final Path file) { + final var certificates = SecuritySslContext.toX509Certificates(file.toFile()); + if (certificates == null || certificates.length == 0) { + throw new OpenSearchException("Couldn't read SSL certificates from " + file); + } + return certificates; + } + + public static KeyStore loadTrustStore(final Path path, final String type, final String alias, final char[] password) { + try { + var keyStore = loadKeyStore(path, type, password); + if (alias != null) { + if (!keyStore.isCertificateEntry(alias)) { + throw new OpenSearchException("Alias " + alias + " does not contain a certificate entry"); + } + final var aliasCertificate = (X509Certificate) keyStore.getCertificate(alias); + if (aliasCertificate == null) { + throw new OpenSearchException("Couldn't find SSL certificate for alias " + alias); + } + keyStore = newKeyStore(); + keyStore.setCertificateEntry(alias, aliasCertificate); + } + return keyStore; + } catch (Exception e) { + throw new OpenSearchException("Failed to load trust store from " + path, e); + } + } + + public static KeyStore newTrustStoreFromPem(final Path pemFile) { + try { + final var certs = x509Certificates(pemFile); + final var keyStore = newKeyStore(); + for (int i = 0; i < certs.length; i++) { + final var c = certs[i]; + keyStore.setCertificateEntry("os-sec-plugin-pem-cert-" + i, c); + } + return keyStore; + } catch (final Exception e) { + throw new OpenSearchException("Failed to load SSL certificates from " + pemFile, e); + } + } + + private static KeyStore newKeyStore() throws KeyStoreException, CertificateException, IOException, NoSuchAlgorithmException { + final var keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, null); + return keyStore; + } + + public static void validateKeyStoreCertificates(final KeyStore keyStore) { + try { + final var aliases = keyStore.aliases(); + while (aliases.hasMoreElements()) { + final var a = aliases.nextElement(); + if (keyStore.isCertificateEntry(a)) { + final var c = (X509Certificate) keyStore.getCertificate(a); + if (c == null) { + throw new CertificateException("Alias " + a + " does not contain a certificate entry"); + } + c.checkValidity(); + } else if (keyStore.isKeyEntry(a)) { + final var cc = keyStore.getCertificateChain(a); + if (cc == null) { + throw new CertificateException("Alias " + a + " does not contain a certificate chain"); + } + for (final var c : cc) { + ((X509Certificate) c).checkValidity(); + } + } + } + } catch (KeyStoreException e) { + throw new OpenSearchException("Couldn't load keys store", e); + } catch (CertificateException e) { + throw new OpenSearchException("Invalid certificates", e); + } + } + + public static KeyStore loadKeyStore(final Path path, final String type, final char[] password) { + try { + final var keyStore = KeyStore.getInstance(type); + try (final var in = Files.newInputStream(path)) { + keyStore.load(in, password); + return keyStore; + } catch (IOException e) { + throw new RuntimeException(e); + } + } catch (Exception e) { + throw new OpenSearchException("Failed to load keystore from " + path, e); + } + } + + public static KeyStore newKeyStore( + final Path path, + final String type, + final String alias, + final char[] password, + final char[] keyPassword + ) { + try { + var keyStore = loadKeyStore(path, type, password); + if (alias != null) { + if (!keyStore.isKeyEntry(alias)) { + throw new CertificateException("Couldn't find SSL key for alias " + alias); + } + final var certificateChain = keyStore.getCertificateChain(alias); + if (certificateChain == null) { + throw new CertificateException("Couldn't find certificate chain for alias " + alias); + } + final var key = keyStore.getKey(alias, keyPassword); + keyStore = newKeyStore(); + keyStore.setKeyEntry(alias, key, keyPassword, certificateChain); + } + return keyStore; + } catch (final Exception e) { + throw new OpenSearchException("Failed to load key store from " + path, e); + } + } + + public static KeyStore newKeyStoreFromPem(final Path certificateChainPath, final Path keyPath, final char[] keyPassword) { + try { + final var certificateChain = x509Certificates(certificateChainPath); + final var keyStore = newKeyStore(); + final var key = SecuritySslContext.toPrivateKey(keyPath.toFile(), keyPassword != null ? new String(keyPassword) : null); + keyStore.setKeyEntry("key", key, keyPassword, certificateChain); + return keyStore; + } catch (Exception e) { + throw new OpenSearchException("Failed read key from " + keyPath, e); + } + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/SslCertificatesLoader.java b/src/main/java/org/opensearch/security/ssl/config/SslCertificatesLoader.java new file mode 100644 index 0000000000..a3f0c39eed --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/SslCertificatesLoader.java @@ -0,0 +1,171 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.Path; +import java.security.KeyStore; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.settings.SecureSetting; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; + +import static org.opensearch.security.ssl.SecureSSLSettings.SECURE_SUFFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.DEFAULT_STORE_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_KEY_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_TYPE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_CERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_KEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_KEY_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_TRUSTED_CAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_TYPE; + +public class SslCertificatesLoader { + + private final static Logger LOGGER = LogManager.getLogger(SslCertificatesLoader.class); + + private final String sslConfigSuffix; + + private final String fullSslConfigSuffix; + + public SslCertificatesLoader(final String sslConfigSuffix) { + this(sslConfigSuffix, null); + } + + public SslCertificatesLoader(final String sslConfigSuffix, final String extendedSslConfigSuffix) { + this.sslConfigSuffix = sslConfigSuffix; + this.fullSslConfigSuffix = extendedSslConfigSuffix != null ? sslConfigSuffix + extendedSslConfigSuffix : sslConfigSuffix; + } + + public Tuple loadConfiguration(final Environment environment) { + final var settings = environment.settings(); + final var sslConfigSettings = settings.getByPrefix(fullSslConfigSuffix); + if (settings.hasValue(sslConfigSuffix + KEYSTORE_FILEPATH)) { + return Tuple.tuple( + environment.settings().hasValue(sslConfigSuffix + TRUSTSTORE_FILEPATH) + ? buildJdkTrustStoreConfiguration( + sslConfigSettings, + environment, + resolvePassword(sslConfigSuffix + TRUSTSTORE_PASSWORD, settings, DEFAULT_STORE_PASSWORD) + ) + : TrustStoreConfiguration.EMPTY_CONFIGURATION, + buildJdkKeyStoreConfiguration( + sslConfigSettings, + environment, + resolvePassword(sslConfigSuffix + KEYSTORE_PASSWORD, settings, DEFAULT_STORE_PASSWORD), + resolvePassword(fullSslConfigSuffix + KEYSTORE_KEY_PASSWORD, settings, DEFAULT_STORE_PASSWORD) + ) + ); + } else { + return Tuple.tuple( + sslConfigSettings.hasValue(PEM_TRUSTED_CAS_FILEPATH) + ? new TrustStoreConfiguration.PemTrustStoreConfiguration( + resolvePath(sslConfigSettings.get(PEM_TRUSTED_CAS_FILEPATH), environment) + ) + : TrustStoreConfiguration.EMPTY_CONFIGURATION, + buildPemKeyStoreConfiguration( + sslConfigSettings, + environment, + resolvePassword(fullSslConfigSuffix + PEM_KEY_PASSWORD, settings, null) + ) + ); + } + } + + private char[] resolvePassword(final String legacyPasswordSettings, final Settings settings, final String defaultPassword) { + final var securePasswordSetting = String.format("%s%s", legacyPasswordSettings, SECURE_SUFFIX); + final var securePassword = SecureSetting.secureString(securePasswordSetting, null).get(settings); + final var legacyPassword = settings.get(legacyPasswordSettings, defaultPassword); + if (!securePassword.isEmpty() && legacyPassword != null && !legacyPassword.equals(defaultPassword)) { + throw new OpenSearchException("One of " + legacyPasswordSettings + " or " + securePasswordSetting + " must be set not both"); + } + if (!securePassword.isEmpty()) { + return securePassword.getChars(); + } else { + if (legacyPassword != null) { + LOGGER.warn( + "Setting [{}] has a secure counterpart [{}] which should be used instead - allowing for legacy SSL setups", + legacyPasswordSettings, + securePasswordSetting + ); + return legacyPassword.toCharArray(); + } + } + return null; + } + + private KeyStoreConfiguration.JdkKeyStoreConfiguration buildJdkKeyStoreConfiguration( + final Settings settings, + final Environment environment, + final char[] keyStorePassword, + final char[] keyPassword + ) { + return new KeyStoreConfiguration.JdkKeyStoreConfiguration( + resolvePath(environment.settings().get(sslConfigSuffix + KEYSTORE_FILEPATH), environment), + environment.settings().get(sslConfigSuffix + KEYSTORE_TYPE, KeyStore.getDefaultType()), + settings.get(KEYSTORE_ALIAS, null), + keyStorePassword, + keyPassword + ); + } + + private TrustStoreConfiguration.JdkTrustStoreConfiguration buildJdkTrustStoreConfiguration( + final Settings settings, + final Environment environment, + final char[] trustStorePassword + ) { + return new TrustStoreConfiguration.JdkTrustStoreConfiguration( + resolvePath(environment.settings().get(sslConfigSuffix + TRUSTSTORE_FILEPATH), environment), + environment.settings().get(sslConfigSuffix + TRUSTSTORE_TYPE, KeyStore.getDefaultType()), + settings.get(TRUSTSTORE_ALIAS, null), + trustStorePassword + ); + } + + private KeyStoreConfiguration.PemKeyStoreConfiguration buildPemKeyStoreConfiguration( + final Settings settings, + final Environment environment, + final char[] pemKeyPassword + ) { + return new KeyStoreConfiguration.PemKeyStoreConfiguration( + resolvePath(settings.get(PEM_CERT_FILEPATH), environment), + resolvePath(settings.get(PEM_KEY_FILEPATH), environment), + pemKeyPassword + ); + } + + private Path resolvePath(final String filePath, final Environment environment) { + final var path = environment.configDir().resolve(Path.of(filePath)); + if (Files.isDirectory(path, LinkOption.NOFOLLOW_LINKS)) { + throw new OpenSearchException(filePath + " - is a directory"); + } + if (!Files.isReadable(path)) { + throw new OpenSearchException( + "Unable to read the file " + filePath + ". Please make sure this files exists and is readable regarding to permissions" + ); + } + return path; + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/SslParameters.java b/src/main/java/org/opensearch/security/ssl/config/SslParameters.java new file mode 100644 index 0000000000..a31b14723b --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/SslParameters.java @@ -0,0 +1,216 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.security.NoSuchAlgorithmException; +import java.util.List; +import java.util.Locale; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.net.ssl.SSLContext; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchSecurityException; +import org.opensearch.common.settings.Settings; + +import io.netty.handler.ssl.ClientAuth; +import io.netty.handler.ssl.OpenSsl; +import io.netty.handler.ssl.SslProvider; + +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_OPENSSL_HTTP_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_OPENSSL_HTTP_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_SSL_CIPHERS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_SSL_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.CLIENT_AUTH_MODE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLED_CIPHERS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLED_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLE_OPENSSL_IF_AVAILABLE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENFORCE_CERT_RELOAD_DN_VERIFICATION; +import static org.opensearch.security.ssl.util.SSLConfigConstants.OPENSSL_1_1_1_BETA_9; +import static org.opensearch.security.ssl.util.SSLConfigConstants.OPENSSL_AVAILABLE; + +public class SslParameters { + + private final SslProvider provider; + + private final ClientAuth clientAuth; + + private final List protocols; + + private final List ciphers; + + private final boolean validateCertDNsOnReload; + + private SslParameters( + SslProvider provider, + final ClientAuth clientAuth, + List protocols, + List ciphers, + boolean validateCertDNsOnReload + ) { + this.provider = provider; + this.ciphers = ciphers; + this.protocols = protocols; + this.clientAuth = clientAuth; + this.validateCertDNsOnReload = validateCertDNsOnReload; + } + + public ClientAuth clientAuth() { + return clientAuth; + } + + public SslProvider provider() { + return provider; + } + + public List allowedCiphers() { + return ciphers; + } + + public List allowedProtocols() { + return protocols; + } + + public boolean shouldValidateNewCertDNs() { + return validateCertDNsOnReload; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + SslParameters that = (SslParameters) o; + return provider == that.provider && Objects.equals(ciphers, that.ciphers) && Objects.equals(protocols, that.protocols); + } + + @Override + public int hashCode() { + return Objects.hash(provider, ciphers, protocols); + } + + public static Loader loader(final Settings sslConfigSettings) { + return new Loader(sslConfigSettings); + } + + public static final class Loader { + + private final static Logger LOGGER = LogManager.getLogger(SslParameters.class); + + private final Settings sslConfigSettings; + + public Loader(final Settings sslConfigSettings) { + this.sslConfigSettings = sslConfigSettings; + } + + private SslProvider provider(final Settings settings) { + final var useOpenSslIfAvailable = settings.getAsBoolean(ENABLE_OPENSSL_IF_AVAILABLE, true); + if (OPENSSL_AVAILABLE && useOpenSslIfAvailable) { + return SslProvider.OPENSSL; + } else { + return SslProvider.JDK; + } + } + + private boolean validateCertDNsOnReload(final Settings settings) { + return settings.getAsBoolean(ENFORCE_CERT_RELOAD_DN_VERIFICATION, true); + } + + private List protocols(final SslProvider provider, final Settings settings, boolean http) { + final var allowedProtocols = settings.getAsList(ENABLED_PROTOCOLS, List.of(ALLOWED_SSL_PROTOCOLS)); + if (provider == SslProvider.OPENSSL) { + final String[] supportedProtocols; + if (OpenSsl.version() > OPENSSL_1_1_1_BETA_9) { + supportedProtocols = http ? ALLOWED_OPENSSL_HTTP_PROTOCOLS : ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS; + } else { + supportedProtocols = http + ? ALLOWED_OPENSSL_HTTP_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9 + : ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9; + } + return openSslProtocols(allowedProtocols, supportedProtocols); + } else { + return jdkProtocols(allowedProtocols); + } + } + + private List openSslProtocols(final List allowedSslProtocols, final String... supportedProtocols) { + LOGGER.debug("OpenSSL supports the following {} protocols {}", supportedProtocols.length, supportedProtocols); + return Stream.of(supportedProtocols).filter(allowedSslProtocols::contains).collect(Collectors.toList()); + } + + private List jdkProtocols(final List allowedSslProtocols) { + try { + final var supportedProtocols = SSLContext.getDefault().getDefaultSSLParameters().getProtocols(); + LOGGER.debug("JVM supports the following {} protocols {}", supportedProtocols.length, supportedProtocols); + return Stream.of(supportedProtocols).filter(allowedSslProtocols::contains).collect(Collectors.toList()); + } catch (final NoSuchAlgorithmException e) { + throw new OpenSearchException("Unable to determine supported protocols", e); + } + } + + private List ciphers(final SslProvider provider, final Settings settings) { + final var allowed = settings.getAsList(ENABLED_CIPHERS, List.of(ALLOWED_SSL_CIPHERS)); + final Stream allowedCiphers; + if (provider == SslProvider.OPENSSL) { + LOGGER.debug( + "OpenSSL {} supports the following ciphers (java-style) {}", + OpenSsl.versionString(), + OpenSsl.availableJavaCipherSuites() + ); + LOGGER.debug( + "OpenSSL {} supports the following ciphers (openssl-style) {}", + OpenSsl.versionString(), + OpenSsl.availableOpenSslCipherSuites() + ); + allowedCiphers = allowed.stream().filter(OpenSsl::isCipherSuiteAvailable); + } else { + try { + final var supportedCiphers = SSLContext.getDefault().getDefaultSSLParameters().getCipherSuites(); + LOGGER.debug("JVM supports the following {} ciphers {}", supportedCiphers.length, supportedCiphers); + allowedCiphers = Stream.of(supportedCiphers).filter(allowed::contains); + } catch (final NoSuchAlgorithmException e) { + throw new OpenSearchException("Unable to determine ciphers protocols", e); + } + } + return allowedCiphers.sorted(String::compareTo).collect(Collectors.toList()); + } + + public SslParameters load(final boolean http) { + final var clientAuth = http + ? ClientAuth.valueOf(sslConfigSettings.get(CLIENT_AUTH_MODE, ClientAuth.OPTIONAL.name()).toUpperCase(Locale.ROOT)) + : ClientAuth.REQUIRE; + + final var provider = provider(sslConfigSettings); + final var sslParameters = new SslParameters( + provider, + clientAuth, + protocols(provider, sslConfigSettings, http), + ciphers(provider, sslConfigSettings), + validateCertDNsOnReload(sslConfigSettings) + ); + if (sslParameters.allowedProtocols().isEmpty()) { + throw new OpenSearchSecurityException("No ssl protocols for " + (http ? "HTTP" : "Transport") + " layer"); + } + if (sslParameters.allowedCiphers().isEmpty()) { + throw new OpenSearchSecurityException("No valid cipher suites for " + (http ? "HTTP" : "Transport") + " layer"); + } + return sslParameters; + } + + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/config/TrustStoreConfiguration.java b/src/main/java/org/opensearch/security/ssl/config/TrustStoreConfiguration.java new file mode 100644 index 0000000000..4965aa3216 --- /dev/null +++ b/src/main/java/org/opensearch/security/ssl/config/TrustStoreConfiguration.java @@ -0,0 +1,185 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.nio.file.Path; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.security.cert.X509Certificate; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.net.ssl.TrustManagerFactory; + +import com.google.common.collect.ImmutableList; + +import org.opensearch.OpenSearchException; + +public interface TrustStoreConfiguration { + + TrustStoreConfiguration EMPTY_CONFIGURATION = new TrustStoreConfiguration() { + @Override + public Path file() { + return null; + } + + @Override + public List loadCertificates() { + return List.of(); + } + + @Override + public KeyStore createTrustStore() { + return null; + } + + @Override + public TrustManagerFactory createTrustManagerFactory(boolean validateCertificates) { + return null; + } + }; + + Path file(); + + List loadCertificates(); + + default TrustManagerFactory createTrustManagerFactory(boolean validateCertificates) { + final var trustStore = createTrustStore(); + if (validateCertificates) { + KeyStoreUtils.validateKeyStoreCertificates(trustStore); + } + return buildTrustManagerFactory(trustStore); + } + + default TrustManagerFactory buildTrustManagerFactory(final KeyStore keyStore) { + try { + final var trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(keyStore); + return trustManagerFactory; + } catch (GeneralSecurityException e) { + throw new OpenSearchException("Couldn't initialize TrustManagerFactory", e); + } + } + + KeyStore createTrustStore(); + + final class JdkTrustStoreConfiguration implements TrustStoreConfiguration { + + private final Path path; + + private final String type; + + private final String alias; + + private final char[] password; + + public JdkTrustStoreConfiguration(final Path path, final String type, final String alias, final char[] password) { + this.path = path; + this.type = type; + this.alias = alias; + this.password = password; + } + + @Override + public List loadCertificates() { + final var keyStore = KeyStoreUtils.loadKeyStore(path, type, password); + final var listBuilder = ImmutableList.builder(); + try { + if (alias != null) { + listBuilder.add(new Certificate((X509Certificate) keyStore.getCertificate(alias), type, alias, false)); + } else { + for (final var a : Collections.list(keyStore.aliases())) { + if (!keyStore.isCertificateEntry(a)) continue; + final var c = keyStore.getCertificate(a); + if (c instanceof X509Certificate) { + listBuilder.add(new Certificate((X509Certificate) c, type, a, false)); + } + } + } + final var list = listBuilder.build(); + if (list.isEmpty()) { + throw new OpenSearchException("The file " + path + " does not contain any certificates"); + } + return listBuilder.build(); + } catch (GeneralSecurityException e) { + throw new OpenSearchException("Couldn't load certificates from file " + path, e); + } + } + + @Override + public Path file() { + return path; + } + + @Override + public KeyStore createTrustStore() { + return KeyStoreUtils.loadTrustStore(path, type, alias, password); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + JdkTrustStoreConfiguration that = (JdkTrustStoreConfiguration) o; + return Objects.equals(path, that.path) + && Objects.equals(type, that.type) + && Objects.equals(alias, that.alias) + && Objects.deepEquals(password, that.password); + } + + @Override + public int hashCode() { + return Objects.hash(path, type, alias, Arrays.hashCode(password)); + } + } + + final class PemTrustStoreConfiguration implements TrustStoreConfiguration { + + private final Path path; + + public PemTrustStoreConfiguration(final Path path) { + this.path = path; + } + + @Override + public List loadCertificates() { + return Stream.of(KeyStoreUtils.x509Certificates(path)).map(c -> new Certificate(c, false)).collect(Collectors.toList()); + } + + @Override + public Path file() { + return path; + } + + @Override + public KeyStore createTrustStore() { + return KeyStoreUtils.newTrustStoreFromPem(path); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PemTrustStoreConfiguration that = (PemTrustStoreConfiguration) o; + return Objects.equals(path, that.path); + } + + @Override + public int hashCode() { + return Objects.hashCode(path); + } + } + +} diff --git a/src/main/java/org/opensearch/security/ssl/rest/SecuritySSLInfoAction.java b/src/main/java/org/opensearch/security/ssl/rest/SecuritySSLInfoAction.java index b9f9e949ec..203a0c7965 100644 --- a/src/main/java/org/opensearch/security/ssl/rest/SecuritySSLInfoAction.java +++ b/src/main/java/org/opensearch/security/ssl/rest/SecuritySSLInfoAction.java @@ -35,11 +35,13 @@ import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.BytesRestResponse; import org.opensearch.rest.RestChannel; -import org.opensearch.rest.RestController; import org.opensearch.rest.RestRequest; import org.opensearch.rest.RestRequest.Method; import org.opensearch.security.filter.SecurityRequestFactory; -import org.opensearch.security.ssl.SecurityKeyStore; +import org.opensearch.security.ssl.SslConfiguration; +import org.opensearch.security.ssl.SslSettingsManager; +import org.opensearch.security.ssl.config.CertType; +import org.opensearch.security.ssl.config.SslParameters; import org.opensearch.security.ssl.transport.PrincipalExtractor; import org.opensearch.security.ssl.util.SSLRequestHelper; import org.opensearch.security.ssl.util.SSLRequestHelper.SSLInfo; @@ -50,7 +52,7 @@ public class SecuritySSLInfoAction extends BaseRestHandler { private static final List routes = Collections.singletonList(new Route(Method.GET, "/_opendistro/_security/sslinfo")); private final Logger log = LogManager.getLogger(this.getClass()); - private final SecurityKeyStore sks; + private final SslSettingsManager sslSettingsManager; final PrincipalExtractor principalExtractor; private final Path configPath; private final Settings settings; @@ -58,13 +60,12 @@ public class SecuritySSLInfoAction extends BaseRestHandler { public SecuritySSLInfoAction( final Settings settings, final Path configPath, - final RestController controller, - final SecurityKeyStore sks, + final SslSettingsManager sslSettingsManager, final PrincipalExtractor principalExtractor ) { super(); this.settings = settings; - this.sks = sks; + this.sslSettingsManager = sslSettingsManager; this.principalExtractor = principalExtractor; this.configPath = configPath; } @@ -103,13 +104,15 @@ public void accept(RestChannel channel) throws Exception { if (showDn == Boolean.TRUE) { builder.field( "peer_certificates_list", - certs == null ? null : Arrays.stream(certs).map(c -> c.getSubjectDN().getName()).collect(Collectors.toList()) + certs == null + ? null + : Arrays.stream(certs).map(c -> c.getSubjectX500Principal().getName()).collect(Collectors.toList()) ); builder.field( "local_certificates_list", localCerts == null ? null - : Arrays.stream(localCerts).map(c -> c.getSubjectDN().getName()).collect(Collectors.toList()) + : Arrays.stream(localCerts).map(c -> c.getSubjectX500Principal().getName()).collect(Collectors.toList()) ); } @@ -122,9 +125,27 @@ public void accept(RestChannel channel) throws Exception { builder.field("ssl_openssl_non_available_cause", openSslUnavailCause == null ? "" : openSslUnavailCause.toString()); builder.field("ssl_openssl_supports_key_manager_factory", OpenSsl.supportsKeyManagerFactory()); builder.field("ssl_openssl_supports_hostname_validation", OpenSsl.supportsHostnameValidation()); - builder.field("ssl_provider_http", sks.getHTTPProviderName()); - builder.field("ssl_provider_transport_server", sks.getTransportServerProviderName()); - builder.field("ssl_provider_transport_client", sks.getTransportClientProviderName()); + builder.field( + "ssl_provider_http", + sslSettingsManager.sslConfiguration(CertType.HTTP) + .map(SslConfiguration::sslParameters) + .map(SslParameters::provider) + .orElse(null) + ); + builder.field( + "ssl_provider_transport_server", + sslSettingsManager.sslConfiguration(CertType.TRANSPORT) + .map(SslConfiguration::sslParameters) + .map(SslParameters::provider) + .orElse(null) + ); + builder.field( + "ssl_provider_transport_client", + sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT) + .map(SslConfiguration::sslParameters) + .map(SslParameters::provider) + .orElse(null) + ); builder.endObject(); response = new BytesRestResponse(RestStatus.OK, builder); diff --git a/src/main/java/org/opensearch/security/ssl/util/ExceptionUtils.java b/src/main/java/org/opensearch/security/ssl/util/ExceptionUtils.java index 83982239f0..4683075f1d 100644 --- a/src/main/java/org/opensearch/security/ssl/util/ExceptionUtils.java +++ b/src/main/java/org/opensearch/security/ssl/util/ExceptionUtils.java @@ -76,7 +76,13 @@ public static OpenSearchException createJwkCreationException(Throwable cause) { return new OpenSearchException("An error occurred during the creation of Jwk: {}", cause, cause.getMessage()); } - public static OpenSearchException createTransportClientNoLongerSupportedException() { - return new OpenSearchException("Transport client authentication no longer supported."); + public static OpenSearchException clusterWrongNodeCertConfigException(String sslPrincipal) { + return new OpenSearchException( + "Node presenting certificate with SSL Principal {" + + sslPrincipal + + "} could" + + " not securely connect to the cluster. Please ensure the principal is correct and present in the" + + " nodes_dn list." + ); } } diff --git a/src/main/java/org/opensearch/security/ssl/util/SSLConfigConstants.java b/src/main/java/org/opensearch/security/ssl/util/SSLConfigConstants.java index a3b9348496..0a67e1a520 100644 --- a/src/main/java/org/opensearch/security/ssl/util/SSLConfigConstants.java +++ b/src/main/java/org/opensearch/security/ssl/util/SSLConfigConstants.java @@ -22,9 +22,55 @@ import java.util.List; import org.opensearch.common.settings.Settings; +import org.opensearch.security.ssl.OpenSearchSecuritySSLPlugin; + +import io.netty.handler.ssl.OpenSsl; public final class SSLConfigConstants { + public static final String SSL_PREFIX = "plugins.security.ssl."; + + public static final String HTTP_SETTINGS = "http"; + + public static final String TRANSPORT_SETTINGS = "transport"; + + public static final String SSL_HTTP_PREFIX = SSL_PREFIX + HTTP_SETTINGS + "."; + + public static final String SSL_TRANSPORT_PREFIX = SSL_PREFIX + TRANSPORT_SETTINGS + "."; + + public static final String SSL_TRANSPORT_SERVER_EXTENDED_PREFIX = "server."; + + public static final String SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX = "client."; + + public static final String SSL_TRANSPORT_CLIENT_PREFIX = SSL_PREFIX + TRANSPORT_SETTINGS + SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX; + + public static final String ENABLED = "enabled"; + + public static final String CLIENT_AUTH_MODE = "clientauth_mode"; + + public static final String ENFORCE_CERT_RELOAD_DN_VERIFICATION = "enforce_cert_reload_dn_verification"; + + public static final String KEYSTORE_TYPE = "keystore_type"; + public static final String KEYSTORE_ALIAS = "keystore_alias"; + public static final String KEYSTORE_FILEPATH = "keystore_filepath"; + public static final String KEYSTORE_PASSWORD = "keystore_password"; + public static final String KEYSTORE_KEY_PASSWORD = "keystore_keypassword"; + + public static final String TRUSTSTORE_ALIAS = "truststore_alias"; + public static final String TRUSTSTORE_FILEPATH = "truststore_filepath"; + public static final String TRUSTSTORE_TYPE = "truststore_type"; + public static final String TRUSTSTORE_PASSWORD = "truststore_password"; + + public static final String PEM_KEY_FILEPATH = "pemkey_filepath"; + public static final String PEM_CERT_FILEPATH = "pemcert_filepath"; + public static final String PEM_TRUSTED_CAS_FILEPATH = "pemtrustedcas_filepath"; + public static final String EXTENDED_KEY_USAGE_ENABLED = "extended_key_usage_enabled"; + + public static final String ENABLE_OPENSSL_IF_AVAILABLE = "enable_openssl_if_available"; + public static final String ENABLED_PROTOCOLS = "enabled_protocols"; + public static final String ENABLED_CIPHERS = "enabled_ciphers"; + public static final String PEM_KEY_PASSWORD = "pemkey_password"; + public static final String SECURITY_SSL_HTTP_ENABLE_OPENSSL_IF_AVAILABLE = "plugins.security.ssl.http.enable_openssl_if_available"; public static final String SECURITY_SSL_HTTP_ENABLED = "plugins.security.ssl.http.enabled"; public static final boolean SECURITY_SSL_HTTP_ENABLED_DEFAULT = false; @@ -38,6 +84,8 @@ public final class SSLConfigConstants { public static final String SECURITY_SSL_HTTP_TRUSTSTORE_ALIAS = "plugins.security.ssl.http.truststore_alias"; public static final String SECURITY_SSL_HTTP_TRUSTSTORE_FILEPATH = "plugins.security.ssl.http.truststore_filepath"; public static final String SECURITY_SSL_HTTP_TRUSTSTORE_TYPE = "plugins.security.ssl.http.truststore_type"; + public static final String SECURITY_SSL_HTTP_ENFORCE_CERT_RELOAD_DN_VERIFICATION = "plugins.security.ssl.http." + + ENFORCE_CERT_RELOAD_DN_VERIFICATION; public static final String SECURITY_SSL_TRANSPORT_ENABLE_OPENSSL_IF_AVAILABLE = "plugins.security.ssl.transport.enable_openssl_if_available"; public static final String SECURITY_SSL_TRANSPORT_ENABLED = "plugins.security.ssl.transport.enabled"; @@ -47,6 +95,8 @@ public final class SSLConfigConstants { public static final String SECURITY_SSL_TRANSPORT_ENFORCE_HOSTNAME_VERIFICATION_RESOLVE_HOST_NAME = "plugins.security.ssl.transport.resolve_hostname"; + public static final String SECURITY_SSL_TRANSPORT_ENFORCE_CERT_RELOAD_DN_VERIFICATION = "plugins.security.ssl.transport." + + ENFORCE_CERT_RELOAD_DN_VERIFICATION; public static final String SECURITY_SSL_TRANSPORT_KEYSTORE_ALIAS = "plugins.security.ssl.transport.keystore_alias"; public static final String SECURITY_SSL_TRANSPORT_SERVER_KEYSTORE_ALIAS = "plugins.security.ssl.transport.server.keystore_alias"; public static final String SECURITY_SSL_TRANSPORT_CLIENT_KEYSTORE_ALIAS = "plugins.security.ssl.transport.client.keystore_alias"; @@ -99,7 +149,19 @@ public final class SSLConfigConstants { public static final String JDK_TLS_REJECT_CLIENT_INITIATED_RENEGOTIATION = "jdk.tls.rejectClientInitiatedRenegotiation"; - private static final String[] _SECURE_SSL_PROTOCOLS = { "TLSv1.3", "TLSv1.2", "TLSv1.1" }; + public static final Long OPENSSL_1_1_1_BETA_9 = 0x10101009L; + + public static final String[] ALLOWED_SSL_PROTOCOLS = { "TLSv1.3", "TLSv1.2", "TLSv1.1" }; + + public static final String[] ALLOWED_OPENSSL_HTTP_PROTOCOLS = ALLOWED_SSL_PROTOCOLS; + + public static final String[] ALLOWED_OPENSSL_HTTP_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9 = { "TLSv1.2", "TLSv1.1", "TLSv1" }; + + public static final String[] ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS = ALLOWED_SSL_PROTOCOLS; + + public static final String[] ALLOWED_OPENSSL_TRANSPORT_PROTOCOLS_PRIOR_OPENSSL_1_1_1_BETA_9 = { "TLSv1.2", "TLSv1.1" }; + + public static final boolean OPENSSL_AVAILABLE = OpenSearchSecuritySSLPlugin.OPENSSL_SUPPORTED && OpenSsl.isAvailable(); public static String[] getSecureSSLProtocols(Settings settings, boolean http) { List configuredProtocols = null; @@ -116,11 +178,11 @@ public static String[] getSecureSSLProtocols(Settings settings, boolean http) { return configuredProtocols.toArray(new String[0]); } - return _SECURE_SSL_PROTOCOLS.clone(); + return ALLOWED_SSL_PROTOCOLS.clone(); } // @formatter:off - private static final String[] _SECURE_SSL_CIPHERS = { + public static final String[] ALLOWED_SSL_CIPHERS = { // TLS__WITH_ // Example (including unsafe ones) @@ -249,7 +311,7 @@ public static List getSecureSSLCiphers(Settings settings, boolean http) return configuredCiphers; } - return Collections.unmodifiableList(Arrays.asList(_SECURE_SSL_CIPHERS)); + return Collections.unmodifiableList(Arrays.asList(ALLOWED_SSL_CIPHERS)); } private SSLConfigConstants() { diff --git a/src/main/java/org/opensearch/security/support/MapUtils.java b/src/main/java/org/opensearch/security/support/MapUtils.java deleted file mode 100644 index f530917824..0000000000 --- a/src/main/java/org/opensearch/security/support/MapUtils.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright 2015-2018 _floragunn_ GmbH - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.support; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -public class MapUtils { - - public static void deepTraverseMap(final Map map, final Callback cb) { - deepTraverseMap(map, cb, null); - } - - private static void deepTraverseMap(final Map map, final Callback cb, final List stack) { - final List localStack; - if (stack == null) { - localStack = new ArrayList(30); - } else { - localStack = stack; - } - for (Map.Entry entry : map.entrySet()) { - if (entry.getValue() != null && entry.getValue() instanceof Map) { - @SuppressWarnings("unchecked") - final Map inner = (Map) entry.getValue(); - localStack.add(entry.getKey()); - deepTraverseMap(inner, cb, localStack); - if (!localStack.isEmpty()) { - localStack.remove(localStack.size() - 1); - } - } else { - cb.call(entry.getKey(), map, Collections.unmodifiableList(localStack)); - } - } - } - - public static interface Callback { - public void call(String key, Map map, List stack); - } -} diff --git a/src/main/java/org/opensearch/security/support/SecurityUtils.java b/src/main/java/org/opensearch/security/support/SecurityUtils.java index 1df5f23637..5686f0076e 100644 --- a/src/main/java/org/opensearch/security/support/SecurityUtils.java +++ b/src/main/java/org/opensearch/security/support/SecurityUtils.java @@ -29,8 +29,6 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Locale; -import java.util.Map; -import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -68,25 +66,6 @@ private static Locale forEN() { return Locale.getDefault(); } - public static String evalMap(final Map> map, final String index) { - - if (map == null) { - return null; - } - - // TODO: check what to do with _all - /*if (map.get(index) != null) { - return index; - } else if (map.get("*") != null) { - return "*"; - } - if (map.get("_all") != null) { - return "_all"; - }*/ - - return map.keySet().stream().filter(key -> WildcardMatcher.from(key).test(index)).findAny().orElse(null); - } - public static String replaceEnvVars(String in, Settings settings) { if (in == null || in.isEmpty()) { return in; @@ -96,16 +75,16 @@ public static String replaceEnvVars(String in, Settings settings) { return in; } - return replaceEnvVarsBC(replaceEnvVarsNonBC(replaceEnvVarsBase64(in))); + return replaceEnvVarsBC(replaceEnvVarsNonBC(replaceEnvVarsBase64(in, settings), settings), settings); } - private static String replaceEnvVarsNonBC(String in) { + private static String replaceEnvVarsNonBC(String in, Settings settings) { // ${env.MY_ENV_VAR} // ${env.MY_ENV_VAR:-default} Matcher matcher = ENV_PATTERN.matcher(in); StringBuffer sb = new StringBuffer(); while (matcher.find()) { - final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), false); + final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), false, settings); if (replacement != null) { matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement)); } @@ -114,13 +93,13 @@ private static String replaceEnvVarsNonBC(String in) { return sb.toString(); } - private static String replaceEnvVarsBC(String in) { + private static String replaceEnvVarsBC(String in, Settings settings) { // ${envbc.MY_ENV_VAR} // ${envbc.MY_ENV_VAR:-default} Matcher matcher = ENVBC_PATTERN.matcher(in); StringBuffer sb = new StringBuffer(); while (matcher.find()) { - final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), true); + final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), true, settings); if (replacement != null) { matcher.appendReplacement(sb, Matcher.quoteReplacement(replacement)); } @@ -129,13 +108,13 @@ private static String replaceEnvVarsBC(String in) { return sb.toString(); } - private static String replaceEnvVarsBase64(String in) { + private static String replaceEnvVarsBase64(String in, Settings settings) { // ${envbc.MY_ENV_VAR} // ${envbc.MY_ENV_VAR:-default} Matcher matcher = ENVBASE64_PATTERN.matcher(in); StringBuffer sb = new StringBuffer(); while (matcher.find()) { - final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), false); + final String replacement = resolveEnvVar(matcher.group(1), matcher.group(2), false, settings); if (replacement != null) { matcher.appendReplacement( sb, @@ -149,16 +128,16 @@ private static String replaceEnvVarsBase64(String in) { // ${env.MY_ENV_VAR} // ${env.MY_ENV_VAR:-default} - private static String resolveEnvVar(String envVarName, String mode, boolean bc) { + private static String resolveEnvVar(String envVarName, String mode, boolean bc, Settings settings) { final String envVarValue = System.getenv(envVarName); if (envVarValue == null || envVarValue.isEmpty()) { if (mode != null && mode.startsWith(":-") && mode.length() > 2) { - return bc ? Hasher.hash(mode.substring(2).toCharArray()) : mode.substring(2); + return bc ? Hasher.hash(mode.substring(2).toCharArray(), settings) : mode.substring(2); } else { return null; } } else { - return bc ? Hasher.hash(envVarValue.toCharArray()) : envVarValue; + return bc ? Hasher.hash(envVarValue.toCharArray(), settings) : envVarValue; } } } diff --git a/src/main/java/org/opensearch/security/support/WildcardMatcher.java b/src/main/java/org/opensearch/security/support/WildcardMatcher.java index d811a73730..537e2d473c 100644 --- a/src/main/java/org/opensearch/security/support/WildcardMatcher.java +++ b/src/main/java/org/opensearch/security/support/WildcardMatcher.java @@ -28,6 +28,7 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.Optional; @@ -282,6 +283,55 @@ public Optional findFirst(final String candidate) { return Optional.ofNullable(test(candidate) ? this : null); } + public Iterable iterateMatching(Iterable candidates) { + return iterateMatching(candidates, Function.identity()); + } + + public Iterable iterateMatching(Iterable candidates, Function toStringFunction) { + return new Iterable() { + + @Override + public Iterator iterator() { + Iterator delegate = candidates.iterator(); + + return new Iterator() { + private E next; + + @Override + public boolean hasNext() { + if (next == null) { + init(); + } + + return next != null; + } + + @Override + public E next() { + if (next == null) { + init(); + } + + E result = next; + next = null; + return result; + } + + private void init() { + while (delegate.hasNext()) { + E candidate = delegate.next(); + + if (test(toStringFunction.apply(candidate))) { + next = candidate; + break; + } + } + } + }; + } + }; + } + public static List matchers(Collection patterns) { return patterns.stream().map(p -> WildcardMatcher.from(p, true)).collect(Collectors.toList()); } @@ -294,6 +344,10 @@ public static List getAllMatchingPatterns(final Collection p.matchAny(candidates)).map(Objects::toString).collect(Collectors.toList()); } + public static boolean isExact(String pattern) { + return pattern == null || !(pattern.contains("*") || pattern.contains("?") || (pattern.startsWith("/") && pattern.endsWith("/"))); + } + // // --- Implementation specializations --- // diff --git a/src/main/java/org/opensearch/security/tools/SecurityAdmin.java b/src/main/java/org/opensearch/security/tools/SecurityAdmin.java index 6080224c36..df9b004d15 100644 --- a/src/main/java/org/opensearch/security/tools/SecurityAdmin.java +++ b/src/main/java/org/opensearch/security/tools/SecurityAdmin.java @@ -204,7 +204,7 @@ public static int execute(final String[] args) throws Exception { .longOpt("truststore-type") .hasArg() .argName("type") - .desc("JKS or PKCS12, if not given we use the file extension to dectect the type") + .desc("JKS or PKCS12, if not given we use the file extension to detect the type") .build() ); options.addOption( @@ -212,7 +212,7 @@ public static int execute(final String[] args) throws Exception { .longOpt("keystore-type") .hasArg() .argName("type") - .desc("JKS or PKCS12, if not given we use the file extension to dectect the type") + .desc("JKS or PKCS12, if not given we use the file extension to detect the type") .build() ); // CS-ENFORCE-SINGLE diff --git a/src/main/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurer.java b/src/main/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurer.java index d97fe2d1bc..572773095a 100644 --- a/src/main/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurer.java +++ b/src/main/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurer.java @@ -106,15 +106,30 @@ public void configureSecuritySettings() throws IOException { /** * Checks if security plugin is already configured. If so, the script execution will exit. */ + @SuppressWarnings("unchecked") void checkIfSecurityPluginIsAlreadyConfigured() { - // Check if the configuration file contains the 'plugins.security' string + // Check if the configuration file contains security settings if (installer.OPENSEARCH_CONF_FILE != null && new File(installer.OPENSEARCH_CONF_FILE).exists()) { try (BufferedReader br = new BufferedReader(new FileReader(installer.OPENSEARCH_CONF_FILE, StandardCharsets.UTF_8))) { - String line; - while ((line = br.readLine()) != null) { - if (line.toLowerCase().contains("plugins.security")) { - System.out.println(installer.OPENSEARCH_CONF_FILE + " seems to be already configured for Security. Quit."); - System.exit(installer.skip_updates); + Yaml yaml = new Yaml(); + Map yamlData = yaml.load(br); + if (yamlData != null) { + // Check for flat keys + for (String key : yamlData.keySet()) { + if (key.startsWith("plugins.security")) { + System.out.println(installer.OPENSEARCH_CONF_FILE + " seems to be already configured for Security. Quit."); + System.exit(installer.skip_updates); + } + } + // Check for nested keys + if (yamlData.containsKey("plugins")) { + Map plugins = (Map) yamlData.get("plugins"); + for (String key : plugins.keySet()) { + if (key.startsWith("security")) { + System.out.println(installer.OPENSEARCH_CONF_FILE + " seems to be already configured for Security. Quit."); + System.exit(installer.skip_updates); + } + } } } } catch (IOException e) { diff --git a/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java b/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java index 9741014fda..7be544c9cd 100644 --- a/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java +++ b/src/main/java/org/opensearch/security/transport/SecurityInterceptor.java @@ -57,6 +57,7 @@ import org.opensearch.security.auditlog.AuditLog.Origin; import org.opensearch.security.auth.BackendRegistry; import org.opensearch.security.configuration.ClusterInfoHolder; +import org.opensearch.security.privileges.dlsfls.DlsFlsLegacyHeaders; import org.opensearch.security.ssl.SslExceptionHandler; import org.opensearch.security.ssl.transport.PrincipalExtractor; import org.opensearch.security.ssl.transport.SSLConfig; @@ -150,6 +151,7 @@ public void sendRequestDecorate( final String origCCSTransientDls = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_CCS); final String origCCSTransientFls = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_CCS); final String origCCSTransientMf = getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_CCS); + final DlsFlsLegacyHeaders dlsFlsLegacyHeaders = getThreadContext().getTransient(DlsFlsLegacyHeaders.TRANSIENT_HEADER); final boolean isDebugEnabled = log.isDebugEnabled(); @@ -183,6 +185,10 @@ public void sendRequestDecorate( ) ); + if (dlsFlsLegacyHeaders != null) { + dlsFlsLegacyHeaders.performHeaderDecoration(connection, request, headerMap); + } + if (OpenSearchSecurityPlugin.GuiceHolder.getRemoteClusterService().isCrossClusterSearchEnabled() && clusterInfoHolder.isInitialized() && (action.equals(ClusterSearchShardsAction.NAME) || action.equals(SearchAction.NAME)) diff --git a/src/main/java/org/opensearch/security/transport/SecurityRequestHandler.java b/src/main/java/org/opensearch/security/transport/SecurityRequestHandler.java index 5845c63672..18c0c21282 100644 --- a/src/main/java/org/opensearch/security/transport/SecurityRequestHandler.java +++ b/src/main/java/org/opensearch/security/transport/SecurityRequestHandler.java @@ -290,7 +290,7 @@ protected void messageReceivedDecorate( || HeaderHelper.isTrustedClusterRequest(getThreadContext()) || HeaderHelper.isExtensionRequest(getThreadContext()))) { // CS-ENFORCE-SINGLE - final OpenSearchException exception = ExceptionUtils.createTransportClientNoLongerSupportedException(); + final OpenSearchException exception = ExceptionUtils.clusterWrongNodeCertConfigException(principal); log.error(exception.toString()); transportChannel.sendResponse(exception); return; diff --git a/src/main/java/org/opensearch/security/util/KeyUtils.java b/src/main/java/org/opensearch/security/util/KeyUtils.java index 920cf198be..bb2abea795 100644 --- a/src/main/java/org/opensearch/security/util/KeyUtils.java +++ b/src/main/java/org/opensearch/security/util/KeyUtils.java @@ -52,8 +52,8 @@ public JwtParserBuilder run() { } else { try { PublicKey key = null; - - final String minimalKeyFormat = signingKey.replace("-----BEGIN PUBLIC KEY-----\n", "") + final String minimalKeyFormat = signingKey.replaceAll("\\r|\\n", "") + .replace("-----BEGIN PUBLIC KEY-----", "") .replace("-----END PUBLIC KEY-----", "") .trim(); final byte[] decoded = Base64.getDecoder().decode(minimalKeyFormat); diff --git a/src/test/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticatorTest.java b/src/test/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticatorTest.java index 4214e8ed06..96ee50bf6a 100644 --- a/src/test/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticatorTest.java +++ b/src/test/java/com/amazon/dlic/auth/http/jwt/HTTPJwtAuthenticatorTest.java @@ -18,9 +18,11 @@ import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; +import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.List; import java.util.Map; import javax.crypto.SecretKey; @@ -40,7 +42,9 @@ import io.jsonwebtoken.security.Keys; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -120,6 +124,67 @@ public void testInvalid() throws Exception { Assert.assertNull(credentials); } + @Test + public void testJwtAttributeParsing() throws Exception { + Map expectedAttributes = new HashMap<>(); + expectedAttributes.put("attr.jwt.sub", "Leonard McCoy"); + expectedAttributes.put("attr.jwt.list", "[\"a\",\"b\",\"c\"]"); + + String jwsToken = Jwts.builder() + .setSubject("Leonard McCoy") + .claim("list", List.of("a", "b", "c")) + .signWith(Keys.hmacShaKeyFor(secretKeyBytes), SignatureAlgorithm.HS512) + .compact(); + + Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKeyBytes)).build(); + + HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null); + Map headers = new HashMap(); + headers.put("Authorization", "Bearer " + jwsToken); + + AuthCredentials credentials = jwtAuth.extractCredentials( + new FakeRestRequest(headers, new HashMap()).asSecurityRequest(), + null + ); + + assertNotNull(credentials); + assertThat(credentials.getUsername(), is("Leonard McCoy")); + assertThat(credentials.getAttributes(), equalTo(expectedAttributes)); + } + + @Test + public void testJwtAttributeParsingMixedDataType() throws Exception { + Map expectedAttributes = new HashMap<>(); + expectedAttributes.put("attr.jwt.sub", "Leonard McCoy"); + expectedAttributes.put("attr.jwt.list", "[\"a\",1,null,2.0]"); + + List elements = new ArrayList<>(); + elements.add("a"); + elements.add(1); + elements.add(null); + elements.add(2.0); + String jwsToken = Jwts.builder() + .setSubject("Leonard McCoy") + .claim("list", elements) + .signWith(Keys.hmacShaKeyFor(secretKeyBytes), SignatureAlgorithm.HS512) + .compact(); + + Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKeyBytes)).build(); + + HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null); + Map headers = new HashMap(); + headers.put("Authorization", "Bearer " + jwsToken); + + AuthCredentials credentials = jwtAuth.extractCredentials( + new FakeRestRequest(headers, new HashMap()).asSecurityRequest(), + null + ); + + assertNotNull(credentials); + assertThat(credentials.getUsername(), is("Leonard McCoy")); + assertThat(credentials.getAttributes(), equalTo(expectedAttributes)); + } + /** Here is the original encoded jwt token generation with cxf library: * * String base64EncodedSecret = Base64.getEncoder().encodeToString(someSecret.getBytes(StandardCharsets.UTF_8)); @@ -389,7 +454,6 @@ public void testNbf() throws Exception { @Test public void testRS256() throws Exception { - KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA"); keyGen.initialize(2048); KeyPair pair = keyGen.generateKeyPair(); @@ -397,27 +461,61 @@ public void testRS256() throws Exception { PublicKey pub = pair.getPublic(); String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(priv, SignatureAlgorithm.RS256).compact(); - Settings settings = Settings.builder() - .put( - "signing_key", - "-----BEGIN PUBLIC KEY-----\n" + BaseEncoding.base64().encode(pub.getEncoded()) + "-----END PUBLIC KEY-----" - ) - .build(); + String signingKey = "-----BEGIN PUBLIC KEY-----\n" + BaseEncoding.base64().encode(pub.getEncoded()) + "-----END PUBLIC KEY-----"; + AuthCredentials creds = testJwtAuthenticationWithSigningKey(signingKey, jwsToken); - HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null); - Map headers = new HashMap(); - headers.put("Authorization", "Bearer " + jwsToken); + Assert.assertNotNull(creds); + assertThat(creds.getUsername(), is("Leonard McCoy")); + assertThat(creds.getBackendRoles().size(), is(0)); + } - AuthCredentials creds = jwtAuth.extractCredentials( - new FakeRestRequest(headers, new HashMap()).asSecurityRequest(), - null - ); + private static String formatKeyWithNewlines(String keyAsString) { + StringBuilder result = new StringBuilder(); + int lineLength = 64; + int length = keyAsString.length(); + + for (int i = 0; i < length; i += lineLength) { + if (i + lineLength < length) { + result.append(keyAsString, i, i + lineLength); + } else { + result.append(keyAsString.substring(i)); + } + result.append("\n"); + } + + return result.toString().trim(); + } + + @Test + public void testRS256WithNewlines() throws Exception { + KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA"); + keyGen.initialize(2048); + KeyPair pair = keyGen.generateKeyPair(); + PrivateKey priv = pair.getPrivate(); + PublicKey pub = pair.getPublic(); + + String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(priv, SignatureAlgorithm.RS256).compact(); + + String signingKey = "-----BEGIN PUBLIC KEY-----\n" + + formatKeyWithNewlines(BaseEncoding.base64().encode(pub.getEncoded())) + + "\n-----END PUBLIC KEY-----"; + AuthCredentials creds = testJwtAuthenticationWithSigningKey(signingKey, jwsToken); Assert.assertNotNull(creds); assertThat(creds.getUsername(), is("Leonard McCoy")); assertThat(creds.getBackendRoles().size(), is(0)); } + private AuthCredentials testJwtAuthenticationWithSigningKey(String signingKey, String jwsToken) throws NoSuchAlgorithmException { + Settings settings = Settings.builder().put("signing_key", signingKey).build(); + + HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null); + Map headers = new HashMap(); + headers.put("Authorization", "Bearer " + jwsToken); + + return jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap()).asSecurityRequest(), null); + } + @Test public void testES512() throws Exception { @@ -427,17 +525,10 @@ public void testES512() throws Exception { PrivateKey priv = pair.getPrivate(); PublicKey pub = pair.getPublic(); - Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(pub.getEncoded())).build(); + String signingKey = BaseEncoding.base64().encode(pub.getEncoded()); String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(priv, SignatureAlgorithm.ES512).compact(); - HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null); - Map headers = new HashMap(); - headers.put("Authorization", jwsToken); - - AuthCredentials creds = jwtAuth.extractCredentials( - new FakeRestRequest(headers, new HashMap()).asSecurityRequest(), - null - ); + AuthCredentials creds = testJwtAuthenticationWithSigningKey(signingKey, jwsToken); Assert.assertNotNull(creds); assertThat(creds.getUsername(), is("Leonard McCoy")); diff --git a/src/test/java/org/opensearch/security/IntegrationTests.java b/src/test/java/org/opensearch/security/IntegrationTests.java index 1f3cd2e3c8..6eeed4ef02 100644 --- a/src/test/java/org/opensearch/security/IntegrationTests.java +++ b/src/test/java/org/opensearch/security/IntegrationTests.java @@ -400,7 +400,7 @@ public void testRegexExcludes() throws Exception { @Test public void testMultiRoleSpan() throws Exception { - setup(); + setup(Settings.EMPTY, new DynamicSecurityConfig().setConfig("config_multirolespan.yml"), Settings.EMPTY); final RestHelper rh = nonSslRestHelper(); try (Client tc = getClient()) { @@ -411,24 +411,8 @@ public void testMultiRoleSpan() throws Exception { } HttpResponse res = rh.executeGetRequest("/mindex_1,mindex_2/_search", encodeBasicHeader("mindex12", "nagilum")); - assertThat(res.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - Assert.assertFalse(res.getBody().contains("\"content\":1")); - Assert.assertFalse(res.getBody().contains("\"content\":2")); - - try (Client tc = getClient()) { - tc.index( - new IndexRequest(".opendistro_security").id("config") - .setRefreshPolicy(RefreshPolicy.IMMEDIATE) - .source("config", FileHelper.readYamlContent("config_multirolespan.yml")) - ).actionGet(); - - ConfigUpdateResponse cur = tc.execute(ConfigUpdateAction.INSTANCE, new ConfigUpdateRequest(new String[] { "config" })) - .actionGet(); - assertThat(cur.getNodes().size(), is(clusterInfo.numNodes)); - } - - res = rh.executeGetRequest("/mindex_1,mindex_2/_search", encodeBasicHeader("mindex12", "nagilum")); assertThat(res.getStatusCode(), is(HttpStatus.SC_OK)); + Assert.assertEquals(HttpStatus.SC_OK, res.getStatusCode()); Assert.assertTrue(res.getBody().contains("\"content\":1")); Assert.assertTrue(res.getBody().contains("\"content\":2")); diff --git a/src/test/java/org/opensearch/security/SystemIntegratorsTests.java b/src/test/java/org/opensearch/security/SystemIntegratorsTests.java index ec4f4f8ddb..896f477ca6 100644 --- a/src/test/java/org/opensearch/security/SystemIntegratorsTests.java +++ b/src/test/java/org/opensearch/security/SystemIntegratorsTests.java @@ -289,7 +289,7 @@ public void testInjectedAdminUser() throws Exception { Assert.assertTrue(resc.getBody().contains("\"_id\" : \"config\"")); Assert.assertTrue(resc.getBody().contains("\"_id\" : \"roles\"")); Assert.assertTrue(resc.getBody().contains("\"_id\" : \"internalusers\"")); - Assert.assertTrue(resc.getBody().contains("\"total\" : 5")); + Assert.assertTrue(resc.getBody().contains("\"total\" : 1")); resc = rh.executeGetRequest( ".opendistro_security/_search?pretty", diff --git a/src/test/java/org/opensearch/security/UtilTests.java b/src/test/java/org/opensearch/security/UtilTests.java index 2445b560df..fcfd26576f 100644 --- a/src/test/java/org/opensearch/security/UtilTests.java +++ b/src/test/java/org/opensearch/security/UtilTests.java @@ -155,6 +155,44 @@ public void testEnvReplace() { assertTrue(checked); } + @Test + public void testEnvReplacePBKDF2() { + Settings settings = Settings.builder().put(ConfigConstants.SECURITY_PASSWORD_HASHING_ALGORITHM, ConfigConstants.PBKDF2).build(); + final PasswordHasher passwordHasherPBKDF2 = PasswordHasherFactory.createPasswordHasher(settings); + assertThat(SecurityUtils.replaceEnvVars("abv${env.MYENV}xyz", settings), is("abv${env.MYENV}xyz")); + assertThat(SecurityUtils.replaceEnvVars("abv${envbc.MYENV}xyz", settings), is("abv${envbc.MYENV}xyz")); + assertThat(SecurityUtils.replaceEnvVars("abv${env.MYENV:-tTt}xyz", settings), is("abvtTtxyz")); + assertTrue(passwordHasherPBKDF2.check("tTt".toCharArray(), SecurityUtils.replaceEnvVars("${envbc.MYENV:-tTt}", settings))); + assertThat(SecurityUtils.replaceEnvVars("abv${env.MYENV:-tTt}xyz${env.MYENV:-xxx}", settings), is("abvtTtxyzxxx")); + assertTrue(SecurityUtils.replaceEnvVars("abv${env.MYENV:-tTt}xyz${envbc.MYENV:-xxx}", settings).startsWith("abvtTtxyz$3$")); + assertThat(SecurityUtils.replaceEnvVars("abv${env.MYENV:tTt}xyz", settings), is("abv${env.MYENV:tTt}xyz")); + assertThat(SecurityUtils.replaceEnvVars("abv${env.MYENV-tTt}xyz", settings), is("abv${env.MYENV-tTt}xyz")); + + Map env = System.getenv(); + assertTrue(env.size() > 0); + + boolean checked = false; + + for (String k : env.keySet()) { + String val = System.getenv().get(k); + if (val == null || val.isEmpty()) { + continue; + } + assertThat(SecurityUtils.replaceEnvVars("abv${env." + k + "}xyz", settings), is("abv" + val + "xyz")); + assertThat(SecurityUtils.replaceEnvVars("abv${" + k + "}xyz", settings), is("abv${" + k + "}xyz")); + assertThat(SecurityUtils.replaceEnvVars("abv${env." + k + ":-k182765ggh}xyz", settings), is("abv" + val + "xyz")); + assertThat( + SecurityUtils.replaceEnvVars("abv${env." + k + "}xyzabv${env." + k + "}xyz", settings), + is("abv" + val + "xyzabv" + val + "xyz") + ); + assertThat(SecurityUtils.replaceEnvVars("abv${env." + k + ":-k182765ggh}xyz", settings), is("abv" + val + "xyz")); + assertTrue(passwordHasherPBKDF2.check(val.toCharArray(), SecurityUtils.replaceEnvVars("${envbc." + k + "}", settings))); + checked = true; + } + + assertTrue(checked); + } + @Test public void testNoEnvReplace() { Settings settings = Settings.builder().put(ConfigConstants.SECURITY_DISABLE_ENVVAR_REPLACEMENT, true).build(); diff --git a/src/test/java/org/opensearch/security/ccstest/CrossClusterSearchTests.java b/src/test/java/org/opensearch/security/ccstest/CrossClusterSearchTests.java index 32ab78dbdf..d6a427e581 100644 --- a/src/test/java/org/opensearch/security/ccstest/CrossClusterSearchTests.java +++ b/src/test/java/org/opensearch/security/ccstest/CrossClusterSearchTests.java @@ -1355,7 +1355,14 @@ public void testCcsWithDiffCertsWithNoNodesDnUpdate() throws Exception { String uri = "cross_cluster_two:twitter/_search?pretty"; HttpResponse ccs = rh1.executeGetRequest(uri, encodeBasicHeader("twitter", "nagilum")); assertThat(ccs.getStatusCode(), equalTo(HttpStatus.SC_INTERNAL_SERVER_ERROR)); - assertThat(ccs.getBody(), containsString("Transport client authentication no longer supported")); + assertThat( + ccs.getBody(), + containsString( + "Node presenting certificate with SSL Principal " + + "{CN=node-0.example.com,OU=SSL,O=Test,L=Test,C=DE} could not securely connect to the cluster. Please" + + " ensure the principal is correct and present in the nodes_dn list." + ) + ); } @Test diff --git a/src/test/java/org/opensearch/security/http/OnBehalfOfAuthenticatorTest.java b/src/test/java/org/opensearch/security/http/OnBehalfOfAuthenticatorTest.java index 310acc4772..0220bd37af 100644 --- a/src/test/java/org/opensearch/security/http/OnBehalfOfAuthenticatorTest.java +++ b/src/test/java/org/opensearch/security/http/OnBehalfOfAuthenticatorTest.java @@ -267,7 +267,7 @@ public void testBearer() throws Exception { Map expectedAttributes = new HashMap<>(); expectedAttributes.put("attr.jwt.iss", "cluster_0"); expectedAttributes.put("attr.jwt.sub", "Leonard McCoy"); - expectedAttributes.put("attr.jwt.aud", "[ext_0]"); + expectedAttributes.put("attr.jwt.aud", "[\"ext_0\"]"); String jwsToken = Jwts.builder() .setIssuer(clusterName) diff --git a/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java deleted file mode 100644 index fba61d00ec..0000000000 --- a/src/test/java/org/opensearch/security/privileges/PrivilegesEvaluatorTest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.privileges; - -import org.apache.hc.core5.http.Header; -import org.apache.http.HttpStatus; -import org.junit.Before; -import org.junit.Test; - -import org.opensearch.common.settings.Settings; -import org.opensearch.security.test.DynamicSecurityConfig; -import org.opensearch.security.test.SingleClusterTest; -import org.opensearch.security.test.helper.rest.RestHelper; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.is; - -public class PrivilegesEvaluatorTest extends SingleClusterTest { - private static final Header NegativeLookaheadUserHeader = encodeBasicHeader("negative_lookahead_user", "negative_lookahead_user"); - private static final Header NegatedRegexUserHeader = encodeBasicHeader("negated_regex_user", "negated_regex_user"); - - @Before - public void setupSettingsIndexPattern() throws Exception { - Settings settings = Settings.builder().build(); - setup( - Settings.EMPTY, - new DynamicSecurityConfig().setSecurityRoles("roles_index_patterns.yml") - .setSecurityInternalUsers("internal_users_index_patterns.yml") - .setSecurityRolesMapping("roles_mapping_index_patterns.yml"), - settings, - true - ); - } - - @Test - public void testNegativeLookaheadPattern() throws Exception { - - RestHelper rh = nonSslRestHelper(); - RestHelper.HttpResponse response = rh.executeGetRequest("*/_search", NegativeLookaheadUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - response = rh.executeGetRequest("r*/_search", NegativeLookaheadUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_OK)); - } - - @Test - public void testRegexPattern() throws Exception { - RestHelper rh = nonSslRestHelper(); - RestHelper.HttpResponse response = rh.executeGetRequest("*/_search", NegatedRegexUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_FORBIDDEN)); - response = rh.executeGetRequest("r*/_search", NegatedRegexUserHeader); - assertThat(response.getStatusCode(), is(HttpStatus.SC_OK)); - } -} diff --git a/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java index c374a10c24..da35226d62 100644 --- a/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java +++ b/src/test/java/org/opensearch/security/privileges/RestLayerPrivilegesEvaluatorTest.java @@ -11,28 +11,33 @@ package org.opensearch.security.privileges; -import java.util.Collections; import java.util.Set; +import java.util.TreeMap; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.config.Configurator; -import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.opensearch.OpenSearchSecurityException; +import org.opensearch.cluster.ClusterState; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.metadata.Metadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.security.auditlog.NullAuditLog; import org.opensearch.security.securityconf.ConfigModel; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.DynamicConfigModel; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.user.User; -import org.opensearch.threadpool.ThreadPool; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; @@ -41,12 +46,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoInteractions; import static org.mockito.Mockito.when; import static org.mockito.Mockito.withSettings; @@ -56,11 +56,9 @@ public class RestLayerPrivilegesEvaluatorTest { @Mock(strictness = Mock.Strictness.LENIENT) private ClusterService clusterService; @Mock - private ThreadPool threadPool; - @Mock private ConfigModel configModel; - - private RestLayerPrivilegesEvaluator privilegesEvaluator; + @Mock + private DynamicConfigModel dynamicConfigModel; private static final User TEST_USER = new User("test_user"); @@ -71,16 +69,14 @@ private void setLoggingLevel(final Level level) { @Before public void setUp() { - when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY)); - when(clusterService.localNode()).thenReturn(mock(DiscoveryNode.class, withSettings().strictness(Strictness.LENIENT))); - privilegesEvaluator = new RestLayerPrivilegesEvaluator( - clusterService, - threadPool - ); - privilegesEvaluator.onConfigModelChanged(configModel); // Defaults to the mocked config model - verify(threadPool).getThreadContext(); // Called during construction of RestLayerPrivilegesEvaluator + when(configModel.mapSecurityRoles(TEST_USER, null)).thenReturn(Set.of("test_role")); setLoggingLevel(Level.DEBUG); // Enable debug logging scenarios for verification + ClusterState clusterState = mock(ClusterState.class); + when(clusterService.state()).thenReturn(clusterState); + Metadata metadata = mock(Metadata.class); + when(clusterState.metadata()).thenReturn(metadata); + when(metadata.getIndicesLookup()).thenReturn(new TreeMap<>()); } @After @@ -89,96 +85,89 @@ public void after() { } @Test - public void testEvaluate_Initialized_Success() { + public void testEvaluate_Initialized_Success() throws Exception { String action = "action"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(false); + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - any", CType.ROLES); + + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.isAllowed(), equalTo(false)); assertThat(response.getMissingPrivileges(), equalTo(Set.of(action))); - assertThat(response.getResolvedSecurityRoles(), Matchers.empty()); - verify(configModel, times(3)).getSecurityRoles(); } @Test public void testEvaluate_NotInitialized_NullModel_ExceptionThrown() { - // Null out the config model - privilegesEvaluator.onConfigModelChanged(null); - final OpenSearchSecurityException exception = assertThrows( - OpenSearchSecurityException.class, - () -> privilegesEvaluator.evaluate(TEST_USER, null) - ); - assertThat(exception.getMessage(), equalTo("OpenSearch Security is not initialized.")); - verify(configModel, never()).getSecurityRoles(); - } - - @Test - public void testEvaluate_NotInitialized_NoSecurityRoles_ExceptionThrown() { + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(null); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); final OpenSearchSecurityException exception = assertThrows( OpenSearchSecurityException.class, - () -> privilegesEvaluator.evaluate(TEST_USER, null) + () -> restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", null) ); assertThat(exception.getMessage(), equalTo("OpenSearch Security is not initialized.")); - verify(configModel).getSecurityRoles(); } @Test - public void testMapRoles_ReturnsMappedRoles() { - final User user = mock(User.class); - final Set mappedRoles = Collections.singleton("role1"); - when(configModel.mapSecurityRoles(any(), any())).thenReturn(mappedRoles); - - final Set result = privilegesEvaluator.mapRoles(user, null); - - assertThat(result, equalTo(mappedRoles)); - verifyNoInteractions(user); - verify(configModel).mapSecurityRoles(user, null); - } - - @Test - public void testEvaluate_Successful_NewPermission() { + public void testEvaluate_Successful_NewPermission() throws Exception { String action = "hw:greet"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(true); - - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); - + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - hw:greet", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.allowed, equalTo(true)); - verify(securityRoles).impliesClusterPermissionPermission(action); } @Test - public void testEvaluate_Successful_LegacyPermission() { + public void testEvaluate_Successful_LegacyPermission() throws Exception { String action = "cluster:admin/opensearch/hw/greet"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(true); - - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); - + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - cluster:admin/opensearch/hw/greet", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); assertThat(response.allowed, equalTo(true)); - verify(securityRoles).impliesClusterPermissionPermission(action); - verify(configModel, times(3)).getSecurityRoles(); } @Test - public void testEvaluate_Unsuccessful() { + public void testEvaluate_Unsuccessful() throws Exception { String action = "action"; - SecurityRoles securityRoles = mock(SecurityRoles.class); - when(configModel.getSecurityRoles()).thenReturn(securityRoles); - when(configModel.getSecurityRoles().filter(Collections.emptySet())).thenReturn(securityRoles); - when(securityRoles.impliesClusterPermissionPermission(action)).thenReturn(false); + SecurityDynamicConfiguration roles = SecurityDynamicConfiguration.fromYaml("test_role:\n" + // + " cluster_permissions:\n" + // + " - other_action", CType.ROLES); + PrivilegesEvaluator privilegesEvaluator = createPrivilegesEvaluator(roles); + RestLayerPrivilegesEvaluator restPrivilegesEvaluator = new RestLayerPrivilegesEvaluator(privilegesEvaluator); + PrivilegesEvaluatorResponse response = restPrivilegesEvaluator.evaluate(TEST_USER, "route_name", Set.of(action)); + assertThat(response.allowed, equalTo(false)); + } - PrivilegesEvaluatorResponse response = privilegesEvaluator.evaluate(TEST_USER, Set.of(action)); + PrivilegesEvaluator createPrivilegesEvaluator(SecurityDynamicConfiguration roles) { + PrivilegesEvaluator privilegesEvaluator = new PrivilegesEvaluator( + clusterService, + () -> clusterService.state(), + null, + new ThreadContext(Settings.EMPTY), + null, + new IndexNameExpressionResolver(new ThreadContext(Settings.EMPTY)), + new NullAuditLog(), + Settings.EMPTY, + null, + null, + null, + null + ); + privilegesEvaluator.onConfigModelChanged(configModel); // Defaults to the mocked config model + privilegesEvaluator.onDynamicConfigModelChanged(dynamicConfigModel); - assertThat(response.allowed, equalTo(false)); - verify(securityRoles).impliesClusterPermissionPermission(action); + if (roles != null) { + privilegesEvaluator.updateConfiguration(SecurityDynamicConfiguration.empty(CType.ACTIONGROUPS), roles); + } + return privilegesEvaluator; } } diff --git a/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java b/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java index fa8a991db9..878033fd5c 100644 --- a/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java +++ b/src/test/java/org/opensearch/security/privileges/SystemIndexAccessEvaluatorTest.java @@ -11,13 +11,13 @@ package org.opensearch.security.privileges; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; +import java.util.Arrays; import java.util.List; import java.util.Set; +import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.fasterxml.jackson.core.JsonProcessingException; import org.apache.logging.log4j.Logger; import org.junit.After; import org.junit.Test; @@ -27,6 +27,7 @@ import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; +import org.opensearch.cluster.metadata.IndexAbstraction; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -34,10 +35,13 @@ import org.opensearch.security.auditlog.AuditLog; import org.opensearch.security.resolver.IndexResolverReplacer; import org.opensearch.security.resolver.IndexResolverReplacer.Resolved; -import org.opensearch.security.securityconf.ConfigModelV7; -import org.opensearch.security.securityconf.SecurityRoles; +import org.opensearch.security.securityconf.FlattenedActionGroups; +import org.opensearch.security.securityconf.impl.CType; +import org.opensearch.security.securityconf.impl.SecurityDynamicConfiguration; +import org.opensearch.security.securityconf.impl.v7.RoleV7; import org.opensearch.security.support.ConfigConstants; import org.opensearch.security.user.User; +import org.opensearch.security.util.MockIndexMetadataBuilder; import org.opensearch.tasks.Task; import org.mockito.Mock; @@ -46,9 +50,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; import static org.opensearch.security.support.ConfigConstants.SYSTEM_INDEX_PERMISSION; -import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoInteractions; @@ -81,12 +83,12 @@ public class SystemIndexAccessEvaluatorTest { private static final String TEST_INDEX = ".test"; private static final String SECURITY_INDEX = ConfigConstants.OPENDISTRO_SECURITY_DEFAULT_CONFIG_INDEX; - @Mock - SecurityRoles securityRoles; + ImmutableMap indexMetadata = MockIndexMetadataBuilder.indices(TEST_INDEX, TEST_SYSTEM_INDEX, SECURITY_INDEX) + .build(); User user; - IndexNameExpressionResolver indexNameExpressionResolver; + ActionPrivileges actionPrivileges; private ThreadContext createThreadContext() { return new ThreadContext(Settings.EMPTY); @@ -105,29 +107,29 @@ public void setup( ThreadContext threadContext = createThreadContext(); indexNameExpressionResolver = createIndexNameExpressionResolver(threadContext); - // create a security role - ConfigModelV7.IndexPattern ip = spy(new ConfigModelV7.IndexPattern(index)); - ConfigModelV7.SecurityRole.Builder _securityRole = new ConfigModelV7.SecurityRole.Builder("role_a"); - ip.addPerm(createIndexPatternWithSystemIndexPermission ? Set.of("*", SYSTEM_INDEX_PERMISSION) : Set.of("*")); - _securityRole.addIndexPattern(ip); - _securityRole.addClusterPerms(List.of("*")); - ConfigModelV7.SecurityRole secRole = _securityRole.build(); - try { - // create an instance of Security Role - Constructor constructor = ConfigModelV7.SecurityRoles.class.getDeclaredConstructor(int.class); - constructor.setAccessible(true); - securityRoles = constructor.newInstance(1); - - // add security role to Security Roles - Method addSecurityRoleMethod = ConfigModelV7.SecurityRoles.class.getDeclaredMethod( - "addSecurityRole", - ConfigModelV7.SecurityRole.class + SecurityDynamicConfiguration rolesConfig = SecurityDynamicConfiguration.fromMap( + ImmutableMap.of( + "role_a", + ImmutableMap.of( + "index_permissions", + Arrays.asList( + ImmutableMap.of( + "index_patterns", + Arrays.asList(index), + "allowed_actions", + createIndexPatternWithSystemIndexPermission ? Set.of("*", SYSTEM_INDEX_PERMISSION) : Set.of("*") + ) + ), + "cluster_permissions", + Arrays.asList("*") + ) + ), + CType.ROLES ); - addSecurityRoleMethod.setAccessible(true); - addSecurityRoleMethod.invoke(securityRoles, secRole); - } catch (NoSuchMethodException | InvocationTargetException | InstantiationException | IllegalAccessException e) { + this.actionPrivileges = new ActionPrivileges(rolesConfig, FlattenedActionGroups.EMPTY, () -> indexMetadata, Settings.EMPTY); + } catch (JsonProcessingException e) { throw new RuntimeException(e); } @@ -150,8 +152,19 @@ public void setup( when(log.isDebugEnabled()).thenReturn(true); when(log.isInfoEnabled()).thenReturn(true); + } - doReturn(ImmutableSet.of(index)).when(ip).getResolvedIndexPattern(user, indexNameExpressionResolver, cs, true); + PrivilegesEvaluationContext ctx(String action) { + return new PrivilegesEvaluationContext( + user, + ImmutableSet.of("role_a"), + action, + request, + null, + null, + indexNameExpressionResolver, + null + ); } @After @@ -171,10 +184,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -193,10 +205,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexPermissionDisabled() UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -214,10 +225,9 @@ public void testUnprotectedActionOnRegularIndex_systemIndexPermissionEnabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -235,10 +245,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -256,10 +265,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionDisabled() { UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verifyNoInteractions(presponse); assertThat(response, is(presponse)); @@ -277,17 +285,21 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionEnabled_With UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); verify(presponse).markComplete(); assertThat(response, is(presponse)); verify(auditLog).logSecurityIndexAttempt(request, UNPROTECTED_ACTION, null); verify(log).isInfoEnabled(); - verify(log).info("No {} permission for user roles {} to System Indices {}", UNPROTECTED_ACTION, securityRoles, TEST_SYSTEM_INDEX); + verify(log).info( + "No {} permission for user roles {} to System Indices {}", + UNPROTECTED_ACTION, + user.getSecurityRoles(), + TEST_SYSTEM_INDEX + ); } @Test @@ -302,10 +314,9 @@ public void testUnprotectedActionOnSystemIndex_systemIndexPermissionEnabled_With UNPROTECTED_ACTION, resolved, presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs + ctx(UNPROTECTED_ACTION), + actionPrivileges, + user ); assertThat(response, is(presponse)); // unprotected action is not allowed on a system index @@ -321,29 +332,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verifyNoInteractions(presponse); } @@ -357,29 +348,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionDisable final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -398,29 +369,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -437,7 +388,7 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled verify(log, times(3)).info( "No {} permission for user roles {} to System Indices {}", UNPROTECTED_ACTION, - securityRoles, + user.getSecurityRoles(), TEST_SYSTEM_INDEX ); verify(log).debug("Disable search request cache for this request"); @@ -453,29 +404,9 @@ public void testDisableCacheOrRealtimeOnSystemIndex_systemIndexPermissionEnabled final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); - evaluator.evaluate( - searchRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); - evaluator.evaluate( - realtimeRequest, - null, - UNPROTECTED_ACTION, - resolved, - presponse, - securityRoles, - user, - indexNameExpressionResolver, - cs - ); + evaluator.evaluate(request, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(searchRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); + evaluator.evaluate(realtimeRequest, null, UNPROTECTED_ACTION, resolved, presponse, ctx(UNPROTECTED_ACTION), actionPrivileges, user); verify(searchRequest).requestCache(Boolean.FALSE); verify(realtimeRequest).realtime(Boolean.FALSE); @@ -491,7 +422,7 @@ public void testProtectedActionLocalAll_systemIndexDisabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -505,7 +436,7 @@ public void testProtectedActionLocalAll_systemIndexPermissionDisabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -519,7 +450,7 @@ public void testProtectedActionLocalAll_systemIndexPermissionEnabled() { final Resolved resolved = Resolved._LOCAL_ALL; // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -533,7 +464,7 @@ public void testProtectedActionOnRegularIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -544,7 +475,7 @@ public void testProtectedActionOnRegularIndex_systemIndexPermissionDisabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -555,7 +486,7 @@ public void testProtectedActionOnRegularIndex_systemIndexPermissionEnabled() { final Resolved resolved = createResolved(TEST_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -566,7 +497,7 @@ public void testProtectedActionOnSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -577,7 +508,7 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionDisabled() { final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -591,13 +522,18 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionEnabled_withou final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); verify(presponse).markComplete(); verify(log).isInfoEnabled(); - verify(log).info("No {} permission for user roles {} to System Indices {}", PROTECTED_ACTION, securityRoles, TEST_SYSTEM_INDEX); + verify(log).info( + "No {} permission for user roles {} to System Indices {}", + PROTECTED_ACTION, + user.getSecurityRoles(), + TEST_SYSTEM_INDEX + ); } @Test @@ -607,7 +543,7 @@ public void testProtectedActionOnSystemIndex_systemIndexPermissionEnabled_withSy final Resolved resolved = createResolved(TEST_SYSTEM_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); assertThat(presponse.allowed, is(false)); } @@ -618,7 +554,7 @@ public void testProtectedActionOnProtectedSystemIndex_systemIndexDisabled() { final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -633,7 +569,7 @@ public void testProtectedActionOnProtectedSystemIndex_systemIndexPermissionDisab final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, PROTECTED_ACTION, resolved, presponse, ctx(PROTECTED_ACTION), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, PROTECTED_ACTION, task); assertThat(presponse.allowed, is(false)); @@ -668,14 +604,19 @@ private void testSecurityIndexAccess(String action) { final Resolved resolved = createResolved(SECURITY_INDEX); // Action - evaluator.evaluate(request, task, action, resolved, presponse, securityRoles, user, indexNameExpressionResolver, cs); + evaluator.evaluate(request, task, action, resolved, presponse, ctx(action), actionPrivileges, user); verify(auditLog).logSecurityIndexAttempt(request, action, task); assertThat(presponse.allowed, is(false)); verify(presponse).markComplete(); verify(log).isInfoEnabled(); - verify(log).info("{} not permitted for a regular user {} on protected system indices {}", action, securityRoles, SECURITY_INDEX); + verify(log).info( + "{} not permitted for a regular user {} on protected system indices {}", + action, + user.getSecurityRoles(), + SECURITY_INDEX + ); } private Resolved createResolved(final String... indexes) { diff --git a/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java b/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java deleted file mode 100644 index 513e5bb2cc..0000000000 --- a/src/test/java/org/opensearch/security/securityconf/impl/v7/IndexPatternTests.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.security.securityconf.impl.v7; - -import java.util.Arrays; -import java.util.Set; -import java.util.TreeMap; - -import com.google.common.collect.ImmutableSet; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - -import org.opensearch.action.support.IndicesOptions; -import org.opensearch.cluster.ClusterState; -import org.opensearch.cluster.metadata.IndexAbstraction; -import org.opensearch.cluster.metadata.IndexAbstraction.Type; -import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.cluster.metadata.Metadata; -import org.opensearch.cluster.service.ClusterService; -import org.opensearch.security.securityconf.ConfigModelV7.IndexPattern; -import org.opensearch.security.user.User; - -import org.mockito.Mock; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.quality.Strictness; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.contains; -import static org.junit.Assert.assertThrows; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; -import static org.mockito.Mockito.when; -import static org.mockito.Mockito.withSettings; - -@RunWith(MockitoJUnitRunner.class) -public class IndexPatternTests { - - @Mock - private User user; - @Mock - private IndexNameExpressionResolver resolver; - @Mock - private ClusterService clusterService; - - private IndexPattern ip; - - @Before - public void before() { - ip = spy(new IndexPattern("defaultPattern")); - } - - @After - public void after() { - verifyNoMoreInteractions(user, resolver, clusterService); - } - - @Test - public void testCtor() { - assertThrows(NullPointerException.class, () -> new IndexPattern(null)); - } - - /** Ensure that concreteIndexNames sends correct parameters are sent to getResolvedIndexPattern */ - @Test - public void testConcreteIndexNamesOverload() { - doReturn(ImmutableSet.of("darn")).when(ip).getResolvedIndexPattern(user, resolver, clusterService, false); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("darn")); - - verify(ip).getResolvedIndexPattern(user, resolver, clusterService, false); - verify(ip).concreteIndexNames(user, resolver, clusterService); - verifyNoMoreInteractions(ip); - } - - /** Ensure that attemptResolveIndexNames sends correct parameters are sent to getResolvedIndexPattern */ - @Test - public void testAttemptResolveIndexNamesOverload() { - doReturn(ImmutableSet.of("yarn")).when(ip).getResolvedIndexPattern(user, resolver, clusterService, true); - - final Set results = ip.attemptResolveIndexNames(user, resolver, clusterService); - - assertThat(results, contains("yarn")); - - verify(ip).getResolvedIndexPattern(user, resolver, clusterService, true); - verify(ip).attemptResolveIndexNames(user, resolver, clusterService); - verifyNoMoreInteractions(ip); - } - - /** Verify concreteIndexNames when there are no matches */ - @Test - public void testExactNameWithNoMatches() { - doReturn("index-17").when(ip).getUnresolvedIndexPattern(user); - when(clusterService.state()).thenReturn(mock(ClusterState.class)); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17"))).thenReturn( - new String[] {} - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("index-17")); - - verify(clusterService).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17")); - } - - /** Verify concreteIndexNames on exact name matches */ - @Test - public void testExactName() { - doReturn("index-17").when(ip).getUnresolvedIndexPattern(user); - when(clusterService.state()).thenReturn(mock(ClusterState.class)); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17"))).thenReturn( - new String[] { "resolved-index-17" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-17")); - - verify(clusterService).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-17")); - } - - /** Verify concreteIndexNames on multiple matches */ - @Test - public void testMultipleConcreteIndices() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - doReturn(createClusterState()).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-17", "resolved-index-18")); - - verify(clusterService, times(2)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - /** Verify concreteIndexNames when there is an alias */ - @Test - public void testMultipleConcreteIndicesWithOneAlias() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - - doReturn( - createClusterState( - new IndexShorthand("index-100", Type.ALIAS), // Name and type match - new IndexShorthand("19", Type.ALIAS) // Type matches/wrong name - ) - ).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"))).thenReturn( - new String[] { "resolved-index-100" } - ); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.concreteIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-100", "resolved-index-17", "resolved-index-18")); - - verify(clusterService, times(3)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100")); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - /** Verify attemptResolveIndexNames with multiple aliases */ - @Test - public void testMultipleConcreteAliasedAndUnresolved() { - doReturn("index-1*").when(ip).getUnresolvedIndexPattern(user); - doReturn( - createClusterState( - new IndexShorthand("index-100", Type.ALIAS), // Name and type match - new IndexShorthand("index-101", Type.ALIAS), // Name and type match - new IndexShorthand("19", Type.ALIAS) // Type matches/wrong name - ) - ).when(clusterService).state(); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"), eq("index-101"))) - .thenReturn(new String[] { "resolved-index-100", "resolved-index-101" }); - when(resolver.concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*"))).thenReturn( - new String[] { "resolved-index-17", "resolved-index-18" } - ); - - final Set results = ip.attemptResolveIndexNames(user, resolver, clusterService); - - assertThat(results, contains("resolved-index-100", "resolved-index-101", "resolved-index-17", "resolved-index-18", "index-1*")); - - verify(clusterService, times(3)).state(); - verify(ip).getUnresolvedIndexPattern(user); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-100"), eq("index-101")); - verify(resolver).concreteIndexNames(any(), eq(IndicesOptions.lenientExpandOpen()), eq(true), eq("index-1*")); - } - - private ClusterState createClusterState(final IndexShorthand... indices) { - final TreeMap indexMap = new TreeMap(); - Arrays.stream(indices).forEach(indexShorthand -> { - final IndexAbstraction indexAbstraction = mock(IndexAbstraction.class); - when(indexAbstraction.getType()).thenReturn(indexShorthand.type); - indexMap.put(indexShorthand.name, indexAbstraction); - }); - - final Metadata mockMetadata = mock(Metadata.class, withSettings().strictness(Strictness.LENIENT)); - when(mockMetadata.getIndicesLookup()).thenReturn(indexMap); - - final ClusterState mockClusterState = mock(ClusterState.class, withSettings().strictness(Strictness.LENIENT)); - when(mockClusterState.getMetadata()).thenReturn(mockMetadata); - - return mockClusterState; - } - - private class IndexShorthand { - public final String name; - public final Type type; - - public IndexShorthand(final String name, final Type type) { - this.name = name; - this.type = type; - } - } -} diff --git a/src/test/java/org/opensearch/security/ssl/CertificatesRule.java b/src/test/java/org/opensearch/security/ssl/CertificatesRule.java new file mode 100644 index 0000000000..b5a397d7c8 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/CertificatesRule.java @@ -0,0 +1,327 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.file.Path; +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.PublicKey; +import java.security.cert.CertificateException; +import java.security.cert.X509Certificate; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.Date; +import java.util.List; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.rules.ExternalResource; +import org.junit.rules.TemporaryFolder; +import org.bouncycastle.asn1.ASN1Encodable; +import org.bouncycastle.asn1.DERSequence; +import org.bouncycastle.asn1.x500.X500Name; +import org.bouncycastle.asn1.x500.style.RFC4519Style; +import org.bouncycastle.asn1.x509.BasicConstraints; +import org.bouncycastle.asn1.x509.ExtendedKeyUsage; +import org.bouncycastle.asn1.x509.Extension; +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.asn1.x509.KeyPurposeId; +import org.bouncycastle.asn1.x509.KeyUsage; +import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; +import org.bouncycastle.cert.CertIOException; +import org.bouncycastle.cert.X509CertificateHolder; +import org.bouncycastle.cert.X509v3CertificateBuilder; +import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; +import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils; +import org.bouncycastle.jce.provider.BouncyCastleProvider; +import org.bouncycastle.operator.OperatorCreationException; +import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; + +import org.opensearch.common.collect.Tuple; + +public class CertificatesRule extends ExternalResource { + + private final static BouncyCastleProvider BOUNCY_CASTLE_PROVIDER = new BouncyCastleProvider(); + + private final TemporaryFolder temporaryFolder = new TemporaryFolder(); + + final static String DEFAULT_SUBJECT_NAME = "CN=some_access,OU=client,O=client,L=test,C=de"; + + private Path configRootFolder; + + private final String privateKeyPassword = RandomStringUtils.randomAlphabetic(10); + + private X509CertificateHolder caCertificateHolder; + + private X509CertificateHolder accessCertificateHolder; + + private PrivateKey accessCertificatePrivateKey; + + @Override + protected void before() throws Throwable { + super.before(); + temporaryFolder.create(); + configRootFolder = temporaryFolder.newFolder("esHome").toPath(); + final var keyPair = generateKeyPair(); + caCertificateHolder = generateCaCertificate(keyPair); + final var keyAndCertificate = generateAccessCertificate(keyPair); + accessCertificatePrivateKey = keyAndCertificate.v1(); + accessCertificateHolder = keyAndCertificate.v2(); + } + + @Override + protected void after() { + super.after(); + temporaryFolder.delete(); + } + + public Path configRootFolder() { + return configRootFolder; + } + + public String privateKeyPassword() { + return privateKeyPassword; + } + + public X509CertificateHolder caCertificateHolder() { + return caCertificateHolder; + } + + public X509CertificateHolder accessCertificateHolder() { + return accessCertificateHolder; + } + + public X509Certificate x509CaCertificate() throws CertificateException { + return toX509Certificate(caCertificateHolder); + } + + public X509Certificate x509AccessCertificate() throws CertificateException { + return toX509Certificate(accessCertificateHolder); + } + + public PrivateKey accessCertificatePrivateKey() { + return accessCertificatePrivateKey; + } + + public KeyPair generateKeyPair() throws NoSuchAlgorithmException { + KeyPairGenerator generator = KeyPairGenerator.getInstance("RSA", BOUNCY_CASTLE_PROVIDER); + generator.initialize(4096); + return generator.generateKeyPair(); + } + + public X509CertificateHolder generateCaCertificate(final KeyPair parentKeyPair) throws IOException, NoSuchAlgorithmException, + OperatorCreationException { + final var startAndEndDate = generateStartAndEndDate(); + return generateCaCertificate(parentKeyPair, generateSerialNumber(), startAndEndDate.v1(), startAndEndDate.v2()); + } + + public X509CertificateHolder generateCaCertificate(final KeyPair parentKeyPair, final Instant startDate, final Instant endDate) + throws IOException, NoSuchAlgorithmException, OperatorCreationException { + return generateCaCertificate(parentKeyPair, generateSerialNumber(), startDate, endDate); + } + + public X509CertificateHolder generateCaCertificate( + final KeyPair parentKeyPair, + final BigInteger serialNumber, + final Instant startDate, + final Instant endDate + ) throws IOException, NoSuchAlgorithmException, OperatorCreationException { + // CS-SUPPRESS-SINGLE: RegexpSingleline Extension should only be used sparingly to keep implementations as generic as possible + return createCertificateBuilder( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair.getPublic(), + parentKeyPair.getPublic(), + serialNumber, + startDate, + endDate + ).addExtension(Extension.basicConstraints, true, new BasicConstraints(true)) + .addExtension(Extension.keyUsage, true, new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyCertSign | KeyUsage.cRLSign)) + .build(new JcaContentSignerBuilder("SHA256withRSA").setProvider(BOUNCY_CASTLE_PROVIDER).build(parentKeyPair.getPrivate())); + // CS-ENFORCE-SINGLE + } + + public Tuple generateAccessCertificate(final KeyPair parentKeyPair) throws NoSuchAlgorithmException, + IOException, OperatorCreationException { + final var startAndEndDate = generateStartAndEndDate(); + return generateAccessCertificate( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair, + generateSerialNumber(), + startAndEndDate.v1(), + startAndEndDate.v2(), + defaultSubjectAlternativeNames() + ); + } + + public Tuple generateAccessCertificate(final KeyPair parentKeyPair, final BigInteger serialNumber) + throws NoSuchAlgorithmException, IOException, OperatorCreationException { + final var startAdnEndDate = generateStartAndEndDate(); + return generateAccessCertificate( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair, + serialNumber, + startAdnEndDate.v1(), + startAdnEndDate.v2(), + defaultSubjectAlternativeNames() + ); + } + + public Tuple generateAccessCertificate( + final KeyPair parentKeyPair, + final Instant startDate, + final Instant endDate + ) throws NoSuchAlgorithmException, IOException, OperatorCreationException { + return generateAccessCertificate( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair, + generateSerialNumber(), + startDate, + endDate, + defaultSubjectAlternativeNames() + ); + } + + public Tuple generateAccessCertificate( + final KeyPair parentKeyPair, + final Instant startDate, + final Instant endDate, + List sans + ) throws NoSuchAlgorithmException, IOException, OperatorCreationException { + return generateAccessCertificate( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair, + generateSerialNumber(), + startDate, + endDate, + sans + ); + } + + public Tuple generateAccessCertificate( + final KeyPair parentKeyPair, + final String subject, + final String issuer + ) throws NoSuchAlgorithmException, IOException, OperatorCreationException { + final var startAndEndDate = generateStartAndEndDate(); + return generateAccessCertificate( + subject, + issuer, + parentKeyPair, + generateSerialNumber(), + startAndEndDate.v1(), + startAndEndDate.v2(), + defaultSubjectAlternativeNames() + ); + } + + public Tuple generateAccessCertificate(final KeyPair parentKeyPair, final List sans) + throws NoSuchAlgorithmException, IOException, OperatorCreationException { + final var startAndEndDate = generateStartAndEndDate(); + return generateAccessCertificate( + DEFAULT_SUBJECT_NAME, + DEFAULT_SUBJECT_NAME, + parentKeyPair, + generateSerialNumber(), + startAndEndDate.v1(), + startAndEndDate.v2(), + sans + ); + } + + public Tuple generateAccessCertificate( + final String subject, + final String issuer, + final KeyPair parentKeyPair, + final BigInteger serialNumber, + final Instant startDate, + final Instant endDate, + final List sans + ) throws NoSuchAlgorithmException, IOException, OperatorCreationException { + final var keyPair = generateKeyPair(); + // CS-SUPPRESS-SINGLE: RegexpSingleline Extension should only be used sparingly to keep implementations as generic as possible + final var certificate = createCertificateBuilder( + subject, + issuer, + keyPair.getPublic(), + parentKeyPair.getPublic(), + serialNumber, + startDate, + endDate + ).addExtension(Extension.basicConstraints, true, new BasicConstraints(false)) + .addExtension( + Extension.keyUsage, + true, + new KeyUsage(KeyUsage.digitalSignature | KeyUsage.nonRepudiation | KeyUsage.keyEncipherment) + ) + .addExtension(Extension.extendedKeyUsage, true, new ExtendedKeyUsage(KeyPurposeId.id_kp_clientAuth)) + .addExtension(Extension.subjectAlternativeName, false, new DERSequence(sans.toArray(sans.toArray(new ASN1Encodable[0])))) + .build(new JcaContentSignerBuilder("SHA256withRSA").setProvider(BOUNCY_CASTLE_PROVIDER).build(parentKeyPair.getPrivate())); + // CS-ENFORCE-SINGLE + return Tuple.tuple(keyPair.getPrivate(), certificate); + } + + private List defaultSubjectAlternativeNames() { + return List.of( + new GeneralName(GeneralName.registeredID, "1.2.3.4.5.5"), + new GeneralName(GeneralName.dNSName, "localhost"), + new GeneralName(GeneralName.iPAddress, "127.0.0.1") + ); + } + + public X509Certificate toX509Certificate(final X509CertificateHolder x509CertificateHolder) throws CertificateException { + return new JcaX509CertificateConverter().getCertificate(x509CertificateHolder); + } + + private X509v3CertificateBuilder createCertificateBuilder( + final String subject, + final String issuer, + final PublicKey certificatePublicKey, + final PublicKey parentPublicKey, + final BigInteger serialNumber, + final Instant startDate, + final Instant endDate + ) throws NoSuchAlgorithmException, CertIOException { + // CS-SUPPRESS-SINGLE: RegexpSingleline Extension should only be used sparingly to keep implementations as generic as possible + final var subjectName = new X500Name(RFC4519Style.INSTANCE, subject); + final var issuerName = new X500Name(RFC4519Style.INSTANCE, issuer); + final var extUtils = new JcaX509ExtensionUtils(); + return new X509v3CertificateBuilder( + issuerName, + serialNumber, + Date.from(startDate), + Date.from(endDate), + subjectName, + SubjectPublicKeyInfo.getInstance(certificatePublicKey.getEncoded()) + ).addExtension(Extension.authorityKeyIdentifier, false, extUtils.createAuthorityKeyIdentifier(parentPublicKey)) + .addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(certificatePublicKey)); + // CS-ENFORCE-SINGLE + } + + Tuple generateStartAndEndDate() { + final var startDate = Instant.now().minusMillis(24 * 3600 * 1000); + final var endDate = Instant.from(startDate).plus(10, ChronoUnit.DAYS); + return Tuple.tuple(startDate, endDate); + } + + public BigInteger generateSerialNumber() { + return BigInteger.valueOf(Instant.now().plusMillis(100).getEpochSecond()); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/CertificatesUtils.java b/src/test/java/org/opensearch/security/ssl/CertificatesUtils.java new file mode 100644 index 0000000000..7b6ee9fc74 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/CertificatesUtils.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.PrivateKey; +import java.security.SecureRandom; + +import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; +import org.bouncycastle.openssl.PKCS8Generator; +import org.bouncycastle.openssl.jcajce.JcaPEMWriter; +import org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8EncryptorBuilder; +import org.bouncycastle.util.io.pem.PemObject; + +public class CertificatesUtils { + + public static void writePemContent(final Path path, final Object pemContent) throws IOException { + try (JcaPEMWriter writer = new JcaPEMWriter(Files.newBufferedWriter(path))) { + writer.writeObject(pemContent); + } + } + + public static PemObject privateKeyToPemObject(final PrivateKey privateKey, final String password) throws Exception { + return new PKCS8Generator( + PrivateKeyInfo.getInstance(privateKey.getEncoded()), + new JceOpenSSLPKCS8EncryptorBuilder(PKCS8Generator.PBE_SHA1_3DES).setRandom(new SecureRandom()) + .setPassword(password.toCharArray()) + .build() + ).generate(); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPluginTest.java b/src/test/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPluginTest.java index aefb12c0db..e7e5abaeda 100644 --- a/src/test/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPluginTest.java +++ b/src/test/java/org/opensearch/security/ssl/OpenSearchSecuritySSLPluginTest.java @@ -9,6 +9,7 @@ package org.opensearch.security.ssl; import java.io.IOException; +import java.nio.file.Path; import java.util.Collection; import java.util.List; import java.util.Map; @@ -26,6 +27,7 @@ import org.opensearch.common.network.NetworkModule; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; import org.opensearch.http.HttpServerTransport; import org.opensearch.http.netty4.ssl.SecureNetty4HttpServerTransport; import org.opensearch.plugins.SecureHttpTransportSettingsProvider; @@ -55,17 +57,17 @@ public class OpenSearchSecuritySSLPluginTest extends AbstractSecurityUnitTest { private SecureTransportSettingsProvider secureTransportSettingsProvider; private ClusterSettings clusterSettings; + private Path osPathHome; + @Before public void setUp() { + osPathHome = FileHelper.getAbsoluteFilePathFromClassPath("ssl/kirk-keystore.jks").getParent().getParent(); settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), osPathHome) .put( SSLConfigConstants.SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, FileHelper.getAbsoluteFilePathFromClassPath("ssl/kirk-keystore.jks") ) - .put( - SSLConfigConstants.SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH, - FileHelper.getAbsoluteFilePathFromClassPath("ssl/root-ca.pem") - ) .put( SSLConfigConstants.SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, FileHelper.getAbsoluteFilePathFromClassPath("ssl/truststore.jks") @@ -116,7 +118,7 @@ public Optional buildSecureHttpServerEngine(Settings settings, HttpSe @Test public void testRegisterSecureHttpTransport() throws IOException { - try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(settings, null, false)) { + try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(settings, osPathHome, false)) { final Map> transports = plugin.getSecureHttpTransports( settings, MOCK_POOL, @@ -140,7 +142,7 @@ public void testRegisterSecureHttpTransport() throws IOException { @Test public void testRegisterSecureTransport() throws IOException { - try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(settings, null, false)) { + try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(settings, osPathHome, false)) { final Map> transports = plugin.getSecureTransports( settings, MOCK_POOL, @@ -165,7 +167,7 @@ public void testRegisterSecureTransportWithDeprecatedSecuirtyPluginSettings() th .put(SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENFORCE_HOSTNAME_VERIFICATION, false) .build(); - try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(deprecated, null, false)) { + try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(deprecated, osPathHome, false)) { final Map> transports = plugin.getSecureTransports( deprecated, MOCK_POOL, @@ -190,7 +192,7 @@ public void testRegisterSecureTransportWithNetworkModuleSettings() throws IOExce .put(NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION_KEY, false) .build(); - try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(migrated, null, false)) { + try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(migrated, osPathHome, false)) { final Map> transports = plugin.getSecureTransports( migrated, MOCK_POOL, @@ -229,7 +231,7 @@ public void testRegisterSecureTransportWithDuplicateSettings() throws IOExceptio .put(NetworkModule.TRANSPORT_SSL_ENFORCE_HOSTNAME_VERIFICATION_KEY, false) .build(); - try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(migrated, null, false)) { + try (OpenSearchSecuritySSLPlugin plugin = new OpenSearchSecuritySSLPlugin(migrated, osPathHome, false)) { final Map> transports = plugin.getSecureTransports( migrated, MOCK_POOL, diff --git a/src/test/java/org/opensearch/security/ssl/SSLTest.java b/src/test/java/org/opensearch/security/ssl/SSLTest.java index a6013c7823..20887fccdf 100644 --- a/src/test/java/org/opensearch/security/ssl/SSLTest.java +++ b/src/test/java/org/opensearch/security/ssl/SSLTest.java @@ -569,7 +569,7 @@ public void testHttpsAndNodeSSLFailedCipher() throws Exception { Assert.fail(); } catch (Exception e1) { Throwable e = ExceptionUtils.getRootCause(e1); - Assert.assertTrue(e.toString(), e.toString().contains("no valid cipher")); + Assert.assertTrue(e.toString(), e.toString().contains("No valid cipher")); } } diff --git a/src/test/java/org/opensearch/security/ssl/SecuritySSLReloadCertsActionTests.java b/src/test/java/org/opensearch/security/ssl/SecuritySSLReloadCertsActionTests.java index 244967cf76..86d1e45133 100644 --- a/src/test/java/org/opensearch/security/ssl/SecuritySSLReloadCertsActionTests.java +++ b/src/test/java/org/opensearch/security/ssl/SecuritySSLReloadCertsActionTests.java @@ -14,7 +14,6 @@ import java.io.IOException; import java.util.List; import java.util.Map; -import java.util.Objects; import com.fasterxml.jackson.databind.JsonNode; import org.junit.After; @@ -44,10 +43,8 @@ public class SecuritySSLReloadCertsActionTests extends SingleClusterTest { private final String RELOAD_HTTP_CERTS_ENDPOINT = "_opendistro/_security/api/ssl/http/reloadcerts"; @Rule public TemporaryFolder testFolder = new TemporaryFolder(); - private final String HTTP_CERTIFICATES_LIST_KEY = "http_certificates_list"; - private final String TRANSPORT_CERTIFICATES_LIST_KEY = "transport_certificates_list"; - private final List> NODE_CERT_DETAILS = List.of( + private final List> INITIAL_NODE_CERT_DETAILS = List.of( Map.of( "issuer_dn", "CN=Example Com Inc. Signing CA,OU=Example Com Inc. Signing CA,O=Example Com Inc.,DC=example,DC=com", @@ -77,6 +74,21 @@ public class SecuritySSLReloadCertsActionTests extends SingleClusterTest { ) ); + private final List> NEW_CA_NODE_CERT_DETAILS = List.of( + Map.of( + "issuer_dn", + "CN=Example Com Inc. Secondary Signing CA,OU=Example Com Inc. Secondary Signing CA,O=Example Com Inc.,DC=example,DC=com", + "subject_dn", + "CN=node-1.example.com,OU=SSL,O=Test,L=Test,C=DE", + "san", + "[[2, localhost], [2, node-1.example.com], [7, 127.0.0.1], [8, 1.2.3.4.5.5]]", + "not_before", + "2024-09-17T00:15:48Z", + "not_after", + "2034-09-15T00:15:48Z" + ) + ); + private String pemCertFilePath; private String pemKeyFilePath; private final String defaultCertFilePath = "ssl/reload/node.crt.pem"; @@ -116,7 +128,7 @@ public void testReloadTransportSSLCertsPass() throws Exception { updateFiles(newCertFilePath, pemCertFilePath); updateFiles(newKeyFilePath, pemKeyFilePath); - assertReloadCertificateSuccess(rh, "transport", getUpdatedCertDetailsExpectedResponse("transport")); + assertReloadCertificateSuccess(rh, "transport", getCertDetailsExpectedResponse(INITIAL_NODE_CERT_DETAILS, NEW_NODE_CERT_DETAILS)); } @Test @@ -133,7 +145,7 @@ public void testReloadHttpSSLCertsPass() throws Exception { updateFiles(newCertFilePath, pemCertFilePath); updateFiles(newKeyFilePath, pemKeyFilePath); - assertReloadCertificateSuccess(rh, "http", getUpdatedCertDetailsExpectedResponse("http")); + assertReloadCertificateSuccess(rh, "http", getCertDetailsExpectedResponse(NEW_NODE_CERT_DETAILS, INITIAL_NODE_CERT_DETAILS)); } @Test @@ -147,9 +159,12 @@ public void testSSLReloadFail_InvalidDNAndDate() throws Exception { RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_TRANSPORT_CERTS_ENDPOINT, null); assertThat(reloadCertsResponse.getStatusCode(), is(500)); assertThat( - "OpenSearchSecurityException[Error while initializing transport SSL layer from PEM: java.lang.Exception: " - + "New Certs do not have valid Issuer DN, Subject DN or SAN.]; nested: Exception[New Certs do not have valid Issuer DN, Subject DN or SAN.];", - is(DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText()) + DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText(), + is( + "java.security.cert.CertificateException: " + + "New certificates do not have valid Subject DNs. Current Subject DNs [CN=node-1.example.com,OU=SSL,O=Test,L=Test,C=DE] " + + "new Subject DNs [CN=node-2.example.com,OU=SSL,O=Test,L=Test,C=DE]" + ) ); } @@ -186,6 +201,152 @@ public void testReloadHttpSSLSameCertsPass() throws Exception { assertReloadCertificateSuccess(rh, "http", getInitCertDetailsExpectedResponse()); } + @Test + public void testReloadHttpCertDifferentTrustChain_skipDnValidationPass() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, false, true); + + RestHelper rh = getRestHelperAdminUser(); + // Change http certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_HTTP_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(200)); + final var expectedJsonResponse = DefaultObjectMapper.objectMapper.createObjectNode(); + expectedJsonResponse.put("message", "updated http certs"); + assertThat(reloadCertsResponse.getBody(), is(expectedJsonResponse.toString())); + + String certDetailsResponse = rh.executeSimpleRequest(GET_CERT_DETAILS_ENDPOINT); + assertThat( + DefaultObjectMapper.readTree(certDetailsResponse), + is(getCertDetailsExpectedResponse(NEW_CA_NODE_CERT_DETAILS, INITIAL_NODE_CERT_DETAILS)) + ); + } + + @Test + public void testReloadHttpCertDifferentTrustChain_noSkipDnValidationFail() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, true, true); + + RestHelper rh = getRestHelperAdminUser(); + // Change http certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_HTTP_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(500)); + assertThat( + DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText(), + is( + "java.security.cert.CertificateException: New certificates do not have valid Issuer DNs. " + + "Current Issuer DNs: [CN=Example Com Inc. Signing CA,OU=Example Com Inc. Signing CA,O=Example Com Inc.,DC=example,DC=com] " + + "new Issuer DNs: [CN=Example Com Inc. Secondary Signing CA,OU=Example Com Inc. Secondary Signing CA,O=Example Com Inc.,DC=example,DC=com]" + ) + ); + } + + @Test + public void testReloadHttpCertDifferentTrustChain_defaultSettingValidationFail() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, null, null); + + RestHelper rh = getRestHelperAdminUser(); + // Change http certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_HTTP_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(500)); + assertThat( + DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText(), + is( + "java.security.cert.CertificateException: New certificates do not have valid Issuer DNs. " + + "Current Issuer DNs: [CN=Example Com Inc. Signing CA,OU=Example Com Inc. Signing CA,O=Example Com Inc.,DC=example,DC=com] " + + "new Issuer DNs: [CN=Example Com Inc. Secondary Signing CA,OU=Example Com Inc. Secondary Signing CA,O=Example Com Inc.,DC=example,DC=com]" + ) + ); + } + + @Test + public void testReloadTransportCertDifferentTrustChain_skipDnValidationPass() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, true, false); + + RestHelper rh = getRestHelperAdminUser(); + // Change transport certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_TRANSPORT_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(200)); + final var expectedJsonResponse = DefaultObjectMapper.objectMapper.createObjectNode(); + expectedJsonResponse.put("message", "updated transport certs"); + assertThat(reloadCertsResponse.getBody(), is(expectedJsonResponse.toString())); + + String certDetailsResponse = rh.executeSimpleRequest(GET_CERT_DETAILS_ENDPOINT); + assertThat( + DefaultObjectMapper.readTree(certDetailsResponse), + is(getCertDetailsExpectedResponse(INITIAL_NODE_CERT_DETAILS, NEW_CA_NODE_CERT_DETAILS)) + ); + } + + @Test + public void testReloadTransportCertDifferentTrustChain_noSkipDnValidationFail() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, true, true); + + RestHelper rh = getRestHelperAdminUser(); + // Change transport certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_TRANSPORT_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(500)); + assertThat( + DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText(), + is( + "java.security.cert.CertificateException: New certificates do not have valid Issuer DNs. " + + "Current Issuer DNs: [CN=Example Com Inc. Signing CA,OU=Example Com Inc. Signing CA,O=Example Com Inc.,DC=example,DC=com] " + + "new Issuer DNs: [CN=Example Com Inc. Secondary Signing CA,OU=Example Com Inc. Secondary Signing CA,O=Example Com Inc.,DC=example,DC=com]" + ) + ); + } + + @Test + public void testReloadTransportCertDifferentTrustChain_defaultSettingValidationFail() throws Exception { + updateFiles(defaultCertFilePath, pemCertFilePath); + updateFiles(defaultKeyFilePath, pemKeyFilePath); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, null, null); + + RestHelper rh = getRestHelperAdminUser(); + // Change transport certs to one signed by a different CA than the previous one + updateFiles("ssl/reload/node-new-ca.crt.pem", pemCertFilePath); + updateFiles("ssl/reload/node-new-ca.key.pem", pemKeyFilePath); + + RestHelper.HttpResponse reloadCertsResponse = rh.executePutRequest(RELOAD_TRANSPORT_CERTS_ENDPOINT, null); + + assertThat(reloadCertsResponse.getStatusCode(), is(500)); + assertThat( + DefaultObjectMapper.readTree(reloadCertsResponse.getBody()).get("error").get("root_cause").get(0).get("reason").asText(), + is( + "java.security.cert.CertificateException: New certificates do not have valid Issuer DNs. " + + "Current Issuer DNs: [CN=Example Com Inc. Signing CA,OU=Example Com Inc. Signing CA,O=Example Com Inc.,DC=example,DC=com] " + + "new Issuer DNs: [CN=Example Com Inc. Secondary Signing CA,OU=Example Com Inc. Secondary Signing CA,O=Example Com Inc.,DC=example,DC=com]" + ) + ); + } + /** * * @param rh RestHelper to perform rest actions on the cluster @@ -211,20 +372,18 @@ private void updateFiles(String srcFile, String dstFile) { FileHelper.copyFileContents(FileHelper.getAbsoluteFilePathFromClassPath(srcFile).toString(), dstFile); } - private JsonNode getUpdatedCertDetailsExpectedResponse(String updateChannel) { - String updateKey = (Objects.equals(updateChannel, "http")) ? HTTP_CERTIFICATES_LIST_KEY : TRANSPORT_CERTIFICATES_LIST_KEY; - String oldKey = (Objects.equals(updateChannel, "http")) ? TRANSPORT_CERTIFICATES_LIST_KEY : HTTP_CERTIFICATES_LIST_KEY; + private JsonNode getCertDetailsExpectedResponse( + List> httpCertDetails, + List> transportCertDetails + ) { final var updatedCertDetailsResponse = DefaultObjectMapper.objectMapper.createObjectNode(); - updatedCertDetailsResponse.set(updateKey, buildCertsInfoNode(NEW_NODE_CERT_DETAILS)); - updatedCertDetailsResponse.set(oldKey, buildCertsInfoNode(NODE_CERT_DETAILS)); + updatedCertDetailsResponse.set("http_certificates_list", buildCertsInfoNode(httpCertDetails)); + updatedCertDetailsResponse.set("transport_certificates_list", buildCertsInfoNode(transportCertDetails)); return updatedCertDetailsResponse; } private JsonNode getInitCertDetailsExpectedResponse() { - final var initCertDetailsResponse = DefaultObjectMapper.objectMapper.createObjectNode(); - initCertDetailsResponse.set(HTTP_CERTIFICATES_LIST_KEY, buildCertsInfoNode(NODE_CERT_DETAILS)); - initCertDetailsResponse.set(TRANSPORT_CERTIFICATES_LIST_KEY, buildCertsInfoNode(NODE_CERT_DETAILS)); - return initCertDetailsResponse; + return getCertDetailsExpectedResponse(INITIAL_NODE_CERT_DETAILS, INITIAL_NODE_CERT_DETAILS); } private JsonNode buildCertsInfoNode(final List> certsInfo) { @@ -270,25 +429,29 @@ private RestHelper getRestHelperNonAdminUser() { private void initClusterWithTestCerts() throws Exception { updateFiles(defaultCertFilePath, pemCertFilePath); updateFiles(defaultKeyFilePath, pemKeyFilePath); - initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true); + initTestCluster(pemCertFilePath, pemKeyFilePath, pemCertFilePath, pemKeyFilePath, true, true, true); } /** * Helper method to initialize test cluster for SSL Certificate Reload Tests - * @param transportPemCertFilePath Absolute Path to transport pem cert file - * @param transportPemKeyFilePath Absolute Path to transport pem key file - * @param httpPemCertFilePath Absolute Path to transport pem cert file - * @param httpPemKeyFilePath Absolute Path to transport pem key file - * @param sslCertReload Sets the ssl cert reload flag + * @param transportPemCertFilePath Absolute Path to transport pem cert file + * @param transportPemKeyFilePath Absolute Path to transport pem key file + * @param httpPemCertFilePath Absolute Path to transport pem cert file + * @param httpPemKeyFilePath Absolute Path to transport pem key file + * @param sslCertReload Sets the ssl cert reload flag + * @param httpEnforceReloadDnVerification Sets the http enforce reload dn verification flag if non-null + * @param transportEnforceReloadDnVerification Sets the transport enforce reload dn verification flag if non-null */ private void initTestCluster( final String transportPemCertFilePath, final String transportPemKeyFilePath, final String httpPemCertFilePath, final String httpPemKeyFilePath, - final boolean sslCertReload + final boolean sslCertReload, + final Boolean httpEnforceReloadDnVerification, + final Boolean transportEnforceReloadDnVerification ) throws Exception { - final Settings settings = Settings.builder() + final Settings.Builder settingsBuilder = Settings.builder() .putList(ConfigConstants.SECURITY_AUTHCZ_ADMIN_DN, "CN=kirk,OU=client,O=client,L=Test,C=DE") .putList(ConfigConstants.SECURITY_NODES_DN, "CN=node-1.example.com,OU=SSL,O=Test,L=Test,C=DE") .put(SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED, true) @@ -307,8 +470,17 @@ private void initTestCluster( SSLConfigConstants.SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH, FileHelper.getAbsoluteFilePathFromClassPath("ssl/reload/root-ca.pem") ) - .put(ConfigConstants.SECURITY_SSL_CERT_RELOAD_ENABLED, sslCertReload) - .build(); + .put(ConfigConstants.SECURITY_SSL_CERT_RELOAD_ENABLED, sslCertReload); + + if (httpEnforceReloadDnVerification != null) settingsBuilder.put( + SSLConfigConstants.SECURITY_SSL_HTTP_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + httpEnforceReloadDnVerification + ); + + if (transportEnforceReloadDnVerification != null) settingsBuilder.put( + SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENFORCE_CERT_RELOAD_DN_VERIFICATION, + transportEnforceReloadDnVerification + ); final Settings initTransportClientSettings = Settings.builder() .put( @@ -322,7 +494,7 @@ private void initTestCluster( ) .build(); - setup(initTransportClientSettings, new DynamicSecurityConfig(), settings, true, clusterConfiguration); + setup(initTransportClientSettings, new DynamicSecurityConfig(), settingsBuilder.build(), true, clusterConfiguration); } } diff --git a/src/test/java/org/opensearch/security/ssl/SslContextHandlerTest.java b/src/test/java/org/opensearch/security/ssl/SslContextHandlerTest.java new file mode 100644 index 0000000000..916b7b09a7 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/SslContextHandlerTest.java @@ -0,0 +1,293 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.nio.file.Path; +import java.security.PrivateKey; +import java.security.cert.CertificateException; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.bouncycastle.asn1.ASN1Encodable; +import org.bouncycastle.asn1.ASN1Sequence; +import org.bouncycastle.asn1.x509.Extension; +import org.bouncycastle.asn1.x509.GeneralName; +import org.bouncycastle.cert.X509CertificateHolder; + +import org.opensearch.common.settings.Settings; +import org.opensearch.security.ssl.config.KeyStoreConfiguration; +import org.opensearch.security.ssl.config.SslParameters; +import org.opensearch.security.ssl.config.TrustStoreConfiguration; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.ssl.CertificatesUtils.privateKeyToPemObject; +import static org.opensearch.security.ssl.CertificatesUtils.writePemContent; +import static org.junit.Assert.assertThrows; + +public class SslContextHandlerTest { + + @ClassRule + public static CertificatesRule certificatesRule = new CertificatesRule(); + + Path caCertificatePath; + + Path accessCertificatePath; + + Path accessCertificatePrivateKeyPath; + + @Before + public void setUp() throws Exception { + caCertificatePath = certificatesRule.configRootFolder().resolve("ca_certificate.pem"); + accessCertificatePath = certificatesRule.configRootFolder().resolve("access_certificate.pem"); + accessCertificatePrivateKeyPath = certificatesRule.configRootFolder().resolve("access_certificate_pk.pem"); + writeCertificates( + certificatesRule.caCertificateHolder(), + certificatesRule.accessCertificateHolder(), + certificatesRule.accessCertificatePrivateKey() + ); + } + + void writeCertificates( + final X509CertificateHolder caCertificate, + final X509CertificateHolder accessCertificate, + final PrivateKey accessPrivateKey + ) throws Exception { + writePemContent(caCertificatePath, caCertificate); + writePemContent(accessCertificatePath, accessCertificate); + writePemContent(accessCertificatePrivateKeyPath, privateKeyToPemObject(accessPrivateKey, certificatesRule.privateKeyPassword())); + } + + @Test + public void doesNothingIfCertificatesAreSame() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var sslContextBefore = sslContextHandler.sslContext(); + sslContextHandler.reloadSslContext(); + + assertThat("SSL Context is the same", sslContextBefore.equals(sslContextHandler.sslContext())); + } + + @Test + public void failsIfAuthorityCertificateHasInvalidDates() throws Exception { + final var sslContextHandler = sslContextHandler(); + final var keyPair = certificatesRule.generateKeyPair(); + + final var caCertificate = certificatesRule.caCertificateHolder(); + + var newCaCertificate = certificatesRule.generateCaCertificate( + keyPair, + caCertificate.getNotAfter().toInstant(), + caCertificate.getNotAfter().toInstant().minus(10, ChronoUnit.DAYS) + ); + + writeCertificates(newCaCertificate, certificatesRule.accessCertificateHolder(), certificatesRule.accessCertificatePrivateKey()); + + assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + + newCaCertificate = certificatesRule.generateCaCertificate( + keyPair, + caCertificate.getNotBefore().toInstant().plus(10, ChronoUnit.DAYS), + caCertificate.getNotAfter().toInstant().plus(20, ChronoUnit.DAYS) + ); + writeCertificates(newCaCertificate, certificatesRule.accessCertificateHolder(), certificatesRule.accessCertificatePrivateKey()); + + assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + } + + @Test + public void failsIfKeyMaterialCertificateHasInvalidDates() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var accessCertificate = certificatesRule.x509AccessCertificate(); + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + var newAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + accessCertificate.getNotBefore().toInstant(), + accessCertificate.getNotAfter().toInstant().minus(10, ChronoUnit.DAYS) + ); + + writeCertificates(newCaCertificate, newAccessCertificate.v2(), newAccessCertificate.v1()); + + assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + + newAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + accessCertificate.getNotBefore().toInstant().plus(10, ChronoUnit.DAYS), + accessCertificate.getNotAfter().toInstant().plus(20, ChronoUnit.DAYS) + ); + writeCertificates(newCaCertificate, newAccessCertificate.v2(), newAccessCertificate.v1()); + + assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + } + + @Test + public void failsIfKeyMaterialCertificateHasNotValidSubjectDNs() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var currentAccessCertificate = certificatesRule.x509AccessCertificate(); + final var wrongSubjectAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + "CN=ddddd,O=client,L=test,C=de", + currentAccessCertificate.getIssuerX500Principal().getName() + ); + + writeCertificates(newCaCertificate, wrongSubjectAccessCertificate.v2(), wrongSubjectAccessCertificate.v1()); + + final var e = assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + assertThat( + e.getMessage(), + is( + "New certificates do not have valid Subject DNs. " + + "Current Subject DNs [CN=some_access,OU=client,O=client,L=test,C=de] " + + "new Subject DNs [CN=ddddd,O=client,L=test,C=de]" + ) + ); + } + + @Test + public void failsIfKeyMaterialCertificateHasNotValidIssuerDNs() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var currentAccessCertificate = certificatesRule.x509AccessCertificate(); + final var wrongSubjectAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + currentAccessCertificate.getSubjectX500Principal().getName(), + "CN=ddddd,O=client,L=test,C=de" + ); + + writeCertificates(newCaCertificate, wrongSubjectAccessCertificate.v2(), wrongSubjectAccessCertificate.v1()); + + final var e = assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + assertThat( + e.getMessage(), + is( + "New certificates do not have valid Issuer DNs. " + + "Current Issuer DNs: [CN=some_access,OU=client,O=client,L=test,C=de] " + + "new Issuer DNs: [CN=ddddd,O=client,L=test,C=de]" + ) + ); + } + + @Test + public void failsIfKeyMaterialCertificateHasNotValidSans() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var wrongSubjectAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + List.of(new GeneralName(GeneralName.iPAddress, "127.0.0.3")) + ); + + writeCertificates(newCaCertificate, wrongSubjectAccessCertificate.v2(), wrongSubjectAccessCertificate.v1()); + + final var e = assertThrows(CertificateException.class, sslContextHandler::reloadSslContext); + assertThat( + e.getMessage(), + is( + "New certificates do not have valid SANs. " + + "Current SANs: [[[2, localhost], [7, 127.0.0.1], [8, 1.2.3.4.5.5]]] " + + "new SANs: [[[7, 127.0.0.3]]]" + ) + ); + } + + @Test + public void reloadSslContext() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var sslContextBefore = sslContextHandler.sslContext(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var currentAccessCertificate = certificatesRule.x509AccessCertificate(); + final var newAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + currentAccessCertificate.getNotBefore().toInstant(), + currentAccessCertificate.getNotAfter().toInstant().plus(10, ChronoUnit.MINUTES) + ); + + writeCertificates(newCaCertificate, newAccessCertificate.v2(), newAccessCertificate.v1()); + + sslContextHandler.reloadSslContext(); + + assertThat("Context reloaded", is(not(sslContextBefore.equals(sslContextHandler.sslContext())))); + } + + @Test + public void reloadSslContextForShuffledSameSans() throws Exception { + final var sslContextHandler = sslContextHandler(); + + final var sslContextBefore = sslContextHandler.sslContext(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var newCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var currentAccessCertificate = certificatesRule.accessCertificateHolder(); + + // CS-SUPPRESS-SINGLE: RegexpSingleline Extension should only be used sparingly to keep implementations as generic as possible + final var newAccessCertificate = certificatesRule.generateAccessCertificate( + keyPair, + currentAccessCertificate.getNotBefore().toInstant(), + currentAccessCertificate.getNotAfter().toInstant().plus(10, ChronoUnit.MINUTES), + shuffledSans(currentAccessCertificate.getExtension(Extension.subjectAlternativeName)) + ); + // CS-ENFORCE-SINGLE + + writeCertificates(newCaCertificate, newAccessCertificate.v2(), newAccessCertificate.v1()); + + sslContextHandler.reloadSslContext(); + + assertThat("Context reloaded", is(not(sslContextBefore.equals(sslContextHandler.sslContext())))); + } + + // CS-SUPPRESS-SINGLE: RegexpSingleline Extension should only be used sparingly to keep implementations as generic as possible + List shuffledSans(Extension currentSans) { + final var san1Sequence = ASN1Sequence.getInstance(currentSans.getParsedValue().toASN1Primitive()); + + final var shuffledSans = new ArrayList(); + final var objects = san1Sequence.getObjects(); + while (objects.hasMoreElements()) { + shuffledSans.add(GeneralName.getInstance(objects.nextElement())); + } + + for (int i = 0; i < 5; i++) + Collections.shuffle(shuffledSans); + return shuffledSans; + } + // CS-ENFORCE-SINGLE + + SslContextHandler sslContextHandler() { + final var sslParameters = SslParameters.loader(Settings.EMPTY).load(false); + final var trustStoreConfiguration = new TrustStoreConfiguration.PemTrustStoreConfiguration(caCertificatePath); + final var keyStoreConfiguration = new KeyStoreConfiguration.PemKeyStoreConfiguration( + accessCertificatePath, + accessCertificatePrivateKeyPath, + certificatesRule.privateKeyPassword().toCharArray() + ); + + SslConfiguration sslConfiguration = new SslConfiguration(sslParameters, trustStoreConfiguration, keyStoreConfiguration); + return new SslContextHandler(sslConfiguration, false); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/SslSettingsManagerTest.java b/src/test/java/org/opensearch/security/ssl/SslSettingsManagerTest.java new file mode 100644 index 0000000000..1aa2c47eb3 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/SslSettingsManagerTest.java @@ -0,0 +1,464 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl; + +import java.nio.file.Path; +import java.util.List; +import java.util.Locale; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.Test; + +import org.opensearch.OpenSearchException; +import org.opensearch.common.settings.MockSecureSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.env.TestEnvironment; +import org.opensearch.security.ssl.config.CertType; + +import io.netty.handler.ssl.ClientAuth; +import io.netty.handler.ssl.SslContext; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.ssl.CertificatesUtils.privateKeyToPemObject; +import static org.opensearch.security.ssl.CertificatesUtils.writePemContent; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_CLIENTAUTH_MODE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_HTTP_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_SERVER_EXTENDED_PREFIX; +import static org.opensearch.security.support.ConfigConstants.SECURITY_SSL_ONLY; +import static org.junit.Assert.assertThrows; + +public class SslSettingsManagerTest extends RandomizedTest { + + @ClassRule + public static CertificatesRule certificatesRule = new CertificatesRule(); + + @BeforeClass + public static void setUp() throws Exception { + writeCertificates("ca_http_certificate.pem", "access_http_certificate.pem", "access_http_certificate_pk.pem"); + writeCertificates("ca_transport_certificate.pem", "access_transport_certificate.pem", "access_transport_certificate_pk.pem"); + } + + static void writeCertificates(final String trustedFileName, final String accessFileName, final String accessPkFileName) + throws Exception { + writePemContent(path(trustedFileName), certificatesRule.caCertificateHolder()); + writePemContent(path(accessFileName), certificatesRule.accessCertificateHolder()); + writePemContent( + path(accessPkFileName), + privateKeyToPemObject(certificatesRule.accessCertificatePrivateKey(), certificatesRule.privateKeyPassword()) + ); + } + + static Path path(final String fileName) { + return certificatesRule.configRootFolder().resolve(fileName); + } + + @Test + public void failsIfNoSslSet() throws Exception { + final var settings = defaultSettingsBuilder().build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void transportFailsIfNoConfigDefine() throws Exception { + final var noTransportSettings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true).build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(noTransportSettings))); + } + + @Test + public void transportFailsIfConfigEnabledButNotDefined() throws Exception { + final var noTransportSettingsButItEnabled = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true).build(); + assertThrows( + OpenSearchException.class, + () -> new SslSettingsManager(TestEnvironment.newEnvironment(noTransportSettingsButItEnabled)) + ); + } + + @Test + public void transportFailsIfJdkTrustStoreHasNotBeenSet() throws Exception { + final var noTransportSettingsButItEnabled = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, certificatesRule.configRootFolder().toString()) + .build(); + assertThrows( + OpenSearchException.class, + () -> new SslSettingsManager(TestEnvironment.newEnvironment(noTransportSettingsButItEnabled)) + ); + } + + @Test + public void transportFailsIfExtendedKeyUsageEnabledForJdkKeyStoreButNotConfigured() throws Exception { + final var noTransportSettingsButItEnabled = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, certificatesRule.configRootFolder().toString()) + .put(SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, certificatesRule.configRootFolder().toString()) + .put(SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED, true) + .build(); + assertThrows( + OpenSearchException.class, + () -> new SslSettingsManager(TestEnvironment.newEnvironment(noTransportSettingsButItEnabled)) + ); + } + + @Test + public void transportFailsIfExtendedKeyUsageEnabledForPemKeyStoreButNotConfigured() throws Exception { + final var noTransportSettingsButItEnabled = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_PEMCERT_FILEPATH, certificatesRule.configRootFolder().toString()) + .put(SECURITY_SSL_TRANSPORT_PEMKEY_FILEPATH, certificatesRule.configRootFolder().toString()) + .put(SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED, true) + .build(); + assertThrows( + OpenSearchException.class, + () -> new SslSettingsManager(TestEnvironment.newEnvironment(noTransportSettingsButItEnabled)) + ); + } + + @Test + public void transportFailsIfConfigDisabled() throws Exception { + Settings settings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_ENABLED, false) + .build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void httpConfigFailsIfBothPemAndJDKSettingsWereSet() throws Exception { + final var keyStoreSettings = randomFrom(List.of(SECURITY_SSL_HTTP_KEYSTORE_FILEPATH)); + final var pemKeyStoreSettings = randomFrom( + List.of(SECURITY_SSL_HTTP_PEMKEY_FILEPATH, SECURITY_SSL_HTTP_PEMCERT_FILEPATH, SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH) + ); + final var settings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true) + .put(keyStoreSettings, "aaa") + .put(pemKeyStoreSettings, "bbb") + .build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void httpConfigFailsIfHttpEnabledButButNotDefined() throws Exception { + final var settings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true).build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void httpConfigFailsIfClientAuthRequiredAndJdkTrustStoreNotSet() throws Exception { + final var settings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true) + .put(SECURITY_SSL_HTTP_CLIENTAUTH_MODE, ClientAuth.REQUIRE.name().toLowerCase(Locale.ROOT)) + .put(SECURITY_SSL_HTTP_KEYSTORE_FILEPATH, certificatesRule.configRootFolder().toString()) + .build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void httpConfigFailsIfClientAuthRequiredAndPemTrustedCasNotSet() throws Exception { + final var settings = defaultSettingsBuilder().put(SECURITY_SSL_HTTP_ENABLED, true) + .put(SECURITY_SSL_HTTP_CLIENTAUTH_MODE, ClientAuth.REQUIRE.name().toLowerCase(Locale.ROOT)) + .put(SECURITY_SSL_HTTP_PEMKEY_FILEPATH, "aaa") + .put(SECURITY_SSL_HTTP_PEMCERT_FILEPATH, "bbb") + .build(); + assertThrows(OpenSearchException.class, () -> new SslSettingsManager(TestEnvironment.newEnvironment(settings))); + } + + @Test + public void loadConfigurationAndBuildHSslContextForSslOnlyMode() throws Exception { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + securitySettings.setString(SSL_HTTP_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + final var settingsBuilder = defaultSettingsBuilder().setSecureSettings(securitySettings); + withTransportSslSettings( + settingsBuilder, + "ca_transport_certificate.pem", + "access_transport_certificate.pem", + "access_transport_certificate_pk.pem" + ); + withHttpSslSettings(settingsBuilder); + final var transportEnabled = randomBoolean(); + final var sslSettingsManager = new SslSettingsManager( + TestEnvironment.newEnvironment( + settingsBuilder.put(SECURITY_SSL_TRANSPORT_ENABLED, transportEnabled).put(SECURITY_SSL_ONLY, true).build() + ) + ); + + assertThat("Loaded HTTP configuration", sslSettingsManager.sslConfiguration(CertType.HTTP).isPresent()); + if (transportEnabled) { + assertThat("Loaded Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isPresent()); + assertThat("Loaded Transport Client configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isPresent()); + } else { + assertThat("Didn't load Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isEmpty()); + assertThat( + "Didn't load Transport Client configuration", + sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isEmpty() + ); + } + + assertThat("Built HTTP SSL Context", sslSettingsManager.sslContextHandler(CertType.HTTP).isPresent()); + if (transportEnabled) { + assertThat("Built Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isPresent()); + assertThat("Built Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isPresent()); + } else { + assertThat("Didn't build Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isEmpty()); + assertThat("Didn't build Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isEmpty()); + } + + assertThat( + "Built Server SSL context for HTTP", + sslSettingsManager.sslContextHandler(CertType.HTTP).map(SslContextHandler::sslContext).map(SslContext::isServer).orElse(false) + ); + } + + @Test + public void loadConfigurationAndBuildSslContextForClientNode() throws Exception { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + securitySettings.setString(SSL_HTTP_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + final var settingsBuilder = defaultSettingsBuilder().setSecureSettings(securitySettings); + withTransportSslSettings( + settingsBuilder, + "ca_transport_certificate.pem", + "access_transport_certificate.pem", + "access_transport_certificate_pk.pem" + ); + withHttpSslSettings(settingsBuilder); + final var sslSettingsManager = new SslSettingsManager( + TestEnvironment.newEnvironment( + settingsBuilder.put("client.type", "client").put(SECURITY_SSL_HTTP_ENABLED, randomBoolean()).build() + ) + ); + + assertThat("Didn't load HTTP configuration", sslSettingsManager.sslConfiguration(CertType.HTTP).isEmpty()); + assertThat("Loaded Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isPresent()); + assertThat("Loaded Transport Client configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat("Didn't build HTTP SSL Context", sslSettingsManager.sslContextHandler(CertType.HTTP).isEmpty()); + assertThat("Built Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isPresent()); + assertThat("Built Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat( + "Built Server SSL context for Transport", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::sslContext) + .map(SslContext::isServer) + .orElse(false) + ); + assertThat( + "Built Client SSL context for Transport Client", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT) + .map(SslContextHandler::sslContext) + .map(SslContext::isClient) + .orElse(false) + + ); + } + + @Test + public void loadConfigurationAndBuildSslContexts() throws Exception { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + securitySettings.setString(SSL_HTTP_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + final var settingsBuilder = defaultSettingsBuilder().setSecureSettings(securitySettings); + withTransportSslSettings( + settingsBuilder, + "ca_transport_certificate.pem", + "access_transport_certificate.pem", + "access_transport_certificate_pk.pem" + ); + withHttpSslSettings(settingsBuilder); + final var sslSettingsManager = new SslSettingsManager(TestEnvironment.newEnvironment(settingsBuilder.build())); + assertThat("Loaded HTTP configuration", sslSettingsManager.sslConfiguration(CertType.HTTP).isPresent()); + assertThat("Loaded Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isPresent()); + assertThat("Loaded Transport Client configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat("Built HTTP SSL Context", sslSettingsManager.sslContextHandler(CertType.HTTP).isPresent()); + assertThat("Built Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isPresent()); + assertThat("Built Transport Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat( + "Built Server SSL context for HTTP", + sslSettingsManager.sslContextHandler(CertType.HTTP).map(SslContextHandler::sslContext).map(SslContext::isServer).orElse(false) + ); + assertThat( + "Built Server SSL context for Transport", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::sslContext) + .map(SslContext::isServer) + .orElse(false) + ); + assertThat( + "Built Client SSL context for Transport Client", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT) + .map(SslContextHandler::sslContext) + .map(SslContext::isClient) + .orElse(false) + + ); + } + + @Test + public void loadConfigurationAndBuildTransportSslContext() throws Exception { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + final var settingsBuilder = defaultSettingsBuilder().setSecureSettings(securitySettings); + withTransportSslSettings( + settingsBuilder, + "ca_transport_certificate.pem", + "access_transport_certificate.pem", + "access_transport_certificate_pk.pem" + ); + final var sslSettingsManager = new SslSettingsManager(TestEnvironment.newEnvironment(settingsBuilder.build())); + + assertThat("Didn't load HTTP configuration", sslSettingsManager.sslConfiguration(CertType.HTTP).isEmpty()); + assertThat("Loaded Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isPresent()); + assertThat("Loaded Transport Client configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isPresent()); + assertThat( + "SSL configuration for Transport and Transport Client is the same", + sslSettingsManager.sslConfiguration(CertType.TRANSPORT) + .flatMap(t -> sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).map(tc -> tc.equals(t))) + .orElse(false) + ); + + assertThat("Built HTTP SSL Context", sslSettingsManager.sslContextHandler(CertType.HTTP).isEmpty()); + assertThat("Built Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isPresent()); + assertThat("Built Transport Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat( + "Built Server SSL context for Transport", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::sslContext) + .map(SslContext::isServer) + .orElse(false) + + ); + assertThat( + "Built Client SSL context for Transport Client", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT) + .map(SslContextHandler::sslContext) + .map(SslContext::isClient) + .orElse(false) + + ); + } + + @Test + public void loadConfigurationAndBuildExtendedTransportSslContexts() throws Exception { + writeCertificates( + "ca_server_transport_certificate.pem", + "access_server_transport_certificate.pem", + "access_server_transport_certificate_pk.pem" + ); + writeCertificates( + "ca_client_transport_certificate.pem", + "access_client_transport_certificate.pem", + "access_client_transport_certificate_pk.pem" + ); + + final var securitySettings = new MockSecureSettings(); + securitySettings.setString( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_SERVER_EXTENDED_PREFIX + "pemkey_password_secure", + certificatesRule.privateKeyPassword() + ); + securitySettings.setString( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX + "pemkey_password_secure", + certificatesRule.privateKeyPassword() + ); + final var sslSettingsManager = new SslSettingsManager( + TestEnvironment.newEnvironment( + defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH, path("ca_server_transport_certificate.pem")) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH, path("access_server_transport_certificate.pem")) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH, path("access_server_transport_certificate_pk.pem")) + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH, path("ca_client_transport_certificate.pem")) + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH, path("access_client_transport_certificate.pem")) + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH, path("access_client_transport_certificate_pk.pem")) + .setSecureSettings(securitySettings) + .build() + ) + ); + + assertThat("Didn't load HTTP configuration", sslSettingsManager.sslConfiguration(CertType.HTTP).isEmpty()); + assertThat("Loaded Transport configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT).isPresent()); + assertThat("Loaded Transport Client configuration", sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).isPresent()); + assertThat( + "SSL configuration for Transport and Transport Client is not the same", + sslSettingsManager.sslConfiguration(CertType.TRANSPORT) + .flatMap(t -> sslSettingsManager.sslConfiguration(CertType.TRANSPORT_CLIENT).map(tc -> !tc.equals(t))) + .orElse(true) + ); + assertThat("Built HTTP SSL Context", sslSettingsManager.sslContextHandler(CertType.HTTP).isEmpty()); + assertThat("Built Transport SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT).isPresent()); + assertThat("Built Transport Client SSL Context", sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT).isPresent()); + + assertThat( + "Built Server SSL context for Transport", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT) + .map(SslContextHandler::sslContext) + .map(SslContext::isServer) + .orElse(false) + + ); + assertThat( + "Built Client SSL context for Transport Client", + sslSettingsManager.sslContextHandler(CertType.TRANSPORT_CLIENT) + .map(SslContextHandler::sslContext) + .map(SslContext::isClient) + .orElse(false) + + ); + } + + private void withTransportSslSettings( + final Settings.Builder settingsBuilder, + final String caFileName, + final String accessFileName, + final String accessPkFileName + ) { + settingsBuilder.put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_PEMTRUSTEDCAS_FILEPATH, path(caFileName)) + .put(SECURITY_SSL_TRANSPORT_PEMCERT_FILEPATH, path(accessFileName)) + .put(SECURITY_SSL_TRANSPORT_PEMKEY_FILEPATH, path(accessPkFileName)); + } + + private void withHttpSslSettings(final Settings.Builder settingsBuilder) { + settingsBuilder.put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_HTTP_ENABLED, true) + .put(SECURITY_SSL_HTTP_PEMTRUSTEDCAS_FILEPATH, path("ca_http_certificate.pem")) + .put(SECURITY_SSL_HTTP_PEMCERT_FILEPATH, path("access_http_certificate.pem")) + .put(SECURITY_SSL_HTTP_PEMKEY_FILEPATH, path("access_http_certificate_pk.pem")); + } + + Settings.Builder defaultSettingsBuilder() { + return Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), certificatesRule.configRootFolder().toString()) + .put("client.type", "node"); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/config/CertificateTest.java b/src/test/java/org/opensearch/security/ssl/config/CertificateTest.java new file mode 100644 index 0000000000..5fe2185d44 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/config/CertificateTest.java @@ -0,0 +1,38 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.lang.reflect.Method; + +import org.junit.Test; + +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.junit.Assert.fail; + +public class CertificateTest { + + @Test + public void testGetObjectMethod() { + try { + final Method method = Certificate.getObjectMethod(); + assertThat("Method should not be null", method, notNullValue()); + assertThat( + "One of the expected methods should be available", + method.getName().equals("getBaseObject") || method.getName().equals("getObject") + ); + } catch (ClassNotFoundException | NoSuchMethodException e) { + fail("Exception should not be thrown: " + e.getMessage()); + } + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/config/JdkSslCertificatesLoaderTest.java b/src/test/java/org/opensearch/security/ssl/config/JdkSslCertificatesLoaderTest.java new file mode 100644 index 0000000000..174f6c0fd5 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/config/JdkSslCertificatesLoaderTest.java @@ -0,0 +1,318 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.security.KeyStore; +import java.security.PrivateKey; +import java.security.cert.X509Certificate; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import org.junit.Test; + +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.settings.MockSecureSettings; +import org.opensearch.env.TestEnvironment; + +import static java.util.Objects.isNull; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.ssl.util.SSLConfigConstants.DEFAULT_STORE_PASSWORD; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.KEYSTORE_TYPE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_KEYSTORE_TYPE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_KEYSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_HTTP_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_SERVER_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_ALIAS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.TRUSTSTORE_TYPE; + +public class JdkSslCertificatesLoaderTest extends SslCertificatesLoaderTest { + + static final Function resolveKeyStoreType = s -> isNull(s) ? KeyStore.getDefaultType() : s; + + static final String SERVER_TRUSTSTORE_ALIAS = "server-truststore-alias"; + + static final String SERVER_KEYSTORE_ALIAS = "server-keystore-alias"; + + static final String CLIENT_TRUSTSTORE_ALIAS = "client-truststore-alias"; + + static final String CLIENT_KEYSTORE_ALIAS = "client-keystore-alias"; + + @Test + public void loadHttpSslConfigurationFromKeyAndTrustStoreFiles() throws Exception { + testJdkBasedSslConfiguration(SSL_HTTP_PREFIX, randomBoolean()); + } + + @Test + public void loadTransportJdkBasedSslConfiguration() throws Exception { + testJdkBasedSslConfiguration(SSL_TRANSPORT_PREFIX, true); + } + + @Test + public void loadTransportJdkBasedSslExtendedConfiguration() throws Exception { + final var clientKeyPair = certificatesRule.generateKeyPair(); + + final var serverCaCertificate = certificatesRule.x509CaCertificate(); + final var clientCaCertificate = certificatesRule.toX509Certificate(certificatesRule.generateCaCertificate(clientKeyPair)); + + final var serverAccessCertificateKey = certificatesRule.accessCertificatePrivateKey(); + final var serverAccessCertificate = certificatesRule.x509AccessCertificate(); + + final var clientAccessCertificateAndKey = certificatesRule.generateAccessCertificate(clientKeyPair); + + final var clientAccessCertificateKey = clientAccessCertificateAndKey.v1(); + final var clientAccessCertificate = certificatesRule.toX509Certificate(clientAccessCertificateAndKey.v2()); + + final var trustStoreType = randomKeyStoreType(); + final var keyStoreType = randomKeyStoreType(); + + final var useSecurePassword = randomBoolean(); + final var trustStorePassword = randomKeyStorePassword(useSecurePassword); + final var keyStorePassword = randomKeyStorePassword(useSecurePassword); + + final var trustStorePath = createTrustStore( + trustStoreType, + trustStorePassword, + Map.of(SERVER_TRUSTSTORE_ALIAS, serverCaCertificate, CLIENT_TRUSTSTORE_ALIAS, clientCaCertificate) + ); + final var keyStorePath = createKeyStore( + keyStoreType, + keyStorePassword, + Map.of( + SERVER_KEYSTORE_ALIAS, + Tuple.tuple(serverAccessCertificateKey, serverAccessCertificate), + CLIENT_KEYSTORE_ALIAS, + Tuple.tuple(clientAccessCertificateKey, clientAccessCertificate) + ) + ); + + final var settingsBuilder = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE, trustStoreType) + .put(SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, trustStorePath) + .put(SECURITY_SSL_TRANSPORT_SERVER_TRUSTSTORE_ALIAS, SERVER_TRUSTSTORE_ALIAS) + .put(SECURITY_SSL_TRANSPORT_CLIENT_TRUSTSTORE_ALIAS, CLIENT_TRUSTSTORE_ALIAS) + .put(SECURITY_SSL_TRANSPORT_KEYSTORE_TYPE, keyStoreType) + .put(SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, keyStorePath) + .put(SECURITY_SSL_TRANSPORT_SERVER_KEYSTORE_ALIAS, SERVER_KEYSTORE_ALIAS) + .put(SECURITY_SSL_TRANSPORT_CLIENT_KEYSTORE_ALIAS, CLIENT_KEYSTORE_ALIAS); + + if (useSecurePassword) { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "keystore_password_secure", keyStorePassword); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "truststore_password_secure", trustStorePassword); + + securitySettings.setString( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_SERVER_EXTENDED_PREFIX + "keystore_keypassword_secure", + certificatesRule.privateKeyPassword() + ); + securitySettings.setString( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX + "keystore_keypassword_secure", + certificatesRule.privateKeyPassword() + ); + settingsBuilder.setSecureSettings(securitySettings); + } else { + settingsBuilder.put(SSL_TRANSPORT_PREFIX + "keystore_password", keyStorePassword); + settingsBuilder.put(SSL_TRANSPORT_PREFIX + "truststore_password", trustStorePassword); + + settingsBuilder.put( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_SERVER_EXTENDED_PREFIX + "keystore_keypassword", + certificatesRule.privateKeyPassword() + ); + settingsBuilder.put( + SSL_TRANSPORT_PREFIX + SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX + "keystore_keypassword", + certificatesRule.privateKeyPassword() + ); + } + final var settings = settingsBuilder.build(); + + final var serverConfiguration = new SslCertificatesLoader(SSL_TRANSPORT_PREFIX, SSL_TRANSPORT_SERVER_EXTENDED_PREFIX) + .loadConfiguration(TestEnvironment.newEnvironment(settings)); + assertTrustStoreConfiguration( + serverConfiguration.v1(), + trustStorePath, + new Certificate(serverCaCertificate, resolveKeyStoreType.apply(trustStoreType), SERVER_TRUSTSTORE_ALIAS, false) + ); + assertKeyStoreConfiguration( + serverConfiguration.v2(), + List.of(keyStorePath), + new Certificate(serverAccessCertificate, resolveKeyStoreType.apply(keyStoreType), SERVER_KEYSTORE_ALIAS, true) + ); + + final var clientConfiguration = new SslCertificatesLoader(SSL_TRANSPORT_PREFIX, SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX) + .loadConfiguration(TestEnvironment.newEnvironment(settings)); + assertTrustStoreConfiguration( + clientConfiguration.v1(), + trustStorePath, + new Certificate(clientCaCertificate, resolveKeyStoreType.apply(trustStoreType), CLIENT_TRUSTSTORE_ALIAS, false) + ); + assertKeyStoreConfiguration( + clientConfiguration.v2(), + List.of(keyStorePath), + new Certificate(clientAccessCertificate, resolveKeyStoreType.apply(keyStoreType), CLIENT_KEYSTORE_ALIAS, true) + ); + } + + private void testJdkBasedSslConfiguration(final String sslConfigPrefix, final boolean useAuthorityCertificate) throws Exception { + final var useSecurePassword = randomBoolean(); + + final var keyPair = certificatesRule.generateKeyPair(); + final var trustStoreCertificates = Map.of( + "default-truststore-alias", + certificatesRule.x509CaCertificate(), + "another-truststore-alias", + certificatesRule.toX509Certificate(certificatesRule.generateCaCertificate(keyPair)) + ); + + final var keysAndCertificate = certificatesRule.generateAccessCertificate(keyPair); + final var keyStoreCertificates = Map.of( + "default-keystore-alias", + Tuple.tuple(certificatesRule.accessCertificatePrivateKey(), certificatesRule.x509AccessCertificate()), + "another-keystore-alias", + Tuple.tuple(keysAndCertificate.v1(), certificatesRule.toX509Certificate(keysAndCertificate.v2())) + ); + + final var trustStoreAlias = randomFrom(new String[] { "default-truststore-alias", "another-truststore-alias", null }); + final var keyStoreAlias = (String) null;// randomFrom(new String[] { "default-keystore-alias", "another-keystore-alias", null }); + + final var keyStorePassword = randomKeyStorePassword(useSecurePassword); + final var trustStorePassword = randomKeyStorePassword(useSecurePassword); + + final var keyStoreType = randomKeyStoreType(); + final var keyStorePath = createKeyStore(keyStoreType, keyStorePassword, keyStoreCertificates); + + final var trustStoreType = randomKeyStoreType(); + final var trustStorePath = createTrustStore(trustStoreType, trustStorePassword, trustStoreCertificates); + + final var settingsBuilder = defaultSettingsBuilder().put(sslConfigPrefix + ENABLED, true) + .put(sslConfigPrefix + KEYSTORE_FILEPATH, keyStorePath) + .put(sslConfigPrefix + KEYSTORE_ALIAS, keyStoreAlias) + .put(sslConfigPrefix + KEYSTORE_TYPE, keyStoreType); + if (useAuthorityCertificate) { + settingsBuilder.put(sslConfigPrefix + TRUSTSTORE_FILEPATH, trustStorePath) + .put(sslConfigPrefix + TRUSTSTORE_ALIAS, trustStoreAlias) + .put(sslConfigPrefix + TRUSTSTORE_TYPE, trustStoreType); + } + if (useSecurePassword) { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(sslConfigPrefix + "keystore_password_secure", keyStorePassword); + securitySettings.setString(sslConfigPrefix + "keystore_keypassword_secure", certificatesRule.privateKeyPassword()); + if (useAuthorityCertificate) { + securitySettings.setString(sslConfigPrefix + "truststore_password_secure", trustStorePassword); + } + settingsBuilder.setSecureSettings(securitySettings); + } else { + settingsBuilder.put(sslConfigPrefix + "keystore_password", keyStorePassword); + settingsBuilder.put(sslConfigPrefix + "keystore_keypassword", certificatesRule.privateKeyPassword()); + if (useAuthorityCertificate) { + settingsBuilder.put(sslConfigPrefix + "truststore_password", trustStorePassword); + } + } + + final var configuration = new SslCertificatesLoader(sslConfigPrefix).loadConfiguration( + TestEnvironment.newEnvironment(settingsBuilder.build()) + ); + + if (useAuthorityCertificate) { + final var expectedTrustStoreCertificates = isNull(trustStoreAlias) + ? trustStoreCertificates.entrySet() + .stream() + .map(e -> new Certificate(e.getValue(), resolveKeyStoreType.apply(trustStoreType), e.getKey(), false)) + .toArray(Certificate[]::new) + : trustStoreCertificates.entrySet() + .stream() + .filter(e -> e.getKey().equals(trustStoreAlias)) + .map(e -> new Certificate(e.getValue(), resolveKeyStoreType.apply(trustStoreType), e.getKey(), false)) + .toArray(Certificate[]::new); + assertTrustStoreConfiguration(configuration.v1(), trustStorePath, expectedTrustStoreCertificates); + } else { + assertThat(configuration.v1(), is(TrustStoreConfiguration.EMPTY_CONFIGURATION)); + } + + final var expectedKeyStoreCertificates = isNull(keyStoreAlias) + ? keyStoreCertificates.entrySet() + .stream() + .map(e -> new Certificate(e.getValue().v2(), resolveKeyStoreType.apply(keyStoreType), e.getKey(), true)) + .toArray(Certificate[]::new) + : keyStoreCertificates.entrySet() + .stream() + .filter(e -> e.getKey().equals(keyStoreAlias)) + .map(e -> new Certificate(e.getValue().v2(), resolveKeyStoreType.apply(keyStoreType), e.getKey(), true)) + .toArray(Certificate[]::new); + assertKeyStoreConfiguration(configuration.v2(), List.of(keyStorePath), expectedKeyStoreCertificates); + } + + String randomKeyStoreType() { + return randomFrom(new String[] { "jks", "pkcs12", null }); + } + + String randomKeyStorePassword(final boolean useSecurePassword) { + return useSecurePassword ? randomAsciiAlphanumOfLength(10) : randomFrom(new String[] { randomAsciiAlphanumOfLength(10), null }); + } + + Path createTrustStore(final String type, final String password, Map certificates) throws Exception { + final var keyStore = keyStore(type); + for (final var alias : certificates.keySet()) { + keyStore.setCertificateEntry(alias, certificates.get(alias)); + } + final var trustStorePath = path(String.format("truststore.%s", isNull(type) ? "jsk" : type)); + storeKeyStore(keyStore, trustStorePath, password); + return trustStorePath; + } + + Path createKeyStore(final String type, final String password, final Map> keysAndCertificates) + throws Exception { + final var keyStore = keyStore(type); + final var keyStorePath = path(String.format("keystore.%s", isNull(type) ? "jsk" : type)); + for (final var alias : keysAndCertificates.keySet()) { + final var keyAndCertificate = keysAndCertificates.get(alias); + keyStore.setKeyEntry( + alias, + keyAndCertificate.v1(), + certificatesRule.privateKeyPassword().toCharArray(), + new X509Certificate[] { keyAndCertificate.v2() } + ); + } + storeKeyStore(keyStore, keyStorePath, password); + return keyStorePath; + } + + KeyStore keyStore(final String type) throws Exception { + final var keyStore = KeyStore.getInstance(isNull(type) ? KeyStore.getDefaultType() : type); + keyStore.load(null, null); + return keyStore; + } + + void storeKeyStore(final KeyStore keyStore, final Path path, final String password) throws Exception { + try (final var out = Files.newOutputStream(path)) { + keyStore.store(out, isNull(password) ? DEFAULT_STORE_PASSWORD.toCharArray() : password.toCharArray()); + } + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/config/PemSslCertificatesLoaderTest.java b/src/test/java/org/opensearch/security/ssl/config/PemSslCertificatesLoaderTest.java new file mode 100644 index 0000000000..d03bf9c59d --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/config/PemSslCertificatesLoaderTest.java @@ -0,0 +1,174 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.security.SecureRandom; +import java.util.List; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.BeforeClass; +import org.junit.Test; +import org.bouncycastle.asn1.pkcs.PrivateKeyInfo; +import org.bouncycastle.openssl.PKCS8Generator; +import org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8EncryptorBuilder; + +import org.opensearch.common.settings.MockSecureSettings; +import org.opensearch.env.TestEnvironment; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.ssl.CertificatesUtils.privateKeyToPemObject; +import static org.opensearch.security.ssl.CertificatesUtils.writePemContent; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_CERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_KEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.PEM_TRUSTED_CAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_HTTP_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_SERVER_EXTENDED_PREFIX; + +public class PemSslCertificatesLoaderTest extends SslCertificatesLoaderTest { + + final static String PEM_CA_CERTIFICATE_FILE_NAME = "ca_certificate.pem"; + + final static String PEM_KEY_CERTIFICATE_FILE_NAME = "key_certificate.pem"; + + final static String PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME = "private_key.pem"; + + @BeforeClass + public static void setup() throws Exception { + writePemContent(path(PEM_CA_CERTIFICATE_FILE_NAME), certificatesRule.caCertificateHolder()); + writePemContent(path(PEM_KEY_CERTIFICATE_FILE_NAME), certificatesRule.accessCertificateHolder()); + writePemContent( + path(PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME), + new PKCS8Generator( + PrivateKeyInfo.getInstance(certificatesRule.accessCertificatePrivateKey().getEncoded()), + new JceOpenSSLPKCS8EncryptorBuilder(PKCS8Generator.PBE_SHA1_3DES).setRandom(new SecureRandom()) + .setPassword(certificatesRule.privateKeyPassword().toCharArray()) + .build() + ).generate() + ); + } + + @Test + public void loadHttpSslConfigurationFromPemFiles() throws Exception { + testLoadPemBasedConfiguration(SSL_HTTP_PREFIX, randomBoolean()); + } + + @Test + public void loadTransportSslConfigurationFromPemFiles() throws Exception { + testLoadPemBasedConfiguration(SSL_HTTP_PREFIX, false); + } + + void testLoadPemBasedConfiguration(final String sslConfigPrefix, final boolean useAuthorityCertificate) throws Exception { + final var settingsBuilder = defaultSettingsBuilder().put(sslConfigPrefix + ENABLED, true) + .put(sslConfigPrefix + PEM_CERT_FILEPATH, path(PEM_KEY_CERTIFICATE_FILE_NAME)) + .put(sslConfigPrefix + PEM_KEY_FILEPATH, path(PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME)); + if (useAuthorityCertificate) { + settingsBuilder.put(sslConfigPrefix + PEM_TRUSTED_CAS_FILEPATH, path(PEM_CA_CERTIFICATE_FILE_NAME)); + } + if (randomBoolean()) { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(sslConfigPrefix + "pemkey_password_secure", certificatesRule.privateKeyPassword()); + settingsBuilder.setSecureSettings(securitySettings); + } else { + settingsBuilder.put(sslConfigPrefix + "pemkey_password", certificatesRule.privateKeyPassword()); + } + + final var settings = settingsBuilder.build(); + final var configuration = new SslCertificatesLoader(SSL_HTTP_PREFIX).loadConfiguration(TestEnvironment.newEnvironment(settings)); + if (useAuthorityCertificate) { + assertTrustStoreConfiguration( + configuration.v1(), + path(PEM_CA_CERTIFICATE_FILE_NAME), + new Certificate(certificatesRule.x509CaCertificate(), false) + ); + } else { + assertThat(configuration.v1(), is(TrustStoreConfiguration.EMPTY_CONFIGURATION)); + } + assertKeyStoreConfiguration( + configuration.v2(), + List.of(path(PEM_KEY_CERTIFICATE_FILE_NAME), path(PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME)), + new Certificate(certificatesRule.x509AccessCertificate(), true) + ); + } + + @Test + public void loadExtendedTransportSslConfigurationFromPemFiles() throws Exception { + final var keyPair = certificatesRule.generateKeyPair(); + final var clientCaCertificate = certificatesRule.generateCaCertificate(keyPair); + final var keyAndCertificate = certificatesRule.generateAccessCertificate(keyPair); + final var clientCaCertificatePath = "client_ca_certificate.pem"; + final var clientKeyCertificatePath = "client_key_certificate.pem"; + final var clientPrivateKeyCertificatePath = "client_private_key_certificate.pem"; + final var clientPrivateKeyPassword = RandomStringUtils.randomAlphabetic(10); + + writePemContent(path(clientCaCertificatePath), clientCaCertificate); + writePemContent(path(clientKeyCertificatePath), keyAndCertificate.v2()); + writePemContent(path(clientPrivateKeyCertificatePath), privateKeyToPemObject(keyAndCertificate.v1(), clientPrivateKeyPassword)); + + final var settingsBuilder = defaultSettingsBuilder().put(SECURITY_SSL_TRANSPORT_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_EXTENDED_KEY_USAGE_ENABLED, true) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMTRUSTEDCAS_FILEPATH, path(PEM_CA_CERTIFICATE_FILE_NAME)) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMCERT_FILEPATH, path(PEM_KEY_CERTIFICATE_FILE_NAME)) + .put(SECURITY_SSL_TRANSPORT_SERVER_PEMKEY_FILEPATH, path(PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME)) + + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMTRUSTEDCAS_FILEPATH, path(clientCaCertificatePath)) + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMCERT_FILEPATH, path(clientKeyCertificatePath)) + .put(SECURITY_SSL_TRANSPORT_CLIENT_PEMKEY_FILEPATH, path(clientPrivateKeyCertificatePath)); + if (randomBoolean()) { + final var securitySettings = new MockSecureSettings(); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "server.pemkey_password_secure", certificatesRule.privateKeyPassword()); + securitySettings.setString(SSL_TRANSPORT_PREFIX + "client.pemkey_password_secure", clientPrivateKeyPassword); + settingsBuilder.setSecureSettings(securitySettings); + } else { + settingsBuilder.put(SSL_TRANSPORT_PREFIX + "server.pemkey_password", certificatesRule.privateKeyPassword()); + settingsBuilder.put(SSL_TRANSPORT_PREFIX + "client.pemkey_password", clientPrivateKeyPassword); + } + final var settings = settingsBuilder.build(); + + final var transportServerConfiguration = new SslCertificatesLoader(SSL_TRANSPORT_PREFIX, SSL_TRANSPORT_SERVER_EXTENDED_PREFIX) + .loadConfiguration(TestEnvironment.newEnvironment(settings)); + assertTrustStoreConfiguration( + transportServerConfiguration.v1(), + path(PEM_CA_CERTIFICATE_FILE_NAME), + new Certificate(certificatesRule.x509CaCertificate(), false) + ); + assertKeyStoreConfiguration( + transportServerConfiguration.v2(), + List.of(path(PEM_KEY_CERTIFICATE_FILE_NAME), path(PEM_CERTIFICATE_PRIVATE_KEY_FILE_NAME)), + new Certificate(certificatesRule.x509AccessCertificate(), true) + ); + final var transportClientConfiguration = new SslCertificatesLoader(SSL_TRANSPORT_PREFIX, SSL_TRANSPORT_CLIENT_EXTENDED_PREFIX) + .loadConfiguration(TestEnvironment.newEnvironment(settings)); + assertTrustStoreConfiguration( + transportClientConfiguration.v1(), + path(clientCaCertificatePath), + new Certificate(certificatesRule.toX509Certificate(clientCaCertificate), false) + ); + assertKeyStoreConfiguration( + transportClientConfiguration.v2(), + List.of(path(clientKeyCertificatePath), path(clientPrivateKeyCertificatePath)), + new Certificate(certificatesRule.toX509Certificate(keyAndCertificate.v2()), true) + ); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/config/SslCertificatesLoaderTest.java b/src/test/java/org/opensearch/security/ssl/config/SslCertificatesLoaderTest.java new file mode 100644 index 0000000000..0dfc02b386 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/config/SslCertificatesLoaderTest.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.nio.file.Path; +import java.util.List; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import org.junit.ClassRule; + +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.security.ssl.CertificatesRule; + +import static java.util.Objects.nonNull; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.notNullValue; + +public abstract class SslCertificatesLoaderTest extends RandomizedTest { + + @ClassRule + public static CertificatesRule certificatesRule = new CertificatesRule(); + + static Path path(final String fileName) { + return certificatesRule.configRootFolder().resolve(fileName); + } + + Settings.Builder defaultSettingsBuilder() throws Exception { + return Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), certificatesRule.caCertificateHolder().toString()); + } + + void assertTrustStoreConfiguration( + final TrustStoreConfiguration trustStoreConfiguration, + final Path expectedFile, + final Certificate... expectedCertificates + ) { + assertThat("Truststore configuration created", nonNull(trustStoreConfiguration)); + assertThat(trustStoreConfiguration.file(), is(expectedFile)); + assertThat(trustStoreConfiguration.loadCertificates(), containsInAnyOrder(expectedCertificates)); + assertThat(trustStoreConfiguration.createTrustManagerFactory(true), is(notNullValue())); + } + + void assertKeyStoreConfiguration( + final KeyStoreConfiguration keyStoreConfiguration, + final List expectedFiles, + final Certificate... expectedCertificates + ) { + assertThat("Keystore configuration created", nonNull(keyStoreConfiguration)); + assertThat(keyStoreConfiguration.files(), contains(expectedFiles.toArray(new Path[0]))); + assertThat(keyStoreConfiguration.loadCertificates(), containsInAnyOrder(expectedCertificates)); + assertThat(keyStoreConfiguration.createKeyManagerFactory(true), is(notNullValue())); + } + +} diff --git a/src/test/java/org/opensearch/security/ssl/config/SslParametersTest.java b/src/test/java/org/opensearch/security/ssl/config/SslParametersTest.java new file mode 100644 index 0000000000..d95c336e15 --- /dev/null +++ b/src/test/java/org/opensearch/security/ssl/config/SslParametersTest.java @@ -0,0 +1,90 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.security.ssl.config; + +import java.util.List; +import java.util.Locale; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.net.ssl.SSLContext; + +import org.junit.Test; + +import org.opensearch.common.settings.Settings; + +import io.netty.handler.ssl.ClientAuth; +import io.netty.handler.ssl.SslProvider; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.opensearch.security.ssl.util.SSLConfigConstants.ALLOWED_SSL_CIPHERS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_CLIENTAUTH_MODE; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED_CIPHERS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_HTTP_ENABLED_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED_CIPHERS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SECURITY_SSL_TRANSPORT_ENABLED_PROTOCOLS; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_HTTP_PREFIX; +import static org.opensearch.security.ssl.util.SSLConfigConstants.SSL_TRANSPORT_PREFIX; + +public class SslParametersTest { + + @Test + public void testDefaultSslParameters() throws Exception { + final var settings = Settings.EMPTY; + final var httpSslParameters = SslParameters.loader(settings).load(true); + final var transportSslParameters = SslParameters.loader(settings).load(false); + + final var defaultCiphers = List.of(ALLOWED_SSL_CIPHERS); + final var finalDefaultCiphers = Stream.of(SSLContext.getDefault().getDefaultSSLParameters().getCipherSuites()) + .filter(defaultCiphers::contains) + .sorted(String::compareTo) + .collect(Collectors.toList()); + + assertThat(httpSslParameters.provider(), is(SslProvider.JDK)); + assertThat(transportSslParameters.provider(), is(SslProvider.JDK)); + + assertThat(httpSslParameters.allowedProtocols(), is(List.of("TLSv1.3", "TLSv1.2"))); + assertThat(httpSslParameters.allowedCiphers(), is(finalDefaultCiphers)); + + assertThat(transportSslParameters.allowedProtocols(), is(List.of("TLSv1.3", "TLSv1.2"))); + assertThat(transportSslParameters.allowedCiphers(), is(finalDefaultCiphers)); + + assertThat(httpSslParameters.clientAuth(), is(ClientAuth.OPTIONAL)); + assertThat(transportSslParameters.clientAuth(), is(ClientAuth.REQUIRE)); + } + + @Test + public void testCustomSSlParameters() { + final var settings = Settings.builder() + .put(SECURITY_SSL_HTTP_CLIENTAUTH_MODE, ClientAuth.REQUIRE.name().toLowerCase(Locale.ROOT)) + .putList(SECURITY_SSL_HTTP_ENABLED_PROTOCOLS, List.of("TLSv1.2", "TLSv1")) + .putList(SECURITY_SSL_HTTP_ENABLED_CIPHERS, List.of("TLS_AES_256_GCM_SHA384")) + .putList(SECURITY_SSL_TRANSPORT_ENABLED_PROTOCOLS, List.of("TLSv1.3", "TLSv1.2")) + .putList(SECURITY_SSL_TRANSPORT_ENABLED_CIPHERS, List.of("TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384")) + .build(); + final var httpSslParameters = SslParameters.loader(settings.getByPrefix(SSL_HTTP_PREFIX)).load(true); + final var transportSslParameters = SslParameters.loader(settings.getByPrefix(SSL_TRANSPORT_PREFIX)).load(false); + + assertThat(httpSslParameters.provider(), is(SslProvider.JDK)); + assertThat(transportSslParameters.provider(), is(SslProvider.JDK)); + + assertThat(httpSslParameters.allowedProtocols(), is(List.of("TLSv1.2"))); + assertThat(httpSslParameters.allowedCiphers(), is(List.of("TLS_AES_256_GCM_SHA384"))); + + assertThat(transportSslParameters.allowedProtocols(), is(List.of("TLSv1.3", "TLSv1.2"))); + assertThat(transportSslParameters.allowedCiphers(), is(List.of("TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384"))); + + assertThat(httpSslParameters.clientAuth(), is(ClientAuth.REQUIRE)); + assertThat(transportSslParameters.clientAuth(), is(ClientAuth.REQUIRE)); + } + +} diff --git a/src/test/java/org/opensearch/security/system_indices/AbstractSystemIndicesTests.java b/src/test/java/org/opensearch/security/system_indices/AbstractSystemIndicesTests.java index deb6f6f5e3..7a7b9cc722 100644 --- a/src/test/java/org/opensearch/security/system_indices/AbstractSystemIndicesTests.java +++ b/src/test/java/org/opensearch/security/system_indices/AbstractSystemIndicesTests.java @@ -174,7 +174,6 @@ void validateSearchResponse(RestHelper.HttpResponse response, int expectedHits) assertThat(searchResponse.status(), is(RestStatus.OK)); assertThat(searchResponse.getHits().getHits().length, is(expectedHits)); assertThat(searchResponse.getFailedShards(), is(0)); - assertThat(searchResponse.getSuccessfulShards(), is(5)); } String permissionExceptionMessage(String action, String username) { diff --git a/src/test/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurerTests.java b/src/test/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurerTests.java index 160d361aef..afb0e44f1e 100644 --- a/src/test/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurerTests.java +++ b/src/test/java/org/opensearch/security/tools/democonfig/SecuritySettingsConfigurerTests.java @@ -363,6 +363,24 @@ public void testCreateSecurityAdminDemoScript_invalidPath() { } } + @Test + public void testReadNonFlatYamlAlreadyConfigured() throws IOException { + installer.OPENSEARCH_CONF_FILE = Paths.get("src/test/resources/opensearch-config-non-flat.yaml").toFile().getAbsolutePath(); + String expectedMessage = installer.OPENSEARCH_CONF_FILE + " seems to be already configured for Security. Quit."; + try { + System.setSecurityManager(new NoExitSecurityManager()); + securitySettingsConfigurer.checkIfSecurityPluginIsAlreadyConfigured(); + } catch (SecurityException e) { + assertThat(e.getMessage(), equalTo("System.exit(-1) blocked to allow print statement testing.")); + } finally { + System.setSecurityManager(null); + } + verifyStdOutContainsString(expectedMessage); + + // reset the file pointer + installer.OPENSEARCH_CONF_FILE = installer.OPENSEARCH_CONF_DIR + "opensearch.yml"; + } + @SuppressWarnings("unchecked") public static void setEnv(String key, String value) throws NoSuchFieldException, IllegalAccessException { Class[] classes = Collections.class.getDeclaredClasses(); diff --git a/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java b/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java new file mode 100644 index 0000000000..e8af0f1384 --- /dev/null +++ b/src/test/java/org/opensearch/security/util/MockIndexMetadataBuilder.java @@ -0,0 +1,194 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ +package org.opensearch.security.util; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import com.google.common.collect.ImmutableMap; + +import org.opensearch.Version; +import org.opensearch.cluster.metadata.AliasMetadata; +import org.opensearch.cluster.metadata.DataStream; +import org.opensearch.cluster.metadata.IndexAbstraction; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.Index; + +/** + * Creates mocks of org.opensearch.cluster.metadata.IndexAbstraction maps. Useful for unit testing code which + * operates on index metadata. + * + * TODO: This is the evil twin of the same class in the integrationTest module. Possibly tests depending on this + * should be moved to the integrationTest module? + */ +public class MockIndexMetadataBuilder { + + private final static Settings INDEX_SETTINGS = Settings.builder() + .put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + + private Map nameToIndexAbstractionMap = new HashMap<>(); + private Map nameToIndexMetadataMap = new HashMap<>(); + private Map> indicesToAliases = new HashMap<>(); + private Map> aliasesToIndices = new HashMap<>(); + + public static MockIndexMetadataBuilder indices(String... indices) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String index : indices) { + builder.index(index); + } + + return builder; + } + + public static MockIndexMetadataBuilder dataStreams(String... dataStreams) { + MockIndexMetadataBuilder builder = new MockIndexMetadataBuilder(); + + for (String dataStream : dataStreams) { + builder.dataStream(dataStream); + } + + return builder; + } + + public ImmutableMap build() { + Map aliasMetadataMap = new HashMap<>(); + + for (Map.Entry> aliasEntry : this.aliasesToIndices.entrySet()) { + String alias = aliasEntry.getKey(); + AliasMetadata aliasMetadata = AliasMetadata.builder(alias).build(); + aliasMetadataMap.put(alias, aliasMetadata); + } + + for (Map.Entry> indexEntry : this.indicesToAliases.entrySet()) { + String index = indexEntry.getKey(); + Set aliases = indexEntry.getValue(); + + IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(index).settings(INDEX_SETTINGS); + + for (String alias : aliases) { + indexMetadataBuilder.putAlias(aliasMetadataMap.get(alias)); + } + + IndexMetadata indexMetadata = indexMetadataBuilder.build(); + nameToIndexMetadataMap.put(index, indexMetadata); + nameToIndexAbstractionMap.put(index, new IndexAbstraction.Index(indexMetadata)); + } + + for (Map.Entry> aliasEntry : this.aliasesToIndices.entrySet()) { + String alias = aliasEntry.getKey(); + Set indices = aliasEntry.getValue(); + AliasMetadata aliasMetadata = aliasMetadataMap.get(alias); + + String firstIndex = indices.iterator().next(); + indices.remove(firstIndex); + + IndexMetadata firstIndexMetadata = nameToIndexMetadataMap.get(firstIndex); + IndexAbstraction.Alias indexAbstraction = new IndexAbstraction.Alias(aliasMetadata, firstIndexMetadata); + + for (String index : indices) { + indexAbstraction.getIndices().add(nameToIndexMetadataMap.get(index)); + } + + nameToIndexAbstractionMap.put(alias, indexAbstraction); + } + + return ImmutableMap.copyOf(this.nameToIndexAbstractionMap); + } + + public MockIndexMetadataBuilder index(String index) { + if (!this.indicesToAliases.containsKey(index)) { + this.indicesToAliases.put(index, new HashSet<>()); + } + return this; + } + + public AliasBuilder alias(String alias) { + return new AliasBuilder(alias); + } + + public MockIndexMetadataBuilder dataStream(String dataStream) { + return dataStream(dataStream, 3); + } + + public MockIndexMetadataBuilder dataStream(String dataStream, int generations) { + List backingIndices = new ArrayList<>(); + + for (int i = 1; i <= generations; i++) { + String backingIndexName = DataStream.getDefaultBackingIndexName(dataStream, i); + backingIndices.add(new Index(backingIndexName, backingIndexName)); + } + + DataStream dataStreamMetadata = new DataStream(dataStream, new DataStream.TimestampField("@timestamp"), backingIndices); + IndexAbstraction.DataStream dataStreamIndexAbstraction = new IndexAbstraction.DataStream( + dataStreamMetadata, + backingIndices.stream().map(i -> getIndexMetadata(i.getName())).collect(Collectors.toList()) + ); + this.nameToIndexAbstractionMap.put(dataStream, dataStreamIndexAbstraction); + + for (Index backingIndex : backingIndices) { + this.nameToIndexAbstractionMap.put( + backingIndex.getName(), + new IndexAbstraction.Index(getIndexMetadata(backingIndex.getName()), dataStreamIndexAbstraction) + ); + } + + return this; + } + + private IndexMetadata getIndexMetadata(String index) { + IndexMetadata result = this.nameToIndexMetadataMap.get(index); + + if (result == null) { + result = IndexMetadata.builder(index) + .settings(Settings.builder().put(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(1) + .build(); + this.nameToIndexMetadataMap.put(index, result); + } + + return result; + } + + public class AliasBuilder { + private String alias; + + private AliasBuilder(String alias) { + this.alias = alias; + } + + public MockIndexMetadataBuilder of(String firstIndex, String... moreIndices) { + MockIndexMetadataBuilder.this.indicesToAliases.computeIfAbsent(firstIndex, (k) -> new HashSet<>()).add(this.alias); + + Set indices = new HashSet<>(); + indices.add(firstIndex); + + for (String index : moreIndices) { + MockIndexMetadataBuilder.this.indicesToAliases.computeIfAbsent(index, (k) -> new HashSet<>()).add(this.alias); + indices.add(index); + } + + MockIndexMetadataBuilder.this.aliasesToIndices.put(this.alias, indices); + + return MockIndexMetadataBuilder.this; + } + } +} diff --git a/src/test/resources/opensearch-config-non-flat.yaml b/src/test/resources/opensearch-config-non-flat.yaml new file mode 100644 index 0000000000..1333feb4a8 --- /dev/null +++ b/src/test/resources/opensearch-config-non-flat.yaml @@ -0,0 +1,14 @@ +plugins: + security: + ssl: + transport: + pemcert_filepath: esnode.pem + pemkey_filepath: esnode-key.pem + pemtrustedcas_filepath: root-ca.pem + enforce_hostname_verification: false + http: + enabled: true + pemcert_filepath: esnode.pem + pemkey_filepath: esnode-key.pem + pemtrustedcas_filepath: root-ca.pem + allow_unsafe_democertificates: true diff --git a/src/test/resources/ssl/reload/README.txt b/src/test/resources/ssl/reload/README.txt new file mode 100644 index 0000000000..a149c42284 --- /dev/null +++ b/src/test/resources/ssl/reload/README.txt @@ -0,0 +1,29 @@ +Commands to generate node-new-ca.crt.pem, node-new-ca.key.pem, secondary-root-ca.pem, secondary-signing-ca.pem: + +# generate new secondary root CA +openssl genrsa -out secondary-root-ca-key.pem 2048 +openssl req -new -x509 -sha256 -days 3650 -key secondary-root-ca-key.pem -subj "/DC=com/DC=example/O=Example Com Inc./OU=Example Com Inc. Secondary Root CA/CN=Example Com Inc. Secondary Root CA" -addext "basicConstraints = critical,CA:TRUE" -addext "keyUsage = critical, digitalSignature, keyCertSign, cRLSign" -addext "subjectKeyIdentifier = hash" -addext "authorityKeyIdentifier = keyid:always,issuer:always" -out secondary-root-ca.pem + +# generate new secondary signing CA, signed by the new secondary root CA + +openssl genrsa -out secondary-signing-ca-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in secondary-signing-ca-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out secondary-signing-ca-key.pem +openssl req -new -key secondary-signing-ca-key.pem -subj "/DC=com/DC=example/O=Example Com Inc./OU=Example Com Inc. Secondary Signing CA/CN=Example Com Inc. Secondary Signing CA" -out secondary-signing-ca-key.csr +printf "basicConstraints = critical,CA:TRUE" > secondary-signing-ca_ext.conf +printf "basicConstraints = critical,CA:TRUE\nkeyUsage = critical, digitalSignature, keyCertSign, cRLSign\nsubjectKeyIdentifier = hash\nauthorityKeyIdentifier = keyid:always,issuer:always" > secondary-signing-ca_ext.conf +openssl x509 -req -in secondary-signing-ca-key.csr -out secondary-signing-ca.pem -CA secondary-root-ca.pem -CAkey secondary-root-ca-key.pem -CAcreateserial -days 3650 -extfile secondary-signing-ca_ext.conf + +# generate a new node cert, signed by the new secondary signing key CA +openssl genrsa -out node-new-ca-key-temp.pem 2048 +openssl pkcs8 -inform PEM -outform PEM -in node-new-ca-key-temp.pem -topk8 -nocrypt -v1 PBE-SHA1-3DES -out node-new-ca.key.pem +openssl req -new -key node-new-ca.key.pem -subj "/C=DE/L=Test/O=Test/OU=SSL/CN=node-1.example.com" -out node-new-ca.csr +printf "subjectAltName = RID:1.2.3.4.5.5, DNS:node-1.example.com, DNS:localhost, IP:127.0.0.1" > node-new-ca_ext.conf +openssl x509 -req -in node-new-ca.csr -out node-new-ca.pem -CA secondary-signing-ca.pem -CAkey secondary-signing-ca-key.pem -CAcreateserial -days 3650 -extfile node-new-ca_ext.conf + +cat node-new-ca.pem > node-new-ca.crt.pem +cat secondary-signing-ca.pem >> node-new-ca.crt.pem +cat secondary-root-ca.pem >> node-new-ca.crt.pem + +# for tests to pass, the new secondary-signing-ca.pem and secondary-root-ca.pem keys should also be added to the truststore.jks file, e.g.: +keytool -import -alias secondary-root-ca -file secondary-root-ca.pem -storetype JKS -keystore truststore.jks +keytool -import -alias secondary-signing-ca -file secondary-signing-ca.pem -storetype JKS -keystore truststore.jks diff --git a/src/test/resources/ssl/reload/node-new-ca.crt.pem b/src/test/resources/ssl/reload/node-new-ca.crt.pem new file mode 100644 index 0000000000..2bf9284f09 --- /dev/null +++ b/src/test/resources/ssl/reload/node-new-ca.crt.pem @@ -0,0 +1,82 @@ +-----BEGIN CERTIFICATE----- +MIIEBzCCAu+gAwIBAgIUUN4lYU0yobNFo1xcluReeadmlaUwDQYJKoZIhvcNAQEL +BQAwgakxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMS4wLAYDVQQLDCVFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMS4wLAYDVQQDDCVFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMB4XDTI0MDkxNzAwMTU0OFoX +DTM0MDkxNTAwMTU0OFowVjELMAkGA1UEBhMCREUxDTALBgNVBAcMBFRlc3QxDTAL +BgNVBAoMBFRlc3QxDDAKBgNVBAsMA1NTTDEbMBkGA1UEAwwSbm9kZS0xLmV4YW1w +bGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtJG372aVfTYZ +tQ6udEQzC9RNy8+SqBZEproPBdYupOZ2l0tKgGykoAI0iX/p3gzQlYBSmSVduKLZ +n5E/nQCb+Rqbi1uoZrojEQxq538RXWmI9X72MyKFnqcgjZW9qCBn0ok5J0fSp7kS +55I6IzJhrJFqJKdn/i1dTReyg0tjSa/dR2yHbFj97gdXAnnte7xa87ounKZoFtme +rhhfVfbnkxQfSFecg0AltBiuhB9TxovRTo1TOVPpAUhBFBaj4ILSyGJdG9qQ11OM +L+QUd6TjQB0qFSVaf/BGu/0Umz1lp1OrrQkouaTQfuQ+3tOY3hwCM4PdL03YbBYX +r/H7EmhJ2QIDAQABo3kwdzA1BgNVHREELjAsiAUqAwQFBYISbm9kZS0xLmV4YW1w +bGUuY29tgglsb2NhbGhvc3SHBH8AAAEwHQYDVR0OBBYEFCBH9UyAoNd2nq/4PHuP +6XqCvcMnMB8GA1UdIwQYMBaAFFLGvw6mimoIfjgzjHjhUfVNAeAIMA0GCSqGSIb3 +DQEBCwUAA4IBAQAOhTfnE+uTD0PQy+/HT2uQKRMsn+f6CeiHTgTWkA7+XXECXyBI +B8cGnXEqNRg7gInrnYpsNv19Q5v4fghMG+5mTO0iDhSCL3ttXVy3J7yvb9IWgc12 +34YC7BeTe8DB+vATTnxEibOqXX8YhB/n9pB/xoqs7XUTVTP56QYcMZZvjzdIJhp9 +kpydel7TIDqJmG7HPkjVn0caxdsGFaBF5XmI4o73xlJVEZrN5OMy9yao6kXrNiqD +GPRg6y3KTtrGXNImTs9+iJhLfBtT3i8/UU7T8vC9yfU6JDC6CWDRIwHNtIBY2Yp+ +cLMxoh/SZHFqLPguzH2RWmwa7mgEOet1RYVe +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFCDCCA/CgAwIBAgIUfUpmQ/BPCGTsPLW7rrPbkEU1RcwwDQYJKoZIhvcNAQEL +BQAwgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFtcGxlIENv +bSBJbmMuIFNlY29uZGFyeSBSb290IENBMB4XDTI0MDkxNzAwMTU0OFoXDTM0MDkx +NTAwMTU0OFowgakxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZ +FgdleGFtcGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMS4wLAYDVQQLDCVF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMS4wLAYDVQQDDCVF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAjUd34V1uf+OwGXIBce+4O/UX35yCxY0LHt48 +wNIGkEs3StbTG5/qjkeAIFr2EUpRX5c0n5sIWdJX1cV/drWrhUzy6Ya1jvQiTA+i +k4YVVkFsz9QajgP+UPS06ZLkFldBofd/Su4GW5YEBlOBfxbsr8+E+73M/8sU1/wD +QLwPZGrkN7Cc37qi0Sf3blCNsjwfZPrGm+J/4hxdlJKuimo3Ctfwtlv/cIJZv7aG +RPksgsiirrk//nrW24wCQjqernuRayT0+2KL7OIn7UH2XL4nUUKU4cHYJOeiTNz+ +ds/uP1FG5WAvQ7CEyh6z2aXxGhZ89ZquATFy2paLpqfWgARiiwIDAQABo4IBKjCC +ASYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFFLG +vw6mimoIfjgzjHjhUfVNAeAIMIHjBgNVHSMEgdswgdiAFLmdh4hHmYd6TqjvfdgK +1x/HzhmeoYGppIGmMIGjMRMwEQYKCZImiZPyLGQBGRYDY29tMRcwFQYKCZImiZPy +LGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBsZSBDb20gSW5jLjErMCkGA1UE +CwwiRXhhbXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQTErMCkGA1UEAwwi +RXhhbXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQYIULRaaYWRGTgsg6K21 +3Aw9UyZJF+8wDQYJKoZIhvcNAQELBQADggEBAJo5QHvLgfH3VVJlNwFrGdNH1dCh +/mqPpqhjHCG8OUl2H8+dFsu/WfY7k/tcrMHCJHSVMbBiPxKM1MlR2aSIrGW7SNVZ +mrk0QfBHvhKdXOnUcPjp6CL7BAwgrKT9h0/v5ky/GutAL0L7N1Enntw+WWdI0SAn +JIaCzEN4s3VniDSyULZ7J3E4z7wmeLhzHf1ugyEoPOehP1RZzVJDLExZ30dXDUlG +qUQaUkBAjclD4i5vybF+CGGhCzIi0UTb+VmHNfi3yqYwltYGJzELvYw3ce/cVkSm +B4Qqx0niiFQfguX6MduWB067IXDLKu51ovnA+h72FKd7iZSrKg+qCEy0eYA= +-----END CERTIFICATE----- +-----BEGIN CERTIFICATE----- +MIIFAjCCA+qgAwIBAgIULRaaYWRGTgsg6K213Aw9UyZJF+8wDQYJKoZIhvcNAQEL +BQAwgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFtcGxlIENv +bSBJbmMuIFNlY29uZGFyeSBSb290IENBMB4XDTI0MDkxNzAwMTU0OFoXDTM0MDkx +NTAwMTU0OFowgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZ +FgdleGFtcGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFt +cGxlIENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAqRYm2F+Ejfg3Vqmld+dXzIThERJuzyqpq7FUNegfAebM +e7aelHiAMSecYroxj990HdCp1yDhryRxzdGHkPK7VHzH695th8N1su7wO37cspjX +ZxMexiZuwV1t/N8khi20MItqa6sYY4gkBLoGiT5DdJNTJHv3Ammx+PmYHIRF0S1P +P1j2nd+Kxaj1Il4sInUo7BqbmO794QdICgJQ5XFeXmEV+4uhoPSHEoOfAlWUTKA9 +a9rugrY0k3JlUTF0tIPLEWOcMxEcQj6uYFehxakwiOnZwgkJMCSbhsoEBzq+i7Eb +2Wob7d2Gn0De3Z+ZruVIJzY0MpHWrDUyny/Qi17nDwIDAQABo4IBKjCCASYwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFLmdh4hHmYd6 +TqjvfdgK1x/HzhmeMIHjBgNVHSMEgdswgdiAFLmdh4hHmYd6TqjvfdgK1x/Hzhme +oYGppIGmMIGjMRMwEQYKCZImiZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYH +ZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBsZSBDb20gSW5jLjErMCkGA1UECwwiRXhh +bXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQTErMCkGA1UEAwwiRXhhbXBs +ZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQYIULRaaYWRGTgsg6K213Aw9UyZJ +F+8wDQYJKoZIhvcNAQELBQADggEBAI6PJGGXH2fIlrZQFZXkuLqjMrR8K+/60cq0 +4qqjTg8p+vQyB66BJSh4BiUM2sh7SwGKpehNB8QQXEZoyzpWY+Cdcm4ty4F430xS +uz/uW0NObhnJnyURlHf1szHTr91/1yX7eCtpUA1X9cjtXYS/uR911BCotdgmp3N9 +lHp+DjMx3j/xsGAuC1B2vmuLaMXA8SeYziDx+9KUHidMM7v/JsDZwc8XKCK+i12s +yIAv7Tuk5drq3x7ZCA3k9Xja/YqpaPNSP6iVsdM57NLPfZA9ilNuSMD49No6q9wW +dJ7sJEGDdICEBTuL9bCnwv/PZQ8ohJMJ+7Ike8f6tz8TsH3C+fg= +-----END CERTIFICATE----- diff --git a/src/test/resources/ssl/reload/node-new-ca.key.pem b/src/test/resources/ssl/reload/node-new-ca.key.pem new file mode 100644 index 0000000000..1fa12aa5e5 --- /dev/null +++ b/src/test/resources/ssl/reload/node-new-ca.key.pem @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC0kbfvZpV9Nhm1 +Dq50RDML1E3Lz5KoFkSmug8F1i6k5naXS0qAbKSgAjSJf+neDNCVgFKZJV24otmf +kT+dAJv5GpuLW6hmuiMRDGrnfxFdaYj1fvYzIoWepyCNlb2oIGfSiTknR9KnuRLn +kjojMmGskWokp2f+LV1NF7KDS2NJr91HbIdsWP3uB1cCee17vFrzui6cpmgW2Z6u +GF9V9ueTFB9IV5yDQCW0GK6EH1PGi9FOjVM5U+kBSEEUFqPggtLIYl0b2pDXU4wv +5BR3pONAHSoVJVp/8Ea7/RSbPWWnU6utCSi5pNB+5D7e05jeHAIzg90vTdhsFhev +8fsSaEnZAgMBAAECggEABVlpxwxVcmOnaE86iNQ6ZOfRtC9+iz85omzRpB0fvZ/c +NIg0+U/+ooTeNJKXBY6AoWUvTT0npSAh7VG6vjZ16G/K2tqIxx5NiqRBCIGhrJBD +T+6GcaZcqgIOe1NLzo7DNJ//EvRUP8bCUhzpXwCPlzKpn9Nbx9JlOLLyhWQ22Uhx +NQVIOd3qvhAvU/LF03fMPAqxeHXD0KOQNNxNPqwTWW1rRi6bzvMud0icQhDjTVBf +gUhowZFdDnt3NIiMmh997Fnjbx2J5BTJ/tWnyG4pVO8d4JX5RDcZOx+MFBO9ypQ/ +FqhGu+J3xjMzSP+Y7kKHI60KBMCRnz2hEUP2IN+xxwKBgQDkiBtpJkRshaP27Bbw +xpuSvsKRAYdMDSlfReKQHEs3hJ0w3wFT0ofgMS49PyzHu0TyQ39jNAJ3YkdpRpIC +nI9fcDV4xS6G5kz72U4yamm1RF7TdMoU1WWMqmtm7i9Cdjrd7z/WGnBUH2pmenJ7 +IDoeALme+GxNwyakaSKaemOlGwKBgQDKRc/uln6AIxtlWBqgo8XGLyzReK0EYdnN +jwp7CcOuza//q9/P1fh/NA4rj0uiPD0SYX92DmaGRuuF/FwQ7OnAw9x81JlQeBbW +iSH3IzFSXP6kuY+SpUkRuWLXuNByIbsICOWN761PyIDV7TJAG/e6G/SLHUoOA6+G +aOQopG+gGwKBgDdmIzbvNuET2HaQLtN5YddF9QaP10uBWUkmOND0eutfc3eYZ8r5 +G0Umxu3D9cgJRqJv6F6VChAEvAjyOYz2hO7+1YeMTUYYaAsZV5JzJ2Lwywf5pM8+ +F9rsqRKPpNc4r/aC+/eb+yT5ZKKpBj2Ax3XkeRrnX+HN7/0lG2VVS/iHAoGAFAvN +KqkRimNwUJ/lq6vvas+8ElpyUy/bZQrbEAyMryNFYQJIoRFkmj6vdNOzvDVaHBs7 +hZixwnb+2n2DJk5EcE046cosE5SDNunKSvLa7X234t1dBDyLPE1yJUz0o4sCPS6c +iW+KbpDBa/Ig+8eJypEAsFTLxQ3KdHiqu/hn86cCgYBygyypD9R11k8X46ayXFcV +C+bpC4GsRliRvA0smFIAC75urzngLYyyP2ueonusE0cNYARlcbV3FcLDLuvEGoJL +enlYi4k9F+nCZaQ2ylWl2H6ud0/kCnIfjv+4Knf15Wz58VkH09AvbmqJNyd6kB0e +19JMEgOOugnWgfUNsBipfQ== +-----END PRIVATE KEY----- diff --git a/src/test/resources/ssl/reload/secondary-root-ca.pem b/src/test/resources/ssl/reload/secondary-root-ca.pem new file mode 100644 index 0000000000..81d8309898 --- /dev/null +++ b/src/test/resources/ssl/reload/secondary-root-ca.pem @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIFAjCCA+qgAwIBAgIULRaaYWRGTgsg6K213Aw9UyZJF+8wDQYJKoZIhvcNAQEL +BQAwgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFtcGxlIENv +bSBJbmMuIFNlY29uZGFyeSBSb290IENBMB4XDTI0MDkxNzAwMTU0OFoXDTM0MDkx +NTAwMTU0OFowgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZ +FgdleGFtcGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFt +cGxlIENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAqRYm2F+Ejfg3Vqmld+dXzIThERJuzyqpq7FUNegfAebM +e7aelHiAMSecYroxj990HdCp1yDhryRxzdGHkPK7VHzH695th8N1su7wO37cspjX +ZxMexiZuwV1t/N8khi20MItqa6sYY4gkBLoGiT5DdJNTJHv3Ammx+PmYHIRF0S1P +P1j2nd+Kxaj1Il4sInUo7BqbmO794QdICgJQ5XFeXmEV+4uhoPSHEoOfAlWUTKA9 +a9rugrY0k3JlUTF0tIPLEWOcMxEcQj6uYFehxakwiOnZwgkJMCSbhsoEBzq+i7Eb +2Wob7d2Gn0De3Z+ZruVIJzY0MpHWrDUyny/Qi17nDwIDAQABo4IBKjCCASYwDwYD +VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFLmdh4hHmYd6 +TqjvfdgK1x/HzhmeMIHjBgNVHSMEgdswgdiAFLmdh4hHmYd6TqjvfdgK1x/Hzhme +oYGppIGmMIGjMRMwEQYKCZImiZPyLGQBGRYDY29tMRcwFQYKCZImiZPyLGQBGRYH +ZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBsZSBDb20gSW5jLjErMCkGA1UECwwiRXhh +bXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQTErMCkGA1UEAwwiRXhhbXBs +ZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQYIULRaaYWRGTgsg6K213Aw9UyZJ +F+8wDQYJKoZIhvcNAQELBQADggEBAI6PJGGXH2fIlrZQFZXkuLqjMrR8K+/60cq0 +4qqjTg8p+vQyB66BJSh4BiUM2sh7SwGKpehNB8QQXEZoyzpWY+Cdcm4ty4F430xS +uz/uW0NObhnJnyURlHf1szHTr91/1yX7eCtpUA1X9cjtXYS/uR911BCotdgmp3N9 +lHp+DjMx3j/xsGAuC1B2vmuLaMXA8SeYziDx+9KUHidMM7v/JsDZwc8XKCK+i12s +yIAv7Tuk5drq3x7ZCA3k9Xja/YqpaPNSP6iVsdM57NLPfZA9ilNuSMD49No6q9wW +dJ7sJEGDdICEBTuL9bCnwv/PZQ8ohJMJ+7Ike8f6tz8TsH3C+fg= +-----END CERTIFICATE----- diff --git a/src/test/resources/ssl/reload/secondary-signing-ca.pem b/src/test/resources/ssl/reload/secondary-signing-ca.pem new file mode 100644 index 0000000000..53f989d0b3 --- /dev/null +++ b/src/test/resources/ssl/reload/secondary-signing-ca.pem @@ -0,0 +1,29 @@ +-----BEGIN CERTIFICATE----- +MIIFCDCCA/CgAwIBAgIUfUpmQ/BPCGTsPLW7rrPbkEU1RcwwDQYJKoZIhvcNAQEL +BQAwgaMxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZFgdleGFt +cGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMSswKQYDVQQLDCJFeGFtcGxl +IENvbSBJbmMuIFNlY29uZGFyeSBSb290IENBMSswKQYDVQQDDCJFeGFtcGxlIENv +bSBJbmMuIFNlY29uZGFyeSBSb290IENBMB4XDTI0MDkxNzAwMTU0OFoXDTM0MDkx +NTAwMTU0OFowgakxEzARBgoJkiaJk/IsZAEZFgNjb20xFzAVBgoJkiaJk/IsZAEZ +FgdleGFtcGxlMRkwFwYDVQQKDBBFeGFtcGxlIENvbSBJbmMuMS4wLAYDVQQLDCVF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMS4wLAYDVQQDDCVF +eGFtcGxlIENvbSBJbmMuIFNlY29uZGFyeSBTaWduaW5nIENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAjUd34V1uf+OwGXIBce+4O/UX35yCxY0LHt48 +wNIGkEs3StbTG5/qjkeAIFr2EUpRX5c0n5sIWdJX1cV/drWrhUzy6Ya1jvQiTA+i +k4YVVkFsz9QajgP+UPS06ZLkFldBofd/Su4GW5YEBlOBfxbsr8+E+73M/8sU1/wD +QLwPZGrkN7Cc37qi0Sf3blCNsjwfZPrGm+J/4hxdlJKuimo3Ctfwtlv/cIJZv7aG +RPksgsiirrk//nrW24wCQjqernuRayT0+2KL7OIn7UH2XL4nUUKU4cHYJOeiTNz+ +ds/uP1FG5WAvQ7CEyh6z2aXxGhZ89ZquATFy2paLpqfWgARiiwIDAQABo4IBKjCC +ASYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFFLG +vw6mimoIfjgzjHjhUfVNAeAIMIHjBgNVHSMEgdswgdiAFLmdh4hHmYd6TqjvfdgK +1x/HzhmeoYGppIGmMIGjMRMwEQYKCZImiZPyLGQBGRYDY29tMRcwFQYKCZImiZPy +LGQBGRYHZXhhbXBsZTEZMBcGA1UECgwQRXhhbXBsZSBDb20gSW5jLjErMCkGA1UE +CwwiRXhhbXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQTErMCkGA1UEAwwi +RXhhbXBsZSBDb20gSW5jLiBTZWNvbmRhcnkgUm9vdCBDQYIULRaaYWRGTgsg6K21 +3Aw9UyZJF+8wDQYJKoZIhvcNAQELBQADggEBAJo5QHvLgfH3VVJlNwFrGdNH1dCh +/mqPpqhjHCG8OUl2H8+dFsu/WfY7k/tcrMHCJHSVMbBiPxKM1MlR2aSIrGW7SNVZ +mrk0QfBHvhKdXOnUcPjp6CL7BAwgrKT9h0/v5ky/GutAL0L7N1Enntw+WWdI0SAn +JIaCzEN4s3VniDSyULZ7J3E4z7wmeLhzHf1ugyEoPOehP1RZzVJDLExZ30dXDUlG +qUQaUkBAjclD4i5vybF+CGGhCzIi0UTb+VmHNfi3yqYwltYGJzELvYw3ce/cVkSm +B4Qqx0niiFQfguX6MduWB067IXDLKu51ovnA+h72FKd7iZSrKg+qCEy0eYA= +-----END CERTIFICATE----- diff --git a/src/test/resources/ssl/reload/truststore.jks b/src/test/resources/ssl/reload/truststore.jks index c750f9807a..217c4d09b4 100644 Binary files a/src/test/resources/ssl/reload/truststore.jks and b/src/test/resources/ssl/reload/truststore.jks differ