diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml
index 7628899..17c53cf 100644
--- a/.github/workflows/gh-pages.yml
+++ b/.github/workflows/gh-pages.yml
@@ -5,6 +5,8 @@ on:
branches: [ main]
pull_request:
branches: [ main ]
+ schedule:
+ - cron: "23 9 */14 * *"
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
@@ -30,10 +32,10 @@ jobs:
uses: gradle/gradle-build-action@842c587ad8aa4c68eeba24c396e15af4c2e9f30a # v2.9.0
with:
gradle-home-cache-cleanup: true
- - name: Run functional tests
- run: |
- ./gradlew --quiet extractImplementations > implementations.json
- cat implementations.json >> $GITHUB_STEP_SUMMARY
+ - name: Extract implementation info
+ run: ./gradlew --quiet extractImplementations > implementations.json
+ - name: Add results to step summary
+ run: cat implementations.json >> $GITHUB_STEP_SUMMARY
- name: Upload Implementations
uses: actions/upload-artifact@v3
with:
@@ -41,8 +43,36 @@ jobs:
path: implementations.json
retention-days: 1
+ run_functional:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - name: Set up JDK
+ uses: actions/setup-java@0ab4596768b603586c0de567f2430c30f5b0d2b0 # v3.13.0
+ with:
+ java-version: '17'
+ distribution: 'adopt'
+ - name: Setup Gradle
+ uses: gradle/gradle-build-action@842c587ad8aa4c68eeba24c396e15af4c2e9f30a # v2.9.0
+ with:
+ gradle-home-cache-cleanup: true
+ - name: Run functional tests
+ run: ./gradlew --quiet runFunctionalTests
+ - name: Add results to step summary
+ run: |
+ echo "# Overall comparison" >> $GITHUB_STEP_SUMMARY
+ cat build/reports/creek/functional-summary.md >> $GITHUB_STEP_SUMMARY
+ echo "# Specific Draft & Implementation results" >> $GITHUB_STEP_SUMMARY
+ cat build/reports/creek/per-draft.md >> $GITHUB_STEP_SUMMARY
+ - name: Upload Implementations
+ uses: actions/upload-artifact@v3
+ with:
+ name: functional-summary
+ path: build/reports/creek/*
+ retention-days: 1
+
build_pages:
- needs: get_impls
+ needs: [get_impls, run_functional]
runs-on: ubuntu-latest
env:
BUNDLE_GEMFILE: ${{ github.workspace }}/docs/Gemfile
@@ -65,7 +95,15 @@ jobs:
with:
name: implementations
- name: Inject Implementations JSON into site
- run: sed -i $'/IMPLEMENTATIONS_JSON/ { r implementations.json\nd }' "docs/_docs/1. implementations.md"
+ run: sed -i $'/IMPLEMENTATIONS_JSON/ { r implementations.json\nd }' docs/_docs/*
+ - name: Download Functional JSON
+ uses: actions/download-artifact@v3
+ with:
+ name: functional-summary
+ - name: Inject Functional JSON into site
+ run: |
+ sed -i $'/FUNCTIONAL_SUMMARY_JSON/ { r functional-summary.json\nd }' docs/_docs/*
+ cat per-draft.md >> "docs/_docs/2. functional.md"
- name: Build with Jekyll
# Outputs to the './docs/_site' directory by default
run: (cd docs && bundle exec jekyll build --baseurl "${{ steps.pages.outputs.base_path }}")
@@ -97,4 +135,4 @@ jobs:
id: deployment
uses: actions/deploy-pages@9dbe3824824f8a1377b8e298bafde1a50ede43e5 # v2.0.4
- name: Ping Google Search
- run: curl "https://www.google.com/ping?sitemap=https://www.creekservice.org/${{ github.event.repository.name }}/sitemap.xml"
+ run: curl "https://www.google.com/ping?sitemap=https://www.creekservice.org/${{ github.event.repository.name }}/sitemap.xml"
\ No newline at end of file
diff --git a/.github/workflows/run-func-test.yml b/.github/workflows/run-func-test.yml
deleted file mode 100644
index 6f73188..0000000
--- a/.github/workflows/run-func-test.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# This workflow run the functional test
-
-name: Func Test
-
-on:
- workflow_dispatch:
- push:
- branches: [ main ]
-
-concurrency:
- group: ${{ github.ref }}-func-test
- cancel-in-progress: true
-
-permissions:
- contents: read
-
-jobs:
- build:
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
- - uses: gradle/wrapper-validation-action@56b90f209b02bf6d1deae490e9ef18b21a389cd4 # v1.1.0
- - name: Set up JDK
- uses: actions/setup-java@0ab4596768b603586c0de567f2430c30f5b0d2b0 # v3.13.0
- with:
- java-version: '17'
- distribution: 'adopt'
- - name: Setup Gradle
- uses: gradle/gradle-build-action@842c587ad8aa4c68eeba24c396e15af4c2e9f30a # v2.9.0
- with:
- gradle-home-cache-cleanup: true
- - name: Run functional tests
- run: ./gradlew --quiet runFunctionalTests >> $GITHUB_STEP_SUMMARY
\ No newline at end of file
diff --git a/README.md b/README.md
index 9a18c43..d987ec6 100644
--- a/README.md
+++ b/README.md
@@ -5,38 +5,58 @@
Feature and performance comparison of different JVM-based implementations of JSON schema validators.
-The initial purpose of this code was to determine which JSON validation library Creek should make use of.
-However, the repo is shared here to hopefully help others faced with a similar decision.
-
-## Schema validator implementations
-
-This repo tests the following implementations of JSON schema validation:
-
-| Implementation under test | Written In | Supported JSON schema specifications | License |
-|--------------------------------------|------------|------------------------------------------|----------------------------------------|
-| [Vert.x Json Schema][1] | Java | 2020-12, 2019-09 draft-07, -04 | Apache License 2.0 |
-| [jsonschemafriend][2] | Java | 2020-12, 2019-09 draft-07, -06, -04, -03 | Apache License 2.0 |
-| [networknt/json-schema-validator][3] | Java | 2020-12, 2019-09 draft-07, -06, -04 | Apache License 2.0 |
-| [Snow][4] | Java | 2019-09 draft-07, -06 | GNU Affero General Public License v3.0 |
-| [everit-org/json-schema][5] | Java | draft-07, -06, -04 | Apache License 2.0 |
-| [Justify][6] | Java | draft-07, -06, -04 | Apache License 2.0 |
-| [worldturner/medeia-validator][7] | Kotlin | draft-07, -06, -04 | Apache License 2.0 |
-| [erosb/json-sKema][8] | Kotlin | 2020-12 | MIT |
+The results of this comparison can be found on [here](https://www.creekservice.org/json-schema-validation-comparison/).
## Note to maintainers
-If you are the maintainer of one of the above implementations, please feel free to raise a PR if you feel your
-implementation is poorly represented due to issues with the code in this repo. See the [Contributing](#contributing) section below.
+If you are the maintainer of one of the above implementations, and you feel your implementation is poorly represented,
+or you maintain an JVM-based implementation not covered yet covered in this comparison, then please feel free to raise a PR.
+See the [Contributing](#contributing) section below.
## Feature comparison
-Run the feature comparison locally with `./gradlew runFunctionalTests`, or view previous runs on [GitHub][functionalTestRuns].
+Run the feature comparison locally with `./gradlew runFunctionalTests`,
+or the [latest results](https://www.creekservice.org/json-schema-validation-comparison/functional),
+or previous runs are available in the [GitHub pages workflow runs on GitHub][GitHubPagesWfRuns].
Runs each implementation through the standard [JSON Schema Test Suite][JSON-Schema-Test-Suite].
The suite contains both positive and negative test cases, i.e. JSON that should both pass and fail validation,
and covers all schema specifications, i.e. draft-03 through to the latest.
-Running the testing will output one table for each implementation and supported schema specification combination,
+Running the functional tests will create result files in the `build/reports/creek` directory.
+
+### functional-summary.md
+
+This report contains a summary of pass/fail rates of required/optional test cases for each implementation,
+per supported JSON schema version.
+
+For example:
+
+| Impl | Overall | DRAFT_03 | DRAFT_04 | DRAFT_06 | DRAFT_07 | DRAFT_2019_09 | DRAFT_2020_12 |
+|--------------|----------------------------------------------------------------------------------------|----------------------------------------------------------------------------------|------------------------------------------------------------------------------------|------------------------------------------------------------------------------------|------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|--------------------------------------------------------------------------------------|
+| SchemaFriend | score: 98.0 pass: r:5051 (98.8%) o:2332 (95.7%) fail: r:60 (1.2%) o:106 (4.3%) | score: 98.4 pass: r:435 (100.0%) o:104 (93.7%) fail: r:0 (0.0%) o:7 (6.3%) | score: 98.5 pass: r:590 (99.8%) o:234 (94.4%) fail: r:1 (0.2%) o:14 (5.6%) | score: 98.6 pass: r:791 (99.6%) o:294 (95.5%) fail: r:3 (0.4%) o:14 (4.5%) | score: 98.8 pass: r:875 (99.7%) o:510 (96.0%) fail: r:3 (0.3%) o:21 (4.0%) | score: 98.0 pass: r:1178 (98.6%) o:591 (96.1%) fail: r:17 (1.4%) o:24 (3.9%) | score: 96.7 pass: r:1182 (97.0%) o:599 (95.8%) fail: r:36 (3.0%) o:26 (4.2%) |
+| Snow | score: 97.6 pass: r:2823 (98.5%) o:1381 (95.0%) fail: r:44 (1.5%) o:73 (5.0%) | | | score: 98.0 pass: r:783 (98.6%) o:296 (96.1%) fail: r:11 (1.4%) o:12 (3.9%) | score: 98.1 pass: r:869 (99.0%) o:508 (95.7%) fail: r:9 (1.0%) o:23 (4.3%) | score: 96.9 pass: r:1171 (98.0%) o:577 (93.8%) fail: r:24 (2.0%) o:38 (6.2%) | |
+| Medeia | score: 96.3 pass: r:2250 (99.4%) o:946 (87.0%) fail: r:13 (0.6%) o:141 (13.0%) | | score: 95.7 pass: r:587 (99.3%) o:210 (84.7%) fail: r:4 (0.7%) o:38 (15.3%) | score: 96.4 pass: r:789 (99.4%) o:270 (87.7%) fail: r:5 (0.6%) o:38 (12.3%) | score: 96.6 pass: r:874 (99.5%) o:466 (87.8%) fail: r:4 (0.5%) o:65 (12.2%) | | |
+| Justify | score: 95.4 pass: r:2146 (94.8%) o:1055 (97.1%) fail: r:117 (5.2%) o:32 (2.9%) | | score: 95.4 pass: r:560 (94.8%) o:241 (97.2%) fail: r:31 (5.2%) o:7 (2.8%) | score: 95.7 pass: r:755 (95.1%) o:301 (97.7%) fail: r:39 (4.9%) o:7 (2.3%) | score: 95.1 pass: r:831 (94.6%) o:513 (96.6%) fail: r:47 (5.4%) o:18 (3.4%) | | |
+| Everit | score: 95.0 pass: r:2204 (97.4%) o:953 (87.7%) fail: r:59 (2.6%) o:134 (12.3%) | | score: 95.8 pass: r:581 (98.3%) o:219 (88.3%) fail: r:10 (1.7%) o:29 (11.7%) | score: 95.5 pass: r:770 (97.0%) o:280 (90.9%) fail: r:24 (3.0%) o:28 (9.1%) | score: 94.2 pass: r:853 (97.2%) o:454 (85.5%) fail: r:25 (2.8%) o:77 (14.5%) | | |
+| Vert.x | score: 93.7 pass: r:3756 (96.8%) o:1710 (84.7%) fail: r:126 (3.2%) o:309 (15.3%) | | score: 96.2 pass: r:580 (98.1%) o:224 (90.3%) fail: r:11 (1.9%) o:24 (9.7%) | | score: 94.0 pass: r:860 (97.9%) o:436 (82.1%) fail: r:18 (2.1%) o:95 (17.9%) | score: 94.1 pass: r:1162 (97.2%) o:522 (84.9%) fail: r:33 (2.8%) o:93 (15.1%) | score: 92.2 pass: r:1154 (94.7%) o:528 (84.5%) fail: r:64 (5.3%) o:97 (15.5%) |
+| sKema | score: 93.5 pass: r:1192 (97.9%) o:503 (80.5%) fail: r:26 (2.1%) o:122 (19.5%) | | | | | | score: 93.5 pass: r:1192 (97.9%) o:503 (80.5%) fail: r:26 (2.1%) o:122 (19.5%) |
+| NetworkNt | score: 93.1 pass: r:4451 (95.2%) o:2023 (86.9%) fail: r:225 (4.8%) o:304 (13.1%) | | score: 96.8 pass: r:581 (98.3%) o:229 (92.3%) fail: r:10 (1.7%) o:19 (7.7%) | score: 95.2 pass: r:773 (97.4%) o:273 (88.6%) fail: r:21 (2.6%) o:35 (11.4%) | score: 93.9 pass: r:853 (97.2%) o:447 (84.2%) fail: r:25 (2.8%) o:84 (15.8%) | score: 92.1 pass: r:1122 (93.9%) o:533 (86.7%) fail: r:73 (6.1%) o:82 (13.3%) | score: 90.7 pass: r:1122 (92.1%) o:541 (86.6%) fail: r:96 (7.9%) o:84 (13.4%) |
+
+Each populated cell details the **r**equired and **o**ptional passed and failed test case counts and percentages by Schema specification version, and overall.
+Underneath there is a 'score' for each implementation, out of 100.
+The score weights test results of _required_ features at triple _optional_ features, meaning 75% of the score is reserved for _required_ features,
+whereas _optional_ features only account for a maximum 25% of the score.
+
+### functional-summary.json
+
+As above, but stored in JSON notation.
+
+This is used to drive the [results micro-site](https://www.creekservice.org/json-schema-validation-comparison/).
+
+### per-draft.md
+
+This report contains one table for each implementation and supported schema specification combination,
showing the number of test cases that pass and fail in each test file.
For example,
@@ -109,42 +129,6 @@ Medeia: DRAFT_07:
| uniqueItems.json | 69 | 0 | 69 |
| unknownKeyword.json | 3 | 0 | 3 |
-Followed by a table containing a summary of pass/fail rates of required/optional test cases for each implementation,
-per supported JSON schema version.
-
-For example:
-
-| Impl | Overall | DRAFT_03 | DRAFT_04 | DRAFT_06 | DRAFT_07 | DRAFT_2019_09 | DRAFT_2020_12 |
-|--------------|-----------------------------------------|-----------------------------------|-------------------------------------|-------------------------------------|-------------------------------------|--------------------------------------|---------------------------------------|
-| NetworkNt | pass: r:4429 o:1980 / fail: r:221 o:302 | | pass: r:579 o:224 / fail: r:10 o:19 | pass: r:768 o:268 / fail: r:20 o:35 | pass: r:848 o:438 / fail: r:24 o:84 | pass: r:1118 o:521 / fail: r:73 o:81 | pass: r:1116 o:529 / fail: r:94 o:83 |
-| | r:95.2% o:86.8% / r:4.8% f:13.2% | | r:98.3% o:92.2% / r:1.7% f:7.8% | r:97.5% o:88.4% / r:2.5% f:11.6% | r:97.2% o:83.9% / r:2.8% f:16.1% | r:93.9% o:86.5% / r:6.1% f:13.5% | r:92.2% o:86.4% / r:7.8% f:13.6% |
-| | score: 93.1 | | score: 96.8 | score: 95.2 | score: 93.9 | score: 92.0 | score: 90.8 |
-| Skema | pass: r:1184 o:490 / fail: r:26 o:122 | | | | | | pass: r:1184 o:490 / fail: r:26 o:122 |
-| | r:97.9% o:80.1% / r:2.1% f:19.9% | | | | | | r:97.9% o:80.1% / r:2.1% f:19.9% |
-| | score: 93.4 | | | | | | score: 93.4 |
-| Medeia | pass: r:2237 o:928 / fail: r:12 o:140 | | pass: r:585 o:205 / fail: r:4 o:38 | pass: r:784 o:265 / fail: r:4 o:38 | pass: r:868 o:458 / fail: r:4 o:64 | | |
-| | r:99.5% o:86.9% / r:0.5% f:13.1% | | r:99.3% o:84.4% / r:0.7% f:15.6% | r:99.5% o:87.5% / r:0.5% f:12.5% | r:99.5% o:87.7% / r:0.5% f:12.3% | | |
-| | score: 96.3 | | score: 95.6 | score: 96.5 | score: 96.6 | | |
-| Snow | pass: r:2810 o:1354 / fail: r:41 o:73 | | | pass: r:778 o:291 / fail: r:10 o:12 | pass: r:864 o:499 / fail: r:8 o:23 | pass: r:1168 o:564 / fail: r:23 o:38 | |
-| | r:98.6% o:94.9% / r:1.4% f:5.1% | | | r:98.7% o:96.0% / r:1.3% f:4.0% | r:99.1% o:95.6% / r:0.9% f:4.4% | r:98.1% o:93.7% / r:1.9% f:6.3% | |
-| | score: 97.6 | | | score: 98.1 | score: 98.2 | score: 97.0 | |
-| Everit | pass: r:2192 o:934 / fail: r:57 o:134 | | pass: r:579 o:214 / fail: r:10 o:29 | pass: r:765 o:275 / fail: r:23 o:28 | pass: r:848 o:445 / fail: r:24 o:77 | | |
-| | r:97.5% o:87.5% / r:2.5% f:12.5% | | r:98.3% o:88.1% / r:1.7% f:11.9% | r:97.1% o:90.8% / r:2.9% f:9.2% | r:97.2% o:85.2% / r:2.8% f:14.8% | | |
-| | score: 95.0 | | score: 95.7 | score: 95.5 | score: 94.2 | | |
-| SchemaFriend | pass: r:5049 o:2311 / fail: r:34 o:82 | pass: r:433 o:104 / fail: r:0 o:7 | pass: r:588 o:233 / fail: r:1 o:10 | pass: r:785 o:293 / fail: r:3 o:10 | pass: r:869 o:505 / fail: r:3 o:17 | pass: r:1187 o:584 / fail: r:4 o:18 | pass: r:1187 o:592 / fail: r:23 o:20 |
-| | r:99.3% o:96.6% / r:0.7% f:3.4% | r:100.0% o:93.7% / r:0.0% f:6.3% | r:99.8% o:95.9% / r:0.2% f:4.1% | r:99.6% o:96.7% / r:0.4% f:3.3% | r:99.7% o:96.7% / r:0.3% f:3.3% | r:99.7% o:97.0% / r:0.3% f:3.0% | r:98.1% o:96.7% / r:1.9% f:3.3% |
-| | score: 98.6 | score: 98.4 | score: 98.8 | score: 98.9 | score: 98.9 | score: 99.0 | score: 97.8 |
-| Vertx | pass: r:3741 o:1672 / fail: r:121 o:307 | | pass: r:578 o:219 / fail: r:11 o:24 | | pass: r:855 o:427 / fail: r:17 o:95 | pass: r:1159 o:510 / fail: r:32 o:92 | pass: r:1149 o:516 / fail: r:61 o:96 |
-| | r:96.9% o:84.5% / r:3.1% f:15.5% | | r:98.1% o:90.1% / r:1.9% f:9.9% | | r:98.1% o:81.8% / r:1.9% f:18.2% | r:97.3% o:84.7% / r:2.7% f:15.3% | r:95.0% o:84.3% / r:5.0% f:15.7% |
-| | score: 93.8 | | score: 96.1 | | score: 94.0 | score: 94.2 | score: 92.3 |
-| Justify | pass: r:2133 o:1036 / fail: r:116 o:32 | | pass: r:557 o:236 / fail: r:32 o:7 | pass: r:750 o:296 / fail: r:38 o:7 | pass: r:826 o:504 / fail: r:46 o:18 | | |
-| | r:94.8% o:97.0% / r:5.2% f:3.0% | | r:94.6% o:97.1% / r:5.4% f:2.9% | r:95.2% o:97.7% / r:4.8% f:2.3% | r:94.7% o:96.6% / r:5.3% f:3.4% | | |
-| | score: 95.4 | | score: 95.2 | score: 95.8 | score: 95.2 | | |
-
-Each populated cell details the **r**equired and **o**ptional passed and failed test case counts and percentages by Schema specification version, and overall.
-Underneath there is a 'score' for each implementation, out of 100.
-The score weights test results of _required_ features at triple _optional_ features, meaning 75% of the score is reserved for _required_ features,
-whereas _optional_ features only account for a maximum 25% of the score.
### Feature comparison conclusions
@@ -171,7 +155,7 @@ There are also a couple of notes to call out for different implementations aroun
## Performance comparison
-Run the performance comparison locally with `./gradlew runBenchmarks`, or view previous runs on [GitHub][performanceBenchmarkRuns].
+Run the performance comparison locally with `./gradlew runBenchmarks`, or previous runs are available in the [GitHub pages workflow runs on GitHub][GitHubPagesWfRuns].
How fast is the implementation at validating JSON? To find out, two different performance suites were run using
the [Java Microbenchmark Harness][jhm]:
@@ -322,8 +306,8 @@ Adding a new validator implementation is relatively straight forward and very we
Ensure tests pass!
5. Register your new Implementation type in [Implementations.java](src/main/java/org/creekservice/kafka/test/perf/implementations/Implementations.java).
This will ensure the new implementation is included in the docs and included in the functional test
-6. Manually add appropriate benchmark methods to [JsonSerdeBenchmark.java](src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java)
- and [JsonValidateBenchmark.java](src/main/java/org/creekservice/kafka/test/perf/JsonValidateBenchmark.java).
+6. Manually add appropriate benchmark methods to [JsonSerdeBenchmark.java](src/main/java/org/creekservice/kafka/test/perf/performance/JsonSerdeBenchmark.java)
+ and [JsonValidateBenchmark.java](src/main/java/org/creekservice/kafka/test/perf/performance/JsonValidateBenchmark.java).
This is currently manual as JMH library does provide a way to generate these automatically.
There should be one test per supported draft version. See the other methods in these classes for examples.
7. Run `./gradlew` to format your code, perform static analysis and run the tests.
@@ -342,5 +326,4 @@ Adding a new validator implementation is relatively straight forward and very we
[JSON-Schema-Test-Suite]: https://github.com/json-schema-org/JSON-Schema-Test-Suite
[jhm]: https://github.com/openjdk/jmh
[confluent]: https://www.confluent.io/
-[functionalTestRuns]: https://github.com/creek-service/json-schema-validation-comparison/actions/workflows/run-func-test.yml
-[performanceBenchmarkRuns]: https://github.com/creek-service/json-schema-validation-comparison/actions/workflows/run-perf-test.yml
+[GitHubPagesWfRuns]: https://github.com/creek-service/json-schema-validation-comparison/actions/workflows/run-func-test.yml
diff --git a/build.gradle.kts b/build.gradle.kts
index b55c27a..4e1d0d6 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -117,14 +117,14 @@ val pullTask = tasks.register("pull-json-schema-test-suite") {
val runFunctionalTests = tasks.register("runFunctionalTests") {
classpath = sourceSets.main.get().runtimeClasspath
- mainClass.set("org.creekservice.kafka.test.perf.testsuite.JsonTestSuiteMain")
+ mainClass.set("org.creekservice.kafka.test.perf.FunctionalMain")
args = listOf(jsonSchemaTestSuiteDir.get().asFile.absolutePath)
dependsOn(pullTask)
}
tasks.register("runBenchmarks") {
classpath = sourceSets.main.get().runtimeClasspath
- mainClass.set("org.creekservice.kafka.test.perf.BenchmarkRunner")
+ mainClass.set("org.creekservice.kafka.test.perf.PerformanceMain")
args(listOf(
// Output results in csv format
"-rf", "csv",
@@ -136,7 +136,7 @@ tasks.register("runBenchmarks") {
val runBenchmarkSmokeTest = tasks.register("runBenchmarkSmokeTest") {
classpath = sourceSets.main.get().runtimeClasspath
- mainClass.set("org.creekservice.kafka.test.perf.BenchmarkRunner")
+ mainClass.set("org.creekservice.kafka.test.perf.PerformanceMain")
args(listOf(
// No warmup:
"-wi", "0",
diff --git a/docs/_docs/0. home.md b/docs/_docs/0. home.md
new file mode 100644
index 0000000..b587eb2
--- /dev/null
+++ b/docs/_docs/0. home.md
@@ -0,0 +1,38 @@
+---
+title: Comparison of JVM based JSON Schema Validation Implementations
+permalink: /
+layout: single
+header:
+ image: /assets/images/json.png
+excerpt: |
+ Trying to decide which JSON validation library to use in the JVM?
+ This post compares the features and performance characteristics of each of the available implementations to help you make an informed decision.
+toc: false
+classes: wide
+---
+
+This micro-site reports the results of a feature and performance comparison of different JVM-based implementations of a JSON schema validator.
+
+**Note:** The full list of JVM-based & non-JVM based implementations of a JSON schema validator
+can be found on the main JSON Schema site's [implementations page ][JSON-Schema-Implementations]{:target="_blank"}
+{: .notice--warning}
+
+**Note:** The source code for the feature testing and performance benchmarking can be found
+[ on GitHub][GitHub-Project]{: .btn .btn--success}{:target="_blank"}
+{: .notice--warning}
+
+The initial purpose of this comparison was to provide information to drive the decision on which JSON validation library Creek should make use of.
+However, the code and the results are shared to help others who are faced with a similar decision.
+
+The results shown in the tables and charts in this mico-site update automatically as new versions of implementations are released
+and as the standard [JSON Schema test suite ][JSON-Schema-Test-Suite]{:target="_blank"} is updated by the community.
+It will _not_ update as new JSON schema specifications are released without a back-end code change.
+
+Read on to view which implementations are under test and the results of their functional and performance comparison.
+
+**Note:** The author of this repository is not affiliated with any of the implementations covered by this test suite.
+{: .notice--warning}
+
+[JSON-Schema-Implementations]: https://json-schema.org/implementations
+[GitHub-Project]: https://github.com/creek-service/json-schema-validation-comparison
+[JSON-Schema-Test-Suite]: https://github.com/json-schema-org/JSON-Schema-Test-Suite
diff --git a/docs/_docs/1. implementations.md b/docs/_docs/1. implementations.md
index d7ffc4a..df5abbd 100644
--- a/docs/_docs/1. implementations.md
+++ b/docs/_docs/1. implementations.md
@@ -14,6 +14,12 @@ See below for an up-to-date list of the JVM based JSON Validator implementations
+## Note to maintainers
+
+If you are the maintainer of one of the above implementations, and you feel your implementation is poorly represented,
+or you maintain an JVM-based implementation not covered yet covered in this comparison, then please feel free to raise a PR
+against the underlying [ GitHub Repo](https://github.com/creek-service/json-schema-validation-comparison){: .btn .btn--success}{:target="_blank"}.
+
[//]: # (Table scripts: https://github.com/fiduswriter/Simple-DataTables)
@@ -43,6 +49,4 @@ See below for an up-to-date list of the JVM based JSON Validator implementations
])
}
});
-
-
\ No newline at end of file
diff --git a/docs/_docs/2. functional.md b/docs/_docs/2. functional.md
new file mode 100644
index 0000000..53d5b0d
--- /dev/null
+++ b/docs/_docs/2. functional.md
@@ -0,0 +1,139 @@
+---
+title: Results of functionality comparison of JVM based JSON Schema Validation Implementations
+permalink: /functional
+layout: single
+header:
+ image: /assets/images/json.png
+toc: true
+classes: wide
+---
+
+## Test setup
+
+Each validator implementation is run through a suite a functional tests.
+The tests cover almost every aspect of the different drafts of the JSON Schema specification.
+The tests cover both positive cases, where the validation should succeed, and negative cases, where validation should fail.
+The tests cover both _required_ features and those deemed _optional_ by the specifications.
+
+The tests are curated by the JSON community and are available [ on GitHub][JSON-Schema-Test-Suite]{: .btn .btn--success}{:target="_blank"}.
+
+## Results
+
+For each schema specification an implementation supports, the number of test cases that pass and fail is tracked,
+split into those covering _required_ vs _optional_ features.
+
+### Summary of results
+
+The chart below uses the **score** from the **overall** column in the [Summary results table](#summary-of-results) below to visually
+present the feature completeness of each validator implementation.
+
+
+
+
+
+#### Summary results table
+
+
+
+
+
+**Note:** >>> scroll to the right for more columns on the table.
+{: .notice--warning}
+
+In the table above the columns cover the different JSON schema draft versions, plus an **overall** aggregate result,
+and each row is the results for a specific validator implementation.
+
+Unpopulated cells indicate the implementations does not support that specific schema draft.
+
+Populated cells details the number (and percentage) of **r**equired and **o**ptional test cases that **pass** and **fail**.
+
+Each populated cell also contains a **score**, indicating the functional completeness of the implementation, out of 100.
+The **score** weights test results of _required_ features at triple _optional_ features, meaning 75% of the score is reserved for _required_ features,
+whereas _optional_ features only account for a maximum 25% of the score.
+
+[//]: # (Chart scripts: https://www.chartjs.org/docs/latest/)
+
+
+[//]: # (Table scripts: https://github.com/fiduswriter/Simple-DataTables)
+
+
+
+
+
+[JSON-Schema-Test-Suite]: https://github.com/json-schema-org/JSON-Schema-Test-Suite
+
+### Detailed results
+
+Below is a more details set of results for each specification draft an implementation supports.
+Each table details the number of test cases that pass and fail for each test file in the JSON schema test suite.
+
+[//]: # (Do not add content below this line, or delete the line following this comment, as the build appends data to this file)
diff --git a/docs/_docs/3. performance.md b/docs/_docs/3. performance.md
new file mode 100644
index 0000000..22c864e
--- /dev/null
+++ b/docs/_docs/3. performance.md
@@ -0,0 +1,11 @@
+---
+title: Results of performance comparison of JVM based JSON Schema Validation Implementations
+permalink: /performance
+layout: single
+header:
+ image: /assets/images/json.png
+toc: true
+classes: wide
+---
+
+WIP [https://github.com/creek-service/json-schema-validation-comparison/issues/53](https://github.com/creek-service/json-schema-validation-comparison/issues/53).
\ No newline at end of file
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java b/src/main/java/org/creekservice/kafka/test/perf/FunctionalMain.java
similarity index 67%
rename from src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java
rename to src/main/java/org/creekservice/kafka/test/perf/FunctionalMain.java
index df1fdb1..8956ee3 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/FunctionalMain.java
@@ -14,25 +14,32 @@
* limitations under the License.
*/
-package org.creekservice.kafka.test.perf.testsuite;
+package org.creekservice.kafka.test.perf;
import static java.util.stream.Collectors.toMap;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
import java.nio.file.Paths;
+import java.nio.file.StandardOpenOption;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import org.creekservice.kafka.test.perf.implementations.Implementation;
import org.creekservice.kafka.test.perf.implementations.Implementations;
+import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite;
import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite.Result;
import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite.TestPredicate;
+import org.creekservice.kafka.test.perf.testsuite.TestSuiteLoader;
import org.creekservice.kafka.test.perf.testsuite.output.PerDraftSummary;
import org.creekservice.kafka.test.perf.testsuite.output.Summary;
import org.creekservice.kafka.test.perf.util.Logging;
/** Entry point for the functional tests. */
-public final class JsonTestSuiteMain {
+public final class FunctionalMain {
static {
Logging.disable();
@@ -41,7 +48,7 @@ public final class JsonTestSuiteMain {
// Increase locally to allow for meaningful profiling:
private static final int ITERATIONS = 1;
- private JsonTestSuiteMain() {}
+ private FunctionalMain() {}
@SuppressFBWarnings("PATH_TRAVERSAL_IN")
public static void main(final String... args) {
@@ -74,10 +81,27 @@ public static void main(final String... args) {
}
private static void outputResults(final Map results) {
- System.out.println("# Overall comparison");
- System.out.println(new Summary(results));
- System.out.println();
- System.out.println("# Specific Draft & Implementation results");
- System.out.println(new PerDraftSummary(results));
+ final Path reportRoot = Paths.get("build/reports/creek/");
+
+ final Summary summary = new Summary(results);
+ writeOutput(summary.toMarkdown(), reportRoot.resolve("functional-summary.md"));
+ writeOutput(summary.toJson(), reportRoot.resolve("functional-summary.json"));
+
+ final PerDraftSummary perDraftSummary = new PerDraftSummary(results);
+ writeOutput(perDraftSummary.toMarkdown(), reportRoot.resolve("per-draft.md"));
+
+ System.out.println("Results written to " + reportRoot.toAbsolutePath());
+ }
+
+ private static void writeOutput(final String content, final Path path) {
+ try {
+ final Path parent = path.getParent();
+ if (parent != null) {
+ Files.createDirectories(parent);
+ }
+ Files.write(path, content.getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
}
}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/ImplementationsMain.java b/src/main/java/org/creekservice/kafka/test/perf/ImplementationsMain.java
index 5047a9a..28e5084 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/ImplementationsMain.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/ImplementationsMain.java
@@ -16,6 +16,7 @@
package org.creekservice.kafka.test.perf;
+import org.creekservice.kafka.test.perf.implementations.Implementations;
import org.creekservice.kafka.test.perf.util.ImplsJsonFormatter;
/** Main entry point for getting information about the implementations under test */
@@ -24,6 +25,6 @@ public final class ImplementationsMain {
private ImplementationsMain() {}
public static void main(final String[] args) {
- System.out.println(ImplsJsonFormatter.implDetailsAsJson());
+ System.out.println(ImplsJsonFormatter.implDetailsAsJson(Implementations.all()));
}
}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java b/src/main/java/org/creekservice/kafka/test/perf/PerformanceMain.java
similarity index 86%
rename from src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java
rename to src/main/java/org/creekservice/kafka/test/perf/PerformanceMain.java
index 85780db..138b27a 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/PerformanceMain.java
@@ -16,9 +16,10 @@
package org.creekservice.kafka.test.perf;
-public final class BenchmarkRunner {
+/** Entry point for running the performance benchmarks. */
+public final class PerformanceMain {
- private BenchmarkRunner() {}
+ private PerformanceMain() {}
public static void main(final String[] args) throws Exception {
org.openjdk.jmh.Main.main(args);
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/ConfluentImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/ConfluentImplementation.java
index 040941e..d760cfe 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/ConfluentImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/ConfluentImplementation.java
@@ -26,6 +26,7 @@
import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializer;
import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializerConfig;
+import java.awt.Color;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -47,7 +48,8 @@ public class ConfluentImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
Set.of(DRAFT_04, DRAFT_06, DRAFT_07),
- "https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/serdes-json.html");
+ "https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/serdes-json.html",
+ new Color(255, 255, 255));
private static final String TOPIC_NAME = "t";
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/Implementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/Implementation.java
index e4997e4..f79f673 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/Implementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/Implementation.java
@@ -20,6 +20,7 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonValue;
+import java.awt.Color;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Set;
@@ -90,6 +91,7 @@ class MetaData {
private final Licence licence;
private final Set supported;
private final URL url;
+ private final Color color;
/**
* Construct metadata about a specific validator implementation.
@@ -100,6 +102,9 @@ class MetaData {
* @param licence the licence the validator library is released under.
* @param supported the set of supported JSON schema draft specifications.
* @param url the url to the validator libraries implementation or documentation.
+ * @param color the RGB color to use for this implementation in charts.
+ * Alpha is ignored.
*/
public MetaData(
final String longName,
@@ -107,12 +112,14 @@ public MetaData(
final Language language,
final Licence licence,
final Set supported,
- final String url) {
+ final String url,
+ final Color color) {
this.longName = requireNonNull(longName, "longName").trim();
this.shortName = requireNonNull(shortName, "shortName").trim();
this.language = requireNonNull(language, "language");
this.licence = requireNonNull(licence, "licence");
this.supported = Set.copyOf(requireNonNull(supported, "supported"));
+ this.color = requireNonNull(color, "color");
try {
this.url = new URL(requireNonNull(url, "url"));
} catch (MalformedURLException e) {
@@ -128,6 +135,21 @@ public MetaData(
}
}
+ /**
+ * Temp constructor to avoid issues for anyone currently adding new implementation.
+ *
+ *
Will be removed soon.
+ */
+ public MetaData(
+ final String longName,
+ final String shortName,
+ final Language language,
+ final Licence licence,
+ final Set supported,
+ final String url) {
+ this(longName, shortName, language, licence, supported, url, new Color(235, 54, 172));
+ }
+
@JsonProperty("longName")
public String longName() {
return longName;
@@ -158,6 +180,11 @@ public Set supported() {
return new TreeSet<>(supported);
}
+ @JsonProperty("color")
+ public String color() {
+ return "rgb(" + color.getRed() + "," + color.getGreen() + "," + color.getBlue() + ")";
+ }
+
// Final, empty finalize method stops spotbugs CT_CONSTRUCTOR_THROW
// Can be moved to base type after https://github.com/spotbugs/spotbugs/issues/2665
@Override
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/JacksonImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/JacksonImplementation.java
index 78815f4..0d8eeef 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/JacksonImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/JacksonImplementation.java
@@ -19,6 +19,7 @@
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.awt.Color;
import java.io.IOException;
import java.util.Set;
import org.creekservice.kafka.test.perf.model.TestModel;
@@ -35,7 +36,8 @@ public class JacksonImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
Set.of(SchemaSpec.DRAFT_07),
- "https://github.com/FasterXML/jackson-core");
+ "https://github.com/FasterXML/jackson-core",
+ new Color(255, 255, 255));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/JustifyImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/JustifyImplementation.java
index 0253fb2..46e4789 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/JustifyImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/JustifyImplementation.java
@@ -24,6 +24,7 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.json.JsonMapper;
import jakarta.json.JsonReader;
+import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -53,7 +54,8 @@ public class JustifyImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
SUPPORTED.keySet(),
- "https://github.com/leadpony/justify");
+ "https://github.com/leadpony/justify",
+ new Color(153, 102, 255));
private ProblemHandler handler =
problems -> {
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/MedeiaImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/MedeiaImplementation.java
index bd07c3b..9d39965 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/MedeiaImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/MedeiaImplementation.java
@@ -32,6 +32,7 @@
import com.worldturner.medeia.api.ValidationOptions;
import com.worldturner.medeia.api.jackson.MedeiaJacksonApi;
import com.worldturner.medeia.schema.validation.SchemaValidator;
+import java.awt.Color;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
@@ -39,10 +40,10 @@
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
-import org.creekservice.kafka.test.perf.TestSchemas;
import org.creekservice.kafka.test.perf.model.TestModel;
import org.creekservice.kafka.test.perf.testsuite.AdditionalSchemas;
import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.util.TestSchemas;
@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
public class MedeiaImplementation implements Implementation {
@@ -60,7 +61,8 @@ public class MedeiaImplementation implements Implementation {
Language.Kotlin,
Licence.Apache_v2_0,
SUPPORTED.keySet(),
- "https://github.com/worldturner/medeia-validator");
+ "https://github.com/worldturner/medeia-validator",
+ new Color(201, 203, 207));
private static final ValidationOptions VALIDATOR_OPTIONS =
new ValidationOptions().withValidateSchema(false);
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/NetworkNtImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/NetworkNtImplementation.java
index 6b1f26b..4298619 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/NetworkNtImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/NetworkNtImplementation.java
@@ -32,6 +32,7 @@
import com.networknt.schema.JsonSchemaFactory;
import com.networknt.schema.SpecVersion;
import com.networknt.schema.ValidationMessage;
+import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Map;
@@ -58,7 +59,8 @@ public class NetworkNtImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
SUPPORTED.keySet(),
- "https://github.com/networknt/json-schema-validator");
+ "https://github.com/networknt/json-schema-validator",
+ new Color(255, 205, 86));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/SchemaFriendImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/SchemaFriendImplementation.java
index a33b6fb..6376385 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/SchemaFriendImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/SchemaFriendImplementation.java
@@ -27,6 +27,7 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.awt.Color;
import java.io.IOException;
import java.net.URI;
import java.util.Map;
@@ -51,7 +52,8 @@ public class SchemaFriendImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
Set.of(DRAFT_2020_12, DRAFT_2019_09, DRAFT_07, DRAFT_06, DRAFT_04, DRAFT_03),
- "https://github.com/jimblackler/jsonschemafriend");
+ "https://github.com/jimblackler/jsonschemafriend",
+ new Color(255, 159, 64));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/SkemaImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/SkemaImplementation.java
index a0dcd9e..8fb5cd1 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/SkemaImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/SkemaImplementation.java
@@ -28,6 +28,7 @@
import com.github.erosb.jsonsKema.SchemaLoaderConfig;
import com.github.erosb.jsonsKema.ValidationFailure;
import com.github.erosb.jsonsKema.Validator;
+import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.Set;
@@ -51,7 +52,8 @@ public class SkemaImplementation implements Implementation {
Language.Kotlin,
Licence.MIT,
Set.of(DRAFT_2020_12),
- "https://github.com/erosb/json-sKema");
+ "https://github.com/erosb/json-sKema",
+ new Color(0, 13, 38));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/SnowImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/SnowImplementation.java
index 1df8c25..9728bf2 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/SnowImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/SnowImplementation.java
@@ -30,6 +30,7 @@
import com.qindesign.json.schema.Specification;
import com.qindesign.json.schema.Validator;
import com.qindesign.json.schema.net.URI;
+import java.awt.Color;
import java.io.ByteArrayInputStream;
import java.net.URL;
import java.nio.file.Path;
@@ -56,7 +57,8 @@ public class SnowImplementation implements Implementation {
Language.Java,
Licence.GNU_Affero_General_Public_v3_0,
SUPPORTED.keySet(),
- "https://github.com/ssilverman/snowy-json");
+ "https://github.com/ssilverman/snowy-json",
+ new Color(75, 192, 192));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/implementations/VertxImplementation.java b/src/main/java/org/creekservice/kafka/test/perf/implementations/VertxImplementation.java
index 1be9607..3fc996e 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/implementations/VertxImplementation.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/implementations/VertxImplementation.java
@@ -28,6 +28,7 @@
import io.vertx.json.schema.OutputFormat;
import io.vertx.json.schema.OutputUnit;
import io.vertx.json.schema.Validator;
+import java.awt.Color;
import java.io.IOException;
import java.util.Map;
import org.creekservice.kafka.test.perf.model.TestModel;
@@ -51,7 +52,8 @@ public class VertxImplementation implements Implementation {
Language.Java,
Licence.Apache_v2_0,
SUPPORTED.keySet(),
- "https://github.com/eclipse-vertx/vertx-json-schema");
+ "https://github.com/eclipse-vertx/vertx-json-schema",
+ new Color(255, 99, 132));
private ObjectMapper mapper = JsonMapper.builder().build();
diff --git a/src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java b/src/main/java/org/creekservice/kafka/test/perf/performance/JsonSerdeBenchmark.java
similarity index 98%
rename from src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java
rename to src/main/java/org/creekservice/kafka/test/perf/performance/JsonSerdeBenchmark.java
index 96ac38a..35cc842 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/performance/JsonSerdeBenchmark.java
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.creekservice.kafka.test.perf;
+package org.creekservice.kafka.test.perf.performance;
import static java.util.concurrent.TimeUnit.MICROSECONDS;
@@ -36,6 +36,7 @@
import org.creekservice.kafka.test.perf.testsuite.AdditionalSchemas;
import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
import org.creekservice.kafka.test.perf.util.Logging;
+import org.creekservice.kafka.test.perf.util.TestSchemas;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
diff --git a/src/main/java/org/creekservice/kafka/test/perf/JsonValidateBenchmark.java b/src/main/java/org/creekservice/kafka/test/perf/performance/JsonValidateBenchmark.java
similarity index 99%
rename from src/main/java/org/creekservice/kafka/test/perf/JsonValidateBenchmark.java
rename to src/main/java/org/creekservice/kafka/test/perf/performance/JsonValidateBenchmark.java
index 0f1a939..8071f50 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/JsonValidateBenchmark.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/performance/JsonValidateBenchmark.java
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.creekservice.kafka.test.perf;
+package org.creekservice.kafka.test.perf.performance;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
index 5250bed..b2ce6aa 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
@@ -56,10 +56,15 @@ public PerDraftSummary(final Map res
TreeMap::new));
}
- @Override
- public String toString() {
+ public String toMarkdown() {
return results.entrySet().stream()
- .map(e -> "## " + e.getKey() + lineSeparator() + e.getValue())
+ .map(
+ e ->
+ "#### "
+ + e.getKey()
+ + lineSeparator()
+ + lineSeparator()
+ + e.getValue().toMarkdown())
.collect(Collectors.joining(lineSeparator()));
}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
index a7b52c2..91fd6be 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
@@ -20,7 +20,9 @@
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toMap;
-import java.text.NumberFormat;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.math.BigDecimal;
+import java.math.RoundingMode;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
@@ -38,7 +40,7 @@ public final class Summary {
/** How much weight to put in required features vs optional. */
private static final int REQUIRED_WEIGHT = 3;
- private static final String COL_IMPL = "Impl";
+ private static final String COL_IMPL = "Implementations";
private static final String COL_OVERALL = "Overall";
private final Table table;
@@ -52,15 +54,18 @@ public Summary(final Map results) {
this.table = createTable(results);
}
- @Override
- public String toString() {
- return table.toString()
+ public String toMarkdown() {
+ return table.toMarkdown()
+ lineSeparator()
+ lineSeparator()
+ String.format(
"Time: %d.%03ds", +duration.toSecondsPart(), duration.toMillisPart());
}
+ public String toJson() {
+ return table.toJson();
+ }
+
private static Table createTable(
final Map results) {
final Map> counts = buildCounts(results);
@@ -137,40 +142,19 @@ private static void populateRow(
final Map specCounts,
final List specColumns) {
row.put(COL_IMPL, impl);
- specColumns.forEach(col -> row.put(col, formatCell(specCounts.get(col))));
- }
-
- private static String formatCell(final Counts counts) {
- if (counts.totalTotal() == 0) {
- return "";
- }
- return "pass: r:"
- + counts.reqPassed
- + " o:"
- + counts.optPassed
- + " / fail: r:"
- + counts.reqFail()
- + " o:"
- + counts.optFail()
- + lineSeparator()
- + "r:"
- + counts.reqPassPct()
- + " o:"
- + counts.optPassPct()
- + " / r:"
- + counts.reqFailPct()
- + " f:"
- + counts.optFailPct()
- + lineSeparator()
- + "score: "
- + counts.formattedScore();
+ specColumns.forEach(col -> row.put(col, specCounts.get(col)));
}
private static class Counts {
+ @JsonProperty("requiredPass")
private int reqPassed;
+
private int reqTotal;
+
+ @JsonProperty("optionalPass")
private int optPassed;
+
private int optTotal;
void add(final JsonSchemaTestSuite.TestResult result) {
@@ -192,35 +176,39 @@ int totalTotal() {
return reqTotal + optTotal;
}
+ @JsonProperty("requiredFail")
int reqFail() {
return reqTotal - reqPassed;
}
+ @JsonProperty("optionalFail")
int optFail() {
return optTotal - optPassed;
}
- String reqPassPct() {
+ @JsonProperty("requiredPassPct")
+ BigDecimal reqPassPct() {
return percentage(reqPassed, reqTotal);
}
- String optPassPct() {
+ @JsonProperty("optionalPassPct")
+ BigDecimal optPassPct() {
return percentage(optPassed, optTotal);
}
- String reqFailPct() {
+ @JsonProperty("requiredFailPct")
+ BigDecimal reqFailPct() {
return percentage(reqFail(), reqTotal);
}
- String optFailPct() {
+ @JsonProperty("optionalFailPct")
+ BigDecimal optFailPct() {
return percentage(optFail(), optTotal);
}
- String formattedScore() {
- final NumberFormat nf = NumberFormat.getNumberInstance();
- nf.setMinimumFractionDigits(1);
- nf.setMaximumFractionDigits(1);
- return nf.format(score());
+ @JsonProperty("score")
+ BigDecimal formattedScore() {
+ return BigDecimal.valueOf(score()).setScale(1, RoundingMode.HALF_EVEN);
}
double score() {
@@ -229,6 +217,35 @@ String formattedScore() {
return 100 * ((reqPct * REQUIRED_WEIGHT) + optPct) / (REQUIRED_WEIGHT + 1);
}
+ @Override
+ public String toString() {
+ if (totalTotal() == 0) {
+ return "";
+ }
+ return "score: "
+ + formattedScore()
+ + " pass: r:"
+ + reqPassed
+ + " ("
+ + reqPassPct()
+ + "%)"
+ + " o:"
+ + optPassed
+ + " ("
+ + optPassPct()
+ + "%)"
+ + " fail: r:"
+ + reqFail()
+ + " ("
+ + reqFailPct()
+ + "%)"
+ + " o:"
+ + optFail()
+ + " ("
+ + optFailPct()
+ + "%)";
+ }
+
static Counts combine(final Counts c0, final Counts c1) {
final Counts counts = new Counts();
counts.reqPassed = c0.reqPassed + c1.reqPassed;
@@ -238,11 +255,12 @@ static Counts combine(final Counts c0, final Counts c1) {
return counts;
}
- private String percentage(final int value, final int total) {
- final NumberFormat nf = NumberFormat.getPercentInstance();
- nf.setMinimumFractionDigits(1);
- nf.setMaximumFractionDigits(1);
- return total == 0 ? nf.format(0) : nf.format(((double) value / total));
+ private BigDecimal percentage(final int value, final int total) {
+ return total == 0
+ ? BigDecimal.ZERO
+ : BigDecimal.valueOf(value)
+ .multiply(BigDecimal.valueOf(100))
+ .divide(BigDecimal.valueOf(total), 1, RoundingMode.HALF_EVEN);
}
}
}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/util/ImplsJsonFormatter.java b/src/main/java/org/creekservice/kafka/test/perf/util/ImplsJsonFormatter.java
index ff44359..7ebf5eb 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/util/ImplsJsonFormatter.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/util/ImplsJsonFormatter.java
@@ -22,15 +22,10 @@
import java.util.List;
import java.util.stream.Collectors;
import org.creekservice.kafka.test.perf.implementations.Implementation;
-import org.creekservice.kafka.test.perf.implementations.Implementations;
public final class ImplsJsonFormatter {
- public static String implDetailsAsJson() {
- return implDetailsAsJson(Implementations.all());
- }
-
- static String implDetailsAsJson(final List impls) {
+ public static String implDetailsAsJson(final List impls) {
final ObjectMapper mapper = JsonMapper.builder().build();
final List metadata =
diff --git a/src/main/java/org/creekservice/kafka/test/perf/util/Table.java b/src/main/java/org/creekservice/kafka/test/perf/util/Table.java
index 5e5fbd5..0ba90eb 100644
--- a/src/main/java/org/creekservice/kafka/test/perf/util/Table.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/util/Table.java
@@ -19,6 +19,11 @@
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.joining;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonValue;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -32,24 +37,24 @@
public class Table {
- private final List headers;
+ @JsonProperty("headings")
+ private final List headings;
+
+ @JsonProperty("rows")
private final List rows = new ArrayList<>();
- private final Map widths = new LinkedHashMap<>();
- public Table(final List headers) {
- this.headers = List.copyOf(requireNonNull(headers, "headers"));
+ public Table(final List headings) {
+ this.headings = List.copyOf(requireNonNull(headings, "headers"));
}
public Row addRow() {
- final Row row = new Row(headers);
+ final Row row = new Row(headings);
rows.add(row);
- widths.clear();
return row;
}
- @Override
- public String toString() {
- ensureWidths();
+ public String toMarkdown() {
+ final Map widths = calcWidths();
final String format =
widths.values().stream()
@@ -61,7 +66,7 @@ public String toString() {
.map(width -> "-".repeat(Math.max(3, width + 2)))
.collect(joining("|", "|", "|" + System.lineSeparator()));
- final String columnHeaders = String.format(format, headers.toArray());
+ final String columnHeaders = String.format(format, headings.toArray());
final String formattedRows =
rows.stream().map(row -> formattedRows(format, row)).collect(joining());
@@ -90,12 +95,10 @@ private static String formattedRows(final String format, final Row row) {
}
@SuppressWarnings("DataFlowIssue")
- private void ensureWidths() {
- if (!widths.isEmpty()) {
- return;
- }
+ private Map calcWidths() {
+ final Map widths = new LinkedHashMap<>();
- headers.forEach(h -> widths.put(h, h.length()));
+ headings.forEach(h -> widths.put(h, h.length()));
rows.forEach(
row ->
@@ -105,6 +108,8 @@ private void ensureWidths() {
header,
(ignored, existing) ->
Math.max(existing, width(value)))));
+
+ return widths;
}
private static int width(final Object value) {
@@ -117,7 +122,15 @@ private static int width(final Object value) {
public void map(final Consumer c) {
rows.forEach(c);
- widths.clear();
+ }
+
+ public String toJson() {
+ try {
+ final ObjectMapper mapper = JsonMapper.builder().build();
+ return mapper.writeValueAsString(this);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
}
public static final class Row {
@@ -156,5 +169,10 @@ private void validateHeader(final String header) {
throw new IllegalArgumentException("Not a valid header: " + header);
}
}
+
+ @JsonValue
+ private Collection