From e527471d1d22ee890b02fc2a6d5a852564d82a50 Mon Sep 17 00:00:00 2001
From: Big Andy <8012398+big-andy-coates@users.noreply.github.com>
Date: Wed, 27 Sep 2023 16:12:05 +0100
Subject: [PATCH] Functional and performance comparison of JSON serde and
validation libraries.
---
.github/workflows/build.yml | 7 +-
.github/workflows/dependabot-auto-merge.yml | 3 +-
README.md | 263 ++++++++++++++-
build.gradle.kts | 131 ++++++--
.../kotlin/creek-common-convention.gradle.kts | 21 +-
config/checkstyle/suppressions.xml | 1 +
config/spotbugs/suppressions.xml | 6 +
img/Feature comparison score.svg | 1 +
.../test/perf/BenchmarkRunner.java} | 19 +-
.../kafka/test/perf/JsonSerdeBenchmark.java | 145 ++++++++
.../test/perf/JsonValidateBenchmark.java | 234 +++++++++++++
.../kafka/test/perf/TestSchemas.java | 31 ++
.../kafka/test/perf/model/ModelState.java | 50 +++
.../kafka/test/perf/model/PolyBase.java | 25 ++
.../kafka/test/perf/model/PolyTypeA.java | 55 +++
.../kafka/test/perf/model/PolyTypeB.java | 54 +++
.../kafka/test/perf/model/TestModel.java | 93 ++++++
.../kafka/test/perf/serde/ConfluentSerde.java | 80 +++++
.../kafka/test/perf/serde/Deserializer.java | 23 ++
.../kafka/test/perf/serde/EveritSerde.java | 155 +++++++++
.../perf/serde/JacksonIntermediateSerde.java | 63 ++++
.../kafka/test/perf/serde/JacksonSerde.java | 57 ++++
.../kafka/test/perf/serde/JustifySerde.java | 163 +++++++++
.../kafka/test/perf/serde/MedeiaSerde.java | 159 +++++++++
.../kafka/test/perf/serde/NetworkNtSerde.java | 160 +++++++++
.../test/perf/serde/SchemaFriendSerde.java | 158 +++++++++
.../kafka/test/perf/serde/SerdeImpl.java | 54 +++
.../kafka/test/perf/serde/Serializer.java | 34 ++
.../kafka/test/perf/serde/SkemaSerde.java | 128 +++++++
.../kafka/test/perf/serde/SnowSerde.java | 172 ++++++++++
.../kafka/test/perf/serde/VertxSerde.java | 147 ++++++++
.../perf/testsuite/JsonSchemaTestSuite.java | 314 ++++++++++++++++++
.../perf/testsuite/JsonTestSuiteMain.java | 98 ++++++
.../kafka/test/perf/testsuite/SchemaSpec.java | 121 +++++++
.../test/perf/testsuite/SpecTestSuites.java | 40 +++
.../kafka/test/perf/testsuite/TestCase.java | 65 ++++
.../kafka/test/perf/testsuite/TestSuite.java | 85 +++++
.../test/perf/testsuite/TestSuiteLoader.java | 151 +++++++++
.../test/perf/testsuite/TestSuiteMapper.java | 32 ++
.../test/perf/testsuite/ValidatorFactory.java | 83 +++++
.../testsuite/output/PerDraftSummary.java | 153 +++++++++
.../test/perf/testsuite/output/Summary.java | 213 ++++++++++++
.../kafka/test/perf/util/Executable.java | 21 ++
.../kafka/test/perf/util/Logging.java | 44 +++
.../kafka/test/perf/util/Table.java | 188 +++++++++++
.../test/perf/validator/EveritValidator.java | 26 ++
.../test/perf/validator/JustifyValidator.java | 26 ++
.../test/perf/validator/MedeiaValidator.java | 26 ++
.../perf/validator/NetworkNtValidator.java | 26 ++
.../validator/SchemaFriendValidator.java} | 16 +-
.../test/perf/validator/SkemaValidator.java | 26 ++
.../test/perf/validator/SnowValidator.java | 26 ++
.../test/perf/validator/ValidatorState.java | 61 ++++
.../test/perf/validator/VertxValidator.java | 26 ++
src/main/resources/schema-draft-2020-12.json | 92 +++++
src/main/resources/schema-draft-7.json | 92 +++++
.../test/perf/serde/ConfluentSerdeTest.java | 19 ++
.../test/perf/serde/EveritSerdeTest.java} | 7 +-
.../test/perf/serde/JacksonSerdeTest.java | 19 ++
.../test/perf/serde/JustifySerdeTest.java | 19 ++
.../test/perf/serde/MedeiaSerdeTest.java | 19 ++
.../test/perf/serde/NetworkNtSerdeTest.java | 19 ++
.../perf/serde/SchemaFriendSerdeTest.java | 19 ++
.../kafka/test/perf/serde/SerdeTest.java | 90 +++++
.../kafka/test/perf/serde/SkemaSerdeTest.java | 19 ++
.../kafka/test/perf/serde/SnowSerdeTest.java | 19 ++
.../kafka/test/perf/serde/VertxSerdeTest.java | 19 ++
src/test/resources/junit-platform.properties | 25 --
68 files changed, 4953 insertions(+), 83 deletions(-)
create mode 100644 img/Feature comparison score.svg
rename src/main/java/org/creekservice/{internal/example/ExampleImpl.java => kafka/test/perf/BenchmarkRunner.java} (58%)
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/JsonValidateBenchmark.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/TestSchemas.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/model/ModelState.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/model/PolyBase.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeA.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeB.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/model/TestModel.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/ConfluentSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/Deserializer.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/EveritSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/JacksonIntermediateSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/JacksonSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/JustifySerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/MedeiaSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/NetworkNtSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/SchemaFriendSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/SerdeImpl.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/Serializer.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/SkemaSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/SnowSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/serde/VertxSerde.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonSchemaTestSuite.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/SchemaSpec.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/SpecTestSuites.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/TestCase.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuite.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteLoader.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteMapper.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/ValidatorFactory.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/util/Executable.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/util/Logging.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/util/Table.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/EveritValidator.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/JustifyValidator.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/MedeiaValidator.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/NetworkNtValidator.java
rename src/{test/java/org/creekservice/internal/example/ExampleImplTest.java => main/java/org/creekservice/kafka/test/perf/validator/SchemaFriendValidator.java} (61%)
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/SkemaValidator.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/SnowValidator.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/ValidatorState.java
create mode 100644 src/main/java/org/creekservice/kafka/test/perf/validator/VertxValidator.java
create mode 100644 src/main/resources/schema-draft-2020-12.json
create mode 100644 src/main/resources/schema-draft-7.json
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/ConfluentSerdeTest.java
rename src/{main/java/org/creekservice/api/example/Example.java => test/java/org/creekservice/kafka/test/perf/serde/EveritSerdeTest.java} (77%)
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/JacksonSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/JustifySerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/MedeiaSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/NetworkNtSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/SchemaFriendSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/SerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/SkemaSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/SnowSerdeTest.java
create mode 100644 src/test/java/org/creekservice/kafka/test/perf/serde/VertxSerdeTest.java
delete mode 100644 src/test/resources/junit-platform.properties
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 39744d7..01c0767 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -9,6 +9,8 @@ on:
tags: [ "v*.*.*" ]
pull_request:
branches: [ main ]
+ schedule:
+ - cron: "39 5 1,15 * *"
workflow_dispatch:
inputs:
publish_artifacts:
@@ -36,7 +38,10 @@ jobs:
with:
java-version: '17'
distribution: 'adopt'
- cache: gradle
+ - name: Setup Gradle
+ uses: gradle/gradle-build-action@a4cf152f482c7ca97ef56ead29bf08bcd953284c # v2.7.0
+ with:
+ gradle-home-cache-cleanup: true
- name: Build
env:
COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }}
diff --git a/.github/workflows/dependabot-auto-merge.yml b/.github/workflows/dependabot-auto-merge.yml
index 119f465..440bde5 100644
--- a/.github/workflows/dependabot-auto-merge.yml
+++ b/.github/workflows/dependabot-auto-merge.yml
@@ -3,8 +3,7 @@
name: Dependabot
on:
pull_request:
- branches:
- - main
+ branches: [ main ]
permissions:
contents: read
diff --git a/README.md b/README.md
index 51ac75b..7637c5a 100644
--- a/README.md
+++ b/README.md
@@ -5,19 +5,274 @@
Feature and performance comparison of different JVM-based implementations of JSON schema validators.
+## Schema validator implementations
+
+This repo tests the following implementations of JSON schema validation:
+
+| Implementation under test | Written In | Supported JSON schema specifications | License |
+|--------------------------------------|------------|------------------------------------------|----------------------------------------|
+| [Vert.x Json Schema][1] | Java | 2020-12, 2019-09 draft-07, -04 | Apache License 2.0 |
+| [jsonschemafriend][2] | Java | 2020-12, 2019-09 draft-07, -06, -04, -03 | Apache License 2.0 |
+| [networknt/json-schema-validator][3] | Java | 2020-12, 2019-09 draft-07, -06, -04 | Apache License 2.0 |
+| [Snow][4] | Java | 2019-09 draft-07, -06 | GNU Affero General Public License v3.0 |
+| [everit-org/json-schema][5] | Java | draft-07, -06, -04 | Apache License 2.0 |
+| [Justify][6] | Java | draft-07, -06, -04 | Apache License 2.0 |
+| [worldturner/medeia-validator][7] | Kotlin | draft-07, -06, -04 | Apache License 2.0 |
+| [erosb/json-sKema][8] | Kotlin | 2020-12 | MIT |
+
## Feature comparison
-To run the comparison...
+To run the comparison: `./gradlew runFunctionalTests`
+
+Runs each implementation through the standard [JSON Schema Test Suite][JSON-Schema-Test-Suite].
+The suite contains both positive and negative test cases, i.e. JSON that should both pass and fail validation,
+and covers all schema specifications, i.e. draft-03 through to the latest.
+
+Running the testing will output one table for each implementation and supported schema specification combination,
+showing the number of test cases that pass and fail in each test file.
+
+For example,
+
+Medeia: DRAFT_07:
+
+| suite | pass | fail | total |
+|--------------------------------------------|------|------|-------|
+| additionalItems.json | 18 | 0 | 18 |
+| additionalProperties.json | 16 | 0 | 16 |
+| allOf.json | 30 | 0 | 30 |
+| anyOf.json | 18 | 0 | 18 |
+| boolean_schema.json | 18 | 0 | 18 |
+| const.json | 50 | 0 | 50 |
+| contains.json | 21 | 0 | 21 |
+| default.json | 7 | 0 | 7 |
+| definitions.json | 2 | 0 | 2 |
+| dependencies.json | 33 | 3 | 36 |
+| enum.json | 33 | 0 | 33 |
+| exclusiveMaximum.json | 4 | 0 | 4 |
+| exclusiveMinimum.json | 4 | 0 | 4 |
+| format.json | 102 | 0 | 102 |
+| id.json | 7 | 0 | 7 |
+| if-then-else.json | 26 | 0 | 26 |
+| infinite-loop-detection.json | 2 | 0 | 2 |
+| items.json | 28 | 0 | 28 |
+| maxItems.json | 6 | 0 | 6 |
+| maxLength.json | 7 | 0 | 7 |
+| maxProperties.json | 10 | 0 | 10 |
+| maximum.json | 8 | 0 | 8 |
+| minItems.json | 6 | 0 | 6 |
+| minLength.json | 7 | 0 | 7 |
+| minProperties.json | 8 | 0 | 8 |
+| minimum.json | 11 | 0 | 11 |
+| multipleOf.json | 10 | 0 | 10 |
+| not.json | 12 | 0 | 12 |
+| oneOf.json | 27 | 0 | 27 |
+| optional/bignum.json | 9 | 0 | 9 |
+| optional/content.json | 10 | 0 | 10 |
+| optional/cross-draft.json | 1 | 1 | 2 |
+| optional/ecmascript-regex.json | 55 | 19 | 74 |
+| optional/float-overflow.json | 1 | 0 | 1 |
+| optional/format/date-time.json | 23 | 2 | 25 |
+| optional/format/date.json | 47 | 0 | 47 |
+| optional/format/email.json | 11 | 4 | 15 |
+| optional/format/hostname.json | 18 | 0 | 18 |
+| optional/format/idn-email.json | 8 | 2 | 10 |
+| optional/format/idn-hostname.json | 38 | 13 | 51 |
+| optional/format/ipv4.json | 14 | 1 | 15 |
+| optional/format/ipv6.json | 29 | 11 | 40 |
+| optional/format/iri-reference.json | 13 | 0 | 13 |
+| optional/format/iri.json | 14 | 1 | 15 |
+| optional/format/json-pointer.json | 38 | 0 | 38 |
+| optional/format/regex.json | 8 | 0 | 8 |
+| optional/format/relative-json-pointer.json | 15 | 3 | 18 |
+| optional/format/time.json | 39 | 6 | 45 |
+| optional/format/unknown.json | 7 | 0 | 7 |
+| optional/format/uri-reference.json | 13 | 0 | 13 |
+| optional/format/uri-template.json | 9 | 1 | 10 |
+| optional/format/uri.json | 26 | 0 | 26 |
+| optional/non-bmp-regex.json | 12 | 0 | 12 |
+| pattern.json | 9 | 0 | 9 |
+| patternProperties.json | 23 | 0 | 23 |
+| properties.json | 27 | 1 | 28 |
+| propertyNames.json | 13 | 0 | 13 |
+| ref.json | 76 | 0 | 76 |
+| refRemote.json | 21 | 0 | 21 |
+| required.json | 16 | 0 | 16 |
+| type.json | 80 | 0 | 80 |
+| uniqueItems.json | 69 | 0 | 69 |
+| unknownKeyword.json | 3 | 0 | 3 |
+
+Followed by a table containing a summary of pass/fail rates of required/optional test cases for each implementation,
+per supported JSON schema version.
+
+For example:
-Results...
+| Impl | Overall | DRAFT_03 | DRAFT_04 | DRAFT_06 | DRAFT_07 | DRAFT_2019_09 | DRAFT_2020_12 |
+|--------------|-----------------------------------------|-----------------------------------|-------------------------------------|-------------------------------------|-------------------------------------|--------------------------------------|---------------------------------------|
+| NetworkNt | pass: r:4429 o:1980 / fail: r:221 o:302 | | pass: r:579 o:224 / fail: r:10 o:19 | pass: r:768 o:268 / fail: r:20 o:35 | pass: r:848 o:438 / fail: r:24 o:84 | pass: r:1118 o:521 / fail: r:73 o:81 | pass: r:1116 o:529 / fail: r:94 o:83 |
+| | r:95.2% o:86.8% / r:4.8% f:13.2% | | r:98.3% o:92.2% / r:1.7% f:7.8% | r:97.5% o:88.4% / r:2.5% f:11.6% | r:97.2% o:83.9% / r:2.8% f:16.1% | r:93.9% o:86.5% / r:6.1% f:13.5% | r:92.2% o:86.4% / r:7.8% f:13.6% |
+| | score: 93.1 | | score: 96.8 | score: 95.2 | score: 93.9 | score: 92.0 | score: 90.8 |
+| Skema | pass: r:1184 o:490 / fail: r:26 o:122 | | | | | | pass: r:1184 o:490 / fail: r:26 o:122 |
+| | r:97.9% o:80.1% / r:2.1% f:19.9% | | | | | | r:97.9% o:80.1% / r:2.1% f:19.9% |
+| | score: 93.4 | | | | | | score: 93.4 |
+| Medeia | pass: r:2237 o:928 / fail: r:12 o:140 | | pass: r:585 o:205 / fail: r:4 o:38 | pass: r:784 o:265 / fail: r:4 o:38 | pass: r:868 o:458 / fail: r:4 o:64 | | |
+| | r:99.5% o:86.9% / r:0.5% f:13.1% | | r:99.3% o:84.4% / r:0.7% f:15.6% | r:99.5% o:87.5% / r:0.5% f:12.5% | r:99.5% o:87.7% / r:0.5% f:12.3% | | |
+| | score: 96.3 | | score: 95.6 | score: 96.5 | score: 96.6 | | |
+| Snow | pass: r:2810 o:1354 / fail: r:41 o:73 | | | pass: r:778 o:291 / fail: r:10 o:12 | pass: r:864 o:499 / fail: r:8 o:23 | pass: r:1168 o:564 / fail: r:23 o:38 | |
+| | r:98.6% o:94.9% / r:1.4% f:5.1% | | | r:98.7% o:96.0% / r:1.3% f:4.0% | r:99.1% o:95.6% / r:0.9% f:4.4% | r:98.1% o:93.7% / r:1.9% f:6.3% | |
+| | score: 97.6 | | | score: 98.1 | score: 98.2 | score: 97.0 | |
+| Everit | pass: r:2192 o:934 / fail: r:57 o:134 | | pass: r:579 o:214 / fail: r:10 o:29 | pass: r:765 o:275 / fail: r:23 o:28 | pass: r:848 o:445 / fail: r:24 o:77 | | |
+| | r:97.5% o:87.5% / r:2.5% f:12.5% | | r:98.3% o:88.1% / r:1.7% f:11.9% | r:97.1% o:90.8% / r:2.9% f:9.2% | r:97.2% o:85.2% / r:2.8% f:14.8% | | |
+| | score: 95.0 | | score: 95.7 | score: 95.5 | score: 94.2 | | |
+| SchemaFriend | pass: r:5049 o:2311 / fail: r:34 o:82 | pass: r:433 o:104 / fail: r:0 o:7 | pass: r:588 o:233 / fail: r:1 o:10 | pass: r:785 o:293 / fail: r:3 o:10 | pass: r:869 o:505 / fail: r:3 o:17 | pass: r:1187 o:584 / fail: r:4 o:18 | pass: r:1187 o:592 / fail: r:23 o:20 |
+| | r:99.3% o:96.6% / r:0.7% f:3.4% | r:100.0% o:93.7% / r:0.0% f:6.3% | r:99.8% o:95.9% / r:0.2% f:4.1% | r:99.6% o:96.7% / r:0.4% f:3.3% | r:99.7% o:96.7% / r:0.3% f:3.3% | r:99.7% o:97.0% / r:0.3% f:3.0% | r:98.1% o:96.7% / r:1.9% f:3.3% |
+| | score: 98.6 | score: 98.4 | score: 98.8 | score: 98.9 | score: 98.9 | score: 99.0 | score: 97.8 |
+| Vertx | pass: r:3741 o:1672 / fail: r:121 o:307 | | pass: r:578 o:219 / fail: r:11 o:24 | | pass: r:855 o:427 / fail: r:17 o:95 | pass: r:1159 o:510 / fail: r:32 o:92 | pass: r:1149 o:516 / fail: r:61 o:96 |
+| | r:96.9% o:84.5% / r:3.1% f:15.5% | | r:98.1% o:90.1% / r:1.9% f:9.9% | | r:98.1% o:81.8% / r:1.9% f:18.2% | r:97.3% o:84.7% / r:2.7% f:15.3% | r:95.0% o:84.3% / r:5.0% f:15.7% |
+| | score: 93.8 | | score: 96.1 | | score: 94.0 | score: 94.2 | score: 92.3 |
+| Justify | pass: r:2133 o:1036 / fail: r:116 o:32 | | pass: r:557 o:236 / fail: r:32 o:7 | pass: r:750 o:296 / fail: r:38 o:7 | pass: r:826 o:504 / fail: r:46 o:18 | | |
+| | r:94.8% o:97.0% / r:5.2% f:3.0% | | r:94.6% o:97.1% / r:5.4% f:2.9% | r:95.2% o:97.7% / r:4.8% f:2.3% | r:94.7% o:96.6% / r:5.3% f:3.4% | | |
+| | score: 95.4 | | score: 95.2 | score: 95.8 | score: 95.2 | | |
+
+Each populated cell details the **r**equired and **o**ptional passed and failed test case counts and percentages by Schema specification version, and overall.
+Underneath there is a 'score' for each implementation, out of 100.
+The score weights test results of _required_ features at triple _optional_ features, meaning 75% of the score is reserved for _required_ features,
+whereas _optional_ features only account for a maximum 25% of the score.
+
+### Feature comparison conclusions
+
+`ScheamFriend` comes out as the clear winner of the functional test, with support for all Schema specification, at the time of writing, _and_ the highest overall score.
+
+Ignoring which implementations support which drafts for a moment, a rough ranking on functionality would be:
+
+![Feature comparison scores](img/Feature comparison score.svg)
+
+Obviously, your own requirements around which specification drafts your want, or need, to use may exclude some of these.
+
+There are also a couple of notes to call out for different implementations around features outside of those covered by the standard tests.
+
+| Implementation | Notes |
+|--------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| [Vert.x Json Schema][1] | Brings in Netty as a dependency, which seems unnecessary. There doesn't seem to be a way to disable loading schemas from remote locations or injecting referenced schemas. |
+| [jsonschemafriend][2] | |
+| [networknt/json-schema-validator][3] | |
+| [Snow][4] | This is intended as a reference implementation. |
+| [everit-org/json-schema][5] | Deprecated. Replaced by [erosb/json-sKema][10] |
+| [Justify][6] | |
+| [worldturner/medeia-validator][7] | No sign of active development :( |
+| [erosb/json-sKema][8] | Replaces [everit-org/json-schema][5]. Looks to still be in initial development... |
## Performance comparison
-To run the comparison...
+To run the comparison: `./gradlew runBenchmarks`
+
+How fast is the implementation at validating JSON? To find out, two different performance suites were run using
+the [Java Microbenchmark Harness][jhm]:
-Results...
+1. Performance test running the standard [JSON Schema Test Suite][JSON-Schema-Test-Suite].
+2. Performance test serializing and deserializing Java Pojos to JSON and back.
+
+The first of these benchmark covers a wide range of JSON schema functionality, while the second focuses on a more
+real-world example, using a small common subset of functionality, in the context of using schema validated JSON
+as a serialization format. Combined, these should give a good comparison of performance.
+
+### JSON schema test suite benchmark
+
+The `JsonValidateBenchmark` benchmark measures the average time taken to run through all _positive_ test cases in the standard
+[JSON Schema Test Suite][JSON-Schema-Test-Suite], by schema specification.
+
+The benchmark excludes negative test cases and the cost of parsing the schema and building the validator logic,
+leaving the benchmark is focused on measuring cost of validation.
+
+The benchmark excludes _negative_ test cases, i.e. test cases with data that should _not_ pass validation, for two
+reasons:
+
+1. In most use-cases, and specifically the Kafka SerDe use-case we're investigating for, validation fails should be very rare.
+2. The cost of error handling varied between different implementations, generally correlated to the richness of the error messages.
+ Including negative cases would penalise implementations for useful error messages.
+
+The benchmark excludes the cost of parsing the schema and building the necessary validator logic as in most use-cases, and
+specifically the Kafka SerDe use-case we're investigating for, schemas don't tend to evolve or change often, meaning the
+cost of validation is much more important than the cost of building the validation logic.
+
+Example output:
+
+```
+Benchmark Mode Cnt Score Error Units
+JsonValidateBenchmark.measureDraft_2019_09_NetworkNt avgt 20 6.017 ± 0.216 ms/op
+JsonValidateBenchmark.measureDraft_2019_09_SchemaFriend avgt 20 1.482 ± 0.005 ms/op
+JsonValidateBenchmark.measureDraft_2019_09_Snow avgt 20 316.178 ± 28.242 ms/op
+JsonValidateBenchmark.measureDraft_2019_09_Vertx avgt 20 3.818 ± 0.028 ms/op
+JsonValidateBenchmark.measureDraft_2020_12_NetworkNt avgt 20 7.305 ± 0.073 ms/op
+JsonValidateBenchmark.measureDraft_2020_12_SchemaFriend avgt 20 1.654 ± 0.005 ms/op
+JsonValidateBenchmark.measureDraft_2020_12_Skema avgt 20 2.812 ± 0.015 ms/op
+JsonValidateBenchmark.measureDraft_2020_12_Vertx avgt 20 3.669 ± 0.019 ms/op
+JsonValidateBenchmark.measureDraft_3_SchemaFriend avgt 20 0.235 ± 0.005 ms/op
+JsonValidateBenchmark.measureDraft_4_Everit avgt 20 0.328 ± 0.006 ms/op
+JsonValidateBenchmark.measureDraft_4_Justify avgt 20 0.634 ± 0.009 ms/op
+JsonValidateBenchmark.measureDraft_4_Medeia avgt 20 0.346 ± 0.006 ms/op
+JsonValidateBenchmark.measureDraft_4_NetworkNt avgt 20 1.086 ± 0.004 ms/op
+JsonValidateBenchmark.measureDraft_4_SchemaFriend avgt 20 0.480 ± 0.017 ms/op
+JsonValidateBenchmark.measureDraft_4_Vertx avgt 20 1.362 ± 0.006 ms/op
+JsonValidateBenchmark.measureDraft_6_Everit avgt 20 0.400 ± 0.003 ms/op
+JsonValidateBenchmark.measureDraft_6_Justify avgt 20 0.816 ± 0.008 ms/op
+JsonValidateBenchmark.measureDraft_6_Medeia avgt 20 0.416 ± 0.007 ms/op
+JsonValidateBenchmark.measureDraft_6_NetworkNt avgt 20 1.771 ± 0.044 ms/op
+JsonValidateBenchmark.measureDraft_6_SchemaFriend avgt 20 0.700 ± 0.018 ms/op
+JsonValidateBenchmark.measureDraft_6_Snow avgt 20 78.241 ± 6.515 ms/op
+JsonValidateBenchmark.measureDraft_7_Everit avgt 20 0.508 ± 0.005 ms/op
+JsonValidateBenchmark.measureDraft_7_Justify avgt 20 1.044 ± 0.019 ms/op
+JsonValidateBenchmark.measureDraft_7_Medeia avgt 20 0.666 ± 0.007 ms/op
+JsonValidateBenchmark.measureDraft_7_NetworkNt avgt 20 2.573 ± 0.032 ms/op
+JsonValidateBenchmark.measureDraft_7_SchemaFriend avgt 20 0.918 ± 0.012 ms/op
+JsonValidateBenchmark.measureDraft_7_Snow avgt 20 76.627 ± 6.336 ms/op
+JsonValidateBenchmark.measureDraft_7_Vertx avgt 20 2.141 ± 0.072 ms/op
+```
+Note: results from running on 2021 Macbook Pro, M1 Max: 2.06 - 3.22 GHz, in High Power mode, JDK 17.0.6
+
+### Schema validated JSON (de)serialization benchmark
+
+The `JsonSerdeBenchmark` benchmark measures the average time taken to serialize a simple Java object, including polymorphism, to JSON and back,
+validating the intermediate JSON data on both legs of the journey.
+
+This is a more real-world test, keeping to the basics of what's possible with JSon schemas, as that's what most use-cases use.
+
+Example results:
+
+```
+Benchmark Mode Cnt Score Error Units
+JsonSerdeBenchmark.measureConfluentRoundTrip avgt 20 107.620 ± 0.546 us/op
+JsonSerdeBenchmark.measureEveritRoundTrip avgt 20 99.747 ± 1.894 us/op
+JsonSerdeBenchmark.measureJacksonIntermediateRoundTrip avgt 20 4.032 ± 0.162 us/op
+JsonSerdeBenchmark.measureJacksonRoundTrip avgt 20 4.114 ± 0.204 us/op
+JsonSerdeBenchmark.measureJustifyRoundTrip avgt 20 72.263 ± 0.811 us/op
+JsonSerdeBenchmark.measureMedeiaRoundTrip avgt 20 30.055 ± 0.351 us/op
+JsonSerdeBenchmark.measureNetworkNtRoundTrip avgt 20 1195.955 ± 33.623 us/op
+JsonSerdeBenchmark.measureSchemaFriendRoundTrip avgt 20 142.186 ± 4.105 us/op
+JsonSerdeBenchmark.measureSkemaRoundTrip avgt 20 166.841 ± 0.303 us/op
+JsonSerdeBenchmark.measureSnowRoundTrip avgt 20 603.705 ± 4.627 us/op
+JsonSerdeBenchmark.measureVertxRoundTrip avgt 20 514.517 ± 1.337 us/op
+```
+Note: results from running on 2021 Macbook Pro, M1 Max: 2.06 - 3.22 GHz, in High Power mode, JDK 17.0.6
+
+### Performance comparison conclusions
+
+Coming soon...
## Overall comparison
+Coming soon...
+
## Conclusions
+Coming soon...
+
+[1]: https://github.com/eclipse-vertx/vertx-json-schema
+[2]: https://github.com/jimblackler/jsonschemafriend
+[3]: https://github.com/networknt/json-schema-validator
+[4]: https://github.com/ssilverman/snowy-json
+[5]: https://github.com/everit-org/json-schema
+[6]: https://github.com/leadpony/justify
+[7]: https://github.com/worldturner/medeia-validator
+[8]: https://github.com/erosb/json-sKema
+[JSON-Schema-Test-Suite]: https://github.com/json-schema-org/JSON-Schema-Test-Suite
+[jhm]: https://github.com/openjdk/jmh
\ No newline at end of file
diff --git a/build.gradle.kts b/build.gradle.kts
index 7ed15e5..57233d9 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -16,44 +16,137 @@
plugins {
java
- jacoco
`creek-common-convention`
- `creek-module-convention`
- `creek-coverage-convention`
- `creek-publishing-convention`
- `creek-sonatype-publishing-convention`
- id("pl.allegro.tech.build.axion-release") version "1.15.4" // https://plugins.gradle.org/plugin/pl.allegro.tech.build.axion-release
+ id("org.ajoberstar.grgit.service") version "5.0.0"
}
-project.version = scmVersion.version
+repositories {
+ maven {
+ url = uri("https://packages.confluent.io/maven/")
+ group = "io.confluent"
+ }
-allprojects {
- tasks.jar {
- onlyIf { sourceSets.main.get().allSource.files.isNotEmpty() }
+ maven {
+ url = uri("https://jitpack.io")
}
}
val creekVersion = "0.4.2-SNAPSHOT"
-val guavaVersion = "32.1.2-jre" // https://mvnrepository.com/artifact/com.google.guava/guava
-val log4jVersion = "2.20.0" // https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core
-val junitVersion = "5.10.0" // https://mvnrepository.com/artifact/org.junit.jupiter/junit-jupiter-api
-val junitPioneerVersion = "2.0.1" // https://mvnrepository.com/artifact/org.junit-pioneer/junit-pioneer
-val mockitoVersion = "5.5.0" // https://mvnrepository.com/artifact/org.mockito/mockito-junit-jupiter
-val hamcrestVersion = "2.2" // https://mvnrepository.com/artifact/org.hamcrest/hamcrest-core
+val log4jVersion = "2.20.0"
+val junitVersion = "5.10.0"
+val junitPioneerVersion = "2.0.1"
+val mockitoVersion = "5.5.0"
+val hamcrestVersion = "2.2"
+val jmhVersion = "1.36"
+val confluentVersion = "7.3.3"
+val vertxVersion = "4.4.1"
dependencies {
+ implementation("org.openjdk.jmh:jmh-core:$jmhVersion")
+ annotationProcessor("org.openjdk.jmh:jmh-generator-annprocess:$jmhVersion")
+ implementation("com.fasterxml.jackson.core:jackson-databind")
+
+ implementation("org.json:json:20230227")
+
+ implementation("com.worldturner.medeia:medeia-validator-jackson:1.1.0")
+
+ implementation("com.github.erosb:everit-json-schema:1.14.2")
+
+ implementation("com.github.erosb:json-sKema:0.6.0")
+
+ implementation("io.confluent:kafka-streams-json-schema-serde:$confluentVersion")
+ implementation("io.confluent:kafka-schema-registry-client:$confluentVersion")
+
+ implementation("io.vertx:vertx-json-schema:$vertxVersion")
+ compileOnly("io.vertx:vertx-codegen:$vertxVersion")
+
+ implementation("net.jimblackler.jsonschemafriend:core:0.11.4")
+
+ implementation("com.networknt:json-schema-validator:1.0.80"){
+ exclude(group = "org.apache.commons", module = "commons-lang3")
+ }
+
+ implementation("com.qindesign:snowy-json:0.16.0")
+ runtimeOnly("org.glassfish:jakarta.json:2.0.0:module")
+
+ implementation("org.leadpony.justify:justify:3.1.0")
+
+ implementation("org.apache.logging.log4j:log4j-core:$log4jVersion");
+ runtimeOnly("org.apache.logging.log4j:log4j-slf4j2-impl:$log4jVersion")
+
testImplementation("org.creekservice:creek-test-hamcrest:$creekVersion")
- testImplementation("org.creekservice:creek-test-util:$creekVersion")
- testImplementation("org.creekservice:creek-test-conformity:$creekVersion")
+ implementation("org.creekservice:creek-test-util:$creekVersion")
testImplementation("org.junit.jupiter:junit-jupiter-api:$junitVersion")
testImplementation("org.junit.jupiter:junit-jupiter-params:$junitVersion")
testImplementation("org.junit-pioneer:junit-pioneer:$junitPioneerVersion")
testImplementation("org.mockito:mockito-junit-jupiter:$mockitoVersion")
testImplementation("org.hamcrest:hamcrest-core:$hamcrestVersion")
- testImplementation("com.google.guava:guava-testlib:$guavaVersion")
testImplementation("org.apache.logging.log4j:log4j-core:$log4jVersion")
testImplementation("org.apache.logging.log4j:log4j-slf4j2-impl:$log4jVersion")
testRuntimeOnly("org.junit.jupiter:junit-jupiter-engine:$junitVersion")
}
+
+tasks.withType {
+ options.compilerArgs.add("-Xlint:all,-serial,-requires-automatic,-requires-transitive-automatic,-module,-processing")
+}
+
+val jsonSchemaTestSuiteDir = layout.buildDirectory.dir("json-schema-test-suite")
+
+val cloneTask = tasks.register("clone-json-schema-test-suite") {
+ outputs.dir(jsonSchemaTestSuiteDir)
+
+ onlyIf { !jsonSchemaTestSuiteDir.get().asFile.exists() }
+
+ doLast {
+ org.ajoberstar.grgit.Grgit.clone {
+ dir = jsonSchemaTestSuiteDir.get().asFile
+ uri = "git@github.com:json-schema-org/JSON-Schema-Test-Suite.git"
+ }
+ }
+}
+
+val pullTask = tasks.register("pull-json-schema-test-suite") {
+ dependsOn(cloneTask)
+
+ doLast {
+ println("pulling.........")
+ org.ajoberstar.grgit.Grgit.open {
+ dir = jsonSchemaTestSuiteDir.get().asFile
+ }.pull()
+ }
+}
+
+val runFunctionalTests = tasks.register("runFunctionalTests") {
+ classpath = sourceSets.main.get().runtimeClasspath
+ mainClass.set("org.creekservice.kafka.test.perf.testsuite.JsonTestSuiteMain")
+ args = listOf(jsonSchemaTestSuiteDir.get().asFile.absolutePath);
+ dependsOn(pullTask)
+}
+
+tasks.register("runBenchmarks") {
+ classpath = sourceSets.main.get().runtimeClasspath
+ mainClass.set("org.creekservice.kafka.test.perf.BenchmarkRunner")
+ dependsOn(pullTask)
+}
+
+val benchmarkSmokeTest = tasks.register("runBenchmarkSmokeTest") {
+ classpath = sourceSets.main.get().runtimeClasspath
+ mainClass.set("org.creekservice.kafka.test.perf.BenchmarkRunner")
+ args(listOf("-wi", "0", "-i", "1", "-t", "1", "-r", "1s"))
+ dependsOn(pullTask)
+}
+
+tasks.test {
+ dependsOn(pullTask, runFunctionalTests, benchmarkSmokeTest)
+}
+
+// Below is required until the following is fixed in IntelliJ:
+// https://youtrack.jetbrains.com/issue/IDEA-316081/Gradle-8-toolchain-error-Toolchain-from-executable-property-does-not-match-toolchain-from-javaLauncher-property-when-different
+gradle.taskGraph.whenReady {
+ allTasks.filterIsInstance().forEach {
+ it.setExecutable(it.javaLauncher.get().executablePath.asFile.absolutePath)
+ }
+}
+
defaultTasks("format", "static", "check")
diff --git a/buildSrc/src/main/kotlin/creek-common-convention.gradle.kts b/buildSrc/src/main/kotlin/creek-common-convention.gradle.kts
index b171956..7ec9e39 100644
--- a/buildSrc/src/main/kotlin/creek-common-convention.gradle.kts
+++ b/buildSrc/src/main/kotlin/creek-common-convention.gradle.kts
@@ -19,7 +19,9 @@
*
*
Apply to all java modules, usually excluding the root project in multi-module sets.
*
- *
Version: 1.7
+ *
Version: 1.8
+ * - 1.9: Add `allDeps` task.
+ * - 1.8: Tweak test config to reduce build speed.
* - 1.7: Switch to setting Java version via toolchain
* - 1.6: Remove GitHub packages for snapshots
* - 1.5: Add filters to exclude generated sources
@@ -70,8 +72,8 @@ tasks.withType {
tasks.test {
useJUnitPlatform()
- setForkEvery(1)
- maxParallelForks = 4
+ setForkEvery(5)
+ maxParallelForks = Runtime.getRuntime().availableProcessors()
testLogging {
showStandardStreams = true
exceptionFormat = org.gradle.api.tasks.testing.logging.TestExceptionFormat.FULL
@@ -117,17 +119,26 @@ if (rootProject.name != project.name) {
}
}
-tasks.register("format") {
+val format = tasks.register("format") {
group = "creek"
description = "Format the code"
dependsOn("spotlessCheck", "spotlessApply")
}
-tasks.register("static") {
+val static = tasks.register("static") {
group = "creek"
description = "Run static code analysis"
dependsOn("checkstyleMain", "checkstyleTest", "spotbugsMain", "spotbugsTest")
+
+ shouldRunAfter(format)
}
+tasks.test {
+ shouldRunAfter(static)
+}
+
+// See: https://solidsoft.wordpress.com/2014/11/13/gradle-tricks-display-dependencies-for-all-subprojects-in-multi-project-build/
+tasks.register("allDeps") {}
+
diff --git a/config/checkstyle/suppressions.xml b/config/checkstyle/suppressions.xml
index aa7e04e..e32b792 100644
--- a/config/checkstyle/suppressions.xml
+++ b/config/checkstyle/suppressions.xml
@@ -26,4 +26,5 @@
+
diff --git a/config/spotbugs/suppressions.xml b/config/spotbugs/suppressions.xml
index 116ee98..ecc0a4f 100644
--- a/config/spotbugs/suppressions.xml
+++ b/config/spotbugs/suppressions.xml
@@ -8,4 +8,10 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/img/Feature comparison score.svg b/img/Feature comparison score.svg
new file mode 100644
index 0000000..3034d46
--- /dev/null
+++ b/img/Feature comparison score.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/src/main/java/org/creekservice/internal/example/ExampleImpl.java b/src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java
similarity index 58%
rename from src/main/java/org/creekservice/internal/example/ExampleImpl.java
rename to src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java
index 1aec54f..85780db 100644
--- a/src/main/java/org/creekservice/internal/example/ExampleImpl.java
+++ b/src/main/java/org/creekservice/kafka/test/perf/BenchmarkRunner.java
@@ -1,5 +1,5 @@
/*
- * Copyright 2021-2023 Creek Contributors (https://github.com/creek-service)
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,20 +14,13 @@
* limitations under the License.
*/
-package org.creekservice.internal.example;
+package org.creekservice.kafka.test.perf;
-import org.creekservice.api.example.Example;
+public final class BenchmarkRunner {
-/** An example class */
-public final class ExampleImpl implements Example {
- private ExampleImpl() {}
+ private BenchmarkRunner() {}
- /**
- * Example method.
- *
- * @return {@code true}.
- */
- public static boolean getTrue() {
- return true;
+ public static void main(final String[] args) throws Exception {
+ org.openjdk.jmh.Main.main(args);
}
}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java b/src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java
new file mode 100644
index 0000000..5dc803b
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/JsonSerdeBenchmark.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf;
+
+import static java.util.concurrent.TimeUnit.MICROSECONDS;
+
+import org.creekservice.kafka.test.perf.model.ModelState;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.serde.ConfluentSerde;
+import org.creekservice.kafka.test.perf.serde.EveritSerde;
+import org.creekservice.kafka.test.perf.serde.JacksonIntermediateSerde;
+import org.creekservice.kafka.test.perf.serde.JacksonSerde;
+import org.creekservice.kafka.test.perf.serde.JustifySerde;
+import org.creekservice.kafka.test.perf.serde.MedeiaSerde;
+import org.creekservice.kafka.test.perf.serde.NetworkNtSerde;
+import org.creekservice.kafka.test.perf.serde.SchemaFriendSerde;
+import org.creekservice.kafka.test.perf.serde.SerdeImpl;
+import org.creekservice.kafka.test.perf.serde.SkemaSerde;
+import org.creekservice.kafka.test.perf.serde.SnowSerde;
+import org.creekservice.kafka.test.perf.serde.VertxSerde;
+import org.creekservice.kafka.test.perf.util.Logging;
+import org.openjdk.jmh.annotations.Benchmark;
+import org.openjdk.jmh.annotations.BenchmarkMode;
+import org.openjdk.jmh.annotations.Fork;
+import org.openjdk.jmh.annotations.Mode;
+import org.openjdk.jmh.annotations.OutputTimeUnit;
+import org.openjdk.jmh.annotations.Threads;
+
+/**
+ * Benchmark results for JSON Serde.
+ *
+ *
The benchmark serializes and deserializes roughly 1K of fairly simple JSON. Testing the
+ * performance of different JSON parsers/generators and schema validators.
+ *
+ *
The JSON / Model / Schema is deliberately simplistic, as Kafka/Creek use-cases tend to only
+ * use the basic JSON schema features: primitives, enums, arrays, polymorphic types and length
+ * assertions. This can be extended in the future it needed.
+ *
+ *
Most recent results (On 2021 Macbook, M1 Max: 2.06 - 3.22 GHz, in High Power mode, JDK
+ * 17.0.6):
+ *
+ *
+ */
+@BenchmarkMode(Mode.AverageTime)
+@OutputTimeUnit(MILLISECONDS)
+@Threads(4)
+@Fork(4) // Note: to debug, set fork to 0.
+// @Warmup(iterations = 0, time = 10)
+// @Measurement(iterations = 1, time = 10)
+@SuppressWarnings({"FieldMayBeFinal", "MethodName"}) // not final to avoid folding.
+public class JsonValidateBenchmark {
+
+ static {
+ Logging.disable();
+ }
+
+ @Benchmark
+ public Result measureDraft_4_Medeia(final MedeiaValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_Medeia(final MedeiaValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_Medeia(final MedeiaValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_4_Everit(final EveritValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_Everit(final EveritValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_Everit(final EveritValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_2020_12_Skema(final SkemaValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2020_12);
+ }
+
+ @Benchmark
+ public Result measureDraft_4_Vertx(final VertxValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_Vertx(final VertxValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_2019_09_Vertx(final VertxValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2019_09);
+ }
+
+ @Benchmark
+ public Result measureDraft_2020_12_Vertx(final VertxValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2020_12);
+ }
+
+ @Benchmark
+ public Result measureDraft_3_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_03);
+ }
+
+ @Benchmark
+ public Result measureDraft_4_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_2019_09_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2019_09);
+ }
+
+ @Benchmark
+ public Result measureDraft_2020_12_SchemaFriend(final SchemaFriendValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2020_12);
+ }
+
+ @Benchmark
+ public Result measureDraft_4_NetworkNt(final NetworkNtValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_NetworkNt(final NetworkNtValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_NetworkNt(final NetworkNtValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_2019_09_NetworkNt(final NetworkNtValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2019_09);
+ }
+
+ @Benchmark
+ public Result measureDraft_2020_12_NetworkNt(final NetworkNtValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2020_12);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_Snow(final SnowValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_Snow(final SnowValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+
+ @Benchmark
+ public Result measureDraft_2019_09_Snow(final SnowValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_2019_09);
+ }
+
+ @Benchmark
+ public Result measureDraft_4_Justify(final JustifyValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_04);
+ }
+
+ @Benchmark
+ public Result measureDraft_6_Justify(final JustifyValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_06);
+ }
+
+ @Benchmark
+ public Result measureDraft_7_Justify(final JustifyValidator validator) {
+ return validator.validate(SchemaSpec.DRAFT_07);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/TestSchemas.java b/src/main/java/org/creekservice/kafka/test/perf/TestSchemas.java
new file mode 100644
index 0000000..a484cf0
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/TestSchemas.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf;
+
+import java.nio.file.Path;
+import org.creekservice.api.test.util.TestPaths;
+
+public final class TestSchemas {
+ private static final Path RESOURCE_ROOT =
+ TestPaths.moduleRoot("json-schema-validation-comparison").resolve("src/main/resources");
+ public static final String DRAFT_2020_SCHEMA =
+ TestPaths.readString(RESOURCE_ROOT.resolve("schema-draft-2020-12.json"));
+ public static final String DRAFT_7_SCHEMA =
+ TestPaths.readString(RESOURCE_ROOT.resolve("schema-draft-7.json"));
+
+ private TestSchemas() {}
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/model/ModelState.java b/src/main/java/org/creekservice/kafka/test/perf/model/ModelState.java
new file mode 100644
index 0000000..6a58093
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/model/ModelState.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.model;
+
+import java.math.BigDecimal;
+import java.util.List;
+import java.util.UUID;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.State;
+
+@State(Scope.Benchmark)
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class ModelState {
+
+ public static final TestModel TEST_MODEL =
+ new TestModel(
+ "some name",
+ new BigDecimal("0145.000001"),
+ TestModel.AnEnum.THAT,
+ List.of(
+ "long", "long", "list", "of", "data", "so", "that", "we've", "got",
+ "some", "time", "spent", "parsing", "all", "this", "json", "data",
+ "long", "long", "list", "of", "data", "so", "that", "we've", "got",
+ "some", "time", "spent", "parsing", "all", "this", "json", "data"),
+ List.of(
+ new PolyTypeA(UUID.randomUUID()),
+ new PolyTypeA(UUID.randomUUID()),
+ new PolyTypeB(12.34000005d),
+ new PolyTypeB(0.0000000002d),
+ new PolyTypeA(UUID.randomUUID()),
+ new PolyTypeA(UUID.randomUUID()),
+ new PolyTypeB(13.34000005d),
+ new PolyTypeB(1.0000000002d)));
+
+ public TestModel model = TEST_MODEL;
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/model/PolyBase.java b/src/main/java/org/creekservice/kafka/test/perf/model/PolyBase.java
new file mode 100644
index 0000000..1d367eb
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/model/PolyBase.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.model;
+
+import com.fasterxml.jackson.annotation.JsonSubTypes;
+import com.fasterxml.jackson.annotation.JsonTypeInfo;
+
+@SuppressWarnings("unused")
+@JsonTypeInfo(use = JsonTypeInfo.Id.NAME)
+@JsonSubTypes({@JsonSubTypes.Type(PolyTypeA.class), @JsonSubTypes.Type(PolyTypeB.class)})
+public interface PolyBase {}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeA.java b/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeA.java
new file mode 100644
index 0000000..20e405e
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeA.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.model;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import java.util.Objects;
+import java.util.UUID;
+
+@JsonTypeName("poly-a")
+@SuppressWarnings("unused")
+public final class PolyTypeA implements PolyBase {
+ private final UUID id;
+
+ @JsonCreator
+ public PolyTypeA(@JsonProperty(value = "id", required = true) final UUID id) {
+ this.id = id;
+ }
+
+ public UUID getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ final PolyTypeA polyTypeA = (PolyTypeA) o;
+ return Objects.equals(id, polyTypeA.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(id);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeB.java b/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeB.java
new file mode 100644
index 0000000..0f4d9dd
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/model/PolyTypeB.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.model;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonTypeName;
+import java.util.Objects;
+
+@JsonTypeName("poly-b")
+@SuppressWarnings("unused")
+public final class PolyTypeB implements PolyBase {
+ private final double num;
+
+ @JsonCreator
+ public PolyTypeB(@JsonProperty(value = "num", required = true) final double num) {
+ this.num = num;
+ }
+
+ public double getNum() {
+ return num;
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ final PolyTypeB polyTypeB = (PolyTypeB) o;
+ return Double.compare(polyTypeB.num, num) == 0;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(num);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/model/TestModel.java b/src/main/java/org/creekservice/kafka/test/perf/model/TestModel.java
new file mode 100644
index 0000000..080ab76
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/model/TestModel.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.model;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import java.math.BigDecimal;
+import java.util.List;
+import java.util.Objects;
+
+public final class TestModel {
+
+ public enum AnEnum {
+ THIS,
+ THAT,
+ OTHER
+ }
+
+ private final String name;
+ private final BigDecimal decimal;
+ private final AnEnum anEnum;
+ private final List list;
+ private final List polyTypes;
+
+ @JsonCreator
+ public TestModel(
+ @JsonProperty(value = "name", required = true) final String name,
+ @JsonProperty(value = "decimal", required = true) final BigDecimal decimal,
+ @JsonProperty(value = "anEnum", required = true) final AnEnum anEnum,
+ @JsonProperty(value = "list", required = true) final List list,
+ @JsonProperty(value = "polymorphicTypes", required = true)
+ final List polyTypes) {
+ this.name = name;
+ this.decimal = decimal;
+ this.anEnum = anEnum;
+ this.list = List.copyOf(list);
+ this.polyTypes = List.copyOf(polyTypes);
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public BigDecimal getDecimal() {
+ return decimal;
+ }
+
+ public AnEnum getAnEnum() {
+ return anEnum;
+ }
+
+ public List getList() {
+ return List.copyOf(list);
+ }
+
+ public List getPolymorphicTypes() {
+ return List.copyOf(polyTypes);
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ final TestModel testModel = (TestModel) o;
+ return Objects.equals(name, testModel.name)
+ && Objects.equals(decimal, testModel.decimal)
+ && Objects.equals(list, testModel.list)
+ && Objects.equals(polyTypes, testModel.polyTypes);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, decimal, list, polyTypes);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/ConfluentSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/ConfluentSerde.java
new file mode 100644
index 0000000..3cc7138
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/ConfluentSerde.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import io.confluent.kafka.schemaregistry.ParsedSchema;
+import io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient;
+import io.confluent.kafka.schemaregistry.json.JsonSchemaProvider;
+import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer;
+import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializer;
+import io.confluent.kafka.serializers.json.KafkaJsonSchemaSerializerConfig;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import org.apache.kafka.common.serialization.Deserializer;
+import org.apache.kafka.common.serialization.Serializer;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+
+@SuppressWarnings("resource")
+public class ConfluentSerde extends SerdeImpl {
+
+ private static final String TOPIC_NAME = "t";
+ private final Serializer serializer;
+ private final Deserializer deserializer;
+ private final Serializer nonValidatingSerializer;
+ public MockSchemaRegistryClient srClient = new MockSchemaRegistryClient();
+
+ public ConfluentSerde() {
+ try {
+ final Optional parsedSchema =
+ new JsonSchemaProvider().parseSchema(TestSchemas.DRAFT_7_SCHEMA, List.of());
+ final int schemaId =
+ srClient.register(TOPIC_NAME + "-value", parsedSchema.orElseThrow());
+
+ final Map validating = new HashMap<>();
+ validating.put(KafkaJsonSchemaSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "ignored");
+ validating.put(KafkaJsonSchemaSerializerConfig.FAIL_INVALID_SCHEMA, true);
+ validating.put(KafkaJsonSchemaSerializerConfig.AUTO_REGISTER_SCHEMAS, false);
+ validating.put(KafkaJsonSchemaSerializerConfig.USE_SCHEMA_ID, schemaId);
+ validating.put(KafkaJsonSchemaSerializerConfig.ID_COMPATIBILITY_STRICT, false);
+
+ serializer = new KafkaJsonSchemaSerializer<>(srClient, validating);
+ deserializer = new KafkaJsonSchemaDeserializer<>(srClient, validating, TestModel.class);
+
+ final Map nonValidating = new HashMap<>(validating);
+ nonValidating.put(KafkaJsonSchemaSerializerConfig.FAIL_INVALID_SCHEMA, false);
+ nonValidatingSerializer = new KafkaJsonSchemaSerializer<>(srClient, nonValidating);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public org.creekservice.kafka.test.perf.serde.Serializer serializer() {
+ return (model, validate) ->
+ validate
+ ? serializer.serialize(TOPIC_NAME, model)
+ : nonValidatingSerializer.serialize(TOPIC_NAME, model);
+ }
+
+ @Override
+ public org.creekservice.kafka.test.perf.serde.Deserializer deserializer() {
+ return data -> deserializer.deserialize(TOPIC_NAME, data);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/Deserializer.java b/src/main/java/org/creekservice/kafka/test/perf/serde/Deserializer.java
new file mode 100644
index 0000000..fe621a1
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/Deserializer.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import org.creekservice.kafka.test.perf.model.TestModel;
+
+public interface Deserializer {
+ TestModel deserialize(byte[] data);
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/EveritSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/EveritSerde.java
new file mode 100644
index 0000000..0f26948
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/EveritSerde.java
@@ -0,0 +1,155 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_04;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_06;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_07;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.io.ByteArrayInputStream;
+import java.util.Collection;
+import java.util.EnumSet;
+import java.util.Map;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+import org.everit.json.schema.Schema;
+import org.everit.json.schema.loader.SchemaLoader;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+/**
+ * EveritSerde impl.
+ *
+ *
Unfortunately, the validator library requires the JSON to be parsed by org.JSON. As org.JSON
+ * isn't really designed to convert between Java POJOs and JSON, e.g. it doesn't natively support
+ * polymorphic types, using the library requires using Jackson and converting to org.JSON: an
+ * additional cost.
+ */
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class EveritSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+ private Schema schema = SchemaLoader.load(new JSONObject(TestSchemas.DRAFT_7_SCHEMA));
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final Map jsonNode =
+ mapper.convertValue(model, new TypeReference<>() {});
+ if (validate) {
+ final JSONObject jsonObject = new JSONObject(jsonNode);
+ schema.validate(jsonObject);
+ }
+ return mapper.writeValueAsBytes(jsonNode);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final Map jsonNode =
+ mapper.readValue(bytes, new TypeReference<>() {});
+ final JSONObject jsonObject = new JSONObject(jsonNode);
+ schema.validate(jsonObject);
+ return mapper.convertValue(jsonNode, TestModel.class);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Set SUPPORTED = EnumSet.of(DRAFT_04, DRAFT_06, DRAFT_07);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return SUPPORTED;
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final Object schemaObject = parse(schema);
+
+ final Schema parsedSchema =
+ schemaLoader(spec)
+ .schemaClient(
+ url ->
+ new ByteArrayInputStream(
+ additionalSchemas
+ .load(url)
+ .getBytes(UTF_8)))
+ .schemaJson(schemaObject)
+ .build()
+ .load()
+ .build();
+
+ return json -> {
+ final Object jsonObject = parse(json);
+ parsedSchema.validate(jsonObject);
+ };
+ }
+
+ private Object parse(final String json) {
+ try {
+ final Object o = mapper.readValue(json, new TypeReference<>() {});
+ if (o instanceof Map) {
+ return new JSONObject((Map, ?>) o);
+ }
+ if (o instanceof Collection) {
+ return new JSONArray((Collection>) o);
+ }
+ return o;
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private SchemaLoader.SchemaLoaderBuilder schemaLoader(final SchemaSpec spec) {
+ final SchemaLoader.SchemaLoaderBuilder builder = SchemaLoader.builder();
+
+ switch (spec) {
+ case DRAFT_07:
+ return builder.draftV7Support();
+ case DRAFT_06:
+ return builder.draftV6Support();
+ case DRAFT_04:
+ return builder; // DRAFT 4 is the default.
+ default:
+ throw new RuntimeException("Unsupported draft: " + spec);
+ }
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonIntermediateSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonIntermediateSerde.java
new file mode 100644
index 0000000..e11f5b0
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonIntermediateSerde.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.io.IOException;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+/**
+ * Similar to {@link JacksonSerde}, but going via an intermediate Object state.
+ *
+ *
This intermediate state is required by some validation libraries. This impl allows the impact
+ * of the intermediate state to be measured.
+ */
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class JacksonIntermediateSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ return mapper.writeValueAsBytes(model);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ return mapper.readValue(bytes, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public ValidatorFactory validator() {
+ return super.validator();
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonSerde.java
new file mode 100644
index 0000000..9137d53
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/JacksonSerde.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.io.IOException;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class JacksonSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ return mapper.writeValueAsBytes(model);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ return mapper.readValue(bytes, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public ValidatorFactory validator() {
+ return super.validator();
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/JustifySerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/JustifySerde.java
new file mode 100644
index 0000000..720a2ae
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/JustifySerde.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_04;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_06;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_07;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import jakarta.json.JsonReader;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+import org.leadpony.justify.api.JsonSchema;
+import org.leadpony.justify.api.JsonSchemaResolver;
+import org.leadpony.justify.api.JsonValidationService;
+import org.leadpony.justify.api.ProblemHandler;
+import org.leadpony.justify.api.SpecVersion;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class JustifySerde extends SerdeImpl {
+
+ private JsonValidationService service;
+ private JsonSchema schema;
+ private ProblemHandler handler;
+ public ObjectMapper mapper = JsonMapper.builder().build();
+
+ public JustifySerde() {
+ service = JsonValidationService.newInstance();
+ schema =
+ service.readSchema(
+ new ByteArrayInputStream(
+ TestSchemas.DRAFT_7_SCHEMA.getBytes(StandardCharsets.UTF_8)));
+ handler =
+ problems -> {
+ throw new RuntimeException(problems.toString());
+ };
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final byte[] bytes = mapper.writeValueAsBytes(model);
+
+ if (validate) {
+ // Double parse seems unavoidable, even if using json-b:
+ try (JsonReader reader =
+ service.createReader(
+ new ByteArrayInputStream(bytes), schema, handler)) {
+ reader.readValue();
+ }
+ }
+
+ return bytes;
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ try (JsonReader reader =
+ service.createReader(new ByteArrayInputStream(bytes), schema, handler)) {
+ reader.readValue();
+ }
+
+ // Double parse seems unavoidable, even if using json-b:
+ return mapper.readValue(bytes, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Map SUPPORTED =
+ Map.of(
+ DRAFT_04, SpecVersion.DRAFT_04,
+ DRAFT_06, SpecVersion.DRAFT_06,
+ DRAFT_07, SpecVersion.DRAFT_07);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return SUPPORTED.keySet();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+
+ final JsonSchema parsedSchema = parseSchema(schema, spec, additionalSchemas);
+
+ return json -> {
+ try (JsonReader reader =
+ service.createReader(
+ new ByteArrayInputStream(json.getBytes(StandardCharsets.UTF_8)),
+ parsedSchema,
+ handler)) {
+ reader.readValue();
+ }
+ };
+ }
+
+ private JsonSchema parseSchema(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final JsonSchemaResolver resolver =
+ uri -> {
+ final String s = additionalSchemas.load(uri);
+ return parseSchema(s, spec, additionalSchemas);
+ };
+
+ return service.createSchemaReaderFactoryBuilder()
+ .withDefaultSpecVersion(schemaVersion(spec))
+ .withSchemaResolver(resolver)
+ .withSchemaValidation(false)
+ .build()
+ .createSchemaReader(
+ new ByteArrayInputStream(schema.getBytes(StandardCharsets.UTF_8)))
+ .read();
+ }
+
+ private SpecVersion schemaVersion(final SchemaSpec spec) {
+ final SpecVersion ver = SUPPORTED.get(spec);
+ if (ver == null) {
+ throw new IllegalArgumentException("Unsupported: " + spec);
+ }
+ return ver;
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/MedeiaSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/MedeiaSerde.java
new file mode 100644
index 0000000..51afa8c
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/MedeiaSerde.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_04;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_06;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_07;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.worldturner.medeia.api.JsonSchemaVersion;
+import com.worldturner.medeia.api.MetaSchemaSource;
+import com.worldturner.medeia.api.SchemaSource;
+import com.worldturner.medeia.api.StringSchemaSource;
+import com.worldturner.medeia.api.ValidationOptions;
+import com.worldturner.medeia.api.jackson.MedeiaJacksonApi;
+import com.worldturner.medeia.schema.validation.SchemaValidator;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class MedeiaSerde extends SerdeImpl {
+
+ private static final ValidationOptions VALIDATOR_OPTIONS =
+ new ValidationOptions().withValidateSchema(false);
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+ private MedeiaJacksonApi api = new MedeiaJacksonApi();
+ private SchemaValidator schemaValidator =
+ api.loadSchemas(
+ List.of(new StringSchemaSource(TestSchemas.DRAFT_7_SCHEMA)), VALIDATOR_OPTIONS);
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final ByteArrayOutputStream out = new ByteArrayOutputStream();
+ JsonGenerator generator = mapper.getFactory().createGenerator(out);
+ if (validate) {
+ generator = api.decorateJsonGenerator(schemaValidator, generator);
+ }
+ mapper.writeValue(generator, model);
+ return out.toByteArray();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final JsonParser parser = mapper.createParser(bytes);
+ final JsonParser validatingParser = api.decorateJsonParser(schemaValidator, parser);
+ return mapper.reader().readValue(validatingParser, TestModel.class);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Map SUPPORTED =
+ Map.of(
+ DRAFT_04, JsonSchemaVersion.DRAFT04,
+ DRAFT_06, JsonSchemaVersion.DRAFT06,
+ DRAFT_07, JsonSchemaVersion.DRAFT07);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+
+ @Override
+ public Set supports() {
+ return SUPPORTED.keySet();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+
+ final JsonSchemaVersion version = schemaVersion(spec);
+
+ // Doesn't seem to be a way to reactively 'load' schema on demand:
+ // Only to provide them all, which means they ALL get parsed... slow!
+ final List schemas =
+ new ArrayList<>(additionalSchema(additionalSchemas, version));
+ schemas.add(MetaSchemaSource.Companion.forVersion(version));
+ schemas.add(0, new StringSchemaSource(schema, version));
+
+ final SchemaValidator v = api.loadSchemas(schemas, VALIDATOR_OPTIONS);
+
+ return json -> {
+ try {
+ final JsonParser parser =
+ mapper.createParser(json.getBytes(StandardCharsets.UTF_8));
+ final JsonParser validatingParser = api.decorateJsonParser(v, parser);
+ mapper.reader().readValue(validatingParser, JsonNode.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private List additionalSchema(
+ final AdditionalSchemas additionalSchemas, final JsonSchemaVersion version) {
+ return additionalSchemas.remotes().entrySet().stream()
+ .filter(
+ e ->
+ !e.getKey().getPath().startsWith("/draft")
+ || e.getKey().getPath().startsWith("/draft7"))
+ .filter(
+ e ->
+ !e.getKey()
+ .getPath()
+ .endsWith("nested-absolute-ref-to-string.json"))
+ .map(e -> new StringSchemaSource(e.getValue(), version, e.getKey()))
+ .collect(Collectors.toList());
+ }
+
+ private JsonSchemaVersion schemaVersion(final SchemaSpec spec) {
+ final JsonSchemaVersion ver = SUPPORTED.get(spec);
+ if (ver == null) {
+ throw new IllegalArgumentException("Unsupported: " + spec);
+ }
+ return ver;
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/NetworkNtSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/NetworkNtSerde.java
new file mode 100644
index 0000000..1455b9c
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/NetworkNtSerde.java
@@ -0,0 +1,160 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_04;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_06;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_07;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_2019_09;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_2020_12;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.networknt.schema.JsonMetaSchema;
+import com.networknt.schema.JsonSchema;
+import com.networknt.schema.JsonSchemaFactory;
+import com.networknt.schema.SpecVersion;
+import com.networknt.schema.ValidationMessage;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class NetworkNtSerde extends SerdeImpl {
+
+ private JsonSchema schema;
+ private ObjectMapper mapper = JsonMapper.builder().build();
+
+ public NetworkNtSerde() {
+ schema =
+ JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7)
+ .getSchema(TestSchemas.DRAFT_7_SCHEMA);
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final JsonNode node = mapper.convertValue(model, JsonNode.class);
+
+ if (validate) {
+ final Set errors = schema.validate(node);
+ if (!errors.isEmpty()) {
+ throw new RuntimeException(errors.toString());
+ }
+ }
+
+ return mapper.writeValueAsBytes(node);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final JsonNode node = mapper.readValue(bytes, JsonNode.class);
+
+ final Set errors = schema.validate(node);
+ if (!errors.isEmpty()) {
+ throw new RuntimeException(errors.toString());
+ }
+
+ return mapper.convertValue(node, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Map SUPPORTED =
+ Map.of(
+ DRAFT_04, SpecVersion.VersionFlag.V4,
+ DRAFT_06, SpecVersion.VersionFlag.V6,
+ DRAFT_07, SpecVersion.VersionFlag.V7,
+ DRAFT_2019_09, SpecVersion.VersionFlag.V201909,
+ DRAFT_2020_12, SpecVersion.VersionFlag.V202012);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return SUPPORTED.keySet();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+
+ final JsonSchema parsedSchema = getParsedSchema(schema, spec, additionalSchemas);
+
+ return json -> {
+ try {
+ final JsonNode node = mapper.readValue(json, JsonNode.class);
+
+ final Set errors = parsedSchema.validate(node);
+ if (!errors.isEmpty()) {
+ throw new RuntimeException(errors.toString());
+ }
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private JsonSchema getParsedSchema(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final JsonMetaSchema metaSchema =
+ JsonSchemaFactory.checkVersion(schemaVersion(spec)).getInstance();
+
+ return JsonSchemaFactory.builder()
+ .defaultMetaSchemaURI(metaSchema.getUri())
+ .addMetaSchema(metaSchema)
+ .uriFetcher(
+ uri ->
+ new ByteArrayInputStream(
+ additionalSchemas.load(uri).getBytes(UTF_8)),
+ Set.of("http", "https"))
+ .build()
+ .getSchema(schema);
+ }
+
+ private SpecVersion.VersionFlag schemaVersion(final SchemaSpec spec) {
+ final SpecVersion.VersionFlag ver = SUPPORTED.get(spec);
+ if (ver == null) {
+ throw new IllegalArgumentException("Unsupported: " + spec);
+ }
+ return ver;
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/SchemaFriendSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/SchemaFriendSerde.java
new file mode 100644
index 0000000..55602af
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/SchemaFriendSerde.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_03;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_04;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_06;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_07;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_2019_09;
+import static org.creekservice.kafka.test.perf.testsuite.SchemaSpec.DRAFT_2020_12;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import java.io.IOException;
+import java.net.URI;
+import java.util.EnumSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import net.jimblackler.jsonschemafriend.Loader;
+import net.jimblackler.jsonschemafriend.Schema;
+import net.jimblackler.jsonschemafriend.SchemaStore;
+import net.jimblackler.jsonschemafriend.ValidationException;
+import net.jimblackler.jsonschemafriend.Validator;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class SchemaFriendSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+ private Validator validator;
+ private Schema schema;
+
+ public SchemaFriendSerde() {
+ this.schema =
+ parseSchema(
+ TestSchemas.DRAFT_7_SCHEMA,
+ DRAFT_07,
+ uri -> {
+ throw new UnsupportedOperationException();
+ });
+ this.validator = new Validator(true);
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final Map map =
+ mapper.convertValue(model, new TypeReference<>() {});
+
+ if (validate) {
+ validator.validate(schema, map);
+ }
+
+ return mapper.writeValueAsBytes(map);
+ } catch (JsonProcessingException | ValidationException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final Map map = mapper.readValue(bytes, new TypeReference<>() {});
+
+ validator.validate(schema, map);
+
+ return mapper.convertValue(map, TestModel.class);
+ } catch (ValidationException | IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Set SUPPORTED =
+ EnumSet.of(DRAFT_2020_12, DRAFT_2019_09, DRAFT_07, DRAFT_06, DRAFT_04, DRAFT_03);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return SUPPORTED;
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final Schema parsedSchema = parseSchema(schema, spec, additionalSchemas::load);
+ return json -> {
+ try {
+ final Object o = mapper.readValue(json, Object.class);
+ validator.validate(parsedSchema, o, URI.create(""));
+ } catch (RuntimeException e) {
+ throw e;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+ };
+ }
+
+ @SuppressWarnings("unchecked")
+ private Schema parseSchema(
+ final String schema,
+ final SchemaSpec spec,
+ final Function additionalSchemas) {
+ try {
+ final Loader loader =
+ (uri, cacheSchema) -> {
+ try {
+ return additionalSchemas.apply(uri);
+ } catch (final UnsupportedOperationException e) {
+ throw new IOException(e);
+ }
+ };
+
+ final SchemaStore schemaStore = new SchemaStore(url -> url, true, loader);
+
+ final Object parsed = mapper.readValue(schema, Object.class);
+ if (parsed instanceof Map) {
+ final Map schemaMap = (Map) parsed;
+ if (!schemaMap.containsKey("$schema")) {
+ schemaMap.put("$schema", spec.uri().toString());
+ }
+ }
+
+ return schemaStore.loadSchema(parsed, null);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/SerdeImpl.java b/src/main/java/org/creekservice/kafka/test/perf/serde/SerdeImpl.java
new file mode 100644
index 0000000..7a6b2cd
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/SerdeImpl.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import java.util.Set;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+import org.openjdk.jmh.annotations.Scope;
+import org.openjdk.jmh.annotations.State;
+
+@State(Scope.Thread)
+public abstract class SerdeImpl {
+
+ public String name() {
+ final String name = getClass().getSimpleName();
+ final int idx = name.lastIndexOf("Serde");
+ return idx < 0 ? name : name.substring(0, idx);
+ }
+
+ public abstract Serializer serializer();
+
+ public abstract Deserializer deserializer();
+
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return Set.of();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ return json -> {};
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/Serializer.java b/src/main/java/org/creekservice/kafka/test/perf/serde/Serializer.java
new file mode 100644
index 0000000..7df7eeb
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/Serializer.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import org.creekservice.kafka.test.perf.model.TestModel;
+
+/** A Kafka-like serializer interface */
+public interface Serializer {
+
+ /**
+ * Serialize the model to bytes
+ *
+ * @param model the model to serialize
+ * @param validate flag indicating if JSON should be validated against the schema. Setting this
+ * to {@code false} allows the serializer to generate invalid JSON, which allows testing of
+ * negative path in the deserializer.
+ * @return the serialized bytes.
+ */
+ byte[] serialize(TestModel model, boolean validate);
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/SkemaSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/SkemaSerde.java
new file mode 100644
index 0000000..7a6f544
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/SkemaSerde.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.github.erosb.jsonsKema.JsonParser;
+import com.github.erosb.jsonsKema.JsonValue;
+import com.github.erosb.jsonsKema.SchemaLoader;
+import com.github.erosb.jsonsKema.SchemaLoaderConfig;
+import com.github.erosb.jsonsKema.ValidationFailure;
+import com.github.erosb.jsonsKema.Validator;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.EnumSet;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+/**
+ * SkemaSerde impl.
+ *
+ *
Unfortunately, the validator library requires the JSON to be parsed using its own parser. This
+ * requires an additional parse step on serialization and deserialization: an additional cost.
+ */
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class SkemaSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+ private Validator validator;
+
+ public SkemaSerde() {
+ this.validator =
+ Validator.forSchema(
+ new SchemaLoader(new JsonParser(TestSchemas.DRAFT_2020_SCHEMA).parse())
+ .load());
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final String json = mapper.writeValueAsString(model);
+ if (validate) {
+ final JsonValue instance = new JsonParser(json).parse();
+ final ValidationFailure failure = validator.validate(instance);
+ if (failure != null) {
+ throw new RuntimeException(failure.getMessage());
+ }
+ }
+ return json.getBytes(UTF_8);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final JsonValue instance = new JsonParser(new String(bytes, UTF_8)).parse();
+ final ValidationFailure failure = validator.validate(instance);
+ if (failure != null) {
+ throw new RuntimeException(failure.getMessage());
+ }
+ return mapper.readValue(bytes, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return EnumSet.of(SchemaSpec.DRAFT_2020_12);
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final JsonValue schemaJson = new JsonParser(schema).parse();
+ final SchemaLoader schemaLoader =
+ new SchemaLoader(
+ schemaJson,
+ new SchemaLoaderConfig(
+ uri ->
+ new ByteArrayInputStream(
+ additionalSchemas
+ .load(uri)
+ .getBytes(UTF_8))));
+ final Validator validator = Validator.forSchema(schemaLoader.load());
+
+ return json -> {
+ final JsonValue instance = new JsonParser(json).parse();
+ final ValidationFailure failure = validator.validate(instance);
+ if (failure != null) {
+ throw new RuntimeException(failure.getMessage());
+ }
+ };
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/SnowSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/SnowSerde.java
new file mode 100644
index 0000000..b582772
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/SnowSerde.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.google.gson.JsonElement;
+import com.qindesign.json.schema.Error;
+import com.qindesign.json.schema.JSON;
+import com.qindesign.json.schema.JSONPath;
+import com.qindesign.json.schema.MalformedSchemaException;
+import com.qindesign.json.schema.Option;
+import com.qindesign.json.schema.Options;
+import com.qindesign.json.schema.Specification;
+import com.qindesign.json.schema.Validator;
+import com.qindesign.json.schema.net.URI;
+import java.io.ByteArrayInputStream;
+import java.net.URL;
+import java.nio.file.Path;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class SnowSerde extends SerdeImpl {
+
+ private Validator validator;
+ private ObjectMapper mapper = JsonMapper.builder().build();
+
+ public SnowSerde() {
+ this.validator =
+ createValidator(TestSchemas.DRAFT_7_SCHEMA, SchemaSpec.DRAFT_07, Optional.empty());
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final byte[] bytes = mapper.writeValueAsBytes(model);
+
+ if (validate) {
+ validate(bytes);
+ }
+
+ return bytes;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ validate(bytes);
+
+ return mapper.readValue(bytes, TestModel.class);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private void validate(final byte[] bytes) throws MalformedSchemaException {
+ final JsonElement json = parse(bytes);
+
+ final Map>> errors = new HashMap<>();
+ if (!validator.validate(json, new HashMap<>(), errors)) {
+ throw new RuntimeException(errors.toString());
+ }
+ }
+
+ private static JsonElement parse(final byte[] bytes) {
+ return JSON.parse(new ByteArrayInputStream(bytes));
+ }
+
+ @SuppressWarnings({"CollectionContainsUrl", "OptionalUsedAsFieldOrParameterType"})
+ private Validator createValidator(
+ final String schema, final SchemaSpec spec, final Optional remotesDir) {
+ try {
+ final Map knownURLs =
+ remotesDir.isPresent()
+ ? Map.of(
+ URI.parseUnchecked("http://localhost:1234"),
+ remotesDir.get().toUri().toURL())
+ : Map.of();
+
+ final Options opts = new Options();
+ opts.set(Option.FORMAT, true);
+ opts.set(Option.CONTENT, true);
+ opts.set(Option.DEFAULT_SPECIFICATION, schemaVersion(spec));
+
+ return new Validator(
+ parse(schema.getBytes(UTF_8)),
+ URI.parseUnchecked("https://something.com/"),
+ Map.of(),
+ knownURLs,
+ opts);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static final Map SUPPORTED =
+ Map.of(
+ SchemaSpec.DRAFT_06, Specification.DRAFT_06,
+ SchemaSpec.DRAFT_07, Specification.DRAFT_07,
+ SchemaSpec.DRAFT_2019_09, Specification.DRAFT_2019_09);
+
+ private Specification schemaVersion(final SchemaSpec spec) {
+ final Specification ver = SUPPORTED.get(spec);
+ if (ver == null) {
+ throw new IllegalArgumentException("Unsupported: " + spec);
+ }
+ return ver;
+ }
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+
+ @Override
+ public Set supports() {
+ return SUPPORTED.keySet();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+
+ final Validator validator =
+ createValidator(schema, spec, Optional.of(additionalSchemas.remotesDir()));
+
+ return json -> {
+ try {
+ final JsonElement toValidate = parse(json.getBytes(UTF_8));
+ final Map>> errors = new HashMap<>();
+ if (!validator.validate(toValidate, new HashMap<>(), errors)) {
+ throw new RuntimeException(errors.toString());
+ }
+ } catch (MalformedSchemaException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/serde/VertxSerde.java b/src/main/java/org/creekservice/kafka/test/perf/serde/VertxSerde.java
new file mode 100644
index 0000000..6b87f1d
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/serde/VertxSerde.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.serde;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import io.vertx.core.json.Json;
+import io.vertx.core.json.JsonObject;
+import io.vertx.json.schema.Draft;
+import io.vertx.json.schema.JsonSchema;
+import io.vertx.json.schema.JsonSchemaOptions;
+import io.vertx.json.schema.OutputFormat;
+import io.vertx.json.schema.OutputUnit;
+import io.vertx.json.schema.Validator;
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+import org.creekservice.kafka.test.perf.TestSchemas;
+import org.creekservice.kafka.test.perf.model.TestModel;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory;
+
+@SuppressWarnings("FieldMayBeFinal") // not final to avoid folding.
+public class VertxSerde extends SerdeImpl {
+
+ private ObjectMapper mapper = JsonMapper.builder().build();
+ private Validator validator;
+
+ public VertxSerde() {
+ this.validator =
+ Validator.create(
+ JsonSchema.of(new JsonObject(TestSchemas.DRAFT_7_SCHEMA)),
+ new JsonSchemaOptions()
+ .setDraft(Draft.DRAFT7)
+ .setBaseUri("https://something.io")
+ .setOutputFormat(OutputFormat.Basic));
+ }
+
+ @Override
+ public Serializer serializer() {
+ return (model, validate) -> {
+ try {
+ final Map map =
+ mapper.convertValue(model, new TypeReference<>() {});
+
+ if (validate) {
+ final OutputUnit result = validator.validate(new JsonObject(map));
+ if (!result.getValid()) {
+ throw new RuntimeException(result.toString());
+ }
+ }
+
+ return mapper.writeValueAsBytes(map);
+ } catch (JsonProcessingException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ @Override
+ public Deserializer deserializer() {
+ return bytes -> {
+ try {
+ final Map map = mapper.readValue(bytes, new TypeReference<>() {});
+
+ final OutputUnit result = validator.validate(new JsonObject(map));
+ if (!result.getValid()) {
+ throw new RuntimeException(result.toString());
+ }
+
+ return mapper.convertValue(map, TestModel.class);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ };
+ }
+
+ private static final Map SUPPORTED =
+ Map.of(
+ SchemaSpec.DRAFT_04, Draft.DRAFT4,
+ SchemaSpec.DRAFT_07, Draft.DRAFT7,
+ SchemaSpec.DRAFT_2019_09, Draft.DRAFT201909,
+ SchemaSpec.DRAFT_2020_12, Draft.DRAFT202012);
+
+ @Override
+ public ValidatorFactory validator() {
+ return new ValidatorFactory() {
+ @Override
+ public Set supports() {
+ return SUPPORTED.keySet();
+ }
+
+ @Override
+ public JsonValidator prepare(
+ final String schema,
+ final SchemaSpec spec,
+ final AdditionalSchemas additionalSchemas) {
+ final Object decodedSchema = Json.decodeValue(schema);
+
+ final JsonSchema parsedSchema =
+ decodedSchema instanceof JsonObject
+ ? JsonSchema.of((JsonObject) decodedSchema)
+ : JsonSchema.of((boolean) decodedSchema);
+
+ // Note: doesn't seem to be a way to provide additional schemas
+ final Validator validator =
+ Validator.create(
+ parsedSchema,
+ new JsonSchemaOptions()
+ .setDraft(schemaVersion(spec))
+ .setBaseUri("https://something.com")
+ .setOutputFormat(OutputFormat.Basic));
+
+ return json -> {
+ final OutputUnit result = validator.validate(Json.decodeValue(json));
+ if (!result.getValid()) {
+ throw new RuntimeException(result.toString());
+ }
+ };
+ }
+
+ private Draft schemaVersion(final SchemaSpec spec) {
+ final Draft ver = SUPPORTED.get(spec);
+ if (ver == null) {
+ throw new IllegalArgumentException("Unsupported: " + spec);
+ }
+ return ver;
+ }
+ };
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonSchemaTestSuite.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonSchemaTestSuite.java
new file mode 100644
index 0000000..d573c37
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonSchemaTestSuite.java
@@ -0,0 +1,314 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.Objects.requireNonNull;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.nio.file.Path;
+import java.time.Duration;
+import java.time.Instant;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import org.creekservice.kafka.test.perf.testsuite.ValidatorFactory.JsonValidator;
+import org.creekservice.kafka.test.perf.util.Executable;
+
+public final class JsonSchemaTestSuite {
+
+ private final List tests;
+ private final ValidatorFactory.AdditionalSchemas additionalSchemas;
+
+ public JsonSchemaTestSuite(
+ final Collection tests,
+ final Map remotes,
+ final Path remotesDir) {
+ this.tests = List.copyOf(requireNonNull(tests, "tests"));
+ this.additionalSchemas = new Additional(remotes, remotesDir);
+ }
+
+ public interface TestPredicate {
+ default boolean test(SchemaSpec spec) {
+ return true;
+ }
+
+ default boolean test(TestSuite testSuite) {
+ return true;
+ }
+
+ default boolean test(TestCase testCase) {
+ return true;
+ }
+
+ TestPredicate ALL = new TestPredicate() {};
+ }
+
+ public interface Runner {
+ Result run(Predicate spec);
+ }
+
+ public Runner prepare(
+ final ValidatorFactory validatorFactory, final TestPredicate testPredicate) {
+ final Map> prepared =
+ tests.stream()
+ .filter(suites -> testPredicate.test(suites.spec()))
+ .filter(suites -> validatorFactory.supports().contains(suites.spec()))
+ .collect(
+ Collectors.toMap(
+ SpecTestSuites::spec,
+ suites ->
+ prepareSpecSuites(
+ suites, validatorFactory, testPredicate)));
+
+ return specPredicate -> {
+ final Instant start = Instant.now();
+
+ final List results =
+ prepared.entrySet().stream()
+ .filter(e -> specPredicate.test(e.getKey()))
+ .map(Map.Entry::getValue)
+ .map(Executable::exec)
+ .collect(Collectors.toList());
+
+ return new Result(Duration.between(start, Instant.now()), results);
+ };
+ }
+
+ private Executable prepareSpecSuites(
+ final SpecTestSuites specSuites,
+ final ValidatorFactory validatorFactory,
+ final TestPredicate testPredicate) {
+ final List>> prepared =
+ specSuites.testSuites().stream()
+ .filter(testPredicate::test)
+ .map(
+ suite ->
+ prepareSuite(
+ specSuites.spec(),
+ suite,
+ validatorFactory,
+ testPredicate))
+ .collect(Collectors.toList());
+
+ return () -> {
+ final List results =
+ prepared.stream()
+ .map(Executable::exec)
+ .flatMap(List::stream)
+ .collect(Collectors.toList());
+
+ return new SpecResult(specSuites.spec(), results);
+ };
+ }
+
+ private Executable> prepareSuite(
+ final SchemaSpec spec,
+ final TestSuite suite,
+ final ValidatorFactory validatorFactory,
+ final TestPredicate testPredicate) {
+
+ final JsonValidator validator = prepareValidator(spec, suite, validatorFactory);
+
+ return () ->
+ suite.tests().stream()
+ .filter(testPredicate::test)
+ .map(test -> runTest(validator, test, suite))
+ .collect(Collectors.toList());
+ }
+
+ private JsonValidator prepareValidator(
+ final SchemaSpec spec, final TestSuite suite, final ValidatorFactory validatorFactory) {
+ try {
+ return validatorFactory.prepare(suite.schema(), spec, additionalSchemas);
+ } catch (final Throwable e) {
+ return json -> {
+ throw e;
+ };
+ }
+ }
+
+ private TestResult runTest(
+ final JsonValidator validator, final TestCase test, final TestSuite suite) {
+ try {
+ validator.validate(test.getData());
+ return test.valid()
+ ? TestResult.pass(test, suite)
+ : TestResult.fail(test, suite, "Passed when it should have failed");
+ } catch (final Exception e) {
+ return test.valid()
+ ? TestResult.fail(test, suite, e.getMessage() + " ")
+ : TestResult.pass(test, suite);
+ } catch (final Throwable t) {
+ return TestResult.error(test, suite, t);
+ }
+ }
+
+ @SuppressWarnings("OptionalUsedAsFieldOrParameterType")
+ public static final class TestResult {
+ private final TestCase test;
+ private final TestSuite suite;
+ private final Optional error;
+ private final Optional failure;
+
+ public static TestResult pass(final TestCase test, final TestSuite suite) {
+ return new TestResult(test, suite, Optional.empty(), Optional.empty());
+ }
+
+ public static TestResult fail(
+ final TestCase test, final TestSuite suite, final String failure) {
+ if (failure.isEmpty()) {
+ throw new IllegalArgumentException("failure message must be supplied");
+ }
+ return new TestResult(test, suite, Optional.empty(), Optional.of(failure));
+ }
+
+ public static TestResult error(
+ final TestCase test, final TestSuite suite, final Throwable e) {
+ return new TestResult(test, suite, Optional.of(e), Optional.empty());
+ }
+
+ private TestResult(
+ final TestCase test,
+ final TestSuite suite,
+ final Optional error,
+ final Optional failure) {
+ this.test = requireNonNull(test, "test");
+ this.suite = requireNonNull(suite, "suite");
+ this.error = requireNonNull(error, "error");
+ this.failure = requireNonNull(failure, "failure");
+ }
+
+ public boolean error() {
+ return error.isPresent();
+ }
+
+ public boolean failed() {
+ return failure.isPresent();
+ }
+
+ public boolean optional() {
+ return suite.optional();
+ }
+
+ public TestSuite suite() {
+ return suite;
+ }
+
+ public TestCase test() {
+ return test;
+ }
+ }
+
+ private static final class SpecResult {
+ private final SchemaSpec spec;
+ private final List results;
+
+ SpecResult(final SchemaSpec spec, final List results) {
+ this.spec = requireNonNull(spec, "spec");
+ this.results = List.copyOf(requireNonNull(results, "results"));
+ }
+
+ public void visit(final Visitor visitor) {
+ results.forEach(r -> visitor.accept(spec, r));
+ }
+
+ public interface Visitor {
+
+ void accept(SchemaSpec spec, TestResult result);
+ }
+ }
+
+ public static final class Result {
+
+ private final Duration duration;
+ private final List results;
+
+ public Result(final Duration duration, final List results) {
+ this.duration = requireNonNull(duration, "duration");
+ this.results = List.copyOf(requireNonNull(results, "results"));
+ }
+
+ public void visit(final Visitor visitor) {
+ results.forEach(r -> r.visit(visitor::accept));
+ }
+
+ public Duration duration() {
+ return duration;
+ }
+
+ public interface Visitor {
+
+ void accept(SchemaSpec spec, TestResult result);
+ }
+ }
+
+ private static final class Additional implements ValidatorFactory.AdditionalSchemas {
+ private final Map remotes;
+ private final Path remotesDir;
+
+ Additional(final Map remotes, final Path remotesDir) {
+ this.remotes = Map.copyOf(remotes);
+ this.remotesDir = requireNonNull(remotesDir, "remotesDir");
+ }
+
+ @Override
+ public String load(final URI uri) {
+ if (!uri.getScheme().startsWith("http")) {
+ throw new UnsupportedOperationException("Unsupported schema in: " + uri);
+ }
+ final URI normalised = normalize(uri);
+
+ final String remote = remotes.get(normalised);
+ if (remote != null) {
+ return remote;
+ }
+
+ return SchemaSpec.contentFromUri(uri)
+ .orElseThrow(
+ () ->
+ new UnsupportedOperationException(
+ "Loading of remote content disabled: " + uri));
+ }
+
+ @Override
+ public Map remotes() {
+ return Map.copyOf(remotes);
+ }
+
+ @Override
+ public Path remotesDir() {
+ return remotesDir;
+ }
+
+ private static URI normalize(final URI uri) {
+ try {
+ return new URI(
+ uri.getScheme(),
+ uri.getAuthority(),
+ uri.getPath(),
+ uri.getRawQuery(),
+ null);
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java
new file mode 100644
index 0000000..c5e8bd8
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/JsonTestSuiteMain.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.stream.Collectors.toMap;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import org.creekservice.kafka.test.perf.serde.EveritSerde;
+import org.creekservice.kafka.test.perf.serde.JustifySerde;
+import org.creekservice.kafka.test.perf.serde.MedeiaSerde;
+import org.creekservice.kafka.test.perf.serde.NetworkNtSerde;
+import org.creekservice.kafka.test.perf.serde.SchemaFriendSerde;
+import org.creekservice.kafka.test.perf.serde.SerdeImpl;
+import org.creekservice.kafka.test.perf.serde.SkemaSerde;
+import org.creekservice.kafka.test.perf.serde.SnowSerde;
+import org.creekservice.kafka.test.perf.serde.VertxSerde;
+import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite.Result;
+import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite.TestPredicate;
+import org.creekservice.kafka.test.perf.testsuite.output.PerDraftSummary;
+import org.creekservice.kafka.test.perf.testsuite.output.Summary;
+
+public final class JsonTestSuiteMain {
+
+ private static final List IMPLS =
+ List.of(
+ new EveritSerde(),
+ new JustifySerde(),
+ new MedeiaSerde(),
+ new NetworkNtSerde(),
+ new SchemaFriendSerde(),
+ new SkemaSerde(),
+ new SnowSerde(),
+ new VertxSerde());
+
+ // Increase locally to allow for meaningful profiling:
+ private static final int ITERATIONS = 1;
+
+ private JsonTestSuiteMain() {}
+
+ @SuppressFBWarnings("PATH_TRAVERSAL_IN")
+ public static void main(final String... args) {
+ if (args.length != 1) {
+ throw new IllegalArgumentException(
+ "Invoke with exactly one argument: the path to the root directory containing"
+ + " the JSON test suite from"
+ + " https://github.com/json-schema-org/JSON-Schema-Test-Suite.");
+ }
+
+ final JsonSchemaTestSuite testSuite =
+ new TestSuiteLoader(path -> true).load(Paths.get(args[0]));
+
+ final Map prepared =
+ IMPLS.stream()
+ .collect(
+ toMap(
+ Function.identity(),
+ impl ->
+ testSuite.prepare(
+ impl.validator(), TestPredicate.ALL)));
+
+ final Map results = new HashMap<>();
+ for (int i = 0; i < ITERATIONS; i++) {
+ for (final Map.Entry e : prepared.entrySet()) {
+ results.put(e.getKey(), e.getValue().run(spec -> true));
+ }
+ }
+
+ outputResults(results);
+ }
+
+ private static void outputResults(final Map results) {
+
+ System.out.println(new PerDraftSummary(results));
+
+ System.out.println();
+
+ System.out.println(new Summary(results));
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/SchemaSpec.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/SchemaSpec.java
new file mode 100644
index 0000000..dcaedff
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/SchemaSpec.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.toMap;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.net.URI;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Scanner;
+import java.util.Set;
+import java.util.function.Function;
+
+public enum SchemaSpec {
+ DRAFT_03("draft3", "http://json-schema.org/draft-03/schema#", Set.of()),
+ DRAFT_04("draft4", "http://json-schema.org/draft-04/schema#", Set.of()),
+ DRAFT_06("draft6", "http://json-schema.org/draft-06/schema#", Set.of()),
+ DRAFT_07("draft7", "http://json-schema.org/draft-07/schema#", Set.of()),
+ DRAFT_2019_09(
+ "draft2019-09",
+ "https://json-schema.org/draft/2019-09/schema",
+ Set.of(
+ "https://json-schema.org/draft/2019-09/meta/validation",
+ "https://json-schema.org/draft/2019-09/meta/core",
+ "https://json-schema.org/draft/2019-09/meta/applicator",
+ "https://json-schema.org/draft/2019-09/meta/meta-data",
+ "https://json-schema.org/draft/2019-09/meta/format",
+ "https://json-schema.org/draft/2019-09/meta/content")),
+ DRAFT_2020_12(
+ "draft2020-12",
+ "https://json-schema.org/draft/2020-12/schema",
+ Set.of(
+ "https://json-schema.org/draft/2020-12/meta/validation",
+ "https://json-schema.org/draft/2020-12/meta/core",
+ "https://json-schema.org/draft/2020-12/meta/applicator",
+ "https://json-schema.org/draft/2020-12/meta/meta-data",
+ "https://json-schema.org/draft/2020-12/meta/content",
+ "https://json-schema.org/draft/2020-12/meta/format-annotation",
+ "https://json-schema.org/draft/2020-12/meta/unevaluated"));
+
+ private final String dirName;
+ private final URI uri;
+ private final String content;
+ private final Map additonal;
+
+ SchemaSpec(final String dirName, final String uri, final Set additional) {
+ this.dirName = requireNonNull(dirName, "dirName");
+ this.uri = URI.create(uri);
+ this.content = loadContent(this.uri);
+ this.additonal =
+ additional.stream()
+ .map(URI::create)
+ .collect(toMap(Function.identity(), SchemaSpec::loadContent));
+ }
+
+ public String dirName() {
+ return dirName;
+ }
+
+ public URI uri() {
+ return uri;
+ }
+
+ public static Optional fromDir(final String dirName) {
+ return Arrays.stream(values()).filter(spec -> spec.dirName.equals(dirName)).findAny();
+ }
+
+ public static Optional contentFromUri(final URI uri) {
+ return Arrays.stream(values())
+ .map(spec -> spec.getContentFromUri(uri))
+ .flatMap(Optional::stream)
+ .findAny();
+ }
+
+ private Optional getContentFromUri(final URI uri) {
+ final URI normalized = normalize(uri);
+ if (normalize(this.uri).equals(normalized)) {
+ return Optional.of(content);
+ }
+ final String content = additonal.get(normalized);
+ return content == null ? Optional.empty() : Optional.of(content);
+ }
+
+ private static URI normalize(final URI uri) {
+ final String uriString = uri.toString();
+ if (uriString.endsWith("#")) {
+ return URI.create(uriString.substring(0, uriString.length() - "#".length()));
+ }
+ return uri;
+ }
+
+ @SuppressFBWarnings("URLCONNECTION_SSRF_FD")
+ private static String loadContent(final URI uri) {
+ try (Scanner scanner = new Scanner(uri.toURL().openStream(), StandardCharsets.UTF_8)) {
+ scanner.useDelimiter("\\A");
+ return scanner.hasNext() ? scanner.next() : "";
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/SpecTestSuites.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/SpecTestSuites.java
new file mode 100644
index 0000000..653649b
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/SpecTestSuites.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.Objects.requireNonNull;
+
+import java.util.Collection;
+import java.util.List;
+
+public final class SpecTestSuites {
+ private final SchemaSpec spec;
+ private final List suites;
+
+ public SpecTestSuites(final SchemaSpec spec, final Collection suites) {
+ this.spec = requireNonNull(spec, "spec");
+ this.suites = List.copyOf(requireNonNull(suites, "suites"));
+ }
+
+ public SchemaSpec spec() {
+ return spec;
+ }
+
+ public List testSuites() {
+ return suites;
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestCase.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestCase.java
new file mode 100644
index 0000000..8110380
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestCase.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.Objects.requireNonNull;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.JsonNode;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.Optional;
+
+@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
+public final class TestCase {
+
+ private final String description;
+ private final String data;
+ private final boolean valid;
+ private final String comment;
+ private final Path suiteFilePath;
+
+ public TestCase(
+ @JsonProperty(value = "description", required = true) final String description,
+ @JsonProperty(value = "data", required = true) final JsonNode data,
+ @JsonProperty(value = "valid", required = true) final boolean valid,
+ @JsonProperty(value = "comment") final Optional comment,
+ @JacksonInject("suiteFilePath") final Path suiteFilePath) {
+ try {
+ this.description = requireNonNull(description, "description");
+ this.data = TestSuiteMapper.MAPPER.writeValueAsString(requireNonNull(data, "data"));
+ this.valid = valid;
+ this.comment = requireNonNull(comment, "comment").orElse("");
+ this.suiteFilePath = requireNonNull(suiteFilePath, "suiteFilePath");
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public String description() {
+ return description;
+ }
+
+ public String getData() {
+ return data;
+ }
+
+ public boolean valid() {
+ return valid;
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuite.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuite.java
new file mode 100644
index 0000000..abb6bc7
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuite.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.util.Objects.requireNonNull;
+
+import com.fasterxml.jackson.annotation.JacksonInject;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.JsonNode;
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.util.List;
+import java.util.Optional;
+
+@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
+public final class TestSuite {
+
+ private final String description;
+ private final String schema;
+ private final List tests;
+ private final String comment;
+ private final Path suiteFilePath;
+ private final boolean optional;
+
+ @SuppressFBWarnings(value = "NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE", justification = "False +")
+ public TestSuite(
+ @JsonProperty(value = "description", required = true) final String description,
+ @JsonProperty(value = "schema", required = true) final JsonNode schema,
+ @JsonProperty(value = "tests", required = true) final List tests,
+ @JsonProperty(value = "comment") final Optional comment,
+ @JacksonInject("suiteFilePath") final Path suiteFilePath) {
+ try {
+ this.description = requireNonNull(description, "description");
+ this.schema =
+ TestSuiteMapper.MAPPER.writeValueAsString(requireNonNull(schema, "schema"));
+ this.tests = List.copyOf(requireNonNull(tests, "tests"));
+ this.comment = requireNonNull(comment, "comment").orElse("");
+ this.suiteFilePath = requireNonNull(suiteFilePath, "suiteFilePath");
+ this.optional =
+ suiteFilePath.getParent() != null
+ && suiteFilePath.getParent().toString().contains("/optional");
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public String description() {
+ return description;
+ }
+
+ public String comment() {
+ return comment;
+ }
+
+ public String schema() {
+ return schema;
+ }
+
+ public List tests() {
+ return List.copyOf(tests);
+ }
+
+ public Path filePath() {
+ return suiteFilePath;
+ }
+
+ public boolean optional() {
+ return optional;
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteLoader.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteLoader.java
new file mode 100644
index 0000000..d0aec30
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteLoader.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.toList;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.InjectableValues;
+import java.io.IOException;
+import java.net.URI;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+public final class TestSuiteLoader {
+
+ private static final Path OPTIONAL = Paths.get("optional");
+ private static final Path FORMAT = Paths.get("format");
+
+ private final Predicate super Path> userPredicate;
+
+ /**
+ * @param predicate predicate to control which test files to load.
+ * @param funcTest {@code true} if running functional vs perf test.
+ */
+ public TestSuiteLoader(final Predicate super Path> predicate) {
+ this.userPredicate = requireNonNull(predicate);
+ }
+
+ public JsonSchemaTestSuite load(final Path rootDir) {
+ if (!Files.exists(rootDir)) {
+ throw new RuntimeException(
+ "rootDir does not exist: "
+ + rootDir
+ + System.lineSeparator()
+ + "Do you need to run the clone-json-schema-test-suite Gradle task to"
+ + " clone the test repo?");
+ }
+
+ if (!Files.exists(rootDir.resolve("test-schema.json"))) {
+ throw new RuntimeException("rootDir does not contain test suites: " + rootDir);
+ }
+
+ final Path remotesDir = rootDir.resolve("remotes");
+ final Map remotes = loadRemotes(remotesDir);
+
+ try (Stream specs = Files.list(rootDir.resolve("tests"))) {
+ final List suites =
+ specs.filter(
+ testDir ->
+ SchemaSpec.fromDir(testDir.getFileName().toString())
+ .isPresent())
+ .map(
+ testDir ->
+ new SpecTestSuites(
+ SchemaSpec.fromDir(
+ testDir.getFileName()
+ .toString())
+ .orElseThrow(),
+ loadSuiteFromSpecDir(testDir)))
+ .sorted(Comparator.comparing(s -> s.spec().name()))
+ .collect(toList());
+
+ return new JsonSchemaTestSuite(suites, remotes, remotesDir);
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static Map loadRemotes(final Path remotes) {
+
+ final Function createKey =
+ path -> URI.create("http://localhost:1234/" + remotes.relativize(path));
+
+ final Function readContent =
+ path -> {
+ try {
+ return Files.readString(path, UTF_8);
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to read file: " + path, e);
+ }
+ };
+
+ try (Stream walk = Files.walk(remotes)) {
+ return walk.filter(Files::isRegularFile)
+ .filter(path -> path.toString().endsWith(".json"))
+ .collect(Collectors.toMap(createKey, readContent));
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public List loadSuiteFromSpecDir(final Path testDir) {
+ final List suites = new ArrayList<>();
+
+ try (Stream s = Files.list(testDir)) {
+ s.filter(Files::isRegularFile)
+ .filter(path -> path.toString().endsWith(".json"))
+ .filter(userPredicate)
+ .map(TestSuiteLoader::loadSuites)
+ .forEach(suites::addAll);
+ } catch (final IOException e) {
+ throw new RuntimeException(e);
+ }
+
+ if (!testDir.endsWith(OPTIONAL) && Files.isDirectory(testDir.resolve(OPTIONAL))) {
+ suites.addAll(loadSuiteFromSpecDir(testDir.resolve(OPTIONAL)));
+ }
+
+ if (!testDir.endsWith(FORMAT) && Files.isDirectory(testDir.resolve(FORMAT))) {
+ suites.addAll(loadSuiteFromSpecDir(testDir.resolve(FORMAT)));
+ }
+
+ return suites;
+ }
+
+ private static List loadSuites(final Path suiteFile) {
+ try {
+ return TestSuiteMapper.MAPPER
+ .readerFor(new TypeReference>() {})
+ .with(new InjectableValues.Std().addValue("suiteFilePath", suiteFile))
+ .readValue(suiteFile.toFile());
+ } catch (final Exception e) {
+ throw new RuntimeException("Failed to parse test suite: " + suiteFile, e);
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteMapper.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteMapper.java
new file mode 100644
index 0000000..03cf984
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/TestSuiteMapper.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.json.JsonMapper;
+import com.fasterxml.jackson.datatype.jdk8.Jdk8Module;
+
+public final class TestSuiteMapper {
+
+ public static final JsonMapper MAPPER =
+ JsonMapper.builder()
+ .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
+ .addModule(new Jdk8Module())
+ .build();
+
+ private TestSuiteMapper() {}
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/ValidatorFactory.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/ValidatorFactory.java
new file mode 100644
index 0000000..2e90d53
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/ValidatorFactory.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite;
+
+import java.net.URI;
+import java.nio.file.Path;
+import java.util.Map;
+import java.util.Set;
+
+public interface ValidatorFactory {
+
+ /**
+ * @return the set of supported specs.
+ */
+ Set supports();
+
+ /**
+ * @param schema the schema to validate with
+ * @param spec the spec of the schema
+ * @param additionalSchemas accessor to meta-schemas and JSON-Schema-Test-Suite 'remote'
+ * schemas.
+ * @throws RuntimeException on failure
+ */
+ JsonValidator prepare(String schema, SchemaSpec spec, AdditionalSchemas additionalSchemas);
+
+ interface JsonValidator {
+ /**
+ * @param json the JSON to validate
+ */
+ void validate(String json);
+ }
+
+ /**
+ * Interface for impls to use to load meta-schemas and JSON-Schema-Test-Suite 'remote' schemas,
+ * without IO operations, (Which would mess with performance results).
+ */
+ interface AdditionalSchemas {
+
+ /**
+ * Load a remote schema.
+ *
+ * @param uri the schema id to load.
+ * @return the schema content
+ * @throws RuntimeException on unknown schema.
+ */
+ default String load(String uri) {
+ return load(URI.create(uri));
+ }
+
+ /**
+ * Load a remote schema.
+ *
+ * @param uri the schema id to load.
+ * @return the schema content
+ * @throws RuntimeException on unknown schema.
+ */
+ String load(URI uri);
+
+ /**
+ * @return content of JSON-Schema-Test-Suite 'remote' schemas
+ */
+ Map remotes();
+
+ /**
+ * @return location where remotes are being loaded from.
+ */
+ Path remotesDir();
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
new file mode 100644
index 0000000..5637ac9
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/PerDraftSummary.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite.output;
+
+import static java.lang.System.lineSeparator;
+import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.toMap;
+
+import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
+import java.nio.file.Path;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.function.BinaryOperator;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.creekservice.api.test.util.TestPaths;
+import org.creekservice.kafka.test.perf.serde.SerdeImpl;
+import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.util.Table;
+import org.jetbrains.annotations.NotNull;
+
+public final class PerDraftSummary {
+
+ public static final Path ROOT_DIR =
+ TestPaths.moduleRoot("json-schema-validation-comparison")
+ .resolve("build/json-schema-test-suite/tests");
+
+ private final Map results;
+
+ public PerDraftSummary(final Map results) {
+ this.results =
+ results.entrySet().stream()
+ .flatMap(e -> buildResults(e.getKey(), e.getValue()))
+ .collect(
+ toMap(
+ Map.Entry::getKey,
+ e -> e.getValue().build(),
+ throwOnDuplicate(),
+ TreeMap::new));
+ }
+
+ @Override
+ public String toString() {
+ return results.entrySet().stream()
+ .map(e -> e.getKey() + lineSeparator() + e.getValue())
+ .collect(Collectors.joining(lineSeparator()));
+ }
+
+ private Stream> buildResults(
+ final SerdeImpl impl, final JsonSchemaTestSuite.Result results) {
+ final Map output = new TreeMap<>();
+ results.visit(
+ (spec, result) -> {
+ output.computeIfAbsent(new Key(spec, impl.name()), k -> new Builder())
+ .add(result, spec);
+ });
+ return output.entrySet().stream();
+ }
+
+ private static BinaryOperator
throwOnDuplicate() {
+ return (m1, m2) -> {
+ throw new IllegalStateException("Duplicate!");
+ };
+ }
+
+ @SuppressFBWarnings("EQ_COMPARETO_USE_OBJECT_EQUALS")
+ private static final class Key implements Comparable {
+
+ private static final Comparator COMPARATOR =
+ Comparator.comparing(Key::spec).thenComparing(Key::impl);
+
+ private final SchemaSpec spec;
+ private final String impl;
+
+ private Key(final SchemaSpec spec, final String impl) {
+ this.spec = requireNonNull(spec, "spec");
+ this.impl = requireNonNull(impl, "impl");
+ }
+
+ SchemaSpec spec() {
+ return spec;
+ }
+
+ String impl() {
+ return impl;
+ }
+
+ @Override
+ public int compareTo(@NotNull final Key o) {
+ return COMPARATOR.compare(this, o);
+ }
+
+ @Override
+ public String toString() {
+ return impl + ": " + spec;
+ }
+ }
+
+ private static class Counts {
+ private int pass;
+ private int fail;
+
+ void add(final JsonSchemaTestSuite.TestResult result) {
+ final boolean passed = !(result.error() || result.failed());
+ if (passed) {
+ pass++;
+ } else {
+ fail++;
+ }
+ }
+ }
+
+ private static class Builder {
+
+ private final Map bySuite = new TreeMap<>();
+
+ void add(final JsonSchemaTestSuite.TestResult result, final SchemaSpec spec) {
+ final Path suitePath =
+ ROOT_DIR.resolve(spec.dirName()).relativize(result.suite().filePath());
+ bySuite.computeIfAbsent(suitePath, k -> new Counts()).add(result);
+ }
+
+ public Table build() {
+ final Table table = new Table(List.of("suite", "pass", "fail", "total"));
+ bySuite.forEach(
+ (suite, counts) -> {
+ final Table.Row row = table.addRow();
+ row.put("suite", suite);
+ row.put("pass", counts.pass);
+ row.put("fail", counts.fail);
+ row.put("total", counts.pass + counts.fail);
+ });
+ return table;
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
new file mode 100644
index 0000000..5c1fda2
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/testsuite/output/Summary.java
@@ -0,0 +1,213 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.testsuite.output;
+
+import static java.lang.System.lineSeparator;
+import static java.util.stream.Collectors.toList;
+import static java.util.stream.Collectors.toMap;
+
+import java.text.NumberFormat;
+import java.time.Duration;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.creekservice.kafka.test.perf.serde.SerdeImpl;
+import org.creekservice.kafka.test.perf.testsuite.JsonSchemaTestSuite;
+import org.creekservice.kafka.test.perf.testsuite.SchemaSpec;
+import org.creekservice.kafka.test.perf.util.Table;
+
+public final class Summary {
+
+ /** How much weight to put in required features vs optional. */
+ private static final int REQUIRED_WEIGHT = 3;
+
+ private static final String COL_IMPL = "Impl";
+ private static final String COL_OVERALL = "Overall";
+
+ private final Table table;
+ private final Duration duration;
+
+ public Summary(final Map results) {
+ this.duration =
+ results.values().stream()
+ .map(JsonSchemaTestSuite.Result::duration)
+ .reduce(Duration.ZERO, Duration::plus);
+ this.table = createTable(results);
+ }
+
+ @Override
+ public String toString() {
+ return table.toString()
+ + lineSeparator()
+ + lineSeparator()
+ + String.format(
+ "Time: %d.%03ds", +duration.toSecondsPart(), duration.toMillisPart());
+ }
+
+ private static Table createTable(final Map results) {
+ final Map> counts =
+ results.entrySet().stream()
+ .collect(toMap(e -> e.getKey().name(), e -> resultCounts(e.getValue())));
+
+ final List specs =
+ Arrays.stream(SchemaSpec.values())
+ .filter(
+ spec ->
+ counts.values().stream()
+ .map(map -> map.get(spec))
+ .anyMatch(c -> c.totalTotal() > 0))
+ .collect(toList());
+
+ final List headers = specs.stream().map(SchemaSpec::name).collect(toList());
+
+ headers.add(0, COL_IMPL);
+ headers.add(1, COL_OVERALL);
+
+ final Table table = new Table(headers);
+
+ counts.forEach((impl, cs) -> populateRow(table.addRow(), impl, cs, specs));
+
+ return table;
+ }
+
+ private static Map resultCounts(final JsonSchemaTestSuite.Result result) {
+ final Map counts = new HashMap<>();
+ Arrays.stream(SchemaSpec.values()).forEach(s -> counts.put(s, new Counts()));
+
+ result.visit((spec, r) -> counts.get(spec).add(r));
+
+ return counts;
+ }
+
+ private static void populateRow(
+ final Table.Row row,
+ final String impl,
+ final Map specCounts,
+ final List specs) {
+ row.put(COL_IMPL, impl);
+
+ specs.forEach(spec -> row.put(spec.name(), formatCell(specCounts.get(spec))));
+
+ final Counts overall = specCounts.values().stream().reduce(new Counts(), Counts::combine);
+ row.put(COL_OVERALL, formatCell(overall));
+ }
+
+ private static String formatCell(final Counts counts) {
+ if (counts.totalTotal() == 0) {
+ return "";
+ }
+ return "pass: r:"
+ + counts.reqPassed
+ + " o:"
+ + counts.optPassed
+ + " / fail: r:"
+ + counts.reqFail()
+ + " o:"
+ + counts.optFail()
+ + lineSeparator()
+ + "r:"
+ + counts.reqPassPct()
+ + " o:"
+ + counts.optPassPct()
+ + " / r:"
+ + counts.reqFailPct()
+ + " f:"
+ + counts.optFailPct()
+ + lineSeparator()
+ + "score: "
+ + counts.score();
+ }
+
+ private static class Counts {
+
+ private int reqPassed;
+ private int reqTotal;
+ private int optPassed;
+ private int optTotal;
+
+ void add(final JsonSchemaTestSuite.TestResult result) {
+ final boolean passed = !(result.error() || result.failed());
+ if (result.optional()) {
+ optTotal++;
+ if (passed) {
+ optPassed++;
+ }
+ } else {
+ reqTotal++;
+ if (passed) {
+ reqPassed++;
+ }
+ }
+ }
+
+ int totalTotal() {
+ return reqTotal + optTotal;
+ }
+
+ int reqFail() {
+ return reqTotal - reqPassed;
+ }
+
+ int optFail() {
+ return optTotal - optPassed;
+ }
+
+ String reqPassPct() {
+ return percentage(reqPassed, reqTotal);
+ }
+
+ String optPassPct() {
+ return percentage(optPassed, optTotal);
+ }
+
+ String reqFailPct() {
+ return percentage(reqFail(), reqTotal);
+ }
+
+ String optFailPct() {
+ return percentage(optFail(), optTotal);
+ }
+
+ String score() {
+ final double reqPct = reqTotal == 0 ? 0 : ((double) reqPassed / reqTotal);
+ final double optPct = optTotal == 0 ? 0 : ((double) optPassed / optTotal);
+ final double score =
+ 100 * ((reqPct * REQUIRED_WEIGHT) + optPct) / (REQUIRED_WEIGHT + 1);
+ final NumberFormat nf = NumberFormat.getNumberInstance();
+ nf.setMinimumFractionDigits(1);
+ nf.setMaximumFractionDigits(1);
+ return nf.format(score);
+ }
+
+ static Counts combine(final Counts c0, final Counts c1) {
+ final Counts counts = new Counts();
+ counts.reqPassed = c0.reqPassed + c1.reqPassed;
+ counts.reqTotal = c0.reqTotal + c1.reqTotal;
+ counts.optPassed = c0.optPassed + c1.optPassed;
+ counts.optTotal = c0.optTotal + c1.optTotal;
+ return counts;
+ }
+
+ private String percentage(final int value, final int total) {
+ final NumberFormat nf = NumberFormat.getPercentInstance();
+ nf.setMinimumFractionDigits(1);
+ nf.setMaximumFractionDigits(1);
+ return total == 0 ? nf.format(0) : nf.format(((double) value / total));
+ }
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/util/Executable.java b/src/main/java/org/creekservice/kafka/test/perf/util/Executable.java
new file mode 100644
index 0000000..364e7fd
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/util/Executable.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.util;
+
+public interface Executable {
+ T exec();
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/util/Logging.java b/src/main/java/org/creekservice/kafka/test/perf/util/Logging.java
new file mode 100644
index 0000000..03a0908
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/util/Logging.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.util;
+
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
+
+public final class Logging {
+
+ private Logging() {}
+
+ public static void disable() {
+ disableLog4J();
+ disableJavaUtil();
+ }
+
+ private static void disableLog4J() {
+ final LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ final org.apache.logging.log4j.core.config.Configuration config = ctx.getConfiguration();
+ config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME).setLevel(Level.OFF);
+ ctx.updateLoggers(config);
+ }
+
+ private static void disableJavaUtil() {
+ java.util.logging.LogManager.getLogManager()
+ .getLogger("")
+ .setLevel(java.util.logging.Level.OFF);
+ }
+}
diff --git a/src/main/java/org/creekservice/kafka/test/perf/util/Table.java b/src/main/java/org/creekservice/kafka/test/perf/util/Table.java
new file mode 100644
index 0000000..b423296
--- /dev/null
+++ b/src/main/java/org/creekservice/kafka/test/perf/util/Table.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2023 Creek Contributors (https://github.com/creek-service)
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.creekservice.kafka.test.perf.util;
+
+import static java.util.Objects.requireNonNull;
+import static java.util.stream.Collectors.joining;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.BiConsumer;
+import java.util.function.BiFunction;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
+import java.util.stream.Collectors;
+
+public class Table {
+
+ private final List headers;
+ private final List rows = new ArrayList<>();
+ private final Map widths = new LinkedHashMap<>();
+
+ public Table(final List headers) {
+ this.headers = List.copyOf(requireNonNull(headers, "headers"));
+ }
+
+ public List headers() {
+ return List.copyOf(headers);
+ }
+
+ public Row addRow() {
+ final Row row = new Row(headers);
+ rows.add(row);
+ widths.clear();
+ return row;
+ }
+
+ @Override
+ public String toString() {
+ ensureWidths();
+
+ final String format =
+ widths.values().stream()
+ .map(width -> "%-" + width + "s")
+ .collect(joining(" | ", "| ", " |" + System.lineSeparator()));
+
+ final String div =
+ widths.values().stream()
+ .map(width -> "-".repeat(width + 2))
+ .collect(joining("|", "|", "|" + System.lineSeparator()));
+
+ final String columnHeaders = String.format(format, headers.toArray());
+
+ final String formattedRows =
+ rows.stream().map(row -> formattedRows(format, row)).collect(joining());
+
+ return div + columnHeaders + div + formattedRows + div;
+ }
+
+ private static String formattedRows(final String format, final Row row) {
+ final List> its =
+ row.values().stream()
+ .map(Object::toString)
+ .map(s -> Arrays.asList(s.split(System.lineSeparator())).iterator())
+ .collect(Collectors.toList());
+
+ final StringBuilder all = new StringBuilder();
+ while (its.stream().anyMatch(Iterator::hasNext)) {
+
+ final Object[] values =
+ its.stream().map(it -> it.hasNext() ? it.next() : "").toArray(String[]::new);
+
+ final String line = String.format(format, values);
+ all.append(line);
+ }
+
+ return all.toString();
+ }
+
+ @SuppressWarnings("DataFlowIssue")
+ private void ensureWidths() {
+ if (!widths.isEmpty()) {
+ return;
+ }
+
+ headers.forEach(h -> widths.put(h, h.length()));
+
+ rows.forEach(
+ row ->
+ row.forEach(
+ (header, value) ->
+ widths.compute(
+ header,
+ (ignored, existing) ->
+ Math.max(existing, width(value)))));
+ }
+
+ private static int width(final Object value) {
+ final String text = value.toString();
+ return Arrays.stream(text.split(System.lineSeparator()))
+ .mapToInt(String::length)
+ .max()
+ .orElse(0);
+ }
+
+ public void removeIf(final Predicate p) {
+ rows.removeIf(p);
+ widths.clear();
+ }
+
+ public void sort(final Comparator super Row> c) {
+ rows.sort(c);
+ }
+
+ public void map(final Consumer c) {
+ rows.forEach(c);
+ widths.clear();
+ }
+
+ public static final class Row {
+
+ private final List headers;
+ private final Map values = new LinkedHashMap<>();
+
+ private Row(final List headers) {
+ this.headers = List.copyOf(headers);
+ headers.forEach(header -> values.put(header, ""));
+ }
+
+ public void put(final String header, final Object value) {
+ validateHeader(header);
+ values.put(header, requireNonNull(value, "value"));
+ }
+
+ public void compute(
+ final String header, final BiFunction super String, ? super Object, ?> updater) {
+ values.compute(
+ header,
+ (h, existing) -> {
+ if (existing == null) {
+ validateHeader(h);
+ }
+ return requireNonNull(updater.apply(h, existing), "updater returned null");
+ });
+ }
+
+ public Collection