Skip to content

Commit

Permalink
Update tutorial (#90)
Browse files Browse the repository at this point in the history
...to:

1. The latest gh-pages workflow
1. Correctly expose its aggregate descriptor
1. include details of the ks-aggregate-api-demo
1. Remove reference to debugging Kafka brokers, as it's not currently working. (see creek-service/creek-system-test#245).
  • Loading branch information
big-andy-coates authored Mar 19, 2023
1 parent c04f738 commit 777a0b3
Show file tree
Hide file tree
Showing 13 changed files with 91 additions and 64 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/gh-pages.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ on:
permissions:
contents: read

# Allow one concurrent deployment
# Allow one concurrent deployment, per branch
concurrency:
group: "${{ github.repository }}-pages"
cancel-in-progress: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true

jobs:
build_pages:
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
[![build](https://github.com/creek-service/basic-kafka-streams-demo/actions/workflows/build.yml/badge.svg)](https://github.com/creek-service/basic-kafka-streams-demo/actions/workflows/build.yml)
[![CodeQL](https://github.com/creek-service/basic-kafka-streams-demo/actions/workflows/codeql.yml/badge.svg)](https://github.com/creek-service/basic-kafka-streams-demo/actions/workflows/codeql.yml)

# Basic Kafka Streams Demo
# Basic Kafka Streams Tutorial

Repo containing the completed [Basic Kafka Streams demo](https://www.creekservice.org/basic-kafka-streams-demo)
Repo containing the completed [Basic Kafka Streams tutorial](https://www.creekservice.org/basic-kafka-streams-demo)
and associated [docs](docs/README.md).

This repository is also a template repository to enable later tutorials, that build on this one.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.creekservice.api.platform.metadata.AggregateDescriptor;
import org.creekservice.api.platform.metadata.ComponentInput;
import org.creekservice.api.platform.metadata.ComponentOutput;
import org.creekservice.api.platform.metadata.OwnedResource;

public final class BasicKafkaStreamsDemoAggregateDescriptor implements AggregateDescriptor {

Expand All @@ -41,12 +42,12 @@ public Collection<ComponentOutput> outputs() {
}

// Uncomment if needed
// private static <T extends ComponentInput> T register(final T input) {
// private static <T extends ComponentInput & OwnedResource> T register(final T input) {
// INPUTS.add(input);
// return input;
// }

private static <T extends ComponentOutput> T register(final T output) {
private static <T extends ComponentOutput & OwnedResource> T register(final T output) {
OUTPUTS.add(output);
return output;
}
Expand Down
6 changes: 6 additions & 0 deletions api/src/main/java/module-info.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,14 @@
import io.github.creek.service.basic.kafka.streams.demo.api.BasicKafkaStreamsDemoAggregateDescriptor;
import org.creekservice.api.platform.metadata.ComponentDescriptor;

module basic.kafka.streams.demo.api {
requires transitive creek.kafka.metadata;

exports io.github.creek.service.basic.kafka.streams.demo.api;
exports io.github.creek.service.basic.kafka.streams.demo.internal to
basic.kafka.streams.demo.services,
basic.kafka.streams.demo.service;

provides ComponentDescriptor with
BasicKafkaStreamsDemoAggregateDescriptor;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
#
# Copyright 2022-2023 Creek Contributors (https://github.com/creek-service)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

io.github.creek.service.basic.kafka.streams.demo.api.BasicKafkaStreamsDemoAggregateDescriptor
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
package io.github.creek.service.basic.kafka.streams.demo.api;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;

import java.util.List;
import org.creekservice.api.platform.metadata.ComponentDescriptor;
import org.creekservice.api.platform.metadata.ComponentDescriptors;
import org.junit.jupiter.api.Test;

class BasicKafkaStreamsDemoAggregateDescriptorTest {

@Test
void shouldLoadDescriptor() {
final List<ComponentDescriptor> loaded = ComponentDescriptors.load();
assertThat(loaded, hasItem(instanceOf(BasicKafkaStreamsDemoAggregateDescriptor.class)));
}
}
2 changes: 1 addition & 1 deletion docs/_config.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ analytics:

comments:
disqus:
shortname : "mmistakes-dev"
shortname : "creek-service-dev"

sass:
style: expanded
27 changes: 22 additions & 5 deletions docs/_demo/04-service-descriptor.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,8 @@ A _service descriptor_ defines the external resources a service uses and the api
The types of resources a descriptor can reference depends on the installed [Creek extensions][creekExts].

**ProTip:** Service descriptors are accessible by other services within the aggregate, but not by those outside.
Services from other aggregates should only use the aggregate's public API defined in an [aggregate descriptor][aggDescriptor].
Services from other aggregates should only use the aggregate's public API defined in its aggregate descriptor.
More information on aggregate APIs and descriptors can be found in the [Kafka Streams: aggregate API tutorial](/ks-aggreagte-api-demo/).
{: .notice--info}

This demo will use the [Kafka Streams extension][ksExt], and the `handle-occurrence-service`'s descriptor will define a
Expand All @@ -22,14 +23,17 @@ which the service will produces to.

**Note:** To keep this tutorial self-contained, the service's input topic is _owned_ by the service.
It would be more common for an upstream service or aggregate to own the topic and for the topic's
definition to be imported from there. This will be covered in a later tutorial.
definition to be imported from there.
The [Kafka Streams: aggregate API tutorial](/ks-aggreagte-api-demo/) covers how to define an aggregate descriptor to allow
interacting with parts of an architecture that don't use Creek.
{: .notice--warning}

[todo]: http:// update note above with link to the tutorial on linking aggregates together.

## Define the topic resources

The aggregate template provided a shell service descriptor in the repository named `HandleOccurrenceServiceDescriptor.java`.
The aggregate template used to bootstrap the repository provided a shell service descriptor in the repository named
`HandleOccurrenceServiceDescriptor.java`.
Add the following to the class to define the service's input and output topics:

{% highlight java %}
Expand All @@ -39,6 +43,8 @@ Add the following to the class to define the service's input and output topics:

{% include_snippet class-name %}

...

{% include_snippet topic-resources %}

...
Expand All @@ -50,7 +56,19 @@ The two class constants define the input and output topics the services use.
These constants will be used later when building the Kafka Streams topology.

Each topic definition includes the topic name, the types stored in the topic's records' key and value,
and the topic config, which in this case is just the number of partitions.
and the topic config.

In this instance, the topic config defines the number of partitions and, for one topic, the retention time for
records in the topic. If no retention time was set, the cluster default would be used.

**ProTip:** Defaulting to the cluster's default topic retention time can be useful as it allows different clusters
to define different defaults. For example, development, QA and Staging environments can have much shorter times
than production.
{: .notice--info}

**ProTip:** The `TopicConfigBuilder` class, which defines the `withPartitions` and `withRetentionTime` methods
used above, is part of the Git repository. It can be customised as your use-case requires.
{: .notice--info}

The `register` method wrapping each resource descriptor ensures they are registered with the outer service descriptor.

Expand All @@ -60,5 +78,4 @@ to discover the service metadata required to run the service, pipe in inputs and

[creekExts]: https://www.creekservice.org/extensions/
[ksExt]: https://github.com/creek-service/creek-kafka
[aggDescriptor]: https://www.creekservice.org/docs/descriptors/#aggregate-descriptor
[todo]: switch about links to proper creekservice.org links once each repo publishes docs.
2 changes: 1 addition & 1 deletion docs/_demo/06-system-testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ Very briefly, the system tests work by discovering the `handle-occurrence-servic
The system tests inspect the service descriptor.

As the descriptor defines Kafka based resources, the system tests, with the help of the installed [Creek Kafka system-test extension][kafkaTestExt],
knows to start a Kafka broker and create any unowned topics.
knows to start a Kafka broker and create any unowned topics, like the `twitter.tweet.text` topic.

The service descriptor also defines the name of the service's Docker container, allowing the system tests to start the service.

Expand Down
5 changes: 3 additions & 2 deletions docs/_demo/07-code coverage.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,10 @@ The human-readable coverage report is saved to `build/reports/jacoco/coverage/ht
With just the system tests, the test coverage is pretty good for our new service.
The only think really missing is coverage of the `io.github.creek.service.basic.kafka.streams.demo.api` package,
and that is because we've not yet looked at utilising the aggregate descriptor defined in that package.
The aggregate descriptor defines the API for the aggregate, one abstraction level up from services, and will be covered in a later tutorial.

[todo]: which tutorial?
**Note:** The aggregate descriptor defines the API on an aggregate, one abstraction level up from services,
and will be covered in the final quick-start tutorial: [Kafka Streams: aggregate API tutorial](/ks-aggreagte-api-demo/)
{: .notice--info}

In the next step, we will add unit tests. However, with good system test coverage we recommend that unit testing
is limited to testing edge cases that are hard, or impossible, to test using the system tests. Use system testing
Expand Down
38 changes: 0 additions & 38 deletions docs/_demo/09-debugging.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,44 +53,6 @@ for the expected output, allowing more time to debug the code. Learn more on thi
in the [system test plugin][systemTestOptions] documentation.
{: .notice--info}

## Debugging a 3rd-party service

Debugging your service code is way cool. But that's not the end of it. With Creek system tests you can also debug
any services started by test extensions too. For example, if any service under test references Kafka resources,
the Creek Kafka test extension will start a Kafka broker, and Creek makes debugging the Kafka broker easy:

With [AttachMe][attachMe] plugin installed, the Kafka broker can be debugged with the following steps:

1. Open the Kafka broker code in your IDE, making sure the code matches the version of the broker.
2. Create and run an `AttachMe` run configuration.
{% include figure image_path="/assets/images/creek-create-attachme-run-config.png" alt="AttacheMe run configuration" %}
3. Name the new configuration, but leave the default port at the default `7857`.
{% include figure image_path="/assets/images/creek-attachme-run-config.png" alt="AttacheMe run configuration details" %}
4. Place the required breakpoints in the Kafka broker code.
5. Run the system tests, specifying `kafka-broker` as the name of the service to debug:

```
./gradlew systemTest \
--debug-service="kafka-broker" \
--verification-timeout-seconds=9999
```

Alternatively, if your test suite starts multiple Kafka brokers as the service use multiple clusters, you can
debug a specific broker by using the _instance name_, rather than the _service name_:

```
./gradlew systemTest \
--debug-service-instance="kafka-broker-0" \
--verification-timeout-seconds=9999
```

When the system tests start the Kafka broker's Docker container, the service will attach to the debugger.
This will cause a new debug window to open and for the breakpoint to be hit.

Pretty cool, right?

[todo]: test the above and maybe add some images!

[attachMe]: https://plugins.jetbrains.com/plugin/13263-attachme
[sysTestRequirements]: https://github.com/creek-service/creek-system-test#configuring-a-service-for-debugging
[pluginRequirements]: https://github.com/creek-service/creek-system-test-gradle-plugin#dependency-management
Expand Down
9 changes: 5 additions & 4 deletions docs/_demo/11-further-reading.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@ layout: single
---

This tutorial has given a high-level view of a lot of the core features and concepts in Creek.
The next tutorial in _quick start_ series [covers linking services together]({{ site.url | append: "/ks-connected-services-demo/" }}).
A
[planned tutorial <i class="fas fa-external-link-alt"></i>](https://github.com/creek-service/creek-kafka/issues/259){:target="_blank"}
will cover linking different aggregates.
The [next tutorial]({{ site.url | append: "/ks-connected-services-demo/" }}) in the quick-start series covers
adding a second service and linking services together.
The [third, and final, tutorial]({{ site.url | append: "/ks-aggregate-api-demo/" }}) in the series covers defining an
aggregate's api, and how to use Creek to interact with parts of a system that predate or don't use Creek.

Additional tutorials will be added over time. These can be found on the [tutorials page]({{ site.url | append: "/tutorials/" }}).

The payloads used in this tutorial were simple types like `Integer` and `String`.
Obviously, this massively limits Creek's utility and is why Creek is still in alpha release.
Work to extend this to more complex types using, schema validated, JSON serialization, will be
[starting soon <i class="fas fa-external-link-alt"></i>](https://github.com/creek-service/creek-kafka/issues/25){:target="_blank"}.
{: .notice--info}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import static io.github.creek.service.basic.kafka.streams.demo.internal.TopicDescriptors.inputTopic;
import static io.github.creek.service.basic.kafka.streams.demo.internal.TopicDescriptors.outputTopic;
// end-snippet
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
Expand Down Expand Up @@ -49,19 +50,21 @@ public final class HandleOccurrenceServiceDescriptor implements ServiceDescripto
register(
inputTopic(
"twitter.tweet.text", // Topic name
Long.class, // Topic key type (Tweet id)
String.class, // Topic value type (Tweet text)
Long.class, // Topic key: Tweet id
String.class, // Topic value: Tweet text
withPartitions(5))); // Topic config

// Define the output topic, again conceptually owned by this service:
public static final OwnedKafkaTopicOutput<String, Integer> TweetHandleUsageStream =
register(outputTopic(
"twitter.handle.usage",
String.class, // (Twitter handle)
Integer.class, // (Usage count)
withPartitions(6)));
String.class, // Twitter handle
Integer.class, // Usage count
withPartitions(6)
.withRetentionTime(Duration.ofHours(12))
));
// end-snippet
// formatting:on
// formatting:on

public HandleOccurrenceServiceDescriptor() {}

Expand Down

0 comments on commit 777a0b3

Please sign in to comment.