diff --git a/docs/index.html b/docs/index.html index f3f9e258..8f9392c4 100644 --- a/docs/index.html +++ b/docs/index.html @@ -190,7 +190,7 @@

ClientId("producer-example") ) // producer: Resource[IO, ProducerApi[IO, org.apache.avro.generic.GenericRecord, org.apache.avro.generic.GenericRecord]] = Allocate( -// resource = cats.effect.kernel.Resource$$$Lambda$10750/0x0000000102f43040@2d283654 +// resource = cats.effect.kernel.Resource$$$Lambda$10734/0x0000000102d1b040@3f667b9b // )

And we'll define some customer records to be written:

import org.apache.kafka.clients.producer.ProducerRecord
@@ -221,16 +221,16 @@ 

Writing typed records with an Avro4s producer

Turning a generic producer into a typed producer is simple. We first ensure that com.sksamuel.avro4s.RecordFormat instances for our data are in scope:

implicit val CustomerRecordFormat = com.sksamuel.avro4s.RecordFormat[Customer]
-// CustomerRecordFormat: com.sksamuel.avro4s.RecordFormat[Customer] = com.sksamuel.avro4s.RecordFormat$$anon$1@732d290c
+// CustomerRecordFormat: com.sksamuel.avro4s.RecordFormat[Customer] = com.sksamuel.avro4s.RecordFormat$$anon$1@1c808a0f
 implicit val CustomerIdRecordFormat = com.sksamuel.avro4s.RecordFormat[CustomerId]
-// CustomerIdRecordFormat: com.sksamuel.avro4s.RecordFormat[CustomerId] = com.sksamuel.avro4s.RecordFormat$$anon$1@22102415
+// CustomerIdRecordFormat: com.sksamuel.avro4s.RecordFormat[CustomerId] = com.sksamuel.avro4s.RecordFormat$$anon$1@e72307a

And with those implicits in scope, we can create our producer:

val avro4sProducer = producer.map(_.toAvro4s[CustomerId, Customer])
 // avro4sProducer: Resource[IO, ProducerApi[[A]IO[A], CustomerId, Customer]] = Bind(
 //   source = Allocate(
-//     resource = cats.effect.kernel.Resource$$$Lambda$10750/0x0000000102f43040@2d283654
+//     resource = cats.effect.kernel.Resource$$$Lambda$10734/0x0000000102d1b040@3f667b9b
 //   ),
-//   fs = cats.effect.kernel.Resource$$Lambda$10878/0x000000010308a040@3781bf7e
+//   fs = cats.effect.kernel.Resource$$Lambda$10862/0x0000000102e63040@64d23496
 // )

We can now write our typed customer records successfully!

import cats.effect.unsafe.implicits.global
@@ -259,11 +259,11 @@ 

// consumer: Resource[IO, ConsumerApi[IO, CustomerId, Customer]] = Bind( // source = Bind( // source = Allocate( -// resource = cats.effect.kernel.Resource$$$Lambda$10750/0x0000000102f43040@6bc2333f +// resource = cats.effect.kernel.Resource$$$Lambda$10734/0x0000000102d1b040@26c8d91d // ), -// fs = com.banno.kafka.consumer.ConsumerApi$Avro$$$Lambda$10881/0x000000010308c040@3510b8b0 +// fs = com.banno.kafka.consumer.ConsumerApi$Avro$$$Lambda$10865/0x0000000102e65040@9bace14 // ), -// fs = cats.effect.kernel.Resource$$Lambda$10878/0x000000010308a040@3ed47a0 +// fs = cats.effect.kernel.Resource$$Lambda$10862/0x0000000102e63040@29767acf // )

With our Kafka consumer in hand, we'll assign to our consumer our topic partition, with no offsets, so that it starts reading from the first record, and read a stream of records from our Kafka topic:

import org.apache.kafka.common.TopicPartition