-
Notifications
You must be signed in to change notification settings - Fork 15
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Fix for exceeding Pubsub max message lengths
- Loading branch information
Showing
5 changed files
with
141 additions
and
37 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
101 changes: 101 additions & 0 deletions
101
...rc/main/scala/com/snowplowanalytics/snowplow/storage/bigquery/streamloader/Producer.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
/* | ||
* Copyright (c) 2018-2023 Snowplow Analytics Ltd. All rights reserved. | ||
* | ||
* This program is licensed to you under the Apache License Version 2.0, | ||
* and you may not use this file except in compliance with the Apache License Version 2.0. | ||
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the Apache License Version 2.0 is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. | ||
*/ | ||
package com.snowplowanalytics.snowplow.storage.bigquery.streamloader | ||
|
||
import java.time.Instant | ||
import java.nio.charset.StandardCharsets | ||
|
||
import cats.implicits._ | ||
|
||
import cats.effect.{Async, Resource} | ||
|
||
import com.permutive.pubsub.producer.{Model, PubsubProducer} | ||
import com.permutive.pubsub.producer.encoder.MessageEncoder | ||
import com.permutive.pubsub.producer.grpc.{GooglePubsubProducer, PubsubProducerConfig} | ||
|
||
import com.snowplowanalytics.snowplow.badrows.{BadRow, Failure, Payload => BadRowPayload} | ||
|
||
import org.typelevel.log4cats.Logger | ||
|
||
import scala.concurrent.duration._ | ||
|
||
trait Producer[F[_], A] { | ||
def produce(data: A): F[Unit] | ||
} | ||
|
||
object Producer { | ||
|
||
val MaxPayloadLength = 9000000 // Stay under Pubsub Maximum of 10MB | ||
|
||
def mkProducer[F[_]: Async: Logger, A: MessageEncoder]( | ||
projectId: String, | ||
topic: String, | ||
batchSize: Long, | ||
delay: FiniteDuration, | ||
oversizeBadRowProducer: Producer[F, BadRow.SizeViolation] | ||
): Resource[F, Producer[F, A]] = | ||
mkProducer[F, A](projectId, topic, batchSize, delay).map { p => | ||
(data: A) => { | ||
val dataSize = getSize(data) | ||
if (dataSize >= MaxPayloadLength) { | ||
val badRow = createSizeViolationBadRow(data, dataSize) | ||
oversizeBadRowProducer.produce(badRow) | ||
} else { | ||
p.produce(data).void | ||
} | ||
} | ||
} | ||
|
||
def mkOversizeBadRowProducer[F[_]: Async: Logger]( | ||
projectId: String, | ||
topic: String, | ||
batchSize: Long, | ||
delay: FiniteDuration | ||
): Resource[F, Producer[F, BadRow.SizeViolation]] = | ||
mkProducer[F, BadRow](projectId, topic, batchSize, delay).map { p => | ||
(data: BadRow.SizeViolation) => p.produce(data).void | ||
} | ||
|
||
/** Construct a PubSub producer. */ | ||
private def mkProducer[F[_]: Async: Logger, A: MessageEncoder]( | ||
projectId: String, | ||
topic: String, | ||
batchSize: Long, | ||
delay: FiniteDuration | ||
): Resource[F, PubsubProducer[F, A]] = | ||
GooglePubsubProducer.of[F, A]( | ||
Model.ProjectId(projectId), | ||
Model.Topic(topic), | ||
config = PubsubProducerConfig[F]( | ||
batchSize = batchSize, | ||
delayThreshold = delay, | ||
onFailedTerminate = e => Logger[F].error(e)(s"Error in PubSub producer") | ||
) | ||
) | ||
|
||
private def createSizeViolationBadRow[A: MessageEncoder](data: A, actualDataSize: Int): BadRow.SizeViolation = { | ||
val msg = s"Pubsub message exceedsMessageEncoder allowed size" | ||
val payload = MessageEncoder[A].encode(data) | ||
.map(bytes => new String(bytes, StandardCharsets.UTF_8)) | ||
.getOrElse("Pubsub message can't be converted to string") | ||
BadRow | ||
.SizeViolation( | ||
processor, | ||
Failure.SizeViolation(Instant.now(), MaxPayloadLength, actualDataSize, msg), | ||
BadRowPayload.RawPayload(payload.take(MaxPayloadLength / 10)) | ||
) | ||
} | ||
|
||
private def getSize[A: MessageEncoder](a: A): Int = | ||
MessageEncoder[A].encode(a).map(_.length).getOrElse(Int.MaxValue) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters