From 4a5f4af14604f30dffe4c7584bec3eb7f80a455f Mon Sep 17 00:00:00 2001
From: Anton Lindgren <anton.lindgren@yolean.com>
Date: Tue, 13 Jun 2023 17:08:38 +0200
Subject: [PATCH] Adjust logging and allow for log level filtering via env
 KAFKA_KEYVALUE_LOG_LEVEL

---
 src/KafkaKeyValue.ts | 14 ++++++--------
 src/logger.ts        |  4 +---
 2 files changed, 7 insertions(+), 11 deletions(-)

diff --git a/src/KafkaKeyValue.ts b/src/KafkaKeyValue.ts
index 15f862d..4af1c7d 100644
--- a/src/KafkaKeyValue.ts
+++ b/src/KafkaKeyValue.ts
@@ -95,7 +95,6 @@ async function produceViaPixy(fetchImpl: IFetchImpl, logger, pixyHost: string, t
   }
 
   const json = await res.json() as PixyPostTopicKeySyncResponse;
-  logger.debug({ res, json }, 'KafkaCache put returned');
 
   return json.offset;
 }
@@ -209,9 +208,9 @@ export default class KafkaKeyValue {
       } = requestBody;
 
       const expectedTopic = this.topic;
-      this.logger.debug({ topic, expectedTopic }, 'Matching update event against expected topic');
+      this.logger.trace({ topic, expectedTopic }, 'Matching update event against expected topic');
       if (topic !== expectedTopic) {
-        this.logger.debug({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
+        this.logger.trace({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
         return;
       }
 
@@ -232,14 +231,14 @@ export default class KafkaKeyValue {
         });
 
         const updatesBeingPropagated = updatesToPropagate.map(async key => {
-          this.logger.debug({ key }, 'Received update event for key');
+          this.logger.trace({ key }, 'Received update event for key');
           const value = await this.get(key);
           this.updateHandlers.forEach(fn => fn(key, value));
         });
 
         await Promise.all(updatesBeingPropagated);
       } else {
-        this.logger.debug({ topic }, 'No update handlers registered, update event has no effect');
+        this.logger.trace({ topic }, 'No update handlers registered, update event has no effect');
       }
 
       // NOTE: Letting all handlers complete before updating the metric
@@ -325,7 +324,6 @@ export default class KafkaKeyValue {
     const value = parseResponse(this.logger, res, this.config.gzip || false);
 
     parseTiming();
-    this.logger.debug({ key, value }, 'KafkaCache get value returned')
 
     this.updateLastSeenOffsetsFromHeader(res);
 
@@ -334,7 +332,7 @@ export default class KafkaKeyValue {
 
   async streamValues(onValue: (value: any) => void): Promise<void> {
     if (this.config.gzip) throw new Error('Unsuported method for gzipped topics!');
-    this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache started');
+    this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache started');
 
     const streamTiming = this.metrics.kafka_key_value_stream_latency_seconds.startTimer({ cache_name: this.getCacheName() });
     const res = await this.fetchImpl(`${this.getCacheHost()}/cache/v1/values`);
@@ -344,7 +342,7 @@ export default class KafkaKeyValue {
     await streamResponseBody(this.logger, res.body, onValue);
 
     streamTiming();
-    this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');
+    this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');
 
     this.updateLastSeenOffsetsFromHeader(res);
   }
diff --git a/src/logger.ts b/src/logger.ts
index b378db2..90dd3fd 100644
--- a/src/logger.ts
+++ b/src/logger.ts
@@ -20,9 +20,7 @@ const globalOptions: LoggerOptions = {
   name: "no-logger-name-given",
   streams: [
     {
-      level: 'debug',
-      // TODO TypeScript didnt allow me to do this when building :(
-      // level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
+      level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
       stream: process.stdout
     }
   ],