Skip to content

Commit

Permalink
Merge pull request #33 from Yolean/adjust-logging-and-allow-filter-env
Browse files Browse the repository at this point in the history
Adjust logging and allow for log level filtering via env KAFKA_KEYVAL…
  • Loading branch information
atamon authored Jun 29, 2023
2 parents 3817224 + 4a5f4af commit 48a78b3
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 11 deletions.
14 changes: 6 additions & 8 deletions src/KafkaKeyValue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ async function produceViaPixy(fetchImpl: IFetchImpl, logger, pixyHost: string, t
}

const json = await res.json() as PixyPostTopicKeySyncResponse;
logger.debug({ res, json }, 'KafkaCache put returned');

return json.offset;
}
Expand Down Expand Up @@ -209,9 +208,9 @@ export default class KafkaKeyValue {
} = requestBody;

const expectedTopic = this.topic;
this.logger.debug({ topic, expectedTopic }, 'Matching update event against expected topic');
this.logger.trace({ topic, expectedTopic }, 'Matching update event against expected topic');
if (topic !== expectedTopic) {
this.logger.debug({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
this.logger.trace({ topic, expectedTopic }, 'Update event ignored due to topic mismatch. Business as usual.');
return;
}

Expand All @@ -232,14 +231,14 @@ export default class KafkaKeyValue {
});

const updatesBeingPropagated = updatesToPropagate.map(async key => {
this.logger.debug({ key }, 'Received update event for key');
this.logger.trace({ key }, 'Received update event for key');
const value = await this.get(key);
this.updateHandlers.forEach(fn => fn(key, value));
});

await Promise.all(updatesBeingPropagated);
} else {
this.logger.debug({ topic }, 'No update handlers registered, update event has no effect');
this.logger.trace({ topic }, 'No update handlers registered, update event has no effect');
}

// NOTE: Letting all handlers complete before updating the metric
Expand Down Expand Up @@ -325,7 +324,6 @@ export default class KafkaKeyValue {
const value = parseResponse(this.logger, res, this.config.gzip || false);

parseTiming();
this.logger.debug({ key, value }, 'KafkaCache get value returned')

this.updateLastSeenOffsetsFromHeader(res);

Expand All @@ -334,7 +332,7 @@ export default class KafkaKeyValue {

async streamValues(onValue: (value: any) => void): Promise<void> {
if (this.config.gzip) throw new Error('Unsuported method for gzipped topics!');
this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache started');
this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache started');

const streamTiming = this.metrics.kafka_key_value_stream_latency_seconds.startTimer({ cache_name: this.getCacheName() });
const res = await this.fetchImpl(`${this.getCacheHost()}/cache/v1/values`);
Expand All @@ -344,7 +342,7 @@ export default class KafkaKeyValue {
await streamResponseBody(this.logger, res.body, onValue);

streamTiming();
this.logger.debug({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');
this.logger.trace({ cache_name: this.getCacheName() }, 'Streaming values for cache finished');

this.updateLastSeenOffsetsFromHeader(res);
}
Expand Down
4 changes: 1 addition & 3 deletions src/logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,7 @@ const globalOptions: LoggerOptions = {
name: "no-logger-name-given",
streams: [
{
level: 'debug',
// TODO TypeScript didnt allow me to do this when building :(
// level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
level: process.env.KAFKA_KEYVALUE_LOG_LEVEL || 'debug',
stream: process.stdout
}
],
Expand Down

0 comments on commit 48a78b3

Please sign in to comment.