Skip to content

Commit

Permalink
fix(plugin-server): don't sent app debug logs to CH (#18816)
Browse files Browse the repository at this point in the history
  • Loading branch information
xvello authored Nov 23, 2023
1 parent d6dd8d6 commit d61ae7b
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 29 deletions.
1 change: 1 addition & 0 deletions plugin-server/bin/ci_functional_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ export CONVERSION_BUFFER_ENABLED=true
export BUFFER_CONVERSION_SECONDS=2 # Make sure we don't have to wait for the default 60 seconds
export KAFKA_MAX_MESSAGE_BATCH_SIZE=0
export APP_METRICS_GATHERED_FOR_ALL=true
export PLUGINS_DEFAULT_LOG_LEVEL=0 # All logs, as debug logs are used in synchronization barriers
export NODE_ENV=production-functional-tests

# Not important at all, but I like to see nice red/green for tests
Expand Down
3 changes: 2 additions & 1 deletion plugin-server/src/config/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LogLevel, PluginsServerConfig, stringToPluginServerMode, ValueMatcher } from '../types'
import { LogLevel, PluginLogLevel, PluginsServerConfig, stringToPluginServerMode, ValueMatcher } from '../types'
import { isDevEnv, isTestEnv, stringToBoolean } from '../utils/env-utils'
import { KAFKAJS_LOG_LEVEL_MAPPING } from './constants'
import {
Expand Down Expand Up @@ -72,6 +72,7 @@ export function getDefaultConfig(): PluginsServerConfig {
TASKS_PER_WORKER: 10,
INGESTION_CONCURRENCY: 10,
INGESTION_BATCH_SIZE: 500,
PLUGINS_DEFAULT_LOG_LEVEL: isTestEnv() ? PluginLogLevel.Full : PluginLogLevel.Log,
LOG_LEVEL: isTestEnv() ? LogLevel.Warn : LogLevel.Info,
SENTRY_DSN: null,
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: 0,
Expand Down
8 changes: 5 additions & 3 deletions plugin-server/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,7 @@ export interface PluginsServerConfig {
APP_METRICS_FLUSH_MAX_QUEUE_SIZE: number
BASE_DIR: string // base path for resolving local plugins
PLUGINS_RELOAD_PUBSUB_CHANNEL: string // Redis channel for reload events'
PLUGINS_DEFAULT_LOG_LEVEL: PluginLogLevel
LOG_LEVEL: LogLevel
SENTRY_DSN: string | null
SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: number // Rate of tracing in plugin server (between 0 and 1)
Expand Down Expand Up @@ -451,9 +452,10 @@ export enum PluginLogEntryType {

export enum PluginLogLevel {
Full = 0, // all logs
Debug = 1, // all except log
Warn = 2, // all except log and info
Critical = 3, // only error type and system source
Log = 1, // all except debug
Info = 2, // all expect log and debug
Warn = 3, // all except log, debug and info
Critical = 4, // only error type and system source
}

export interface PluginLogEntry {
Expand Down
11 changes: 6 additions & 5 deletions plugin-server/src/utils/db/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -158,8 +158,8 @@ export class DB {
/** How many unique group types to allow per team */
MAX_GROUP_TYPES_PER_TEAM = 5

/** Whether to write to clickhouse_person_unique_id topic */
writeToPersonUniqueId?: boolean
/** Default log level for plugins that don't specify it */
pluginsDefaultLogLevel: PluginLogLevel

/** How many seconds to keep person info in Redis cache */
PERSONS_AND_GROUPS_CACHE_TTL: number
Expand All @@ -170,13 +170,15 @@ export class DB {
kafkaProducer: KafkaProducerWrapper,
clickhouse: ClickHouse,
statsd: StatsD | undefined,
pluginsDefaultLogLevel: PluginLogLevel,
personAndGroupsCacheTtl = 1
) {
this.postgres = postgres
this.redisPool = redisPool
this.kafkaProducer = kafkaProducer
this.clickhouse = clickhouse
this.statsd = statsd
this.pluginsDefaultLogLevel = pluginsDefaultLogLevel
this.PERSONS_AND_GROUPS_CACHE_TTL = personAndGroupsCacheTtl
}

Expand Down Expand Up @@ -1076,10 +1078,9 @@ export class DB {

public async queuePluginLogEntry(entry: LogEntryPayload): Promise<void> {
const { pluginConfig, source, message, type, timestamp, instanceId } = entry
const configuredLogLevel = pluginConfig.plugin?.log_level || this.pluginsDefaultLogLevel

const logLevel = pluginConfig.plugin?.log_level

if (!shouldStoreLog(logLevel || PluginLogLevel.Full, source, type)) {
if (!shouldStoreLog(configuredLogLevel, type)) {
return
}

Expand Down
10 changes: 9 additions & 1 deletion plugin-server/src/utils/db/hub.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,15 @@ export async function createHub(

const promiseManager = new PromiseManager(serverConfig, statsd)

const db = new DB(postgres, redisPool, kafkaProducer, clickhouse, statsd, serverConfig.PERSON_INFO_CACHE_TTL)
const db = new DB(
postgres,
redisPool,
kafkaProducer,
clickhouse,
statsd,
serverConfig.PLUGINS_DEFAULT_LOG_LEVEL,
serverConfig.PERSON_INFO_CACHE_TTL
)
const teamManager = new TeamManager(postgres, serverConfig, statsd)
const organizationManager = new OrganizationManager(postgres, teamManager)
const pluginsApiKeyManager = new PluginsApiKeyManager(db)
Expand Down
32 changes: 13 additions & 19 deletions plugin-server/src/utils/db/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,9 @@ import { Counter } from 'prom-client'

import { defaultConfig } from '../../config/config'
import { KAFKA_PERSON } from '../../config/kafka-topics'
import { BasePerson, Person, RawPerson, TimestampFormat } from '../../types'
import { BasePerson, Person, PluginLogEntryType, PluginLogLevel, RawPerson, TimestampFormat } from '../../types'
import { status } from '../../utils/status'
import { castTimestampOrNow } from '../../utils/utils'
import { PluginLogEntrySource, PluginLogEntryType, PluginLogLevel } from './../../types'

export function unparsePersonPartial(person: Partial<Person>): Partial<RawPerson> {
return { ...(person as BasePerson), ...(person.created_at ? { created_at: person.created_at.toISO() } : {}) }
Expand Down Expand Up @@ -127,24 +126,19 @@ export function getFinalPostgresQuery(queryString: string, values: any[]): strin
return queryString.replace(/\$([0-9]+)/g, (m, v) => JSON.stringify(values[parseInt(v) - 1]))
}

export function shouldStoreLog(
pluginLogLevel: PluginLogLevel,
source: PluginLogEntrySource,
type: PluginLogEntryType
): boolean {
if (source === PluginLogEntrySource.System) {
return true
export function shouldStoreLog(pluginLogLevel: PluginLogLevel, type: PluginLogEntryType): boolean {
switch (pluginLogLevel) {
case PluginLogLevel.Full:
return true
case PluginLogLevel.Log:
return type !== PluginLogEntryType.Debug
case PluginLogLevel.Info:
return type !== PluginLogEntryType.Log && type !== PluginLogEntryType.Debug
case PluginLogLevel.Warn:
return type === PluginLogEntryType.Warn || type === PluginLogEntryType.Error
case PluginLogLevel.Critical:
return type === PluginLogEntryType.Error
}

if (pluginLogLevel === PluginLogLevel.Critical) {
return type === PluginLogEntryType.Error
} else if (pluginLogLevel === PluginLogLevel.Warn) {
return type !== PluginLogEntryType.Log && type !== PluginLogEntryType.Info
} else if (pluginLogLevel === PluginLogLevel.Debug) {
return type !== PluginLogEntryType.Log
}

return true
}

// keep in sync with posthog/posthog/api/utils.py::safe_clickhouse_string
Expand Down

0 comments on commit d61ae7b

Please sign in to comment.