diff --git a/plugin-server/bin/ci_functional_tests.sh b/plugin-server/bin/ci_functional_tests.sh index 9eff572c71251..9014a4a249a57 100755 --- a/plugin-server/bin/ci_functional_tests.sh +++ b/plugin-server/bin/ci_functional_tests.sh @@ -16,6 +16,7 @@ export CONVERSION_BUFFER_ENABLED=true export BUFFER_CONVERSION_SECONDS=2 # Make sure we don't have to wait for the default 60 seconds export KAFKA_MAX_MESSAGE_BATCH_SIZE=0 export APP_METRICS_GATHERED_FOR_ALL=true +export PLUGINS_DEFAULT_LOG_LEVEL=0 # All logs, as debug logs are used in synchronization barriers export NODE_ENV=production-functional-tests # Not important at all, but I like to see nice red/green for tests diff --git a/plugin-server/src/config/config.ts b/plugin-server/src/config/config.ts index 5d08afb6428fe..031e108eb691c 100644 --- a/plugin-server/src/config/config.ts +++ b/plugin-server/src/config/config.ts @@ -1,4 +1,4 @@ -import { LogLevel, PluginsServerConfig, stringToPluginServerMode, ValueMatcher } from '../types' +import { LogLevel, PluginLogLevel, PluginsServerConfig, stringToPluginServerMode, ValueMatcher } from '../types' import { isDevEnv, isTestEnv, stringToBoolean } from '../utils/env-utils' import { KAFKAJS_LOG_LEVEL_MAPPING } from './constants' import { @@ -72,6 +72,7 @@ export function getDefaultConfig(): PluginsServerConfig { TASKS_PER_WORKER: 10, INGESTION_CONCURRENCY: 10, INGESTION_BATCH_SIZE: 500, + PLUGINS_DEFAULT_LOG_LEVEL: isTestEnv() ? PluginLogLevel.Full : PluginLogLevel.Log, LOG_LEVEL: isTestEnv() ? LogLevel.Warn : LogLevel.Info, SENTRY_DSN: null, SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: 0, diff --git a/plugin-server/src/types.ts b/plugin-server/src/types.ts index 700bf4f6cef89..c4a973118a887 100644 --- a/plugin-server/src/types.ts +++ b/plugin-server/src/types.ts @@ -146,6 +146,7 @@ export interface PluginsServerConfig { APP_METRICS_FLUSH_MAX_QUEUE_SIZE: number BASE_DIR: string // base path for resolving local plugins PLUGINS_RELOAD_PUBSUB_CHANNEL: string // Redis channel for reload events' + PLUGINS_DEFAULT_LOG_LEVEL: PluginLogLevel LOG_LEVEL: LogLevel SENTRY_DSN: string | null SENTRY_PLUGIN_SERVER_TRACING_SAMPLE_RATE: number // Rate of tracing in plugin server (between 0 and 1) @@ -451,9 +452,10 @@ export enum PluginLogEntryType { export enum PluginLogLevel { Full = 0, // all logs - Debug = 1, // all except log - Warn = 2, // all except log and info - Critical = 3, // only error type and system source + Log = 1, // all except debug + Info = 2, // all expect log and debug + Warn = 3, // all except log, debug and info + Critical = 4, // only error type and system source } export interface PluginLogEntry { diff --git a/plugin-server/src/utils/db/db.ts b/plugin-server/src/utils/db/db.ts index cf64f86b600a2..2cb417c7f5a6f 100644 --- a/plugin-server/src/utils/db/db.ts +++ b/plugin-server/src/utils/db/db.ts @@ -158,8 +158,8 @@ export class DB { /** How many unique group types to allow per team */ MAX_GROUP_TYPES_PER_TEAM = 5 - /** Whether to write to clickhouse_person_unique_id topic */ - writeToPersonUniqueId?: boolean + /** Default log level for plugins that don't specify it */ + pluginsDefaultLogLevel: PluginLogLevel /** How many seconds to keep person info in Redis cache */ PERSONS_AND_GROUPS_CACHE_TTL: number @@ -170,6 +170,7 @@ export class DB { kafkaProducer: KafkaProducerWrapper, clickhouse: ClickHouse, statsd: StatsD | undefined, + pluginsDefaultLogLevel: PluginLogLevel, personAndGroupsCacheTtl = 1 ) { this.postgres = postgres @@ -177,6 +178,7 @@ export class DB { this.kafkaProducer = kafkaProducer this.clickhouse = clickhouse this.statsd = statsd + this.pluginsDefaultLogLevel = pluginsDefaultLogLevel this.PERSONS_AND_GROUPS_CACHE_TTL = personAndGroupsCacheTtl } @@ -1076,10 +1078,9 @@ export class DB { public async queuePluginLogEntry(entry: LogEntryPayload): Promise { const { pluginConfig, source, message, type, timestamp, instanceId } = entry + const configuredLogLevel = pluginConfig.plugin?.log_level || this.pluginsDefaultLogLevel - const logLevel = pluginConfig.plugin?.log_level - - if (!shouldStoreLog(logLevel || PluginLogLevel.Full, source, type)) { + if (!shouldStoreLog(configuredLogLevel, type)) { return } diff --git a/plugin-server/src/utils/db/hub.ts b/plugin-server/src/utils/db/hub.ts index 2d5c780b7336e..5a462590d7744 100644 --- a/plugin-server/src/utils/db/hub.ts +++ b/plugin-server/src/utils/db/hub.ts @@ -137,7 +137,15 @@ export async function createHub( const promiseManager = new PromiseManager(serverConfig, statsd) - const db = new DB(postgres, redisPool, kafkaProducer, clickhouse, statsd, serverConfig.PERSON_INFO_CACHE_TTL) + const db = new DB( + postgres, + redisPool, + kafkaProducer, + clickhouse, + statsd, + serverConfig.PLUGINS_DEFAULT_LOG_LEVEL, + serverConfig.PERSON_INFO_CACHE_TTL + ) const teamManager = new TeamManager(postgres, serverConfig, statsd) const organizationManager = new OrganizationManager(postgres, teamManager) const pluginsApiKeyManager = new PluginsApiKeyManager(db) diff --git a/plugin-server/src/utils/db/utils.ts b/plugin-server/src/utils/db/utils.ts index a4f940defdefb..de37e8c3f5f67 100644 --- a/plugin-server/src/utils/db/utils.ts +++ b/plugin-server/src/utils/db/utils.ts @@ -6,10 +6,9 @@ import { Counter } from 'prom-client' import { defaultConfig } from '../../config/config' import { KAFKA_PERSON } from '../../config/kafka-topics' -import { BasePerson, Person, RawPerson, TimestampFormat } from '../../types' +import { BasePerson, Person, PluginLogEntryType, PluginLogLevel, RawPerson, TimestampFormat } from '../../types' import { status } from '../../utils/status' import { castTimestampOrNow } from '../../utils/utils' -import { PluginLogEntrySource, PluginLogEntryType, PluginLogLevel } from './../../types' export function unparsePersonPartial(person: Partial): Partial { return { ...(person as BasePerson), ...(person.created_at ? { created_at: person.created_at.toISO() } : {}) } @@ -127,24 +126,19 @@ export function getFinalPostgresQuery(queryString: string, values: any[]): strin return queryString.replace(/\$([0-9]+)/g, (m, v) => JSON.stringify(values[parseInt(v) - 1])) } -export function shouldStoreLog( - pluginLogLevel: PluginLogLevel, - source: PluginLogEntrySource, - type: PluginLogEntryType -): boolean { - if (source === PluginLogEntrySource.System) { - return true +export function shouldStoreLog(pluginLogLevel: PluginLogLevel, type: PluginLogEntryType): boolean { + switch (pluginLogLevel) { + case PluginLogLevel.Full: + return true + case PluginLogLevel.Log: + return type !== PluginLogEntryType.Debug + case PluginLogLevel.Info: + return type !== PluginLogEntryType.Log && type !== PluginLogEntryType.Debug + case PluginLogLevel.Warn: + return type === PluginLogEntryType.Warn || type === PluginLogEntryType.Error + case PluginLogLevel.Critical: + return type === PluginLogEntryType.Error } - - if (pluginLogLevel === PluginLogLevel.Critical) { - return type === PluginLogEntryType.Error - } else if (pluginLogLevel === PluginLogLevel.Warn) { - return type !== PluginLogEntryType.Log && type !== PluginLogEntryType.Info - } else if (pluginLogLevel === PluginLogLevel.Debug) { - return type !== PluginLogEntryType.Log - } - - return true } // keep in sync with posthog/posthog/api/utils.py::safe_clickhouse_string