From 4222bbf406b2ef7ea6b8b464c68dacb7a40c72a1 Mon Sep 17 00:00:00 2001 From: Nikolay Pelov Date: Tue, 7 Feb 2023 08:50:22 +0200 Subject: [PATCH] Improve logging after the switch to github.com/go-kit/kit/log (#278) Signed-off-by: Nikolay Pelov --- Makefile | 2 +- main.go | 104 +++++++++++++++++++++++++++---------------------------- 2 files changed, 53 insertions(+), 53 deletions(-) diff --git a/Makefile b/Makefile index 7ac7a9e..e057c05 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ OS_TYPE ?= $(shell uname -s | tr '[:upper:]' '[:lower:]') ARCH_TYPE ?= $(subst x86_64,amd64,$(patsubst i%86,386,$(ARCH))) GOOS ?= $(shell go env GOOS) GOARCH ?= $(shell go env GOARCH) -VERSION ?= 0.4.1 +VERSION ?= 0.4.2 MAJOR_VERSION ?= 21 MINOR_VERSION ?= 8 ORACLE_VERSION ?= $(MAJOR_VERSION).$(MINOR_VERSION) diff --git a/main.go b/main.go index 2dd55a8..306727e 100644 --- a/main.go +++ b/main.go @@ -99,7 +99,7 @@ func getEnv(key, fallback string) string { func atoi(stringValue string, logger log.Logger) int { intValue, err := strconv.Atoi(stringValue) if err != nil { - level.Error(logger).Log("error while converting to int:", err) + level.Error(logger).Log("msg", "error while converting to int", "err", err) panic(err) } return intValue @@ -115,17 +115,17 @@ func maskDsn(dsn string) string { } func connect(dsn string, logger log.Logger) *sql.DB { - level.Debug(logger).Log("Launching connection: ", maskDsn(dsn)) + level.Debug(logger).Log("msg", "Launching connection", "dsn", maskDsn(dsn)) db, err := sql.Open("oci8", dsn) if err != nil { - level.Error(logger).Log("Error while connecting to", dsn) + level.Error(logger).Log("msg", "Error while connecting to", "dsn", dsn) panic(err) } - level.Debug(logger).Log("set max idle connections to ", *maxIdleConns) + level.Debug(logger).Log("msg", "set max idle connections to", "value", *maxIdleConns) db.SetMaxIdleConns(*maxIdleConns) - level.Debug(logger).Log("set max open connections to ", *maxOpenConns) + level.Debug(logger).Log("msg", "set max open connections to", "value", *maxOpenConns) db.SetMaxOpenConns(*maxOpenConns) - level.Debug(logger).Log("Successfully connected to: ", maskDsn(dsn)) + level.Debug(logger).Log("msg", "Successfully connected to", "dsn", maskDsn(dsn)) return db } @@ -150,7 +150,7 @@ func NewExporter(dsn string, logger log.Logger) *Exporter { Namespace: namespace, Subsystem: exporter, Name: "scrape_errors_total", - Help: "Total number of times an error occured scraping a Oracle database.", + Help: "Total number of times an error occurred scraping a Oracle database.", }, []string{"collector"}), error: prometheus.NewGauge(prometheus.GaugeOpts{ Namespace: namespace, @@ -254,17 +254,17 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) { if err = e.db.Ping(); err != nil { if strings.Contains(err.Error(), "sql: database is closed") { - level.Info(e.logger).Log("Reconnecting to DB") + level.Info(e.logger).Log("msg", "Reconnecting to DB") e.db = connect(e.dsn, e.logger) } } if err = e.db.Ping(); err != nil { - level.Error(e.logger).Log("Error pinging oracle:", err) + level.Error(e.logger).Log("msg", "Error pinging oracle", "err", err) //e.db.Close() e.up.Set(0) return } else { - level.Debug(e.logger).Log("Successfully pinged Oracle database: ", maskDsn(e.dsn)) + level.Debug(e.logger).Log("msg", "Successfully pinged Oracle database", "dsn", maskDsn(e.dsn)) e.up.Set(1) } @@ -281,23 +281,23 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) { go func() { defer wg.Done() - level.Debug(e.logger).Log("About to scrape metric: ") - level.Debug(e.logger).Log("- Metric MetricsDesc: ", metric.MetricsDesc) - level.Debug(e.logger).Log("- Metric Context: ", metric.Context) - level.Debug(e.logger).Log("- Metric MetricsType: ", metric.MetricsType) - level.Debug(e.logger).Log("- Metric MetricsBuckets: ", metric.MetricsBuckets, "(Ignored unless Histogram type)") - level.Debug(e.logger).Log("- Metric Labels: ", metric.Labels) - level.Debug(e.logger).Log("- Metric FieldToAppend: ", metric.FieldToAppend) - level.Debug(e.logger).Log("- Metric IgnoreZeroResult: ", metric.IgnoreZeroResult) - level.Debug(e.logger).Log("- Metric Request: ", metric.Request) + level.Debug(e.logger).Log("msg", "About to scrape metric") + level.Debug(e.logger).Log("metricsDesc", metric.MetricsDesc) + level.Debug(e.logger).Log("context", metric.Context) + level.Debug(e.logger).Log("metricsType", metric.MetricsType) + level.Debug(e.logger).Log("metricsBuckets", metric.MetricsBuckets) // , "(Ignored unless Histogram type)" + level.Debug(e.logger).Log("labels", metric.Labels) + level.Debug(e.logger).Log("fieldToAppend", metric.FieldToAppend) + level.Debug(e.logger).Log("ignoreZeroResult", metric.IgnoreZeroResult) + level.Debug(e.logger).Log("request", metric.Request) if len(metric.Request) == 0 { - level.Error(e.logger).Log("Error scraping for ", metric.MetricsDesc, ". Did you forget to define request in your toml file?") + level.Error(e.logger).Log("msg", "Error scraping. Did you forget to define request in your toml file?", "metricsDesc", metric.MetricsDesc) return } if len(metric.MetricsDesc) == 0 { - level.Error(e.logger).Log("Error scraping for query", metric.Request, ". Did you forget to define metricsdesc in your toml file?") + level.Error(e.logger).Log("msg", "Error scraping for query. Did you forget to define metricsdesc in your toml file?", "request", metric.Request) return } @@ -305,7 +305,7 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) { if metricType == "histogram" { _, ok := metric.MetricsBuckets[column] if !ok { - level.Error(e.logger).Log("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")") + level.Error(e.logger).Log("msg", "Unable to find MetricsBuckets configuration key for metric", "metric", column) return } } @@ -313,10 +313,10 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) { scrapeStart := time.Now() if err = ScrapeMetric(e.db, ch, metric, e.logger); err != nil { - level.Error(e.logger).Log("Error scraping for", metric.Context, "_", metric.MetricsDesc, time.Since(scrapeStart), ":", err) + level.Error(e.logger).Log("msg", "Error scraping for", "context", metric.Context, "metricsDesc", metric.MetricsDesc, "since", time.Since(scrapeStart), "err", err) e.scrapeErrors.WithLabelValues(metric.Context).Inc() } else { - level.Debug(e.logger).Log("Successfully scraped metric: ", metric.Context, metric.MetricsDesc, time.Since(scrapeStart)) + level.Debug(e.logger).Log("msg", "Successfully scraped metric", "context", metric.Context, "metricsDesc", metric.MetricsDesc, "since", time.Since(scrapeStart)) } }() } @@ -343,7 +343,7 @@ func GetMetricType(metricType string, metricsType map[string]string) prometheus. // interface method to call ScrapeGenericValues using Metric struct values func ScrapeMetric(db *sql.DB, ch chan<- prometheus.Metric, metricDefinition Metric, logger log.Logger) error { - level.Debug(logger).Log("Calling function ScrapeGenericValues()") + level.Debug(logger).Log("msg", "Calling function ScrapeGenericValues()") return ScrapeGenericValues(db, ch, metricDefinition.Context, metricDefinition.Labels, metricDefinition.MetricsDesc, metricDefinition.MetricsType, metricDefinition.MetricsBuckets, metricDefinition.FieldToAppend, metricDefinition.IgnoreZeroResult, @@ -365,11 +365,11 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string value, err := strconv.ParseFloat(strings.TrimSpace(row[metric]), 64) // If not a float, skip current metric if err != nil { - level.Error(logger).Log("Unable to convert current value to float (metric=" + metric + - ",metricHelp=" + metricHelp + ",value=<" + row[metric] + ">)") + level.Error(logger).Log("msg", "Unable to convert current value to float", "metric", metric, + "metricHelp", metricHelp, "value", row[metric]) continue } - level.Debug(logger).Log("Query result looks like: ", value) + level.Debug(logger).Log("msg", "Query result looks like", "value", value) // If metric do not use a field content in metric's name if strings.Compare(fieldToAppend, "") == 0 { desc := prometheus.NewDesc( @@ -380,22 +380,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string if metricsType[strings.ToLower(metric)] == "histogram" { count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64) if err != nil { - level.Error(logger).Log("Unable to convert count value to int (metric=" + metric + - ",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)") + level.Error(logger).Log("msg", "Unable to convert count value to int", "metric", metric, + "metricHelp", metricHelp, "value", row["count"]) continue } buckets := make(map[float64]uint64) for field, le := range metricsBuckets[metric] { lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64) if err != nil { - level.Error(logger).Log("Unable to convert bucket limit value to float (metric=" + metric + - ",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)") + level.Error(logger).Log("msg", "Unable to convert bucket limit value to float", "metric", metric, + "metricHelp", metricHelp, ",bucketlimit", le) continue } counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64) if err != nil { - level.Error(logger).Log("Unable to convert ", field, " value to int (metric="+metric+ - ",metricHelp="+metricHelp+",value=<"+row[field]+">)") + level.Error(logger).Log("msg", "Unable to convert value to int", "field", field, "metric", metric, + "metricHelp", metricHelp, "value", row[field]) continue } buckets[lelimit] = counter @@ -414,22 +414,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string if metricsType[strings.ToLower(metric)] == "histogram" { count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64) if err != nil { - level.Error(logger).Log("Unable to convert count value to int (metric=" + metric + - ",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)") + level.Error(logger).Log("msg", "Unable to convert count value to int", "metric", metric, + "metricHelp", metricHelp, "value", row["count"]) continue } buckets := make(map[float64]uint64) for field, le := range metricsBuckets[metric] { lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64) if err != nil { - level.Error(logger).Log("Unable to convert bucket limit value to float (metric=" + metric + - ",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)") + level.Error(logger).Log("msg", "Unable to convert bucket limit value to float", "metric", metric, + "metricHelp", metricHelp, ",bucketlimit", le) continue } counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64) if err != nil { - level.Error(logger).Log("Unable to convert ", field, " value to int (metric="+metric+ - ",metricHelp="+metricHelp+",value=<"+row[field]+">)") + level.Error(logger).Log("msg", "Unable to convert value to int", "field", field, "metric", metric, + "metricHelp", metricHelp, "value", row[field]) continue } buckets[lelimit] = counter @@ -443,9 +443,9 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string } return nil } - level.Debug(logger).Log("Calling function GeneratePrometheusMetrics()") + level.Debug(logger).Log("msg", "Calling function GeneratePrometheusMetrics()") err := GeneratePrometheusMetrics(db, genericParser, request, logger) - level.Debug(logger).Log("ScrapeGenericValues() - metricsCount: ", metricsCount) + level.Debug(logger).Log("msg", "ScrapeGenericValues()", "metricsCount", metricsCount) if err != nil { return err } @@ -462,7 +462,7 @@ func GeneratePrometheusMetrics(db *sql.DB, parse func(row map[string]string) err // Add a timeout timeout, err := strconv.Atoi(*queryTimeout) if err != nil { - level.Error(logger).Log("error while converting timeout option value: ", err) + level.Error(logger).Log("msg", "error while converting timeout option", "err", err) panic(err) } ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second) @@ -538,15 +538,15 @@ func checkIfMetricsChanged(logger log.Logger) bool { if len(_customMetrics) == 0 { continue } - level.Debug(logger).Log("Checking modifications in following metrics definition file:", _customMetrics) + level.Debug(logger).Log("msg", "Checking modifications in following metrics definition", "file", _customMetrics) h := sha256.New() if err := hashFile(h, _customMetrics); err != nil { - level.Error(logger).Log("Unable to get file hash", err) + level.Error(logger).Log("msg", "Unable to get file hash", "err", err) return false } // If any of files has been changed reload metrics if !bytes.Equal(hashMap[i], h.Sum(nil)) { - level.Info(logger).Log(_customMetrics, "has been changed. Reloading metrics...") + level.Info(logger).Log("msg", "Metrics file has been changed. Reloading...", "file", _customMetrics) hashMap[i] = h.Sum(nil) return true } @@ -560,25 +560,25 @@ func reloadMetrics(logger log.Logger) { // Load default metrics if _, err := toml.DecodeFile(*defaultFileMetrics, &metricsToScrap); err != nil { - level.Error(logger).Log(err) + level.Error(logger).Log("msg", err) panic(errors.New("Error while loading " + *defaultFileMetrics)) } else { - level.Info(logger).Log("Successfully loaded default metrics from: " + *defaultFileMetrics) + level.Info(logger).Log("msg", "Successfully loaded default metrics", "file", *defaultFileMetrics) } // If custom metrics, load it if strings.Compare(*customMetrics, "") != 0 { for _, _customMetrics := range strings.Split(*customMetrics, ",") { if _, err := toml.DecodeFile(_customMetrics, &additionalMetrics); err != nil { - level.Error(logger).Log(err) + level.Error(logger).Log("msg", err) panic(errors.New("Error while loading " + _customMetrics)) } else { - level.Info(logger).Log("Successfully loaded custom metrics from: " + _customMetrics) + level.Info(logger).Log("msg", "Successfully loaded custom metrics", "file", _customMetrics) } metricsToScrap.Metric = append(metricsToScrap.Metric, additionalMetrics.Metric...) } } else { - level.Info(logger).Log("No custom metrics defined.") + level.Info(logger).Log("msg", "No custom metrics defined") } } @@ -593,7 +593,7 @@ func main() { kingpin.Parse() logger := promlog.New(promlogConfig) - level.Info(logger).Log("Starting oracledb_exporter " + Version) + level.Info(logger).Log("msg", "Starting oracledb_exporter", "version", Version) dsn := os.Getenv("DATA_SOURCE_NAME") // Load default and custom metrics