Skip to content

Commit

Permalink
Improve logging after the switch to github.com/go-kit/kit/log (#278)
Browse files Browse the repository at this point in the history
Signed-off-by: Nikolay Pelov <[email protected]>
  • Loading branch information
pelov authored Feb 7, 2023
1 parent 44ce18c commit 4222bbf
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 53 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ OS_TYPE ?= $(shell uname -s | tr '[:upper:]' '[:lower:]')
ARCH_TYPE ?= $(subst x86_64,amd64,$(patsubst i%86,386,$(ARCH)))
GOOS ?= $(shell go env GOOS)
GOARCH ?= $(shell go env GOARCH)
VERSION ?= 0.4.1
VERSION ?= 0.4.2
MAJOR_VERSION ?= 21
MINOR_VERSION ?= 8
ORACLE_VERSION ?= $(MAJOR_VERSION).$(MINOR_VERSION)
Expand Down
104 changes: 52 additions & 52 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ func getEnv(key, fallback string) string {
func atoi(stringValue string, logger log.Logger) int {
intValue, err := strconv.Atoi(stringValue)
if err != nil {
level.Error(logger).Log("error while converting to int:", err)
level.Error(logger).Log("msg", "error while converting to int", "err", err)
panic(err)
}
return intValue
Expand All @@ -115,17 +115,17 @@ func maskDsn(dsn string) string {
}

func connect(dsn string, logger log.Logger) *sql.DB {
level.Debug(logger).Log("Launching connection: ", maskDsn(dsn))
level.Debug(logger).Log("msg", "Launching connection", "dsn", maskDsn(dsn))
db, err := sql.Open("oci8", dsn)
if err != nil {
level.Error(logger).Log("Error while connecting to", dsn)
level.Error(logger).Log("msg", "Error while connecting to", "dsn", dsn)
panic(err)
}
level.Debug(logger).Log("set max idle connections to ", *maxIdleConns)
level.Debug(logger).Log("msg", "set max idle connections to", "value", *maxIdleConns)
db.SetMaxIdleConns(*maxIdleConns)
level.Debug(logger).Log("set max open connections to ", *maxOpenConns)
level.Debug(logger).Log("msg", "set max open connections to", "value", *maxOpenConns)
db.SetMaxOpenConns(*maxOpenConns)
level.Debug(logger).Log("Successfully connected to: ", maskDsn(dsn))
level.Debug(logger).Log("msg", "Successfully connected to", "dsn", maskDsn(dsn))
return db
}

Expand All @@ -150,7 +150,7 @@ func NewExporter(dsn string, logger log.Logger) *Exporter {
Namespace: namespace,
Subsystem: exporter,
Name: "scrape_errors_total",
Help: "Total number of times an error occured scraping a Oracle database.",
Help: "Total number of times an error occurred scraping a Oracle database.",
}, []string{"collector"}),
error: prometheus.NewGauge(prometheus.GaugeOpts{
Namespace: namespace,
Expand Down Expand Up @@ -254,17 +254,17 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {

if err = e.db.Ping(); err != nil {
if strings.Contains(err.Error(), "sql: database is closed") {
level.Info(e.logger).Log("Reconnecting to DB")
level.Info(e.logger).Log("msg", "Reconnecting to DB")
e.db = connect(e.dsn, e.logger)
}
}
if err = e.db.Ping(); err != nil {
level.Error(e.logger).Log("Error pinging oracle:", err)
level.Error(e.logger).Log("msg", "Error pinging oracle", "err", err)
//e.db.Close()
e.up.Set(0)
return
} else {
level.Debug(e.logger).Log("Successfully pinged Oracle database: ", maskDsn(e.dsn))
level.Debug(e.logger).Log("msg", "Successfully pinged Oracle database", "dsn", maskDsn(e.dsn))
e.up.Set(1)
}

Expand All @@ -281,42 +281,42 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
go func() {
defer wg.Done()

level.Debug(e.logger).Log("About to scrape metric: ")
level.Debug(e.logger).Log("- Metric MetricsDesc: ", metric.MetricsDesc)
level.Debug(e.logger).Log("- Metric Context: ", metric.Context)
level.Debug(e.logger).Log("- Metric MetricsType: ", metric.MetricsType)
level.Debug(e.logger).Log("- Metric MetricsBuckets: ", metric.MetricsBuckets, "(Ignored unless Histogram type)")
level.Debug(e.logger).Log("- Metric Labels: ", metric.Labels)
level.Debug(e.logger).Log("- Metric FieldToAppend: ", metric.FieldToAppend)
level.Debug(e.logger).Log("- Metric IgnoreZeroResult: ", metric.IgnoreZeroResult)
level.Debug(e.logger).Log("- Metric Request: ", metric.Request)
level.Debug(e.logger).Log("msg", "About to scrape metric")
level.Debug(e.logger).Log("metricsDesc", metric.MetricsDesc)
level.Debug(e.logger).Log("context", metric.Context)
level.Debug(e.logger).Log("metricsType", metric.MetricsType)
level.Debug(e.logger).Log("metricsBuckets", metric.MetricsBuckets) // , "(Ignored unless Histogram type)"
level.Debug(e.logger).Log("labels", metric.Labels)
level.Debug(e.logger).Log("fieldToAppend", metric.FieldToAppend)
level.Debug(e.logger).Log("ignoreZeroResult", metric.IgnoreZeroResult)
level.Debug(e.logger).Log("request", metric.Request)

if len(metric.Request) == 0 {
level.Error(e.logger).Log("Error scraping for ", metric.MetricsDesc, ". Did you forget to define request in your toml file?")
level.Error(e.logger).Log("msg", "Error scraping. Did you forget to define request in your toml file?", "metricsDesc", metric.MetricsDesc)
return
}

if len(metric.MetricsDesc) == 0 {
level.Error(e.logger).Log("Error scraping for query", metric.Request, ". Did you forget to define metricsdesc in your toml file?")
level.Error(e.logger).Log("msg", "Error scraping for query. Did you forget to define metricsdesc in your toml file?", "request", metric.Request)
return
}

for column, metricType := range metric.MetricsType {
if metricType == "histogram" {
_, ok := metric.MetricsBuckets[column]
if !ok {
level.Error(e.logger).Log("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")")
level.Error(e.logger).Log("msg", "Unable to find MetricsBuckets configuration key for metric", "metric", column)
return
}
}
}

scrapeStart := time.Now()
if err = ScrapeMetric(e.db, ch, metric, e.logger); err != nil {
level.Error(e.logger).Log("Error scraping for", metric.Context, "_", metric.MetricsDesc, time.Since(scrapeStart), ":", err)
level.Error(e.logger).Log("msg", "Error scraping for", "context", metric.Context, "metricsDesc", metric.MetricsDesc, "since", time.Since(scrapeStart), "err", err)
e.scrapeErrors.WithLabelValues(metric.Context).Inc()
} else {
level.Debug(e.logger).Log("Successfully scraped metric: ", metric.Context, metric.MetricsDesc, time.Since(scrapeStart))
level.Debug(e.logger).Log("msg", "Successfully scraped metric", "context", metric.Context, "metricsDesc", metric.MetricsDesc, "since", time.Since(scrapeStart))
}
}()
}
Expand All @@ -343,7 +343,7 @@ func GetMetricType(metricType string, metricsType map[string]string) prometheus.

// interface method to call ScrapeGenericValues using Metric struct values
func ScrapeMetric(db *sql.DB, ch chan<- prometheus.Metric, metricDefinition Metric, logger log.Logger) error {
level.Debug(logger).Log("Calling function ScrapeGenericValues()")
level.Debug(logger).Log("msg", "Calling function ScrapeGenericValues()")
return ScrapeGenericValues(db, ch, metricDefinition.Context, metricDefinition.Labels,
metricDefinition.MetricsDesc, metricDefinition.MetricsType, metricDefinition.MetricsBuckets,
metricDefinition.FieldToAppend, metricDefinition.IgnoreZeroResult,
Expand All @@ -365,11 +365,11 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
value, err := strconv.ParseFloat(strings.TrimSpace(row[metric]), 64)
// If not a float, skip current metric
if err != nil {
level.Error(logger).Log("Unable to convert current value to float (metric=" + metric +
",metricHelp=" + metricHelp + ",value=<" + row[metric] + ">)")
level.Error(logger).Log("msg", "Unable to convert current value to float", "metric", metric,
"metricHelp", metricHelp, "value", row[metric])
continue
}
level.Debug(logger).Log("Query result looks like: ", value)
level.Debug(logger).Log("msg", "Query result looks like", "value", value)
// If metric do not use a field content in metric's name
if strings.Compare(fieldToAppend, "") == 0 {
desc := prometheus.NewDesc(
Expand All @@ -380,22 +380,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
if metricsType[strings.ToLower(metric)] == "histogram" {
count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64)
if err != nil {
level.Error(logger).Log("Unable to convert count value to int (metric=" + metric +
",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)")
level.Error(logger).Log("msg", "Unable to convert count value to int", "metric", metric,
"metricHelp", metricHelp, "value", row["count"])
continue
}
buckets := make(map[float64]uint64)
for field, le := range metricsBuckets[metric] {
lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64)
if err != nil {
level.Error(logger).Log("Unable to convert bucket limit value to float (metric=" + metric +
",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)")
level.Error(logger).Log("msg", "Unable to convert bucket limit value to float", "metric", metric,
"metricHelp", metricHelp, ",bucketlimit", le)
continue
}
counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64)
if err != nil {
level.Error(logger).Log("Unable to convert ", field, " value to int (metric="+metric+
",metricHelp="+metricHelp+",value=<"+row[field]+">)")
level.Error(logger).Log("msg", "Unable to convert value to int", "field", field, "metric", metric,
"metricHelp", metricHelp, "value", row[field])
continue
}
buckets[lelimit] = counter
Expand All @@ -414,22 +414,22 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
if metricsType[strings.ToLower(metric)] == "histogram" {
count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64)
if err != nil {
level.Error(logger).Log("Unable to convert count value to int (metric=" + metric +
",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)")
level.Error(logger).Log("msg", "Unable to convert count value to int", "metric", metric,
"metricHelp", metricHelp, "value", row["count"])
continue
}
buckets := make(map[float64]uint64)
for field, le := range metricsBuckets[metric] {
lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64)
if err != nil {
level.Error(logger).Log("Unable to convert bucket limit value to float (metric=" + metric +
",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)")
level.Error(logger).Log("msg", "Unable to convert bucket limit value to float", "metric", metric,
"metricHelp", metricHelp, ",bucketlimit", le)
continue
}
counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64)
if err != nil {
level.Error(logger).Log("Unable to convert ", field, " value to int (metric="+metric+
",metricHelp="+metricHelp+",value=<"+row[field]+">)")
level.Error(logger).Log("msg", "Unable to convert value to int", "field", field, "metric", metric,
"metricHelp", metricHelp, "value", row[field])
continue
}
buckets[lelimit] = counter
Expand All @@ -443,9 +443,9 @@ func ScrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric, context string
}
return nil
}
level.Debug(logger).Log("Calling function GeneratePrometheusMetrics()")
level.Debug(logger).Log("msg", "Calling function GeneratePrometheusMetrics()")
err := GeneratePrometheusMetrics(db, genericParser, request, logger)
level.Debug(logger).Log("ScrapeGenericValues() - metricsCount: ", metricsCount)
level.Debug(logger).Log("msg", "ScrapeGenericValues()", "metricsCount", metricsCount)
if err != nil {
return err
}
Expand All @@ -462,7 +462,7 @@ func GeneratePrometheusMetrics(db *sql.DB, parse func(row map[string]string) err
// Add a timeout
timeout, err := strconv.Atoi(*queryTimeout)
if err != nil {
level.Error(logger).Log("error while converting timeout option value: ", err)
level.Error(logger).Log("msg", "error while converting timeout option", "err", err)
panic(err)
}
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(timeout)*time.Second)
Expand Down Expand Up @@ -538,15 +538,15 @@ func checkIfMetricsChanged(logger log.Logger) bool {
if len(_customMetrics) == 0 {
continue
}
level.Debug(logger).Log("Checking modifications in following metrics definition file:", _customMetrics)
level.Debug(logger).Log("msg", "Checking modifications in following metrics definition", "file", _customMetrics)
h := sha256.New()
if err := hashFile(h, _customMetrics); err != nil {
level.Error(logger).Log("Unable to get file hash", err)
level.Error(logger).Log("msg", "Unable to get file hash", "err", err)
return false
}
// If any of files has been changed reload metrics
if !bytes.Equal(hashMap[i], h.Sum(nil)) {
level.Info(logger).Log(_customMetrics, "has been changed. Reloading metrics...")
level.Info(logger).Log("msg", "Metrics file has been changed. Reloading...", "file", _customMetrics)
hashMap[i] = h.Sum(nil)
return true
}
Expand All @@ -560,25 +560,25 @@ func reloadMetrics(logger log.Logger) {

// Load default metrics
if _, err := toml.DecodeFile(*defaultFileMetrics, &metricsToScrap); err != nil {
level.Error(logger).Log(err)
level.Error(logger).Log("msg", err)
panic(errors.New("Error while loading " + *defaultFileMetrics))
} else {
level.Info(logger).Log("Successfully loaded default metrics from: " + *defaultFileMetrics)
level.Info(logger).Log("msg", "Successfully loaded default metrics", "file", *defaultFileMetrics)
}

// If custom metrics, load it
if strings.Compare(*customMetrics, "") != 0 {
for _, _customMetrics := range strings.Split(*customMetrics, ",") {
if _, err := toml.DecodeFile(_customMetrics, &additionalMetrics); err != nil {
level.Error(logger).Log(err)
level.Error(logger).Log("msg", err)
panic(errors.New("Error while loading " + _customMetrics))
} else {
level.Info(logger).Log("Successfully loaded custom metrics from: " + _customMetrics)
level.Info(logger).Log("msg", "Successfully loaded custom metrics", "file", _customMetrics)
}
metricsToScrap.Metric = append(metricsToScrap.Metric, additionalMetrics.Metric...)
}
} else {
level.Info(logger).Log("No custom metrics defined.")
level.Info(logger).Log("msg", "No custom metrics defined")
}
}

Expand All @@ -593,7 +593,7 @@ func main() {
kingpin.Parse()
logger := promlog.New(promlogConfig)

level.Info(logger).Log("Starting oracledb_exporter " + Version)
level.Info(logger).Log("msg", "Starting oracledb_exporter", "version", Version)
dsn := os.Getenv("DATA_SOURCE_NAME")

// Load default and custom metrics
Expand Down

0 comments on commit 4222bbf

Please sign in to comment.