Skip to content
This repository has been archived by the owner on Feb 13, 2025. It is now read-only.

Commit

Permalink
migrate deprecated promlog to promslog
Browse files Browse the repository at this point in the history
  • Loading branch information
Marina Frank committed Jan 7, 2025
1 parent 04e4ba9 commit 78d4104
Show file tree
Hide file tree
Showing 8 changed files with 83 additions and 75 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/pull-request.yml
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,13 @@ jobs:
mv dist/*.tar.gz application.tar.gz
- name: Upload build output
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4.5.0
with:
name: application
path: "./application.tar.gz"

- name: Upload test coverage
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4.5.0
with:
name: test-coverage
path: "./test-coverage.out"
Expand All @@ -151,7 +151,7 @@ jobs:
run: echo "version=${{ needs.build.outputs.version }}-rc.pr-${{ env.PULL_REQUEST_NUMBER }}-${{ github.run_number }}" >> $GITHUB_OUTPUT

- name: Download artifact
uses: actions/[email protected].7
uses: actions/[email protected].8
with:
name: application
path: "."
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ dist/
*.pub
*.key
oci8.pc
*.skip
*.skip
test-coverage.out
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ GOFLAGS := -ldflags "$(LDFLAGS) -s -w"
BUILD_ARGS = --build-arg VERSION=$(VERSION)
LEGACY_TABLESPACE = --build-arg LEGACY_TABLESPACE=.legacy-tablespace
OUTDIR = ./dist
LINTER_VERSION ?= v1.55.2
LINTER_VERSION ?= v1.62.2
LINTER_IMAGE ?= docker.io/golangci/golangci-lint:$(LINTER_VERSION)

ifeq ($(shell command -v podman 2> /dev/null),)
Expand Down
106 changes: 58 additions & 48 deletions collector/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"fmt"
"hash"
"io"
"log/slog"
"net/url"
"os"
"strconv"
Expand All @@ -17,8 +18,6 @@ import (
"time"

"github.com/BurntSushi/toml"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"sigs.k8s.io/yaml"
)
Expand All @@ -36,7 +35,7 @@ type Exporter struct {
scrapeResults []prometheus.Metric
up prometheus.Gauge
db *sql.DB
logger log.Logger
logger *slog.Logger
}

// Config is the configuration of the exporter
Expand Down Expand Up @@ -85,6 +84,14 @@ var (
exporterName = "exporter"
)

func getMapKeys(m map[string]string) []string {
keys := make([]string, 0, len(m))
for k := range m {
keys = append(keys, k)
}
return keys
}

func maskDsn(dsn string) string {
parts := strings.Split(dsn, "@")
if len(parts) > 1 {
Expand All @@ -95,7 +102,7 @@ func maskDsn(dsn string) string {
}

// NewExporter creates a new Exporter instance
func NewExporter(logger log.Logger, cfg *Config) (*Exporter, error) {
func NewExporter(logger *slog.Logger, cfg *Config) (*Exporter, error) {
e := &Exporter{
mu: &sync.Mutex{},
dsn: cfg.DSN,
Expand Down Expand Up @@ -254,21 +261,21 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {

if err = e.db.Ping(); err != nil {
if strings.Contains(err.Error(), "sql: database is closed") {
level.Info(e.logger).Log("Reconnecting to DB")
e.logger.Info("Reconnecting to DB")
err = e.connect()
if err != nil {
level.Error(e.logger).Log("error reconnecting to DB", err.Error())
e.logger.Error("failed reconnect Oracle DB", "err", err.Error())
}
}
}

if err = e.db.Ping(); err != nil {
level.Error(e.logger).Log("error pinging oracle:", err.Error())
e.logger.Error("failed ping Oracle DB", "err", err.Error())
e.up.Set(0)
return
}

level.Debug(e.logger).Log("Successfully pinged Oracle database: ", maskDsn(e.dsn))
e.logger.Debug("Succesful ping Oracle DB", "connstring", maskDsn(e.dsn))
e.up.Set(1)

if e.checkIfMetricsChanged() {
Expand All @@ -284,31 +291,28 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
f := func() {
defer wg.Done()

level.Debug(e.logger).Log("About to scrape metric: ")
level.Debug(e.logger).Log("- Metric MetricsDesc: ", fmt.Sprintf("%+v", metric.MetricsDesc))
level.Debug(e.logger).Log("- Metric Context: ", metric.Context)
level.Debug(e.logger).Log("- Metric MetricsType: ", fmt.Sprintf("%+v", metric.MetricsType))
level.Debug(e.logger).Log("- Metric MetricsBuckets: ", fmt.Sprintf("%+v", metric.MetricsBuckets), "(Ignored unless Histogram type)")
level.Debug(e.logger).Log("- Metric Labels: ", fmt.Sprintf("%+v", metric.Labels))
level.Debug(e.logger).Log("- Metric FieldToAppend: ", metric.FieldToAppend)
level.Debug(e.logger).Log("- Metric IgnoreZeroResult: ", fmt.Sprintf("%+v", metric.IgnoreZeroResult))
level.Debug(e.logger).Log("- Metric Request: ", metric.Request)
metricNames := strings.Join(getMapKeys(metric.MetricsDesc), ",")
e.logger.Debug("Scraping metrics",
"subsystem", metric.Context, "metricsArray", metricNames,
"metricstype", metric.MetricsType, "metricbuckets", metric.MetricsBuckets,
"labels", fmt.Sprintf("%+v", metric.Labels), "fieldToAppend", metric.FieldToAppend,
"ignorezeroresult", metric.IgnoreZeroResult, "query", metric.Request)

if len(metric.Request) == 0 {
level.Error(e.logger).Log("Error scraping for ", metric.MetricsDesc, ". Did you forget to define request in your metrics config file?")
e.logger.Error("metrics request is empty. Did you forget to define request in your metrics config file?", "subsystem", metric.Context, "metricsArray", metricNames)
return
}

if len(metric.MetricsDesc) == 0 {
level.Error(e.logger).Log("Error scraping for query", metric.Request, ". Did you forget to define metricsdesc in your metrics config file?")
e.logger.Error("Metric help is empty. Did you forget to define metricsdesc in your metrics config file?", "subsystem", metric.Context, "query", metric.Request)
return
}

for column, metricType := range metric.MetricsType {
if metricType == "histogram" {

Check failure on line 312 in collector/collector.go

View workflow job for this annotation

GitHub Actions / build

string `histogram` has 3 occurrences, make it a constant (goconst)
_, ok := metric.MetricsBuckets[column]
if !ok {
level.Error(e.logger).Log("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")")
e.logger.Error("Unable to find MetricsBuckets configuration key for metric. (metric=" + column + ")")
return
}
}
Expand All @@ -321,10 +325,10 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
err = err1
}
errmutex.Unlock()
level.Error(e.logger).Log("scrapeMetricContext", metric.Context, "ScrapeDuration", time.Since(scrapeStart), "msg", err1.Error())
e.logger.Error("scrape metric failed", "subsystem", metric.Context, "metricsArray", metricNames, "duration", time.Since(scrapeStart), "err", err1.Error())
e.scrapeErrors.WithLabelValues(metric.Context).Inc()
} else {
level.Debug(e.logger).Log("successfully scraped metric: ", metric.Context, metric.MetricsDesc, time.Since(scrapeStart))
e.logger.Debug("successfully scraped metric", "subsystem", metric.Context, "metricsArray", metricNames, "duration", time.Since(scrapeStart))
}
}
go f()
Expand All @@ -335,20 +339,21 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
func (e *Exporter) connect() error {
_, err := url.Parse(e.dsn)
if err != nil {
level.Error(e.logger).Log("malformed DSN: ", maskDsn(e.dsn))
e.logger.Error("malformed DSN", "value", maskDsn(e.dsn))
return err
}
level.Debug(e.logger).Log("launching connection: ", maskDsn(e.dsn))
e.logger.Debug("launching connection: ", "connstring", maskDsn(e.dsn))
db, err := sql.Open("oracle", e.dsn)
if err != nil {
level.Error(e.logger).Log("error while connecting to", e.dsn)
e.logger.Error("failed to connect", "connstring", maskDsn(e.dsn))
return err
}
level.Debug(e.logger).Log("set max idle connections to ", e.config.MaxIdleConns)
e.logger.Debug("config", "DATABASE_MAXIDLECONNS", e.config.MaxIdleConns)
e.logger.Debug("config", "DATABASE_MAXOPENCONNS", e.config.MaxOpenConns)
e.logger.Debug("config", "QUERY_TIMEOUT", e.config.QueryTimeout)
db.SetMaxIdleConns(e.config.MaxIdleConns)
level.Debug(e.logger).Log("set max open connections to ", e.config.MaxOpenConns)
db.SetMaxOpenConns(e.config.MaxOpenConns)
level.Debug(e.logger).Log("successfully connected to: ", maskDsn(e.dsn))
e.logger.Debug("successfully connected", "connstring", maskDsn(e.dsn))
e.db = db
return nil
}
Expand All @@ -358,15 +363,15 @@ func (e *Exporter) checkIfMetricsChanged() bool {
if len(_customMetrics) == 0 {
continue
}
level.Debug(e.logger).Log("checking modifications in following metrics definition file:", _customMetrics)
e.logger.Debug("checking metrics definition file has changed", "file", _customMetrics)
h := sha256.New()
if err := hashFile(h, _customMetrics); err != nil {
level.Error(e.logger).Log("unable to get file hash", err.Error())
e.logger.Error("unable to get file hash", "file", _customMetrics, "err", err.Error())
return false
}
// If any of files has been changed reload metrics
if !bytes.Equal(hashMap[i], h.Sum(nil)) {
level.Info(e.logger).Log(_customMetrics, "has been changed. Reloading metrics...")
e.logger.Info("metrics definition has been changed. Reloading metrics...", "file", _customMetrics)
hashMap[i] = h.Sum(nil)
return true
}
Expand Down Expand Up @@ -406,12 +411,13 @@ func (e *Exporter) reloadMetrics() {
panic(err)
}
}
level.Info(e.logger).Log("event", "Successfully loaded custom metrics from "+_customMetrics)
level.Debug(e.logger).Log("custom metrics parsed content", fmt.Sprintf("%+v", additionalMetrics))
e.logger.Info("successfully loaded custom metrics from", "file", _customMetrics)
e.logger.Debug("custom metrics parsed content", "value", fmt.Sprintf("%+v", additionalMetrics))

e.metricsToScrape.Metric = append(e.metricsToScrape.Metric, additionalMetrics.Metric...)
}
} else {
level.Debug(e.logger).Log("No custom metrics defined.")
e.logger.Debug("No custom metrics defined.")
}
}

Expand All @@ -435,7 +441,7 @@ func loadTomlMetricsConfig(_customMetrics string, metrics *Metrics) error {

// ScrapeMetric is an interface method to call scrapeGenericValues using Metric struct values
func (e *Exporter) ScrapeMetric(db *sql.DB, ch chan<- prometheus.Metric, metricDefinition Metric) error {
level.Debug(e.logger).Log("calling function ScrapeGenericValues()")
e.logger.Debug("calling function ScrapeGenericValues()")
return e.scrapeGenericValues(db, ch, metricDefinition.Context, metricDefinition.Labels,
metricDefinition.MetricsDesc, metricDefinition.MetricsType, metricDefinition.MetricsBuckets,
metricDefinition.FieldToAppend, metricDefinition.IgnoreZeroResult,
Expand All @@ -457,10 +463,10 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
value, err := strconv.ParseFloat(strings.TrimSpace(row[metric]), 64)
// If not a float, skip current metric
if err != nil {
level.Error(e.logger).Log("msg", "Unable to convert current value to float", "metric", metric, "metricHelp", metricHelp, "value", row[metric])
e.logger.Error("convert to float", "metric", metric, "msg", fmt.Sprintf("Skipping due to error %s", err.Error()), "value", row[metric])
continue
}
level.Debug(e.logger).Log("Query result looks like: ", value)
e.logger.Debug("Query result looks like: ", "value", value)
// If metric do not use a field content in metric's name
if strings.Compare(fieldToAppend, "") == 0 {
desc := prometheus.NewDesc(
Expand All @@ -471,21 +477,21 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
if metricsType[strings.ToLower(metric)] == "histogram" {
count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert count value to int (metric=" + metric +
e.logger.Error("Unable to convert count value to int (metric=" + metric +
",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)")
continue
}
buckets := make(map[float64]uint64)
for field, le := range metricsBuckets[metric] {
lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert bucket limit value to float (metric=" + metric +
e.logger.Error("Unable to convert bucket limit value to float (metric=" + metric +
",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)")
continue
}
counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert ", field, " value to int (metric="+metric+
e.logger.Error("Unable to convert ", field, " value to int (metric="+metric+
",metricHelp="+metricHelp+",value=<"+row[field]+">)")
continue
}
Expand All @@ -505,21 +511,21 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
if metricsType[strings.ToLower(metric)] == "histogram" {
count, err := strconv.ParseUint(strings.TrimSpace(row["count"]), 10, 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert count value to int (metric=" + metric +
e.logger.Error("Unable to convert count value to int (metric=" + metric +
",metricHelp=" + metricHelp + ",value=<" + row["count"] + ">)")
continue
}
buckets := make(map[float64]uint64)
for field, le := range metricsBuckets[metric] {
lelimit, err := strconv.ParseFloat(strings.TrimSpace(le), 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert bucket limit value to float (metric=" + metric +
e.logger.Error("Unable to convert bucket limit value to float (metric=" + metric +
",metricHelp=" + metricHelp + ",bucketlimit=<" + le + ">)")
continue
}
counter, err := strconv.ParseUint(strings.TrimSpace(row[field]), 10, 64)
if err != nil {
level.Error(e.logger).Log("Unable to convert ", field, " value to int (metric="+metric+
e.logger.Error("Unable to convert ", field, " value to int (metric="+metric+
",metricHelp="+metricHelp+",value=<"+row[field]+">)")
continue
}
Expand All @@ -534,14 +540,15 @@ func (e *Exporter) scrapeGenericValues(db *sql.DB, ch chan<- prometheus.Metric,
}
return nil
}
level.Debug(e.logger).Log("Calling function GeneratePrometheusMetrics()")
e.logger.Debug("Calling function GeneratePrometheusMetrics()")
err := e.generatePrometheusMetrics(db, genericParser, request)
level.Debug(e.logger).Log("ScrapeGenericValues() - metricsCount: ", metricsCount)
metricNames := strings.Join(getMapKeys(metricsDesc), ",")
e.logger.Debug("scrapeGenericValues", "subsystem", context, "metricsArray", metricNames, "metricsCount", metricsCount)
if err != nil {
return err
}
if !ignoreZeroResult && metricsCount == 0 {
return errors.New("No metrics found while parsing")
return errors.New("no metrics found while parsing")
}
return err
}
Expand All @@ -557,6 +564,9 @@ func (e *Exporter) generatePrometheusMetrics(db *sql.DB, parse func(row map[stri
return errors.New("oracle query timed out")
}

if ctx.Err() != nil {
e.logger.Error("ctxError", "err", ctx.Err())
}
if err != nil {
return err
}
Expand Down Expand Up @@ -622,9 +632,9 @@ func cleanName(s string) string {
}

func (e *Exporter) logError(s string) {

Check failure on line 634 in collector/collector.go

View workflow job for this annotation

GitHub Actions / build

func `(*Exporter).logError` is unused (unused)
_ = level.Error(e.logger).Log(s)
e.logger.Error(s)
}

func (e *Exporter) logDebug(s string) {

Check failure on line 638 in collector/collector.go

View workflow job for this annotation

GitHub Actions / build

func `(*Exporter).logDebug` is unused (unused)
_ = level.Debug(e.logger).Log(s)
e.logger.Debug(s)
}
7 changes: 3 additions & 4 deletions collector/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,20 @@ import (
"testing"

"github.com/go-kit/log"
"github.com/prometheus/common/promlog"
"github.com/prometheus/common/promslog"
_ "github.com/sijms/go-ora/v2"
"github.com/stretchr/testify/assert"
)

func TestMalformedDSNMasksUserPassword(t *testing.T) {
buf := bytes.Buffer{}
w := log.NewSyncWriter(&buf)
testLogger := log.NewLogfmtLogger(w)
e := &Exporter{
mu: &sync.Mutex{},
dsn: "\tuser:pass@sdfoijwef/sdfle",
logger: promlog.NewWithLogger(testLogger, &promlog.Config{}),
logger: promslog.New(&promslog.Config{Writer: w}),
}
err := e.connect()
assert.NotNil(t, err)
assert.Contains(t, buf.String(), "malformedDSN:=***@")
assert.Contains(t, buf.String(), "malformed DSN\" value=***@")
}
7 changes: 3 additions & 4 deletions collector/default_metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"strings"

"github.com/BurntSushi/toml"
"github.com/go-kit/log/level"
)

// needs the const if imported, cannot os.ReadFile in this case
Expand Down Expand Up @@ -85,12 +84,12 @@ func (e *Exporter) DefaultMetrics() Metrics {
if err == nil {
return metricsToScrape
}
level.Error(e.logger).Log("defaultMetricsFile", e.config.DefaultMetricsFile, "msg", err)
level.Warn(e.logger).Log("msg", "proceeding to run with default metrics")
e.logger.Error("defaultMetricsFile", "file", e.config.DefaultMetricsFile, "msg", err.Error())
e.logger.Warn("proceeding to run with default metrics")
}

if _, err := toml.Decode(defaultMetricsConst, &metricsToScrape); err != nil {
level.Error(e.logger).Log("msg", err.Error())
e.logger.Error(err.Error())
panic(errors.New("Error while loading " + defaultMetricsConst))
}
return metricsToScrape
Expand Down
Loading

0 comments on commit 78d4104

Please sign in to comment.