From de35be477ea4931f7f3a2de1f15ecffd96c1da01 Mon Sep 17 00:00:00 2001 From: Tobias Gesellchen Date: Sat, 21 Oct 2023 11:18:00 +0200 Subject: [PATCH] scrape --- couchdb-exporter.go | 2 ++ couchdb-exporter_test.go | 24 ++++++++++++++++-------- lib/collector.go | 22 ++++++++++++++++++++++ 3 files changed, 40 insertions(+), 8 deletions(-) diff --git a/couchdb-exporter.go b/couchdb-exporter.go index f495d93d..7213f567 100644 --- a/couchdb-exporter.go +++ b/couchdb-exporter.go @@ -214,12 +214,14 @@ func main() { databases = strings.Split(exporterConfig.databases, ",") } + scrapeInterval, _ := time.ParseDuration("0s") exporter := lib.NewExporter( exporterConfig.couchdbURI, lib.BasicAuth{ Username: exporterConfig.couchdbUsername, Password: exporterConfig.couchdbPassword}, lib.CollectorConfig{ + ScrapeInterval: scrapeInterval, Databases: databases, CollectViews: exporterConfig.databaseViews, CollectSchedulerJobs: exporterConfig.schedulerJobs, diff --git a/couchdb-exporter_test.go b/couchdb-exporter_test.go index f89e7c1d..7cf8999a 100644 --- a/couchdb-exporter_test.go +++ b/couchdb-exporter_test.go @@ -124,8 +124,10 @@ func performCouchdbStatsTest(t *testing.T, couchdbVersion string, expectedMetric basicAuth := lib.BasicAuth{Username: "username", Password: "password"} handler := http.HandlerFunc(BasicAuthHandler(basicAuth, couchdbResponse(t, couchdbVersion))) server := httptest.NewServer(handler) + scrapeInterval, _ := time.ParseDuration("0s") e := lib.NewExporter(server.URL, basicAuth, lib.CollectorConfig{ + ScrapeInterval: scrapeInterval, Databases: []string{"example", "another-example"}, CollectViews: true, CollectSchedulerJobs: true, @@ -206,10 +208,12 @@ func TestCouchdbStatsV1Integration(t *testing.T) { } } + scrapeInterval, _ := time.ParseDuration("0s") t.Run("node_up", func(t *testing.T) { e := lib.NewExporter(dbUrl, basicAuth, lib.CollectorConfig{ - Databases: []string{}, - CollectViews: true, + ScrapeInterval: scrapeInterval, + Databases: []string{}, + CollectViews: true, }, true) ch := make(chan prometheus.Metric) @@ -232,8 +236,9 @@ func TestCouchdbStatsV1Integration(t *testing.T) { t.Run("_all_dbs", func(t *testing.T) { e := lib.NewExporter(dbUrl, basicAuth, lib.CollectorConfig{ - Databases: []string{"_all_dbs"}, - CollectViews: true, + ScrapeInterval: scrapeInterval, + Databases: []string{"_all_dbs"}, + CollectViews: true, }, true) ch := make(chan prometheus.Metric) @@ -316,10 +321,12 @@ func TestCouchdbStatsV2Integration(t *testing.T) { } } + scrapeInterval, _ := time.ParseDuration("0s") t.Run("node_up", func(t *testing.T) { e := lib.NewExporter(dbUrl, basicAuth, lib.CollectorConfig{ - Databases: []string{}, - CollectViews: true, + ScrapeInterval: scrapeInterval, + Databases: []string{}, + CollectViews: true, }, true) ch := make(chan prometheus.Metric) @@ -342,8 +349,9 @@ func TestCouchdbStatsV2Integration(t *testing.T) { t.Run("_all_dbs", func(t *testing.T) { e := lib.NewExporter(dbUrl, basicAuth, lib.CollectorConfig{ - Databases: []string{"_all_dbs"}, - CollectViews: true, + ScrapeInterval: scrapeInterval, + Databases: []string{"_all_dbs"}, + CollectViews: true, }, true) ch := make(chan prometheus.Metric) diff --git a/lib/collector.go b/lib/collector.go index 2e64366d..60ad626b 100644 --- a/lib/collector.go +++ b/lib/collector.go @@ -2,6 +2,7 @@ package lib import ( "fmt" + "time" "github.com/prometheus/client_golang/prometheus" "k8s.io/klog/v2" @@ -71,6 +72,7 @@ var ( ) type CollectorConfig struct { + ScrapeInterval time.Duration Databases []string ObservedDatabases []string CollectViews bool @@ -327,6 +329,26 @@ func (e *Exporter) collect(ch chan<- prometheus.Metric) error { return err } } + return nil +} + +func (e *Exporter) collect(ch chan<- prometheus.Metric) error { + sendStatus := func() { + ch <- e.up + } + defer sendStatus() + + if e.collectorConfig.ScrapeInterval.Seconds() == 0 { + // sync + // old behaviour: scrape the CouchDB when the exporter is being scraped by Prometheus + err := e.scrape() + if err != nil { + return err + } + } else { + // async, continuously + // new behaviour: scrape the CouchDB at an interval, deliver most recent metrics when the exporter is being scraped by Prometheus + } e.databasesTotal.Collect(ch) e.nodeUp.Collect(ch)