Skip to content

Commit 4d7577b

Browse files
arvenilSuperQ
authored andcommitted
PMM-2612: Scrape MySQL database only once and fix scrape counter. (#292)
Scrape MySQL database only once and fix scrape counter. Signed-off-by: Kamil Dziedzic <[email protected]>
1 parent 0193b58 commit 4d7577b

File tree

3 files changed

+68
-71
lines changed

3 files changed

+68
-71
lines changed

collector/exporter.go

Lines changed: 58 additions & 65 deletions
Original file line numberDiff line numberDiff line change
@@ -52,16 +52,13 @@ var (
5252

5353
// Exporter collects MySQL metrics. It implements prometheus.Collector.
5454
type Exporter struct {
55-
dsn string
56-
scrapers []Scraper
57-
error prometheus.Gauge
58-
totalScrapes prometheus.Counter
59-
scrapeErrors *prometheus.CounterVec
60-
mysqldUp prometheus.Gauge
55+
dsn string
56+
scrapers []Scraper
57+
metrics Metrics
6158
}
6259

6360
// New returns a new MySQL exporter for the provided DSN.
64-
func New(dsn string, scrapers []Scraper) *Exporter {
61+
func New(dsn string, metrics Metrics, scrapers []Scraper) *Exporter {
6562
// Setup extra params for the DSN, default to having a lock timeout.
6663
dsnParams := []string{fmt.Sprintf(timeoutParam, *exporterLockTimeout)}
6764

@@ -79,79 +76,37 @@ func New(dsn string, scrapers []Scraper) *Exporter {
7976
return &Exporter{
8077
dsn: dsn,
8178
scrapers: scrapers,
82-
totalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
83-
Namespace: namespace,
84-
Subsystem: exporter,
85-
Name: "scrapes_total",
86-
Help: "Total number of times MySQL was scraped for metrics.",
87-
}),
88-
scrapeErrors: prometheus.NewCounterVec(prometheus.CounterOpts{
89-
Namespace: namespace,
90-
Subsystem: exporter,
91-
Name: "scrape_errors_total",
92-
Help: "Total number of times an error occurred scraping a MySQL.",
93-
}, []string{"collector"}),
94-
error: prometheus.NewGauge(prometheus.GaugeOpts{
95-
Namespace: namespace,
96-
Subsystem: exporter,
97-
Name: "last_scrape_error",
98-
Help: "Whether the last scrape of metrics from MySQL resulted in an error (1 for error, 0 for success).",
99-
}),
100-
mysqldUp: prometheus.NewGauge(prometheus.GaugeOpts{
101-
Namespace: namespace,
102-
Name: "up",
103-
Help: "Whether the MySQL server is up.",
104-
}),
79+
metrics: metrics,
10580
}
10681
}
10782

10883
// Describe implements prometheus.Collector.
10984
func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
110-
// We cannot know in advance what metrics the exporter will generate
111-
// from MySQL. So we use the poor man's describe method: Run a collect
112-
// and send the descriptors of all the collected metrics. The problem
113-
// here is that we need to connect to the MySQL DB. If it is currently
114-
// unavailable, the descriptors will be incomplete. Since this is a
115-
// stand-alone exporter and not used as a library within other code
116-
// implementing additional metrics, the worst that can happen is that we
117-
// don't detect inconsistent metrics created by this exporter
118-
// itself. Also, a change in the monitored MySQL instance may change the
119-
// exported metrics during the runtime of the exporter.
120-
121-
metricCh := make(chan prometheus.Metric)
122-
doneCh := make(chan struct{})
123-
124-
go func() {
125-
for m := range metricCh {
126-
ch <- m.Desc()
127-
}
128-
close(doneCh)
129-
}()
130-
131-
e.Collect(metricCh)
132-
close(metricCh)
133-
<-doneCh
85+
ch <- e.metrics.TotalScrapes.Desc()
86+
ch <- e.metrics.Error.Desc()
87+
e.metrics.ScrapeErrors.Describe(ch)
88+
ch <- e.metrics.MySQLUp.Desc()
13489
}
13590

13691
// Collect implements prometheus.Collector.
13792
func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
13893
e.scrape(ch)
13994

140-
ch <- e.totalScrapes
141-
ch <- e.error
142-
e.scrapeErrors.Collect(ch)
143-
ch <- e.mysqldUp
95+
ch <- e.metrics.TotalScrapes
96+
ch <- e.metrics.Error
97+
e.metrics.ScrapeErrors.Collect(ch)
98+
ch <- e.metrics.MySQLUp
14499
}
145100

146101
func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
147-
e.totalScrapes.Inc()
102+
e.metrics.TotalScrapes.Inc()
148103
var err error
149104

150105
scrapeTime := time.Now()
151106
db, err := sql.Open("mysql", e.dsn)
152107
if err != nil {
153108
log.Errorln("Error opening connection to database:", err)
154-
e.error.Set(1)
109+
e.metrics.Error.Set(1)
155110
return
156111
}
157112
defer db.Close()
@@ -165,13 +120,13 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
165120
isUpRows, err := db.Query(upQuery)
166121
if err != nil {
167122
log.Errorln("Error pinging mysqld:", err)
168-
e.mysqldUp.Set(0)
169-
e.error.Set(1)
123+
e.metrics.MySQLUp.Set(0)
124+
e.metrics.Error.Set(1)
170125
return
171126
}
172127
isUpRows.Close()
173128

174-
e.mysqldUp.Set(1)
129+
e.metrics.MySQLUp.Set(1)
175130

176131
ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), "connection")
177132

@@ -185,10 +140,48 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) {
185140
scrapeTime := time.Now()
186141
if err := scraper.Scrape(db, ch); err != nil {
187142
log.Errorln("Error scraping for "+label+":", err)
188-
e.scrapeErrors.WithLabelValues(label).Inc()
189-
e.error.Set(1)
143+
e.metrics.ScrapeErrors.WithLabelValues(label).Inc()
144+
e.metrics.Error.Set(1)
190145
}
191146
ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), label)
192147
}(scraper)
193148
}
194149
}
150+
151+
// Metrics represents exporter metrics which values can be carried between http requests.
152+
type Metrics struct {
153+
TotalScrapes prometheus.Counter
154+
ScrapeErrors *prometheus.CounterVec
155+
Error prometheus.Gauge
156+
MySQLUp prometheus.Gauge
157+
}
158+
159+
// NewMetrics creates new Metrics instance.
160+
func NewMetrics() Metrics {
161+
subsystem := exporter
162+
return Metrics{
163+
TotalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
164+
Namespace: namespace,
165+
Subsystem: subsystem,
166+
Name: "scrapes_total",
167+
Help: "Total number of times MySQL was scraped for metrics.",
168+
}),
169+
ScrapeErrors: prometheus.NewCounterVec(prometheus.CounterOpts{
170+
Namespace: namespace,
171+
Subsystem: subsystem,
172+
Name: "scrape_errors_total",
173+
Help: "Total number of times an error occurred scraping a MySQL.",
174+
}, []string{"collector"}),
175+
Error: prometheus.NewGauge(prometheus.GaugeOpts{
176+
Namespace: namespace,
177+
Subsystem: subsystem,
178+
Name: "last_scrape_error",
179+
Help: "Whether the last scrape of metrics from MySQL resulted in an error (1 for error, 0 for success).",
180+
}),
181+
MySQLUp: prometheus.NewGauge(prometheus.GaugeOpts{
182+
Namespace: namespace,
183+
Name: "up",
184+
Help: "Whether the MySQL server is up.",
185+
}),
186+
}
187+
}

collector/exporter_test.go

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,9 +15,12 @@ func TestExporter(t *testing.T) {
1515
t.Skip("-short is passed, skipping test")
1616
}
1717

18-
exporter := New(dsn, []Scraper{
19-
ScrapeGlobalStatus{},
20-
})
18+
exporter := New(
19+
dsn,
20+
NewMetrics(),
21+
[]Scraper{
22+
ScrapeGlobalStatus{},
23+
})
2124

2225
convey.Convey("Metrics describing", t, func() {
2326
ch := make(chan *prometheus.Desc)

mysqld_exporter.go

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ func init() {
9494
prometheus.MustRegister(version.NewCollector("mysqld_exporter"))
9595
}
9696

97-
func newHandler(scrapers []collector.Scraper) http.HandlerFunc {
97+
func newHandler(metrics collector.Metrics, scrapers []collector.Scraper) http.HandlerFunc {
9898
return func(w http.ResponseWriter, r *http.Request) {
9999
filteredScrapers := scrapers
100100
params := r.URL.Query()["collect[]"]
@@ -116,7 +116,7 @@ func newHandler(scrapers []collector.Scraper) http.HandlerFunc {
116116
}
117117

118118
registry := prometheus.NewRegistry()
119-
registry.MustRegister(collector.New(dsn, filteredScrapers))
119+
registry.MustRegister(collector.New(dsn, metrics, filteredScrapers))
120120

121121
gatherers := prometheus.Gatherers{
122122
prometheus.DefaultGatherer,
@@ -182,7 +182,8 @@ func main() {
182182
enabledScrapers = append(enabledScrapers, scraper)
183183
}
184184
}
185-
http.HandleFunc(*metricPath, prometheus.InstrumentHandlerFunc("metrics", newHandler(enabledScrapers)))
185+
handlerFunc := newHandler(collector.NewMetrics(), enabledScrapers)
186+
http.HandleFunc(*metricPath, prometheus.InstrumentHandlerFunc("metrics", handlerFunc))
186187
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
187188
w.Write(landingPage)
188189
})

0 commit comments

Comments
 (0)