Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add minimal MySQL version to Scraper interface #328

Merged
merged 2 commits into from
Oct 29, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions collector/binlog.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,11 @@ func (ScrapeBinlogSize) Help() string {
return "Collect the current size of all registered binlog files"
}

// Version of MySQL from which scraper is available.
func (ScrapeBinlogSize) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeBinlogSize) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
var logBin uint8
Expand Down Expand Up @@ -113,3 +118,6 @@ func (ScrapeBinlogSize) Scrape(ctx context.Context, db *sql.DB, ch chan<- promet

return nil
}

// check interface
var _ Scraper = ScrapeBinlogSize{}
8 changes: 8 additions & 0 deletions collector/engine_innodb.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ func (ScrapeEngineInnodbStatus) Help() string {
return "Collect from SHOW ENGINE INNODB STATUS"
}

// Version of MySQL from which scraper is available.
func (ScrapeEngineInnodbStatus) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeEngineInnodbStatus) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
rows, err := db.QueryContext(ctx, engineInnodbStatusQuery)
Expand Down Expand Up @@ -92,3 +97,6 @@ func (ScrapeEngineInnodbStatus) Scrape(ctx context.Context, db *sql.DB, ch chan<

return nil
}

// check interface
var _ Scraper = ScrapeEngineInnodbStatus{}
8 changes: 8 additions & 0 deletions collector/engine_tokudb.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,11 @@ func (ScrapeEngineTokudbStatus) Help() string {
return "Collect from SHOW ENGINE TOKUDB STATUS"
}

// Version of MySQL from which scraper is available.
func (ScrapeEngineTokudbStatus) Version() float64 {
return 5.6
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeEngineTokudbStatus) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
tokudbRows, err := db.QueryContext(ctx, engineTokudbStatusQuery)
Expand Down Expand Up @@ -87,3 +92,6 @@ func sanitizeTokudbMetric(metricName string) string {
}
return metricName
}

// check interface
var _ Scraper = ScrapeEngineTokudbStatus{}
30 changes: 28 additions & 2 deletions collector/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ import (
"context"
"database/sql"
"fmt"
"regexp"
"strconv"
"strings"
"sync"
"time"
Expand All @@ -33,14 +35,20 @@ const (
exporter = "exporter"
)

// SQL Queries.
// SQL queries and parameters.
const (
versionQuery = `SELECT @@version`

// System variable params formatting.
// See: https://github.com/go-sql-driver/mysql#system-variables
sessionSettingsParam = `log_slow_filter=%27tmp_table_on_disk,filesort_on_disk%27`
timeoutParam = `lock_wait_timeout=%d`
)

var (
versionRE = regexp.MustCompile(`^\d+\.\d+`)
)

// Tunable flags.
var (
exporterLockTimeout = kingpin.Flag(
Expand Down Expand Up @@ -145,9 +153,14 @@ func (e *Exporter) scrape(ctx context.Context, ch chan<- prometheus.Metric) {

ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), "connection")

wg := &sync.WaitGroup{}
version := getMySQLVersion(db)
var wg sync.WaitGroup
defer wg.Wait()
for _, scraper := range e.scrapers {
if version < scraper.Version() {
continue
}

wg.Add(1)
go func(scraper Scraper) {
defer wg.Done()
Expand All @@ -163,6 +176,19 @@ func (e *Exporter) scrape(ctx context.Context, ch chan<- prometheus.Metric) {
}
}

func getMySQLVersion(db *sql.DB) float64 {
var versionStr string
var versionNum float64
if err := db.QueryRow(versionQuery).Scan(&versionStr); err == nil {
versionNum, _ = strconv.ParseFloat(versionRE.FindString(versionStr), 64)
}
// If we can't match/parse the version, set it some big value that matches all versions.
if versionNum == 0 {
versionNum = 999
}
return versionNum
}

// Metrics represents exporter metrics which values can be carried between http requests.
type Metrics struct {
TotalScrapes prometheus.Counter
Expand Down
15 changes: 15 additions & 0 deletions collector/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ package collector

import (
"context"
"database/sql"
SuperQ marked this conversation as resolved.
Show resolved Hide resolved
"testing"

"github.com/prometheus/client_golang/prometheus"
Expand Down Expand Up @@ -63,3 +64,17 @@ func TestExporter(t *testing.T) {
}
})
}

func TestGetMySQLVersion(t *testing.T) {
if testing.Short() {
t.Skip("-short is passed, skipping test")
}

convey.Convey("Version parsing", t, func() {
db, err := sql.Open("mysql", dsn)
convey.So(err, convey.ShouldBeNil)
defer db.Close()

convey.So(getMySQLVersion(db), convey.ShouldBeBetweenOrEqual, 5.5, 10.3)
})
}
8 changes: 8 additions & 0 deletions collector/global_status.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,11 @@ func (ScrapeGlobalStatus) Help() string {
return "Collect from SHOW GLOBAL STATUS"
}

// Version of MySQL from which scraper is available.
func (ScrapeGlobalStatus) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeGlobalStatus) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
globalStatusRows, err := db.QueryContext(ctx, globalStatusQuery)
Expand Down Expand Up @@ -207,3 +212,6 @@ func (ScrapeGlobalStatus) Scrape(ctx context.Context, db *sql.DB, ch chan<- prom

return nil
}

// check interface
var _ Scraper = ScrapeGlobalStatus{}
8 changes: 8 additions & 0 deletions collector/global_variables.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,11 @@ func (ScrapeGlobalVariables) Help() string {
return "Collect from SHOW GLOBAL VARIABLES"
}

// Version of MySQL from which scraper is available.
func (ScrapeGlobalVariables) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeGlobalVariables) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
globalVariablesRows, err := db.QueryContext(ctx, globalVariablesQuery)
Expand Down Expand Up @@ -227,3 +232,6 @@ func validPrometheusName(s string) string {
s = strings.ToLower(s)
return s
}

// check interface
var _ Scraper = ScrapeGlobalVariables{}
8 changes: 8 additions & 0 deletions collector/heartbeat.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,11 @@ func (ScrapeHeartbeat) Help() string {
return "Collect from heartbeat"
}

// Version of MySQL from which scraper is available.
func (ScrapeHeartbeat) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeHeartbeat) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
query := fmt.Sprintf(heartbeatQuery, *collectHeartbeatDatabase, *collectHeartbeatTable)
Expand Down Expand Up @@ -126,3 +131,6 @@ func (ScrapeHeartbeat) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometh

return nil
}

// check interface
var _ Scraper = ScrapeHeartbeat{}
8 changes: 8 additions & 0 deletions collector/info_schema_auto_increment.go
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,11 @@ func (ScrapeAutoIncrementColumns) Help() string {
return "Collect auto_increment columns and max values from information_schema"
}

// Version of MySQL from which scraper is available.
func (ScrapeAutoIncrementColumns) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeAutoIncrementColumns) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
autoIncrementRows, err := db.QueryContext(ctx, infoSchemaAutoIncrementQuery)
Expand Down Expand Up @@ -93,3 +98,6 @@ func (ScrapeAutoIncrementColumns) Scrape(ctx context.Context, db *sql.DB, ch cha
}
return nil
}

// check interface
var _ Scraper = ScrapeAutoIncrementColumns{}
8 changes: 8 additions & 0 deletions collector/info_schema_clientstats.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,11 @@ func (ScrapeClientStat) Help() string {
return "If running with userstat=1, set to true to collect client statistics"
}

// Version of MySQL from which scraper is available.
func (ScrapeClientStat) Version() float64 {
return 5.5
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeClientStat) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
var varName, varVal string
Expand Down Expand Up @@ -213,3 +218,6 @@ func (ScrapeClientStat) Scrape(ctx context.Context, db *sql.DB, ch chan<- promet
}
return nil
}

// check interface
var _ Scraper = ScrapeClientStat{}
14 changes: 10 additions & 4 deletions collector/info_schema_innodb_cmp.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ import (
)

const innodbCmpQuery = `
SELECT
page_size, compress_ops, compress_ops_ok, compress_time, uncompress_ops, uncompress_time
SELECT
page_size, compress_ops, compress_ops_ok, compress_time, uncompress_ops, uncompress_time
FROM information_schema.innodb_cmp
`

Expand Down Expand Up @@ -70,6 +70,11 @@ func (ScrapeInnodbCmp) Help() string {
return "Collect metrics from information_schema.innodb_cmp"
}

// Version of MySQL from which scraper is available.
func (ScrapeInnodbCmp) Version() float64 {
return 5.5
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeInnodbCmp) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
informationSchemaInnodbCmpRows, err := db.QueryContext(ctx, innodbCmpQuery)
Expand All @@ -84,7 +89,6 @@ func (ScrapeInnodbCmp) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometh
)

for informationSchemaInnodbCmpRows.Next() {

if err := informationSchemaInnodbCmpRows.Scan(
&page_size, &compress_ops, &compress_ops_ok, &compress_time, &uncompress_ops, &uncompress_time,
); err != nil {
Expand All @@ -96,8 +100,10 @@ func (ScrapeInnodbCmp) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometh
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpCompressTime, prometheus.CounterValue, compress_time, page_size)
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpUncompressOps, prometheus.CounterValue, uncompress_ops, page_size)
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpUncompressTime, prometheus.CounterValue, uncompress_time, page_size)

}

return nil
}

// check interface
var _ Scraper = ScrapeInnodbCmp{}
11 changes: 9 additions & 2 deletions collector/info_schema_innodb_cmpmem.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import (

const innodbCmpMemQuery = `
SELECT
page_size, buffer_pool_instance, pages_used, pages_free, relocation_ops, relocation_time
page_size, buffer_pool_instance, pages_used, pages_free, relocation_ops, relocation_time
FROM information_schema.innodb_cmpmem
`

Expand Down Expand Up @@ -65,6 +65,11 @@ func (ScrapeInnodbCmpMem) Help() string {
return "Collect metrics from information_schema.innodb_cmpmem"
}

// Version of MySQL from which scraper is available.
func (ScrapeInnodbCmpMem) Version() float64 {
return 5.5
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeInnodbCmpMem) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
informationSchemaInnodbCmpMemRows, err := db.QueryContext(ctx, innodbCmpMemQuery)
Expand All @@ -89,7 +94,9 @@ func (ScrapeInnodbCmpMem) Scrape(ctx context.Context, db *sql.DB, ch chan<- prom
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpMemPagesFree, prometheus.CounterValue, pages_free, page_size, buffer_pool)
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpMemRelocationOps, prometheus.CounterValue, relocation_ops, page_size, buffer_pool)
ch <- prometheus.MustNewConstMetric(infoSchemaInnodbCmpMemRelocationTime, prometheus.CounterValue, (relocation_time / 1000), page_size, buffer_pool)

}
return nil
}

// check interface
var _ Scraper = ScrapeInnodbCmpMem{}
8 changes: 8 additions & 0 deletions collector/info_schema_innodb_metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,11 @@ func (ScrapeInnodbMetrics) Help() string {
return "Collect metrics from information_schema.innodb_metrics"
}

// Version of MySQL from which scraper is available.
func (ScrapeInnodbMetrics) Version() float64 {
return 5.6
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeInnodbMetrics) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
innodbMetricsRows, err := db.QueryContext(ctx, infoSchemaInnodbMetricsQuery)
Expand Down Expand Up @@ -164,3 +169,6 @@ func (ScrapeInnodbMetrics) Scrape(ctx context.Context, db *sql.DB, ch chan<- pro
}
return nil
}

// check interface
var _ Scraper = ScrapeInnodbMetrics{}
8 changes: 8 additions & 0 deletions collector/info_schema_innodb_sys_tablespaces.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,11 @@ func (ScrapeInfoSchemaInnodbTablespaces) Help() string {
return "Collect metrics from information_schema.innodb_sys_tablespaces"
}

// Version of MySQL from which scraper is available.
func (ScrapeInfoSchemaInnodbTablespaces) Version() float64 {
return 5.7
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeInfoSchemaInnodbTablespaces) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
tablespacesRows, err := db.QueryContext(ctx, innodbTablespacesQuery)
Expand Down Expand Up @@ -113,3 +118,6 @@ func (ScrapeInfoSchemaInnodbTablespaces) Scrape(ctx context.Context, db *sql.DB,

return nil
}

// check interface
var _ Scraper = ScrapeInfoSchemaInnodbTablespaces{}
8 changes: 8 additions & 0 deletions collector/info_schema_processlist.go
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,11 @@ func (ScrapeProcesslist) Help() string {
return "Collect current thread state counts from the information_schema.processlist"
}

// Version of MySQL from which scraper is available.
func (ScrapeProcesslist) Version() float64 {
return 5.1
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeProcesslist) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
processQuery := fmt.Sprintf(
Expand Down Expand Up @@ -261,3 +266,6 @@ func deriveThreadState(command string, state string) string {
}
return "other"
}

// check interface
var _ Scraper = ScrapeProcesslist{}
8 changes: 8 additions & 0 deletions collector/info_schema_query_response_time.go
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,11 @@ func (ScrapeQueryResponseTime) Help() string {
return "Collect query response time distribution if query_response_time_stats is ON."
}

// Version of MySQL from which scraper is available.
func (ScrapeQueryResponseTime) Version() float64 {
return 5.5
}

// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeQueryResponseTime) Scrape(ctx context.Context, db *sql.DB, ch chan<- prometheus.Metric) error {
var queryStats uint8
Expand All @@ -135,3 +140,6 @@ func (ScrapeQueryResponseTime) Scrape(ctx context.Context, db *sql.DB, ch chan<-
}
return nil
}

// check interface
var _ Scraper = ScrapeQueryResponseTime{}
Loading