Commit de048e6f authored by Martín Ferrari's avatar Martín Ferrari

Merge tag 'upstream/0.11.0+ds' into debian/sid

parents c44b5384 4c8c39e9
---
version: 2
jobs:
test:
docker:
- image: circleci/golang:1.10
working_directory: /go/src/github.com/prometheus/mysqld_exporter
steps:
- checkout
- run: make promu
- run: make
- run: rm -v mysqld_exporter
codespell:
docker:
- image: circleci/python
steps:
- checkout
- run: sudo pip install codespell
- run: codespell --skip=".git,./vendor,ttar"
build:
machine: true
working_directory: /home/circleci/.go_workspace/src/github.com/prometheus/mysqld_exporter
steps:
- checkout
- run: make promu
- run: promu crossbuild -v
- persist_to_workspace:
root: .
paths:
- .build
docker_hub_master:
docker:
- image: circleci/golang:1.10
working_directory: /go/src/github.com/prometheus/mysqld_exporter
environment:
DOCKER_IMAGE_NAME: prom/mysqld-exporter
QUAY_IMAGE_NAME: quay.io/prometheus/mysqld-exporter
steps:
- checkout
- setup_remote_docker
- attach_workspace:
at: .
- run: ln -s .build/linux-amd64/mysqld_exporter mysqld_exporter
- run: make docker DOCKER_IMAGE_NAME=$DOCKER_IMAGE_NAME
- run: make docker DOCKER_IMAGE_NAME=$QUAY_IMAGE_NAME
- run: docker images
- run: docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
- run: docker login -u $QUAY_LOGIN -p $QUAY_PASSWORD quay.io
- run: docker push $DOCKER_IMAGE_NAME
- run: docker push $QUAY_IMAGE_NAME
docker_hub_release_tags:
docker:
- image: circleci/golang:1.10
working_directory: /go/src/github.com/prometheus/mysqld_exporter
environment:
DOCKER_IMAGE_NAME: prom/mysqld-exporter
QUAY_IMAGE_NAME: quay.io/prometheus/mysqld-exporter
steps:
- checkout
- setup_remote_docker
- run: mkdir -v -p ${HOME}/bin
- run: curl -L 'https://github.com/aktau/github-release/releases/download/v0.7.2/linux-amd64-github-release.tar.bz2' | tar xvjf - --strip-components 3 -C ${HOME}/bin
- run: echo 'export PATH=${HOME}/bin:${PATH}' >> ${BASH_ENV}
- attach_workspace:
at: .
- run: make promu
- run: promu crossbuild tarballs
- run: promu checksum .tarballs
- run: promu release .tarballs
- store_artifacts:
path: .tarballs
destination: releases
- run: ln -s .build/linux-amd64/mysqld_exporter mysqld_exporter
- run: make docker DOCKER_IMAGE_NAME=$DOCKER_IMAGE_NAME DOCKER_IMAGE_TAG=$CIRCLE_TAG
- run: make docker DOCKER_IMAGE_NAME=$QUAY_IMAGE_NAME DOCKER_IMAGE_TAG=$CIRCLE_TAG
- run: docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
- run: docker login -u $QUAY_LOGIN -p $QUAY_PASSWORD quay.io
- run: |
if [[ "$CIRCLE_TAG" =~ ^v[0-9]+(\.[0-9]+){2}$ ]]; then
docker tag "$DOCKER_IMAGE_NAME:$CIRCLE_TAG" "$DOCKER_IMAGE_NAME:latest"
docker tag "$QUAY_IMAGE_NAME:$CIRCLE_TAG" "$QUAY_IMAGE_NAME:latest"
fi
- run: docker push $DOCKER_IMAGE_NAME
- run: docker push $QUAY_IMAGE_NAME
workflows:
version: 2
mysqld_exporter:
jobs:
- test:
filters:
tags:
only: /.*/
- build:
filters:
tags:
only: /.*/
- codespell:
filters:
tags:
only: /.*/
- docker_hub_master:
requires:
- test
- build
filters:
branches:
only: master
- docker_hub_release_tags:
requires:
- test
- build
filters:
tags:
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
branches:
ignore: /.*/
......@@ -3,9 +3,8 @@ sudo: required
language: go
go:
- 1.8.x
- 1.9.x
- master
- 1.10.x
env:
- MYSQL_IMAGE=mysql/mysql-server:5.5
......
## v0.10.0 / 2018-06-29
### BREAKING CHANGES:
* Flags now use the Kingpin library, and require double-dashes. #222
This also changes the behavior of boolean flags.
* Enable: `--collector.global_status`
* Disable: `--no-collector.global_status`
### Changes:
* [CHANGE] Limit number and lifetime of connections #208
* [ENHANCEMENT] Move session params to DSN #259
* [ENHANCEMENT] Use native DB.Ping() instead of self-written implementation #210
* [FEATURE] Add collector duration metrics #197
* [FEATURE] Add 'collect[]' URL parameter to filter enabled collectors #235
* [FEATURE] Set a `lock_wait_timeout` on the MySQL connection #252
* [FEATURE] Set `last_scrape_error` when an error occurs #237
* [FEATURE] Collect metrics from `performance_schema.replication_group_member_stats` #271
* [FEATURE] Add innodb compression statistic #275
* [FEATURE] Add metrics for the output of `SHOW SLAVE HOSTS` #279
* [FEATURE] Support custom CA truststore and client SSL keypair. #255
* [BUGFIX] Fix perfEventsStatementsQuery #213
* [BUGFIX] Fix `file_instances` metric collector #205
* [BUGFIX] Fix prefix removal in `perf_schema_file_instances` #257
* [BUGFIX] Fix 32bit compile issue #273
* [BUGFIX] Ignore boolean keys in my.cnf. #283
## v0.10.0 / 2017-04-25
BREAKING CHANGES:
......
......@@ -11,55 +11,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
GO := go
FIRST_GOPATH := $(firstword $(subst :, ,$(shell $(GO) env GOPATH)))
PROMU := $(FIRST_GOPATH)/bin/promu
pkgs = $(shell $(GO) list ./... | grep -v /vendor/)
all: vet
PREFIX ?= $(shell pwd)
BIN_DIR ?= $(shell pwd)
DOCKER_IMAGE_NAME ?= mysqld-exporter
DOCKER_IMAGE_TAG ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
include Makefile.common
STATICCHECK_IGNORE = \
github.com/prometheus/mysqld_exporter/mysqld_exporter.go:SA1019
all: format build test-short
DOCKER_IMAGE_NAME ?= mysqld-exporter
style:
@echo ">> checking code style"
@! gofmt -d $(shell find . -path ./vendor -prune -o -name '*.go' -print) | grep '^'
test-docker:
@echo ">> testing docker image"
./test_image.sh "$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG)" 9104
test-short:
@echo ">> running short tests"
@$(GO) test -short -race $(pkgs)
test:
@echo ">> running tests"
@$(GO) test -race $(pkgs)
format:
@echo ">> formatting code"
@$(GO) fmt $(pkgs)
vet:
@echo ">> vetting code"
@$(GO) vet $(pkgs)
build: promu
@echo ">> building binaries"
@$(PROMU) build --prefix $(PREFIX)
tarball: promu
@echo ">> building release tarball"
@$(PROMU) tarball --prefix $(PREFIX) $(BIN_DIR)
docker:
@echo ">> building docker image"
@docker build -t "$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG)" .
promu:
@GOOS=$(shell uname -s | tr A-Z a-z) \
GOARCH=$(subst x86_64,amd64,$(patsubst i%86,386,$(shell uname -m))) \
$(GO) get -u github.com/prometheus/promu
.PHONY: all style format build test vet tarball docker promu
.PHONY: test-docker
# Copyright 2018 The Prometheus Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A common Makefile that includes rules to be reused in different prometheus projects.
# !!! Open PRs only against the prometheus/prometheus/Makefile.common repository!
# Example usage :
# Create the main Makefile in the root project directory.
# include Makefile.common
# customTarget:
# @echo ">> Running customTarget"
#
# Ensure GOBIN is not set during build so that promu is installed to the correct path
unexport GOBIN
GO ?= go
GOFMT ?= $(GO)fmt
FIRST_GOPATH := $(firstword $(subst :, ,$(shell $(GO) env GOPATH)))
PROMU := $(FIRST_GOPATH)/bin/promu
STATICCHECK := $(FIRST_GOPATH)/bin/staticcheck
GOVENDOR := $(FIRST_GOPATH)/bin/govendor
pkgs = ./...
PREFIX ?= $(shell pwd)
BIN_DIR ?= $(shell pwd)
DOCKER_IMAGE_TAG ?= $(subst /,-,$(shell git rev-parse --abbrev-ref HEAD))
all: style staticcheck unused build test
style:
@echo ">> checking code style"
! $(GOFMT) -d $$(find . -path ./vendor -prune -o -name '*.go' -print) | grep '^'
check_license:
@echo ">> checking license header"
@licRes=$$(for file in $$(find . -type f -iname '*.go' ! -path './vendor/*') ; do \
awk 'NR<=3' $$file | grep -Eq "(Copyright|generated|GENERATED)" || echo $$file; \
done); \
if [ -n "$${licRes}" ]; then \
echo "license header checking failed:"; echo "$${licRes}"; \
exit 1; \
fi
test-short:
@echo ">> running short tests"
$(GO) test -short $(pkgs)
test:
@echo ">> running all tests"
$(GO) test -race $(pkgs)
format:
@echo ">> formatting code"
$(GO) fmt $(pkgs)
vet:
@echo ">> vetting code"
$(GO) vet $(pkgs)
staticcheck: $(STATICCHECK)
@echo ">> running staticcheck"
$(STATICCHECK) -ignore "$(STATICCHECK_IGNORE)" $(pkgs)
unused: $(GOVENDOR)
@echo ">> running check for unused packages"
@$(GOVENDOR) list +unused | grep . && exit 1 || echo 'No unused packages'
build: promu
@echo ">> building binaries"
$(PROMU) build --prefix $(PREFIX)
tarball: promu
@echo ">> building release tarball"
$(PROMU) tarball --prefix $(PREFIX) $(BIN_DIR)
docker:
docker build -t "$(DOCKER_IMAGE_NAME):$(DOCKER_IMAGE_TAG)" .
promu:
GOOS= GOARCH= $(GO) get -u github.com/prometheus/promu
$(FIRST_GOPATH)/bin/staticcheck:
GOOS= GOARCH= $(GO) get -u honnef.co/go/tools/cmd/staticcheck
$(FIRST_GOPATH)/bin/govendor:
GOOS= GOARCH= $(GO) get -u github.com/kardianos/govendor
.PHONY: all style check_license format build test vet assets tarball docker promu staticcheck $(FIRST_GOPATH)/bin/staticcheck govendor $(FIRST_GOPATH)/bin/govendor
\ No newline at end of file
......@@ -28,7 +28,7 @@ NOTE: It is recommended to set a max connection limit for the user to avoid over
Running using an environment variable:
export DATA_SOURCE_NAME='login:password@(hostname:port)/'
export DATA_SOURCE_NAME='user:password@(hostname:3306)/'
./mysqld_exporter <flags>
Running using ~/.my.cnf:
......@@ -58,6 +58,8 @@ collect.global_variables | 5.1 | Collect
collect.info_schema.clientstats | 5.5 | If running with userstat=1, set to true to collect client statistics.
collect.info_schema.innodb_metrics | 5.6 | Collect metrics from information_schema.innodb_metrics.
collect.info_schema.innodb_tablespaces | 5.7 | Collect metrics from information_schema.innodb_sys_tablespaces.
collect.info_schema.innodb_cmp | 5.5 | Collect InnoDB compressed tables metrics from information_schema.innodb_cmp.
collect.info_schema.innodb_cmpmem | 5.5 | Collect InnoDB buffer pool compression metrics from information_schema.innodb_cmpmem.
collect.info_schema.processlist | 5.1 | Collect thread state counts from information_schema.processlist.
collect.info_schema.processlist.min_time | 5.1 | Minimum time a thread must be in each state to be counted. (default: 0)
collect.info_schema.query_response_time | 5.5 | Collect query response time distribution if query_response_time_stats is ON.
......@@ -75,7 +77,9 @@ collect.perf_schema.file_instances | 5.5 | Collect
collect.perf_schema.indexiowaits | 5.6 | Collect metrics from performance_schema.table_io_waits_summary_by_index_usage.
collect.perf_schema.tableiowaits | 5.6 | Collect metrics from performance_schema.table_io_waits_summary_by_table.
collect.perf_schema.tablelocks | 5.6 | Collect metrics from performance_schema.table_lock_waits_summary_by_table.
collect.perf_schema.replication_group_member_stats | 5.7 | Collect metrics from performance_schema.replication_group_member_stats.
collect.slave_status | 5.1 | Collect from SHOW SLAVE STATUS (Enabled by default)
collect.slave_hosts | 5.1 | Collect from SHOW SLAVE HOSTS
collect.heartbeat | 5.1 | Collect from [heartbeat](#heartbeat).
collect.heartbeat.database | 5.1 | Database from where to collect heartbeat data. (default: heartbeat)
collect.heartbeat.table | 5.1 | Table from where to collect heartbeat data. (default: heartbeat)
......@@ -98,6 +102,24 @@ The MySQL server's [data source name](http://en.wikipedia.org/wiki/Data_source_n
must be set via the `DATA_SOURCE_NAME` environment variable.
The format of this variable is described at https://github.com/go-sql-driver/mysql#dsn-data-source-name.
## Customizing Configuration for a SSL Connection
if The MySQL server supports SSL, you may need to specify a CA truststore to verify the server's chain-of-trust. You may also need to specify a SSL keypair for the client side of the SSL connection. To configure the mysqld exporter to use a custom CA certificate, add the following to the mysql cnf file:
```
ssl-ca=/path/to/ca/file
```
To specify the client SSL keypair, add the following to the cnf.
```
ssl-key=/path/to/ssl/client/key
ssl-cert=/path/to/ssl/client/cert
```
Customizing the SSL configuration is only supported in the mysql cnf file and is not supported if you set the mysql server's data source name in the environment variable DATA_SOURCE_NAME.
## Using Docker
You can deploy this exporter using the [prom/mysqld-exporter](https://registry.hub.docker.com/u/prom/mysqld-exporter/) Docker image.
......@@ -105,10 +127,14 @@ You can deploy this exporter using the [prom/mysqld-exporter](https://registry.h
For example:
```bash
docker network create my-mysql-network
docker pull prom/mysqld-exporter
docker run -d -p 9104:9104 --link=my_mysql_container:bdd \
-e DATA_SOURCE_NAME="user:password@(bdd:3306)/database" prom/mysqld-exporter
docker run -d \
-p 9104:9104 \
--network my-mysql-network \
-e DATA_SOURCE_NAME="user:password@(my-mysql-network:3306)/" \
prom/mysqld-exporter
```
## heartbeat
......
machine:
environment:
DOCKER_IMAGE_NAME: prom/mysqld-exporter
QUAY_IMAGE_NAME: quay.io/prometheus/mysqld-exporter
DOCKER_TEST_IMAGE_NAME: quay.io/prometheus/golang-builder:1.8-base
REPO_PATH: github.com/prometheus/mysqld_exporter
pre:
- sudo curl -L -o /usr/bin/docker 'https://s3-external-1.amazonaws.com/circle-downloads/docker-1.9.1-circleci'
- sudo chmod 0755 /usr/bin/docker
- sudo curl -L 'https://github.com/aktau/github-release/releases/download/v0.6.2/linux-amd64-github-release.tar.bz2' | tar xvjf - --strip-components 3 -C $HOME/bin
services:
- docker
dependencies:
pre:
- make promu
- docker info
override:
- promu crossbuild
- ln -s .build/linux-amd64/mysqld_exporter mysqld_exporter
- |
if [ -n "$CIRCLE_TAG" ]; then
make docker DOCKER_IMAGE_NAME=$DOCKER_IMAGE_NAME DOCKER_IMAGE_TAG=$CIRCLE_TAG
make docker DOCKER_IMAGE_NAME=$QUAY_IMAGE_NAME DOCKER_IMAGE_TAG=$CIRCLE_TAG
else
make docker DOCKER_IMAGE_NAME=$DOCKER_IMAGE_NAME
make docker DOCKER_IMAGE_NAME=$QUAY_IMAGE_NAME
fi
post:
- mkdir $CIRCLE_ARTIFACTS/binaries/ && cp -a .build/* $CIRCLE_ARTIFACTS/binaries/
- docker images
test:
override:
- docker run --rm -t -v "$(pwd):/app" "${DOCKER_TEST_IMAGE_NAME}" -i "${REPO_PATH}" -T
deployment:
hub_branch:
branch: master
owner: prometheus
commands:
- docker login -e $DOCKER_EMAIL -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
- docker login -e $QUAY_EMAIL -u $QUAY_LOGIN -p $QUAY_PASSWORD quay.io
- docker push $DOCKER_IMAGE_NAME
- docker push $QUAY_IMAGE_NAME
hub_tag:
tag: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
owner: prometheus
commands:
- promu crossbuild tarballs
- promu checksum .tarballs
- promu release .tarballs
- mkdir $CIRCLE_ARTIFACTS/releases/ && cp -a .tarballs/* $CIRCLE_ARTIFACTS/releases/
- docker login -e $DOCKER_EMAIL -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
- docker login -e $QUAY_EMAIL -u $QUAY_LOGIN -p $QUAY_PASSWORD quay.io
- |
if [[ "$CIRCLE_TAG" =~ ^v[0-9]+(\.[0-9]+){2}$ ]]; then
docker tag "$DOCKER_IMAGE_NAME:$CIRCLE_TAG" "$DOCKER_IMAGE_NAME:latest"
docker tag "$QUAY_IMAGE_NAME:$CIRCLE_TAG" "$QUAY_IMAGE_NAME:latest"
fi
- docker push $DOCKER_IMAGE_NAME
- docker push $QUAY_IMAGE_NAME
......@@ -38,7 +38,20 @@ var (
)
// ScrapeBinlogSize colects from `SHOW BINARY LOGS`.
func ScrapeBinlogSize(db *sql.DB, ch chan<- prometheus.Metric) error {
type ScrapeBinlogSize struct{}
// Name of the Scraper. Should be unique.
func (ScrapeBinlogSize) Name() string {
return "binlog_size"
}
// Help describes the role of the Scraper.
func (ScrapeBinlogSize) Help() string {
return "Collect the current size of all registered binlog files"
}
// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeBinlogSize) Scrape(db *sql.DB, ch chan<- prometheus.Metric) error {
var logBin uint8
err := db.QueryRow(logbinQuery).Scan(&logBin)
if err != nil {
......
......@@ -27,7 +27,7 @@ func TestScrapeBinlogSize(t *testing.T) {
ch := make(chan prometheus.Metric)
go func() {
if err = ScrapeBinlogSize(db, ch); err != nil {
if err = (ScrapeBinlogSize{}).Scrape(db, ch); err != nil {
t.Errorf("error calling function on test: %s", err)
}
close(ch)
......@@ -47,6 +47,6 @@ func TestScrapeBinlogSize(t *testing.T) {
// Ensure all SQL queries were executed
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expections: %s", err)
t.Errorf("there were unfulfilled exceptions: %s", err)
}
}
......@@ -19,7 +19,20 @@ const (
)
// ScrapeEngineInnodbStatus scrapes from `SHOW ENGINE INNODB STATUS`.
func ScrapeEngineInnodbStatus(db *sql.DB, ch chan<- prometheus.Metric) error {
type ScrapeEngineInnodbStatus struct{}
// Name of the Scraper. Should be unique.
func (ScrapeEngineInnodbStatus) Name() string {
return "engine_innodb_status"
}
// Help describes the role of the Scraper.
func (ScrapeEngineInnodbStatus) Help() string {
return "Collect from SHOW ENGINE INNODB STATUS"
}
// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeEngineInnodbStatus) Scrape(db *sql.DB, ch chan<- prometheus.Metric) error {
rows, err := db.Query(engineInnodbStatusQuery)
if err != nil {
return err
......
......@@ -140,7 +140,7 @@ END OF INNODB MONITOR OUTPUT
ch := make(chan prometheus.Metric)
go func() {
if err = ScrapeEngineInnodbStatus(db, ch); err != nil {
if err = (ScrapeEngineInnodbStatus{}).Scrape(db, ch); err != nil {
t.Errorf("error calling function on test: %s", err)
}
close(ch)
......@@ -160,6 +160,6 @@ END OF INNODB MONITOR OUTPUT
// Ensure all SQL queries were executed
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expections: %s", err)
t.Errorf("there were unfulfilled exceptions: %s", err)
}
}
......@@ -16,26 +16,21 @@ const (
engineTokudbStatusQuery = `SHOW ENGINE TOKUDB STATUS`
)
func sanitizeTokudbMetric(metricName string) string {
replacements := map[string]string{
">": "",
",": "",
":": "",
"(": "",
")": "",
" ": "_",
"-": "_",
"+": "and",
"/": "and",
}
for r := range replacements {
metricName = strings.Replace(metricName, r, replacements[r], -1)
}
return metricName
// ScrapeEngineTokudbStatus scrapes from `SHOW ENGINE TOKUDB STATUS`.
type ScrapeEngineTokudbStatus struct{}
// Name of the Scraper. Should be unique.
func (ScrapeEngineTokudbStatus) Name() string {
return "engine_tokudb_status"
}
// ScrapeEngineTokudbStatus scrapes from `SHOW ENGINE TOKUDB STATUS`.
func ScrapeEngineTokudbStatus(db *sql.DB, ch chan<- prometheus.Metric) error {
// Help describes the role of the Scraper.
func (ScrapeEngineTokudbStatus) Help() string {
return "Collect from SHOW ENGINE TOKUDB STATUS"
}
// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeEngineTokudbStatus) Scrape(db *sql.DB, ch chan<- prometheus.Metric) error {
tokudbRows, err := db.Query(engineTokudbStatusQuery)
if err != nil {
return err
......@@ -60,3 +55,21 @@ func ScrapeEngineTokudbStatus(db *sql.DB, ch chan<- prometheus.Metric) error {
}
return nil
}
func sanitizeTokudbMetric(metricName string) string {
replacements := map[string]string{
">": "",
",": "",
":": "",
"(": "",
")": "",
" ": "_",
"-": "_",
"+": "and",
"/": "and",
}
for r := range replacements {
metricName = strings.Replace(metricName, r, replacements[r], -1)
}
return metricName
}
......@@ -44,7 +44,7 @@ func TestScrapeEngineTokudbStatus(t *testing.T) {
ch := make(chan prometheus.Metric)
go func() {
if err = ScrapeEngineTokudbStatus(db, ch); err != nil {
if err = (ScrapeEngineTokudbStatus{}).Scrape(db, ch); err != nil {
t.Errorf("error calling function on test: %s", err)
}
close(ch)
......@@ -64,6 +64,6 @@ func TestScrapeEngineTokudbStatus(t *testing.T) {
// Ensure all SQL queries were executed
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expections: %s", err)
t.Errorf("there were unfulfilled exceptions: %s", err)
}
}
This diff is collapsed.
......@@ -15,9 +15,12 @@ func TestExporter(t *testing.T) {
t.Skip("-short is passed, skipping test")
}
exporter := New(dsn, Collect{
GlobalStatus: true,
})
exporter := New(
dsn,
NewMetrics(),
[]Scraper{
ScrapeGlobalStatus{},
})
convey.Convey("Metrics describing", t, func() {
ch := make(chan *prometheus.Desc)
......
......@@ -11,15 +11,16 @@ import (
)
const (
// Scrape query
// Scrape query.
globalStatusQuery = `SHOW GLOBAL STATUS`
// Subsytem.
// Subsystem.
globalStatus = "global_status"
)
// Regexp to match various groups of status vars.
var globalStatusRE = regexp.MustCompile(`^(com|handler|connection_errors|innodb_buffer_pool_pages|innodb_rows|performance_schema)_(.*)$`)
// Metric descriptors.
var (
globalCommandsDesc = prometheus.NewDesc(
prometheus.BuildFQName(namespace, globalStatus, "commands_total"),
......@@ -59,7 +60,20 @@ var (
)
// ScrapeGlobalStatus collects from `SHOW GLOBAL STATUS`.
func ScrapeGlobalStatus(db *sql.DB, ch chan<- prometheus.Metric) error {
type ScrapeGlobalStatus struct{}
// Name of the Scraper. Should be unique.
func (ScrapeGlobalStatus) Name() string {
return globalStatus
}
// Help describes the role of the Scraper.
func (ScrapeGlobalStatus) Help() string {
return "Collect from SHOW GLOBAL STATUS"
}
// Scrape collects data from database connection and sends it over channel as prometheus metric.
func (ScrapeGlobalStatus) Scrape(db *sql.DB, ch chan<- prometheus.Metric) error {
globalStatusRows, err := db.Query(globalStatusQuery)
if err != nil {
return err
......
......@@ -38,7 +38,7 @@ func TestScrapeGlobalStatus(t *testing.T) {
ch := make(chan prometheus.Metric)
go func() {
if err = ScrapeGlobalStatus(db, ch); err != nil {
if err = (ScrapeGlobalStatus{}).Scrape(db, ch); err != nil {
t.Errorf("error calling function on test: %s", err)
}
close(ch)
......@@ -68,6 +68,6 @@ func TestScrapeGlobalStatus(t *testing.T) {
// Ensure all SQL queries were executed
if err := mock.ExpectationsWereMet(); err != nil {
t.Errorf("there were unfulfilled expections: %s", err)
t.Errorf("there were unfulfilled exceptions: %s", err)
}
}
......@@ -19,7 +19,20 @@ const (
)
// ScrapeGlobalVariables collects from `SHOW GLOBAL VARIABLES`.
func ScrapeGlobalVariables(db *sql.DB, ch chan<- prometheus.Metric) error {
type ScrapeGlobalVariables struct{}
// Name of the Scraper. Should be unique.
func (ScrapeGlobalVariables) Name() string {
return globalVariables
}
// Help describes the role of the Scraper.
func (ScrapeGlobalVariables) Help() string {