Skip to content

Commit

Permalink
Add maxwait_us as a computed property
Browse files Browse the repository at this point in the history
  • Loading branch information
twooster committed Nov 26, 2019
1 parent 7a6e288 commit e7add50
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 15 deletions.
36 changes: 25 additions & 11 deletions prometheus_pgbouncer_exporter/collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,14 @@ def collect(self):
results = self._filterMetricsByIncludeDatabases(results, self.config.getIncludeDatabases())
results = self._filterMetricsByExcludeDatabases(results, self.config.getExcludeDatabases())
metrics += self._exportMetrics(results, "pgbouncer_pools_", [
{"type": "gauge", "column": "cl_active", "metric": "client_active_connections", "help": "Client connections that are linked to server connection and can process queries"},
{"type": "gauge", "column": "cl_waiting", "metric": "client_waiting_connections", "help": "Client connections have sent queries but have not yet got a server connection"},
{"type": "gauge", "column": "sv_active", "metric": "server_active_connections", "help": "Server connections that linked to client"},
{"type": "gauge", "column": "sv_idle", "metric": "server_idle_connections", "help": "Server connections that unused and immediately usable for client queries"},
{"type": "gauge", "column": "sv_used", "metric": "server_used_connections", "help": "Server connections that have been idle more than server_check_delay, so they needs server_check_query to run on it before it can be used"},
{"type": "gauge", "column": "sv_tested", "metric": "server_testing_connections", "help": "Server connections that are currently running either server_reset_query or server_check_query"},
{"type": "gauge", "column": "sv_login", "metric": "server_login_connections", "help": "Server connections currently in logging in process"},
{"type": "gauge", "column": "maxwait", "metric": "client_maxwait_seconds", "help": "How long the first (oldest) client in queue has waited, in seconds"},
{"type": "gauge", "column": "cl_active", "metric": "client_active_connections", "help": "Client connections that are linked to server connection and can process queries"},
{"type": "gauge", "column": "cl_waiting", "metric": "client_waiting_connections", "help": "Client connections have sent queries but have not yet got a server connection"},
{"type": "gauge", "column": "sv_active", "metric": "server_active_connections", "help": "Server connections that linked to client"},
{"type": "gauge", "column": "sv_idle", "metric": "server_idle_connections", "help": "Server connections that unused and immediately usable for client queries"},
{"type": "gauge", "column": "sv_used", "metric": "server_used_connections", "help": "Server connections that have been idle more than server_check_delay, so they needs server_check_query to run on it before it can be used"},
{"type": "gauge", "column": "sv_tested", "metric": "server_testing_connections", "help": "Server connections that are currently running either server_reset_query or server_check_query"},
{"type": "gauge", "column": "sv_login", "metric": "server_login_connections", "help": "Server connections currently in logging in process"},
{"type": "gauge", "compute": compute_maxwait_seconds, "metric": "client_maxwait_seconds", "help": "How long the first (oldest) client in queue has waited, in seconds"},
], {"database": "database", "user": "user"}, self.config.getExtraLabels())
else:
success = False
Expand Down Expand Up @@ -127,8 +127,14 @@ def _exportMetrics(self, results, metricPrefix, metricMappings, labelMappings, e

for result in results:
for mapping in metricMappings:
# Ensure the column exists
if not mapping["column"] in result:
if "compute" in mapping:
try:
value = mapping["compute"](result)
except KeyError:
continue
elif mapping["column"] in result:
value = result[mapping["column"]]
else:
continue

labels = {labelName: result[columnName] for columnName, labelName in labelMappings.items()}
Expand All @@ -137,7 +143,7 @@ def _exportMetrics(self, results, metricPrefix, metricMappings, labelMappings, e
metrics.append({
"type": mapping["type"],
"name": metricPrefix + mapping['metric'],
"value": result[mapping["column"]],
"value": value,
"labels": labels,
"help": mapping["help"]
})
Expand Down Expand Up @@ -182,3 +188,11 @@ def _createConnection(self):
conn.set_session(autocommit=True)

return conn


def compute_maxwait_seconds(result):
"""
Compute the maximum wait by summing `maxwait` with `maxwait_us` (microseconds)
so that we have higher precision on the maxwait seconds gauge
"""
return result["maxwait"] + result["maxwait_us"] / 1000000.0
23 changes: 19 additions & 4 deletions tests/test_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ def fetchMetricsSuccessFromPgBouncer17Mock(conn, query):
]
elif query == "SHOW POOLS":
return [
{"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8 },
{"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1 }
{"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8, "maxwait_us": 100000 },
{"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1, "maxwait_us": 200000 }
]
elif query == "SHOW DATABASES":
return [
Expand All @@ -37,8 +37,8 @@ def fetchMetricsSuccessFromPgBouncer18Mock(conn, query):
]
elif query == "SHOW POOLS":
return [
{"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8 },
{"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1 }
{"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8, "maxwait_us": 100000 },
{"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1, "maxwait_us": 200000 }
]
elif query == "SHOW DATABASES":
return [
Expand Down Expand Up @@ -113,6 +113,21 @@ def testShouldExportPgbouncerDownMetricOnMetricsPartiallyScraped(self):
self.assertEqual(metrics[0]["value"], 0)
self.assertEqual(metrics[0]["labels"], {})

def testShouldComputeMaxwait(self):
config = PgbouncerConfig({})
collector = PgbouncerMetricsCollector(config)
collector._createConnection = MagicMock(return_value=False)
collector._fetchMetrics = MagicMock(side_effect=fetchMetricsSuccessFromPgBouncer17Mock)

metrics = getMetricsByName(collector.collect(), "pgbouncer_pools_client_maxwait_seconds")
self.assertEqual(len(metrics), 2)
self.assertEqual(metrics[0]["type"], "gauge")
self.assertAlmostEqual(metrics[0]["value"], 8.1)
self.assertEqual(metrics[0]["labels"], {"database":"test", "user":"marco"})
self.assertEqual(metrics[1]["type"], "gauge")
self.assertAlmostEqual(metrics[1]["value"], 1.2)
self.assertEqual(metrics[1]["labels"], {"database":"prod", "user":"marco"})

def testShouldExportDatabasesMetrics(self):
config = PgbouncerConfig({})
collector = PgbouncerMetricsCollector(config)
Expand Down

0 comments on commit e7add50

Please sign in to comment.