diff --git a/prometheus_pgbouncer_exporter/collector.py b/prometheus_pgbouncer_exporter/collector.py index d9f1cb8..81672d8 100644 --- a/prometheus_pgbouncer_exporter/collector.py +++ b/prometheus_pgbouncer_exporter/collector.py @@ -78,14 +78,14 @@ def collect(self): results = self._filterMetricsByIncludeDatabases(results, self.config.getIncludeDatabases()) results = self._filterMetricsByExcludeDatabases(results, self.config.getExcludeDatabases()) metrics += self._exportMetrics(results, "pgbouncer_pools_", [ - {"type": "gauge", "column": "cl_active", "metric": "client_active_connections", "help": "Client connections that are linked to server connection and can process queries"}, - {"type": "gauge", "column": "cl_waiting", "metric": "client_waiting_connections", "help": "Client connections have sent queries but have not yet got a server connection"}, - {"type": "gauge", "column": "sv_active", "metric": "server_active_connections", "help": "Server connections that linked to client"}, - {"type": "gauge", "column": "sv_idle", "metric": "server_idle_connections", "help": "Server connections that unused and immediately usable for client queries"}, - {"type": "gauge", "column": "sv_used", "metric": "server_used_connections", "help": "Server connections that have been idle more than server_check_delay, so they needs server_check_query to run on it before it can be used"}, - {"type": "gauge", "column": "sv_tested", "metric": "server_testing_connections", "help": "Server connections that are currently running either server_reset_query or server_check_query"}, - {"type": "gauge", "column": "sv_login", "metric": "server_login_connections", "help": "Server connections currently in logging in process"}, - {"type": "gauge", "column": "maxwait", "metric": "client_maxwait_seconds", "help": "How long the first (oldest) client in queue has waited, in seconds"}, + {"type": "gauge", "column": "cl_active", "metric": "client_active_connections", "help": "Client connections that are linked to server connection and can process queries"}, + {"type": "gauge", "column": "cl_waiting", "metric": "client_waiting_connections", "help": "Client connections have sent queries but have not yet got a server connection"}, + {"type": "gauge", "column": "sv_active", "metric": "server_active_connections", "help": "Server connections that linked to client"}, + {"type": "gauge", "column": "sv_idle", "metric": "server_idle_connections", "help": "Server connections that unused and immediately usable for client queries"}, + {"type": "gauge", "column": "sv_used", "metric": "server_used_connections", "help": "Server connections that have been idle more than server_check_delay, so they needs server_check_query to run on it before it can be used"}, + {"type": "gauge", "column": "sv_tested", "metric": "server_testing_connections", "help": "Server connections that are currently running either server_reset_query or server_check_query"}, + {"type": "gauge", "column": "sv_login", "metric": "server_login_connections", "help": "Server connections currently in logging in process"}, + {"type": "gauge", "compute": compute_maxwait_seconds, "metric": "client_maxwait_seconds", "help": "How long the first (oldest) client in queue has waited, in seconds"}, ], {"database": "database", "user": "user"}, self.config.getExtraLabels()) else: success = False @@ -127,8 +127,14 @@ def _exportMetrics(self, results, metricPrefix, metricMappings, labelMappings, e for result in results: for mapping in metricMappings: - # Ensure the column exists - if not mapping["column"] in result: + if "compute" in mapping: + try: + value = mapping["compute"](result) + except KeyError: + continue + elif mapping["column"] in result: + value = result[mapping["column"]] + else: continue labels = {labelName: result[columnName] for columnName, labelName in labelMappings.items()} @@ -137,7 +143,7 @@ def _exportMetrics(self, results, metricPrefix, metricMappings, labelMappings, e metrics.append({ "type": mapping["type"], "name": metricPrefix + mapping['metric'], - "value": result[mapping["column"]], + "value": value, "labels": labels, "help": mapping["help"] }) @@ -182,3 +188,11 @@ def _createConnection(self): conn.set_session(autocommit=True) return conn + + +def compute_maxwait_seconds(result): + """ + Compute the maximum wait by summing `maxwait` with `maxwait_us` (microseconds) + so that we have higher precision on the maxwait seconds gauge + """ + return result["maxwait"] + result["maxwait_us"] / 1000000.0 diff --git a/tests/test_collector.py b/tests/test_collector.py index 4d5be62..9dc7adc 100644 --- a/tests/test_collector.py +++ b/tests/test_collector.py @@ -18,8 +18,8 @@ def fetchMetricsSuccessFromPgBouncer17Mock(conn, query): ] elif query == "SHOW POOLS": return [ - {"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8 }, - {"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1 } + {"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8, "maxwait_us": 100000 }, + {"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1, "maxwait_us": 200000 } ] elif query == "SHOW DATABASES": return [ @@ -37,8 +37,8 @@ def fetchMetricsSuccessFromPgBouncer18Mock(conn, query): ] elif query == "SHOW POOLS": return [ - {"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8 }, - {"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1 } + {"database": "test", "user": "marco", "cl_active": 1, "cl_waiting": 2, "sv_active": 3, "sv_idle": 4, "sv_used": 5, "sv_tested": 6, "sv_login": 7, "maxwait": 8, "maxwait_us": 100000 }, + {"database": "prod", "user": "marco", "cl_active": 8, "cl_waiting": 7, "sv_active": 6, "sv_idle": 5, "sv_used": 4, "sv_tested": 3, "sv_login": 2, "maxwait": 1, "maxwait_us": 200000 } ] elif query == "SHOW DATABASES": return [ @@ -113,6 +113,21 @@ def testShouldExportPgbouncerDownMetricOnMetricsPartiallyScraped(self): self.assertEqual(metrics[0]["value"], 0) self.assertEqual(metrics[0]["labels"], {}) + def testShouldComputeMaxwait(self): + config = PgbouncerConfig({}) + collector = PgbouncerMetricsCollector(config) + collector._createConnection = MagicMock(return_value=False) + collector._fetchMetrics = MagicMock(side_effect=fetchMetricsSuccessFromPgBouncer17Mock) + + metrics = getMetricsByName(collector.collect(), "pgbouncer_pools_client_maxwait_seconds") + self.assertEqual(len(metrics), 2) + self.assertEqual(metrics[0]["type"], "gauge") + self.assertAlmostEqual(metrics[0]["value"], 8.1) + self.assertEqual(metrics[0]["labels"], {"database":"test", "user":"marco"}) + self.assertEqual(metrics[1]["type"], "gauge") + self.assertAlmostEqual(metrics[1]["value"], 1.2) + self.assertEqual(metrics[1]["labels"], {"database":"prod", "user":"marco"}) + def testShouldExportDatabasesMetrics(self): config = PgbouncerConfig({}) collector = PgbouncerMetricsCollector(config)