@@ -232,8 +232,9 @@ var (
232
232
61 : newBackendMetric ("http_total_time_average_seconds" , "Avg. HTTP total time for last 1024 successful connections." , prometheus .GaugeValue , nil ),
233
233
}
234
234
235
- haproxyInfo = prometheus .NewDesc (prometheus .BuildFQName (namespace , "version" , "info" ), "HAProxy version info." , []string {"release_date" , "version" }, nil )
236
- haproxyUp = prometheus .NewDesc (prometheus .BuildFQName (namespace , "" , "up" ), "Was the last scrape of HAProxy successful." , nil , nil )
235
+ haproxyInfo = prometheus .NewDesc (prometheus .BuildFQName (namespace , "version" , "info" ), "HAProxy version info." , []string {"release_date" , "version" }, nil )
236
+ haproxyUp = prometheus .NewDesc (prometheus .BuildFQName (namespace , "" , "up" ), "Was the last scrape of HAProxy successful." , nil , nil )
237
+ haproxyIdlePct = prometheus .NewDesc (prometheus .BuildFQName (namespace , "idle" , "percent" ), "Time spent waiting for events instead of processing them." , nil , nil )
237
238
)
238
239
239
240
// Exporter collects HAProxy stats from the given URI and exports them using
@@ -317,6 +318,7 @@ func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
317
318
}
318
319
ch <- haproxyInfo
319
320
ch <- haproxyUp
321
+ ch <- haproxyIdlePct
320
322
ch <- e .totalScrapes .Desc ()
321
323
ch <- e .csvParseFailures .Desc ()
322
324
}
@@ -397,6 +399,9 @@ func (e *Exporter) scrape(ch chan<- prometheus.Metric) (up float64) {
397
399
level .Debug (e .logger ).Log ("msg" , "Failed parsing show info" , "err" , err )
398
400
} else {
399
401
ch <- prometheus .MustNewConstMetric (haproxyInfo , prometheus .GaugeValue , 1 , info .ReleaseDate , info .Version )
402
+ if info .IdlePct != - 1 {
403
+ ch <- prometheus .MustNewConstMetric (haproxyIdlePct , prometheus .GaugeValue , info .IdlePct )
404
+ }
400
405
}
401
406
}
402
407
@@ -435,10 +440,13 @@ loop:
435
440
type versionInfo struct {
436
441
ReleaseDate string
437
442
Version string
443
+ IdlePct float64
438
444
}
439
445
440
446
func (e * Exporter ) parseInfo (i io.Reader ) (versionInfo , error ) {
441
447
var version , releaseDate string
448
+ // idlePct value of -1 is used to indicate it's unset
449
+ var idlePct float64 = - 1
442
450
s := bufio .NewScanner (i )
443
451
for s .Scan () {
444
452
line := s .Text ()
@@ -452,9 +460,14 @@ func (e *Exporter) parseInfo(i io.Reader) (versionInfo, error) {
452
460
releaseDate = field [1 ]
453
461
case "Version" :
454
462
version = field [1 ]
463
+ case "Idle_pct" :
464
+ i , err := strconv .ParseFloat (field [1 ], 10 )
465
+ if err == nil && i >= 0 && i <= 100 {
466
+ idlePct = i
467
+ }
455
468
}
456
469
}
457
- return versionInfo {ReleaseDate : releaseDate , Version : version }, s .Err ()
470
+ return versionInfo {ReleaseDate : releaseDate , Version : version , IdlePct : idlePct }, s .Err ()
458
471
}
459
472
460
473
func (e * Exporter ) parseRow (csvRow []string , ch chan <- prometheus.Metric ) {
0 commit comments