Skip to content

Commit c746829

Browse files
committed
Add hub_template_active_watches metric
Gauge that records the current number of active watches. ref: stolostron/backlog#25866 Signed-off-by: Dale Haiducek <[email protected]>
1 parent 821792b commit c746829

File tree

3 files changed

+109
-4
lines changed

3 files changed

+109
-4
lines changed

controllers/propagator/metric.go

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,20 @@ import (
88
"sigs.k8s.io/controller-runtime/pkg/metrics"
99
)
1010

11-
var roothandlerMeasure = prometheus.NewHistogram(prometheus.HistogramOpts{
12-
Name: "ocm_handle_root_policy_duration_seconds",
13-
Help: "Time the handleRootPolicy function takes to complete.",
14-
})
11+
var (
12+
hubTemplateActiveWatchesMetric = prometheus.NewGauge(
13+
prometheus.GaugeOpts{
14+
Name: "hub_templates_active_watches",
15+
Help: "The number of active watch API requests for Hub policy templates",
16+
},
17+
)
18+
roothandlerMeasure = prometheus.NewHistogram(prometheus.HistogramOpts{
19+
Name: "ocm_handle_root_policy_duration_seconds_bucket",
20+
Help: "Time the handleRootPolicy function takes to complete.",
21+
})
22+
)
1523

1624
func init() {
1725
metrics.Registry.MustRegister(roothandlerMeasure)
26+
metrics.Registry.MustRegister(hubTemplateActiveWatchesMetric)
1827
}

controllers/propagator/policy_controller.go

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,14 @@ type PolicyReconciler struct {
9898
func (r *PolicyReconciler) Reconcile(ctx context.Context, request ctrl.Request) (ctrl.Result, error) {
9999
log := log.WithValues("Request.Namespace", request.Namespace, "Request.Name", request.Name)
100100

101+
// Set the hub template watch metric after reconcile
102+
defer func() {
103+
hubTempWatches := r.DynamicWatcher.GetWatchCount()
104+
log.V(3).Info("Setting hub template watch metric", "value", hubTempWatches)
105+
106+
hubTemplateActiveWatchesMetric.Set(float64(hubTempWatches))
107+
}()
108+
101109
log.Info("Reconciling the policy")
102110

103111
// Fetch the Policy instance
Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
// Copyright (c) 2021 Red Hat, Inc.
2+
// Copyright Contributors to the Open Cluster Management project
3+
4+
package e2e
5+
6+
import (
7+
"context"
8+
9+
. "github.com/onsi/ginkgo/v2"
10+
. "github.com/onsi/gomega"
11+
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
12+
13+
"open-cluster-management.io/governance-policy-propagator/test/utils"
14+
)
15+
16+
var _ = Describe("Test policy template metrics", Ordered, func() {
17+
const (
18+
policyName = "case9-test-policy"
19+
policyYaml = "../resources/case9_templates/case9-test-policy.yaml"
20+
)
21+
22+
Describe("Create policy, placement and referenced resource in ns:"+testNamespace, func() {
23+
It("should be created in user ns", func() {
24+
By("Creating " + policyYaml)
25+
utils.Kubectl("apply",
26+
"-f", policyYaml,
27+
"-n", testNamespace)
28+
plc := utils.GetWithTimeout(
29+
clientHubDynamic, gvrPolicy, policyName, testNamespace, true, defaultTimeoutSeconds,
30+
)
31+
Expect(plc).NotTo(BeNil())
32+
})
33+
34+
It("should resolve templates and propagate to cluster ns managed1", func() {
35+
By("Patching test-policy-plr with decision of cluster managed1")
36+
plr := utils.GetWithTimeout(
37+
clientHubDynamic, gvrPlacementRule, policyName+"-plr", testNamespace,
38+
true, defaultTimeoutSeconds,
39+
)
40+
plr.Object["status"] = utils.GeneratePlrStatus("managed1")
41+
_, err := clientHubDynamic.Resource(gvrPlacementRule).Namespace(testNamespace).UpdateStatus(
42+
context.TODO(), plr, metav1.UpdateOptions{},
43+
)
44+
Expect(err).To(BeNil())
45+
plc := utils.GetWithTimeout(
46+
clientHubDynamic, gvrPolicy, testNamespace+"."+policyName, "managed1",
47+
true, defaultTimeoutSeconds,
48+
)
49+
Expect(plc).ToNot(BeNil())
50+
51+
yamlPlc := utils.ParseYaml(case9ReplicatedPolicyYamlM1)
52+
Eventually(func() interface{} {
53+
replicatedPlc := utils.GetWithTimeout(
54+
clientHubDynamic,
55+
gvrPolicy,
56+
testNamespace+"."+policyName,
57+
"managed1",
58+
true,
59+
defaultTimeoutSeconds,
60+
)
61+
62+
return replicatedPlc.Object["spec"]
63+
}, defaultTimeoutSeconds, 1).Should(utils.SemanticEqual(yamlPlc.Object["spec"]))
64+
})
65+
66+
It("should correctly report root policy hub template watches when propagated", func() {
67+
By("Checking metric endpoint for root policy hub template watches")
68+
Eventually(func() interface{} {
69+
return utils.GetMetrics("hub_templates_active_watches", "\"[0-9]\"")
70+
}, defaultTimeoutSeconds, 1).Should(Equal([]string{"2"}))
71+
})
72+
73+
It("should clean up", func() {
74+
utils.Kubectl("delete",
75+
"-f", policyYaml,
76+
"-n", testNamespace)
77+
opt := metav1.ListOptions{}
78+
utils.ListWithTimeout(clientHubDynamic, gvrPolicy, opt, 0, false, defaultTimeoutSeconds)
79+
})
80+
81+
It("should report root policy 0 hub template watches after clean up", func() {
82+
By("Checking metric endpoint for root policy hub template watches")
83+
Eventually(func() interface{} {
84+
return utils.GetMetrics("hub_templates_active_watches", "\"[0-9]\"")
85+
}, defaultTimeoutSeconds, 1).Should(Equal([]string{"0"}))
86+
})
87+
})
88+
})

0 commit comments

Comments
 (0)