Skip to content

Commit d504bfe

Browse files
authored
[qos] Headroom pool watermark test (#2614)
* Add tests for the headroom pool watermark feature How did you verify/test it? Ran the test on Th2 on master and it passed Signed-off-by: Neetha John <[email protected]>
1 parent 264820c commit d504bfe

File tree

5 files changed

+113
-3
lines changed

5 files changed

+113
-3
lines changed

tests/qos/files/qos.yml

+4
Original file line numberDiff line numberDiff line change
@@ -615,6 +615,8 @@ qos_params:
615615
pkts_num_trig_egr_drp: 9887
616616
pkts_num_fill_egr_min: 8
617617
cell_size: 208
618+
hdrm_pool_wm_multiplier: 4
619+
cell_size: 208
618620
th2:
619621
40000_300m:
620622
pkts_num_leak_out: 0
@@ -811,3 +813,5 @@ qos_params:
811813
pkts_num_trig_egr_drp: 10692
812814
pkts_num_fill_egr_min: 16
813815
cell_size: 208
816+
hdrm_pool_wm_multiplier: 4
817+
cell_size: 208

tests/qos/qos_sai_base.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,7 @@ def __getBufferProfile(self, request, duthost, table, port, priorityGroup):
142142
pytest_assert("xon" in bufferProfile.keys() and "xoff" in bufferProfile.keys(),
143143
"Could not find xon and/or xoff values for profile '{0}'".format(bufferProfileName))
144144

145-
disableTest = request.config.getoption("--disable_test")
146-
if not disableTest:
147-
self.__updateVoidRoidParams(duthost, bufferProfile)
145+
self.__updateVoidRoidParams(duthost, bufferProfile)
148146

149147
return bufferProfile
150148

tests/qos/test_qos_sai.py

+53
Original file line numberDiff line numberDiff line change
@@ -212,6 +212,59 @@ def testQosSaiHeadroomPoolSize(self, ptfhost, dutTestParams, dutConfig, dutQosCo
212212

213213
self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams)
214214

215+
def testQosSaiHeadroomPoolWatermark(self, duthosts, rand_one_dut_hostname, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, sharedHeadroomPoolSize, resetWatermark):
216+
"""
217+
Test QoS SAI Headroom pool watermark
218+
219+
Args:
220+
duthosts (AnsibleHost): Dut hosts
221+
rand_one_dut_hostname (AnsibleHost): select one of the duts in multi dut testbed
222+
ptfhost (AnsibleHost): Packet Test Framework (PTF)
223+
dutTestParams (Fixture, dict): DUT host test params
224+
dutConfig (Fixture, dict): Map of DUT config containing dut interfaces, test port IDs, test port IPs,
225+
and test ports
226+
dutQosConfig (Fixture, dict): Map containing DUT host QoS configuration
227+
ingressLosslessProfile (Fxiture): Map of egress lossless buffer profile attributes
228+
resetWatermark (Fixture): reset watermarks
229+
230+
Returns:
231+
None
232+
233+
Raises:
234+
RunAnsibleModuleFail if ptf test fails
235+
"""
236+
duthost = duthosts[rand_one_dut_hostname]
237+
cmd_output = duthost.shell("show headroom-pool watermark", module_ignore_errors=True)
238+
if dutTestParams["hwsku"] not in self.SUPPORTED_HEADROOM_SKUS or cmd_output['rc'] != 0:
239+
pytest.skip("Headroom pool watermark is not supported")
240+
241+
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
242+
qosConfig = dutQosConfig["param"][portSpeedCableLength]
243+
testPortIps = dutConfig["testPortIps"]
244+
245+
testParams = dict()
246+
testParams.update(dutTestParams["basicParams"])
247+
testParams.update({
248+
"testbed_type": dutTestParams["topo"],
249+
"dscps": qosConfig["hdrm_pool_size"]["dscps"],
250+
"ecn": qosConfig["hdrm_pool_size"]["ecn"],
251+
"pgs": qosConfig["hdrm_pool_size"]["pgs"],
252+
"src_port_ids": qosConfig["hdrm_pool_size"]["src_port_ids"],
253+
"src_port_ips": [testPortIps[port] for port in qosConfig["hdrm_pool_size"]["src_port_ids"]],
254+
"dst_port_id": qosConfig["hdrm_pool_size"]["dst_port_id"],
255+
"dst_port_ip": testPortIps[qosConfig["hdrm_pool_size"]["dst_port_id"]],
256+
"pgs_num": qosConfig["hdrm_pool_size"]["pgs_num"],
257+
"pkts_num_leak_out": qosConfig["pkts_num_leak_out"],
258+
"pkts_num_trig_pfc": qosConfig["hdrm_pool_size"]["pkts_num_trig_pfc"],
259+
"pkts_num_hdrm_full": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_full"],
260+
"pkts_num_hdrm_partial": qosConfig["hdrm_pool_size"]["pkts_num_hdrm_partial"],
261+
"hdrm_pool_wm_multiplier": dutQosConfig["param"]["hdrm_pool_wm_multiplier"],
262+
"cell_size": dutQosConfig["param"]["cell_size"],
263+
"buf_pool_roid": ingressLosslessProfile["bufferPoolRoid"],
264+
"max_headroom": sharedHeadroomPoolSize
265+
})
266+
self.runPtfTest(ptfhost, testCase="sai_qos_tests.HdrmPoolSizeTest", testParams=testParams)
267+
215268
@pytest.mark.parametrize("bufPool", ["wm_buf_pool_lossless", "wm_buf_pool_lossy"])
216269
def testQosSaiBufferPoolWatermark(self, request, bufPool, ptfhost, dutTestParams, dutConfig, dutQosConfig, ingressLosslessProfile, egressLossyProfile, resetWatermark):
217270
"""

tests/saitests/sai_qos_tests.py

+44
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
sai_thrift_read_port_watermarks,
2727
sai_thrift_read_pg_counters,
2828
sai_thrift_read_buffer_pool_watermark,
29+
sai_thrift_read_headroom_pool_watermark,
2930
sai_thrift_port_tx_disable,
3031
sai_thrift_port_tx_enable)
3132
from switch_sai_thrift.ttypes import (sai_thrift_attribute_value_t,
@@ -876,6 +877,18 @@ def setUp(self):
876877
print >> sys.stderr, ("pkts num: leak_out: %d, trig_pfc: %d, hdrm_full: %d, hdrm_partial: %d, pkt_size %d" % (self.pkts_num_leak_out, self.pkts_num_trig_pfc, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size))
877878
elif self.pkts_num_trig_pfc_shp:
878879
print >> sys.stderr, ("pkts num: leak_out: {}, trig_pfc: {}, hdrm_full: {}, hdrm_partial: {}, pkt_size {}".format(self.pkts_num_leak_out, self.pkts_num_trig_pfc_shp, self.pkts_num_hdrm_full, self.pkts_num_hdrm_partial, self.pkt_size))
880+
881+
# used only for headroom pool watermark
882+
if all(key in self.test_params for key in ['hdrm_pool_wm_multiplier', 'buf_pool_roid', 'cell_size', 'max_headroom']):
883+
self.cell_size = int(self.test_params['cell_size'])
884+
self.wm_multiplier = self.test_params['hdrm_pool_wm_multiplier']
885+
print >> sys.stderr, "Wm multiplier: %d buf_pool_roid: %s" % (self.wm_multiplier, self.test_params['buf_pool_roid'])
886+
self.buf_pool_roid = int(self.test_params['buf_pool_roid'], 0)
887+
print >> sys.stderr, "buf_pool_roid: 0x%lx" % (self.buf_pool_roid)
888+
self.max_headroom = int(self.test_params['max_headroom'])
889+
else:
890+
self.wm_multiplier = None
891+
879892
sys.stderr.flush()
880893

881894
self.dst_port_mac = self.dataplane.get_mac(0, self.dst_port_id)
@@ -997,6 +1010,15 @@ def runTest(self):
9971010
print >> sys.stderr, "PFC triggered"
9981011
sys.stderr.flush()
9991012

1013+
upper_bound = 2
1014+
if self.wm_multiplier:
1015+
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
1016+
print >> sys.stderr, "Actual headroom pool watermark value to start: %d" % hdrm_pool_wm
1017+
assert (hdrm_pool_wm <= (upper_bound * self.cell_size * self.wm_multiplier))
1018+
1019+
expected_wm = 0
1020+
wm_pkt_num = 0
1021+
upper_bound_wm = 0
10001022
# send packets to all pgs to fill the headroom pool
10011023
for i in range(0, self.pgs_num):
10021024
# Prepare TCP packet data
@@ -1021,6 +1043,18 @@ def runTest(self):
10211043
assert(recv_counters[INGRESS_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_DROP])
10221044
assert(recv_counters[INGRESS_PORT_BUFFER_DROP] == recv_counters_bases[sidx_dscp_pg_tuples[i][0]][INGRESS_PORT_BUFFER_DROP])
10231045

1046+
if self.wm_multiplier:
1047+
wm_pkt_num += (self.pkts_num_hdrm_full if i != self.pgs_num - 1 else self.pkts_num_hdrm_partial)
1048+
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
1049+
expected_wm = wm_pkt_num * self.cell_size * self.wm_multiplier
1050+
upper_bound_wm = expected_wm + (upper_bound * self.cell_size * self.wm_multiplier)
1051+
if upper_bound_wm > self.max_headroom:
1052+
upper_bound_wm = self.max_headroom
1053+
1054+
print >> sys.stderr, "pkts sent: %d, lower bound: %d, actual headroom pool watermark: %d, upper_bound: %d" %(wm_pkt_num, expected_wm, hdrm_pool_wm, upper_bound_wm)
1055+
assert(expected_wm <= hdrm_pool_wm)
1056+
assert(hdrm_pool_wm <= upper_bound_wm)
1057+
10241058
print >> sys.stderr, "all but the last pg hdrms filled"
10251059
sys.stderr.flush()
10261060

@@ -1041,6 +1075,16 @@ def runTest(self):
10411075
assert(xmit_counters[EGRESS_PORT_BUFFER_DROP] == xmit_counters_base[EGRESS_PORT_BUFFER_DROP])
10421076

10431077
print >> sys.stderr, "pg hdrm filled"
1078+
if self.wm_multiplier:
1079+
# assert hdrm pool wm still remains the same
1080+
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
1081+
assert(expected_wm <= hdrm_pool_wm)
1082+
assert(hdrm_pool_wm <= upper_bound_wm)
1083+
# at this point headroom pool should be full. send few more packets to continue causing drops
1084+
print >> sys.stderr, "overflow headroom pool"
1085+
send_packet(self, self.src_port_ids[sidx_dscp_pg_tuples[i][0]], pkt, 10)
1086+
hdrm_pool_wm = sai_thrift_read_headroom_pool_watermark(self.client, self.buf_pool_roid)
1087+
assert(hdrm_pool_wm <= self.max_headroom)
10441088
sys.stderr.flush()
10451089

10461090
finally:

tests/saitests/switch.py

+11
Original file line numberDiff line numberDiff line change
@@ -758,6 +758,17 @@ def sai_thrift_read_buffer_pool_watermark(client, buffer_pool_id):
758758
return None
759759
return wm_vals[0]
760760

761+
def sai_thrift_read_headroom_pool_watermark(client, buffer_pool_id):
762+
buffer_pool_wm_ids = [
763+
SAI_BUFFER_POOL_STAT_XOFF_ROOM_WATERMARK_BYTES
764+
]
765+
766+
wm_vals = client.sai_thrift_get_buffer_pool_stats(buffer_pool_id, buffer_pool_wm_ids)
767+
if not wm_vals:
768+
print >> sys.stderr, "sai_thrift_read_headroom_pool_watermark returns empty list"
769+
return None
770+
return wm_vals[0]
771+
761772
def sai_thrift_create_vlan_member(client, vlan_id, port_id, tagging_mode):
762773
vlan_member_attr_list = []
763774
attribute_value = sai_thrift_attribute_value_t(s32=vlan_id)

0 commit comments

Comments
 (0)