Skip to content

Commit 1dbb607

Browse files
JibinBaoOriTrabelsi
authored andcommitted
Update qos sai due to no lossless pg for some platforms (sonic-net#16440)
1. For spc4 and above, there is only the lossy buffer, so the buffer for the lossless buffer will be taken by the lossy buffer. If the packet size is too small, the packet number sent to occupy the shared buffer will increase a lot, which will lead to the descriptor being exhausted, so update testQosSaiPgSharedWatermark, testQosSaiQSharedWatermark, and testQosSaiLossyQueue accordingly. 2. Remove the test config of scheduler.block_data_plane, otherwise it might raise yang validation error when do config reload 3. When there is no lossless buffer, return a dump buffer lossless pg profile, and skip tests related to lossless buffer case dynamically 4. Skip fixture reaseAllports for mellanox device, because after qos test is finished, the teardown will do config reload, it will restore the config of ports, we don't need this fixture before running tests. Also it can save 2 minutes 5. list the relevant Prs: [Mellanox] Update buffer calculations for Mellanox-SN5600-C224O8 SKU sonic-buildimage#20992 [Mellanox] Add x86_64-nvidia_sn5610n-r0 new platform and SKUs sonic-buildimage#21056 [Mellanox] Update Mellanox-SN5600-C256S1 buffer calculations sonic-buildimage#20991 [Mellanox] Add x86_64-nvidia_sn5610n-r0 new platform and SKUs sonic-buildimage#21056 [Mellanox] Update Mellanox-SN5600-C256S1, Mellanox-SN5600-C224O8 buffers and DSCP mapping sonic-buildimage#21427 [Mellanox] Add x86_64-nvidia_sn5610n-r0 new platform and SKUs sonic-buildimage#21056 [Mellanox] Update DSCP mapping for SN5600, SN5610 SKUs sonic-buildimage#21762
1 parent 56cf8fb commit 1dbb607

File tree

6 files changed

+92
-17
lines changed

6 files changed

+92
-17
lines changed

tests/qos/files/mellanox/qos_param_generator.py

+5
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,11 @@ def calculate_parameters(self):
236236
self.qos_params_mlnx['wm_pg_shared_lossy'].update(wm_shared_lossy)
237237
wm_shared_lossy["pkts_num_margin"] = 8
238238
self.qos_params_mlnx['wm_q_shared_lossy'].update(wm_shared_lossy)
239+
if 'lossy_dscp' in self.egressLossyProfile:
240+
lossy_queue['dscp'] = self.egressLossyProfile['lossy_dscp']
241+
self.qos_params_mlnx['wm_pg_shared_lossy']['dscp'] = self.egressLossyProfile['lossy_dscp']
242+
self.qos_params_mlnx['wm_q_shared_lossy']['dscp'] = self.egressLossyProfile['lossy_dscp']
243+
self.qos_params_mlnx['wm_q_shared_lossy']['queue'] = self.egressLossyProfile['lossy_queue']
239244

240245
wm_buf_pool_lossless = self.qos_params_mlnx['wm_buf_pool_lossless']
241246
wm_buf_pool_lossless['pkts_num_trig_pfc'] = pkts_num_trig_pfc

tests/qos/files/mellanox/special_qos_config.yml

+4-4
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ qos_params:
2727
xon_4:
2828
packet_size: 800
2929
lossy_queue_1:
30-
packet_size: 800
30+
packet_size: 1200
3131
wm_pg_shared_lossless:
3232
packet_size: 800
3333
pkts_num_margin: 7
3434
wm_pg_shared_lossy:
35-
packet_size: 800
36-
pkts_num_margin: 5
35+
packet_size: 1200
36+
pkts_num_margin: 8
3737
wm_q_shared_lossy:
38-
packet_size: 800
38+
packet_size: 1200

tests/qos/qos_sai_base.py

+46-8
Original file line numberDiff line numberDiff line change
@@ -350,8 +350,24 @@ def __getBufferProfile(self, request, dut_asic, os_version, table, port, priorit
350350
else:
351351
bufferProfileName = out.translate({ord(i): None for i in '[]'})
352352
else:
353-
bufferProfileName = bufkeystr + dut_asic.run_redis_cmd(
354-
argv=["redis-cli", "-n", db, "HGET", keystr, "profile"])[0]
353+
profile_content = dut_asic.run_redis_cmd(argv=["redis-cli", "-n", db, "HGET", keystr, "profile"])
354+
if profile_content:
355+
bufferProfileName = bufkeystr + profile_content[0]
356+
else:
357+
logger.info("No lossless buffer. To compatible the existing case, return dump bufferProfilfe")
358+
dump_buffer_profile = {
359+
"profileName": f"{bufkeystr}pg_lossless_0_0m_profile",
360+
"pool": "ingress_lossless_pool",
361+
"xon": "0",
362+
"xoff": "0",
363+
"size": "0",
364+
"dynamic_th": "0",
365+
"pg_q_alpha": "0",
366+
"port_alpha": "0",
367+
"pool_size": "0",
368+
"static_th": "0"
369+
}
370+
return dump_buffer_profile
355371

356372
result = dut_asic.run_redis_cmd(
357373
argv=["redis-cli", "-n", db, "HGETALL", bufferProfileName]
@@ -1765,10 +1781,13 @@ def releaseAllPorts(
17651781
Raises:
17661782
RunAnsibleModuleFail if ptf test fails
17671783
"""
1768-
self.runPtfTest(
1769-
ptfhost, testCase="sai_qos_tests.ReleaseAllPorts",
1770-
testParams=dutTestParams["basicParams"]
1771-
)
1784+
if isMellanoxDevice(duthosts[0]):
1785+
logger.info("skip reaseAllports fixture for mellanox device")
1786+
else:
1787+
self.runPtfTest(
1788+
ptfhost, testCase="sai_qos_tests.ReleaseAllPorts",
1789+
testParams=dutTestParams["basicParams"]
1790+
)
17721791

17731792
def __loadSwssConfig(self, duthost):
17741793
"""
@@ -2111,12 +2130,25 @@ def egressLossyProfile(
21112130

21122131
srcport = dutConfig["dutInterfaces"][dutConfig["testPorts"]["src_port_id"]]
21132132

2133+
is_lossy_queue_only = False
2134+
21142135
if srcport in dualtor_ports_for_duts:
21152136
queues = "0-1"
21162137
else:
2117-
queues = "0-2"
2138+
if isMellanoxDevice(duthost):
2139+
cable_len = dut_asic.shell(f"redis-cli -n 4 hget 'CABLE_LENGTH|AZURE' {srcport}")['stdout']
2140+
if cable_len == '0m':
2141+
is_lossy_queue_only = True
2142+
logger.info(f"{srcport} has only lossy queue")
2143+
if is_lossy_queue_only:
2144+
is_lossy_queue_only = True
2145+
queue_table_postfix_list = ['1-3', '4', '5']
2146+
queue_to_dscp_map = {'1-3': '1', '4': '11', '5': '31'}
2147+
queues = random.choice(queue_table_postfix_list)
2148+
else:
2149+
queues = "0-2"
21182150

2119-
yield self.__getBufferProfile(
2151+
egress_lossy_profile = self.__getBufferProfile(
21202152
request,
21212153
dut_asic,
21222154
duthost.os_version,
@@ -2125,6 +2157,12 @@ def egressLossyProfile(
21252157
srcport,
21262158
queues
21272159
)
2160+
if is_lossy_queue_only:
2161+
egress_lossy_profile['lossy_dscp'] = queue_to_dscp_map[queues]
2162+
egress_lossy_profile['lossy_queue'] = '1' if queues == '1-3' else queues
2163+
logger.info(f"queues:{queues}, egressLossyProfile: {egress_lossy_profile}")
2164+
2165+
yield egress_lossy_profile
21282166

21292167
@pytest.fixture(scope='class')
21302168
def losslessSchedProfile(

tests/qos/test_qos_sai.py

+19-1
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,11 @@ def check_skip_xon_hysteresis_test(xonHysteresisKey, dutQosConfig,
175175
" Pls see qos.yaml for the port idx's that are needed.")
176176

177177

178+
def skip_test_on_no_lossless_pg(portSpeedCableLength):
179+
if portSpeedCableLength == "0_0m":
180+
pytest.skip("skip the test due to no buffer lossless pg")
181+
182+
178183
class TestQosSai(QosSaiBase):
179184
"""TestQosSai derives from QosSaiBase and contains collection of QoS SAI test cases.
180185
@@ -411,6 +416,7 @@ def testQosSaiPfcXoffLimit(
411416
"Additional DSCPs are not supported on non-dual ToR ports")
412417

413418
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
419+
skip_test_on_no_lossless_pg(portSpeedCableLength)
414420
if dutTestParams['hwsku'] in self.BREAKOUT_SKUS and 'backend' not in dutTestParams['topo']:
415421
qosConfig = dutQosConfig["param"][portSpeedCableLength]["breakout"]
416422
else:
@@ -508,6 +514,7 @@ def testPfcStormWithSharedHeadroomOccupancy(
508514
pytest.skip("Shared Headroom has to be enabled for this test")
509515

510516
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
517+
skip_test_on_no_lossless_pg(portSpeedCableLength)
511518
if xonProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
512519
qosConfig = dutQosConfig["param"][portSpeedCableLength]
513520
else:
@@ -681,6 +688,7 @@ def testQosSaiPfcXonLimit(
681688
"Additional DSCPs are not supported on non-dual ToR ports")
682689

683690
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
691+
skip_test_on_no_lossless_pg(portSpeedCableLength)
684692
if xonProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
685693
qosConfig = dutQosConfig["param"][portSpeedCableLength]
686694
else:
@@ -858,6 +866,7 @@ def testQosSaiHeadroomPoolSize(
858866
"""
859867

860868
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
869+
skip_test_on_no_lossless_pg(portSpeedCableLength)
861870
qosConfig = dutQosConfig["param"][portSpeedCableLength]
862871
testPortIps = dutConfig["testPortIps"]
863872

@@ -1616,6 +1625,7 @@ def testQosSaiDwrr(
16161625
RunAnsibleModuleFail if ptf test fails
16171626
"""
16181627
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1628+
skip_test_on_no_lossless_pg(portSpeedCableLength)
16191629
qosConfig = dutQosConfig["param"]
16201630
if "wrr" in qosConfig[portSpeedCableLength]:
16211631
qosConfigWrr = qosConfig[portSpeedCableLength]["wrr"]
@@ -1695,6 +1705,9 @@ def testQosSaiPgSharedWatermark(
16951705
"""
16961706

16971707
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1708+
if pgProfile == "wm_pg_shared_lossless":
1709+
skip_test_on_no_lossless_pg(portSpeedCableLength)
1710+
16981711
if pgProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
16991712
qosConfig = dutQosConfig["param"][portSpeedCableLength]
17001713
else:
@@ -1798,6 +1811,7 @@ def testQosSaiPgHeadroomWatermark(
17981811
RunAnsibleModuleFail if ptf test fails
17991812
"""
18001813
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1814+
skip_test_on_no_lossless_pg(portSpeedCableLength)
18011815
if dutTestParams['hwsku'] in self.BREAKOUT_SKUS and 'backend' not in dutTestParams['topo']:
18021816
qosConfig = dutQosConfig["param"][portSpeedCableLength]["breakout"]
18031817
else:
@@ -1910,6 +1924,8 @@ def testQosSaiQSharedWatermark(
19101924
RunAnsibleModuleFail if ptf test fails
19111925
"""
19121926
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1927+
if queueProfile == "wm_q_shared_lossless":
1928+
skip_test_on_no_lossless_pg(portSpeedCableLength)
19131929

19141930
if queueProfile == "wm_q_shared_lossless":
19151931
if dutTestParams["basicParams"]["sonic_asic_type"] == 'cisco-8000':
@@ -1972,7 +1988,7 @@ def testQosSaiQSharedWatermark(
19721988

19731989
def testQosSaiDscpToPgMapping(
19741990
self, get_src_dst_asic_and_duts, duthost, request, ptfhost, dutTestParams, dutConfig, dut_qos_maps, # noqa F811
1975-
change_lag_lacp_timer):
1991+
change_lag_lacp_timer, dutQosConfig):
19761992
"""
19771993
Test QoS SAI DSCP to PG mapping ptf test
19781994
@@ -1990,6 +2006,8 @@ def testQosSaiDscpToPgMapping(
19902006
RunAnsibleModuleFail if ptf test fails
19912007
"""
19922008
disableTest = request.config.getoption("--disable_test")
2009+
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
2010+
skip_test_on_no_lossless_pg(portSpeedCableLength)
19932011
if dutTestParams["basicParams"]["sonic_asic_type"] == 'cisco-8000' or \
19942012
('platform_asic' in dutTestParams["basicParams"] and
19952013
dutTestParams["basicParams"]["platform_asic"] in ["broadcom-dnx", "mellanox"]):

tests/saitests/py3/sai_base_test.py

+17-3
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
"5": "scheduler.0",
4545
"6": "scheduler.0",
4646
"7": ""}
47+
BLOCK_DATA_PLANE_SCHEDULER_NAME = 'scheduler.block_data_plane'
4748

4849

4950
class ThriftInterface(BaseTest):
@@ -238,16 +239,15 @@ def disable_mellanox_egress_data_plane(self, ptf_port_list):
238239
dut_port = self.get_dut_port(ptf_port)
239240
dut_port_list.append(dut_port)
240241
self.original_dut_port_queue_scheduler_map = self.get_queue_scheduler_name(dut_port_list)
241-
block_data_plane_scheduler_name = 'scheduler.block_data_plane'
242242
cmd_set_block_data_plane_scheduler = \
243-
f'sonic-db-cli CONFIG_DB hset "SCHEDULER|{block_data_plane_scheduler_name}" "type" DWRR "weight" 15 "pir" 1'
243+
f'sonic-db-cli CONFIG_DB hset "SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}" "type" DWRR "weight" 15 "pir" 1'
244244

245245
self.exec_cmd_on_dut(self.server, self.test_params['dut_username'], self.test_params['dut_password'],
246246
cmd_set_block_data_plane_scheduler)
247247
for dut_port in dut_port_list:
248248
for q in DATA_PLANE_QUEUE_LIST:
249249
cmd_block_q = \
250-
f" sonic-db-cli CONFIG_DB hset 'QUEUE|{dut_port}|{q}' scheduler {block_data_plane_scheduler_name}"
250+
f" sonic-db-cli CONFIG_DB hset 'QUEUE|{dut_port}|{q}' scheduler {BLOCK_DATA_PLANE_SCHEDULER_NAME}"
251251
self.exec_cmd_on_dut(
252252
self.server, self.test_params['dut_username'], self.test_params['dut_password'], cmd_block_q)
253253

@@ -276,6 +276,20 @@ def enable_mellanox_egress_data_plane(self, ptf_port_list):
276276
self.exec_cmd_on_dut(
277277
self.server, self.test_params['dut_username'],
278278
self.test_params['dut_password'], cmd_recover_q_scheduler_config)
279+
self.remove_block_data_plan_scheduler()
280+
281+
def remove_block_data_plan_scheduler(self):
282+
get_block_data_plane_scheduler_name = \
283+
f"sonic-db-cli CONFIG_DB keys 'SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}'"
284+
scheduler_name, _, _ = self.exec_cmd_on_dut(self.server,
285+
self.test_params['dut_username'],
286+
self.test_params['dut_password'],
287+
get_block_data_plane_scheduler_name)
288+
if isinstance(scheduler_name, list) and BLOCK_DATA_PLANE_SCHEDULER_NAME in scheduler_name[0]:
289+
cmd_del_block_data_plane_scheduler = \
290+
f'sonic-db-cli CONFIG_DB del "SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}"'
291+
self.exec_cmd_on_dut(self.server, self.test_params['dut_username'], self.test_params['dut_password'],
292+
cmd_del_block_data_plane_scheduler)
279293

280294

281295
class ThriftInterfaceDataPlane(ThriftInterface):

tests/saitests/py3/sai_qos_tests.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5209,7 +5209,7 @@ def runTest(self):
52095209
assert (q_wm_res[queue] <= (margin + 1) * cell_size)
52105210
elif pkts_num_fill_min:
52115211
assert (q_wm_res[queue] == 0)
5212-
elif 'cisco-8000' in asic_type or "SN5600" in hwsku or "SN5400" in hwsku:
5212+
elif 'cisco-8000' in asic_type or "SN56" in hwsku or "SN5400" in hwsku:
52135213
assert (q_wm_res[queue] <= (margin + 1) * cell_size)
52145214
else:
52155215
if platform_asic and platform_asic == "broadcom-dnx":

0 commit comments

Comments
 (0)