Skip to content

Commit 2f184e9

Browse files
authored
Update qos sai due to no lossless pg for some platforms (#225)
<!-- Please make sure you've read and understood our contributing guidelines; https://github.com/sonic-net/SONiC/blob/gh-pages/CONTRIBUTING.md Please provide following information to help code review process a bit easier: --> ### Description of PR <!-- - Please include a summary of the change and which issue is fixed. - Please also include relevant motivation and context. Where should reviewer start? background context? - List any dependencies that are required for this change. --> 1. For spc4 and above, there is only the lossy buffer, so the buffer for the lossless buffer will be taken by the lossy buffer. If the packet size is too small, the packet number sent to occupy the shared buffer will increase a lot, which will lead to the descriptor being exhausted, so update testQosSaiPgSharedWatermark, testQosSaiQSharedWatermark, and testQosSaiLossyQueue accordingly. 2. Remove the test config of scheduler.block_data_plane, otherwise it might raise yang validation error when do config reload 3. When there is no lossless buffer, return a dump buffer lossless pg profile, and skip tests related to lossless buffer case dynamically 4. Skip fixture reaseAllports for mellanox device, because after qos test is finished, the teardown will do config reload, it will restore the config of ports, we don't need this fixture before running tests. Also it can save 2 minutes 5. list the relevant Prs: sonic-net/sonic-buildimage#20992 sonic-net/sonic-buildimage#21056 sonic-net/sonic-buildimage#20991 sonic-net/sonic-buildimage#21056 sonic-net/sonic-buildimage#21427 sonic-net/sonic-buildimage#21056 sonic-net/sonic-buildimage#21762 Summary: Fixes # (issue) ### Type of change <!-- - Fill x for your type of change. - e.g. - [x] Bug fix --> - [ ] Bug fix - [ ] Testbed and Framework(new/improvement) - [ ] New Test case - [ ] Skipped for non-supported platforms - [ ] Add ownership [here](https://msazure.visualstudio.com/AzureWiki/_wiki/wikis/AzureWiki.wiki/744287/TSG-for-ownership-modification)(Microsft required only) - [ ] Test case improvement ### Back port request - [ ] 202012 - [ ] 202205 - [ ] 202305 - [ ] 202311 - [ ] 202405 - [ ] 202411 ### Approach #### What is the motivation for this PR? update the qos sai test for no pg lossless buffer platform #### How did you do it? update for lossy case and skip test relatd to pg buffer lossless #### How did you verify/test it? Run qos sai test on platform without pg lossless buffer plaform #### Any platform specific information? sn5600 and sn5610 #### Supported testbed topology if it's a new test case? ### Documentation <!-- (If it's a new feature, new test case) Did you update documentation/Wiki relevant to your implementation? Link to the wiki page? -->
1 parent ca6248b commit 2f184e9

File tree

6 files changed

+93
-18
lines changed

6 files changed

+93
-18
lines changed

tests/qos/files/mellanox/qos_param_generator.py

+5
Original file line numberDiff line numberDiff line change
@@ -236,6 +236,11 @@ def calculate_parameters(self):
236236
self.qos_params_mlnx['wm_pg_shared_lossy'].update(wm_shared_lossy)
237237
wm_shared_lossy["pkts_num_margin"] = 8
238238
self.qos_params_mlnx['wm_q_shared_lossy'].update(wm_shared_lossy)
239+
if 'lossy_dscp' in self.egressLossyProfile:
240+
lossy_queue['dscp'] = self.egressLossyProfile['lossy_dscp']
241+
self.qos_params_mlnx['wm_pg_shared_lossy']['dscp'] = self.egressLossyProfile['lossy_dscp']
242+
self.qos_params_mlnx['wm_q_shared_lossy']['dscp'] = self.egressLossyProfile['lossy_dscp']
243+
self.qos_params_mlnx['wm_q_shared_lossy']['queue'] = self.egressLossyProfile['lossy_queue']
239244

240245
wm_buf_pool_lossless = self.qos_params_mlnx['wm_buf_pool_lossless']
241246
wm_buf_pool_lossless['pkts_num_trig_pfc'] = pkts_num_trig_pfc

tests/qos/files/mellanox/special_qos_config.yml

+4-4
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@ qos_params:
2727
xon_4:
2828
packet_size: 800
2929
lossy_queue_1:
30-
packet_size: 800
30+
packet_size: 1200
3131
wm_pg_shared_lossless:
3232
packet_size: 800
3333
pkts_num_margin: 7
3434
wm_pg_shared_lossy:
35-
packet_size: 800
36-
pkts_num_margin: 5
35+
packet_size: 1200
36+
pkts_num_margin: 8
3737
wm_q_shared_lossy:
38-
packet_size: 800
38+
packet_size: 1200

tests/qos/qos_sai_base.py

+46-8
Original file line numberDiff line numberDiff line change
@@ -350,8 +350,24 @@ def __getBufferProfile(self, request, dut_asic, os_version, table, port, priorit
350350
else:
351351
bufferProfileName = out.translate({ord(i): None for i in '[]'})
352352
else:
353-
bufferProfileName = bufkeystr + dut_asic.run_redis_cmd(
354-
argv=["redis-cli", "-n", db, "HGET", keystr, "profile"])[0]
353+
profile_content = dut_asic.run_redis_cmd(argv=["redis-cli", "-n", db, "HGET", keystr, "profile"])
354+
if profile_content:
355+
bufferProfileName = bufkeystr + profile_content[0]
356+
else:
357+
logger.info("No lossless buffer. To compatible the existing case, return dump bufferProfilfe")
358+
dump_buffer_profile = {
359+
"profileName": f"{bufkeystr}pg_lossless_0_0m_profile",
360+
"pool": "ingress_lossless_pool",
361+
"xon": "0",
362+
"xoff": "0",
363+
"size": "0",
364+
"dynamic_th": "0",
365+
"pg_q_alpha": "0",
366+
"port_alpha": "0",
367+
"pool_size": "0",
368+
"static_th": "0"
369+
}
370+
return dump_buffer_profile
355371

356372
result = dut_asic.run_redis_cmd(
357373
argv=["redis-cli", "-n", db, "HGETALL", bufferProfileName]
@@ -1751,10 +1767,13 @@ def releaseAllPorts(
17511767
Raises:
17521768
RunAnsibleModuleFail if ptf test fails
17531769
"""
1754-
self.runPtfTest(
1755-
ptfhost, testCase="sai_qos_tests.ReleaseAllPorts",
1756-
testParams=dutTestParams["basicParams"]
1757-
)
1770+
if isMellanoxDevice(duthosts[0]):
1771+
logger.info("skip reaseAllports fixture for mellanox device")
1772+
else:
1773+
self.runPtfTest(
1774+
ptfhost, testCase="sai_qos_tests.ReleaseAllPorts",
1775+
testParams=dutTestParams["basicParams"]
1776+
)
17581777

17591778
def __loadSwssConfig(self, duthost):
17601779
"""
@@ -2097,12 +2116,25 @@ def egressLossyProfile(
20972116

20982117
srcport = dutConfig["dutInterfaces"][dutConfig["testPorts"]["src_port_id"]]
20992118

2119+
is_lossy_queue_only = False
2120+
21002121
if srcport in dualtor_ports_for_duts:
21012122
queues = "0-1"
21022123
else:
2103-
queues = "0-2"
2124+
if isMellanoxDevice(duthost):
2125+
cable_len = dut_asic.shell(f"redis-cli -n 4 hget 'CABLE_LENGTH|AZURE' {srcport}")['stdout']
2126+
if cable_len == '0m':
2127+
is_lossy_queue_only = True
2128+
logger.info(f"{srcport} has only lossy queue")
2129+
if is_lossy_queue_only:
2130+
is_lossy_queue_only = True
2131+
queue_table_postfix_list = ['1-3', '4', '5']
2132+
queue_to_dscp_map = {'1-3': '1', '4': '11', '5': '31'}
2133+
queues = random.choice(queue_table_postfix_list)
2134+
else:
2135+
queues = "0-2"
21042136

2105-
yield self.__getBufferProfile(
2137+
egress_lossy_profile = self.__getBufferProfile(
21062138
request,
21072139
dut_asic,
21082140
duthost.os_version,
@@ -2111,6 +2143,12 @@ def egressLossyProfile(
21112143
srcport,
21122144
queues
21132145
)
2146+
if is_lossy_queue_only:
2147+
egress_lossy_profile['lossy_dscp'] = queue_to_dscp_map[queues]
2148+
egress_lossy_profile['lossy_queue'] = '1' if queues == '1-3' else queues
2149+
logger.info(f"queues:{queues}, egressLossyProfile: {egress_lossy_profile}")
2150+
2151+
yield egress_lossy_profile
21142152

21152153
@pytest.fixture(scope='class')
21162154
def losslessSchedProfile(

tests/qos/test_qos_sai.py

+20-2
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,11 @@ def check_skip_xon_hysteresis_test(xonHysteresisKey, dutQosConfig,
167167
" Pls see qos.yaml for the port idx's that are needed.")
168168

169169

170+
def skip_test_on_no_lossless_pg(portSpeedCableLength):
171+
if portSpeedCableLength == "0_0m":
172+
pytest.skip("skip the test due to no buffer lossless pg")
173+
174+
170175
class TestQosSai(QosSaiBase):
171176
"""TestQosSai derives from QosSaiBase and contains collection of QoS SAI test cases.
172177
@@ -402,6 +407,7 @@ def testQosSaiPfcXoffLimit(
402407
"Additional DSCPs are not supported on non-dual ToR ports")
403408

404409
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
410+
skip_test_on_no_lossless_pg(portSpeedCableLength)
405411
if dutTestParams['hwsku'] in self.BREAKOUT_SKUS and 'backend' not in dutTestParams['topo']:
406412
qosConfig = dutQosConfig["param"][portSpeedCableLength]["breakout"]
407413
else:
@@ -499,6 +505,7 @@ def testPfcStormWithSharedHeadroomOccupancy(
499505
pytest.skip("Shared Headroom has to be enabled for this test")
500506

501507
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
508+
skip_test_on_no_lossless_pg(portSpeedCableLength)
502509
if xonProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
503510
qosConfig = dutQosConfig["param"][portSpeedCableLength]
504511
else:
@@ -672,6 +679,7 @@ def testQosSaiPfcXonLimit(
672679
"Additional DSCPs are not supported on non-dual ToR ports")
673680

674681
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
682+
skip_test_on_no_lossless_pg(portSpeedCableLength)
675683
if xonProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
676684
qosConfig = dutQosConfig["param"][portSpeedCableLength]
677685
else:
@@ -849,6 +857,7 @@ def testQosSaiHeadroomPoolSize(
849857
"""
850858

851859
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
860+
skip_test_on_no_lossless_pg(portSpeedCableLength)
852861
qosConfig = dutQosConfig["param"][portSpeedCableLength]
853862
testPortIps = dutConfig["testPortIps"]
854863

@@ -1607,6 +1616,7 @@ def testQosSaiDwrr(
16071616
RunAnsibleModuleFail if ptf test fails
16081617
"""
16091618
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1619+
skip_test_on_no_lossless_pg(portSpeedCableLength)
16101620
qosConfig = dutQosConfig["param"]
16111621
if "wrr" in qosConfig[portSpeedCableLength]:
16121622
qosConfigWrr = qosConfig[portSpeedCableLength]["wrr"]
@@ -1686,6 +1696,9 @@ def testQosSaiPgSharedWatermark(
16861696
"""
16871697

16881698
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1699+
if pgProfile == "wm_pg_shared_lossless":
1700+
skip_test_on_no_lossless_pg(portSpeedCableLength)
1701+
16891702
if pgProfile in list(dutQosConfig["param"][portSpeedCableLength].keys()):
16901703
qosConfig = dutQosConfig["param"][portSpeedCableLength]
16911704
else:
@@ -1789,6 +1802,7 @@ def testQosSaiPgHeadroomWatermark(
17891802
RunAnsibleModuleFail if ptf test fails
17901803
"""
17911804
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1805+
skip_test_on_no_lossless_pg(portSpeedCableLength)
17921806
if dutTestParams['hwsku'] in self.BREAKOUT_SKUS and 'backend' not in dutTestParams['topo']:
17931807
qosConfig = dutQosConfig["param"][portSpeedCableLength]["breakout"]
17941808
else:
@@ -1901,6 +1915,8 @@ def testQosSaiQSharedWatermark(
19011915
RunAnsibleModuleFail if ptf test fails
19021916
"""
19031917
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
1918+
if queueProfile == "wm_q_shared_lossless":
1919+
skip_test_on_no_lossless_pg(portSpeedCableLength)
19041920

19051921
if queueProfile == "wm_q_shared_lossless":
19061922
if dutTestParams["basicParams"]["sonic_asic_type"] == 'cisco-8000':
@@ -1962,8 +1978,8 @@ def testQosSaiQSharedWatermark(
19621978
)
19631979

19641980
def testQosSaiDscpToPgMapping(
1965-
self, get_src_dst_asic_and_duts, duthost, request, ptfhost, dutTestParams, dutConfig, dut_qos_maps # noqa F811
1966-
):
1981+
self, get_src_dst_asic_and_duts, duthost, request, ptfhost, dutTestParams, dutConfig, dut_qos_maps, # noqa F811
1982+
dutQosConfig):
19671983
"""
19681984
Test QoS SAI DSCP to PG mapping ptf test
19691985
@@ -1981,6 +1997,8 @@ def testQosSaiDscpToPgMapping(
19811997
RunAnsibleModuleFail if ptf test fails
19821998
"""
19831999
disableTest = request.config.getoption("--disable_test")
2000+
portSpeedCableLength = dutQosConfig["portSpeedCableLength"]
2001+
skip_test_on_no_lossless_pg(portSpeedCableLength)
19842002
if dutTestParams["basicParams"]["sonic_asic_type"] == 'cisco-8000' or \
19852003
('platform_asic' in dutTestParams["basicParams"] and
19862004
dutTestParams["basicParams"]["platform_asic"] in ["broadcom-dnx", "mellanox"]):

tests/saitests/py3/sai_base_test.py

+17-3
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
"5": "scheduler.0",
4545
"6": "scheduler.0",
4646
"7": ""}
47+
BLOCK_DATA_PLANE_SCHEDULER_NAME = 'scheduler.block_data_plane'
4748

4849

4950
class ThriftInterface(BaseTest):
@@ -238,16 +239,15 @@ def disable_mellanox_egress_data_plane(self, ptf_port_list):
238239
dut_port = self.get_dut_port(ptf_port)
239240
dut_port_list.append(dut_port)
240241
self.original_dut_port_queue_scheduler_map = self.get_queue_scheduler_name(dut_port_list)
241-
block_data_plane_scheduler_name = 'scheduler.block_data_plane'
242242
cmd_set_block_data_plane_scheduler = \
243-
f'sonic-db-cli CONFIG_DB hset "SCHEDULER|{block_data_plane_scheduler_name}" "type" DWRR "weight" 15 "pir" 1'
243+
f'sonic-db-cli CONFIG_DB hset "SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}" "type" DWRR "weight" 15 "pir" 1'
244244

245245
self.exec_cmd_on_dut(self.server, self.test_params['dut_username'], self.test_params['dut_password'],
246246
cmd_set_block_data_plane_scheduler)
247247
for dut_port in dut_port_list:
248248
for q in DATA_PLANE_QUEUE_LIST:
249249
cmd_block_q = \
250-
f" sonic-db-cli CONFIG_DB hset 'QUEUE|{dut_port}|{q}' scheduler {block_data_plane_scheduler_name}"
250+
f" sonic-db-cli CONFIG_DB hset 'QUEUE|{dut_port}|{q}' scheduler {BLOCK_DATA_PLANE_SCHEDULER_NAME}"
251251
self.exec_cmd_on_dut(
252252
self.server, self.test_params['dut_username'], self.test_params['dut_password'], cmd_block_q)
253253

@@ -276,6 +276,20 @@ def enable_mellanox_egress_data_plane(self, ptf_port_list):
276276
self.exec_cmd_on_dut(
277277
self.server, self.test_params['dut_username'],
278278
self.test_params['dut_password'], cmd_recover_q_scheduler_config)
279+
self.remove_block_data_plan_scheduler()
280+
281+
def remove_block_data_plan_scheduler(self):
282+
get_block_data_plane_scheduler_name = \
283+
f"sonic-db-cli CONFIG_DB keys 'SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}'"
284+
scheduler_name, _, _ = self.exec_cmd_on_dut(self.server,
285+
self.test_params['dut_username'],
286+
self.test_params['dut_password'],
287+
get_block_data_plane_scheduler_name)
288+
if isinstance(scheduler_name, list) and BLOCK_DATA_PLANE_SCHEDULER_NAME in scheduler_name[0]:
289+
cmd_del_block_data_plane_scheduler = \
290+
f'sonic-db-cli CONFIG_DB del "SCHEDULER|{BLOCK_DATA_PLANE_SCHEDULER_NAME}"'
291+
self.exec_cmd_on_dut(self.server, self.test_params['dut_username'], self.test_params['dut_password'],
292+
cmd_del_block_data_plane_scheduler)
279293

280294

281295
class ThriftInterfaceDataPlane(ThriftInterface):

tests/saitests/py3/sai_qos_tests.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -5194,7 +5194,7 @@ def runTest(self):
51945194
assert (q_wm_res[queue] <= (margin + 1) * cell_size)
51955195
elif pkts_num_fill_min:
51965196
assert (q_wm_res[queue] == 0)
5197-
elif 'cisco-8000' in asic_type or "SN5600" in hwsku or "SN5400" in hwsku:
5197+
elif 'cisco-8000' in asic_type or "SN56" in hwsku or "SN5400" in hwsku:
51985198
assert (q_wm_res[queue] <= (margin + 1) * cell_size)
51995199
else:
52005200
if platform_asic and platform_asic == "broadcom-dnx":

0 commit comments

Comments
 (0)