Skip to content

Commit 561300f

Browse files
committed
Merge branch 'master' into dev-reset-local-users-password
2 parents 284de91 + e3991d5 commit 561300f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

58 files changed

+3144
-223
lines changed

azure-pipelines.yml

+31-7
Original file line numberDiff line numberDiff line change
@@ -43,14 +43,15 @@ stages:
4343
vmImage: ubuntu-20.04
4444

4545
container:
46-
image: sonicdev-microsoft.azurecr.io:443/sonic-slave-bullseye:$(BUILD_BRANCH)
46+
image: sonicdev-microsoft.azurecr.io:443/sonic-slave-bookworm:$(BUILD_BRANCH)
4747

4848
steps:
4949
- script: |
5050
set -ex
5151
sudo apt-get update
5252
sudo apt-get install -y python3-pip
5353
sudo pip3 install requests==2.31.0
54+
sudo apt-get install -y python3-protobuf
5455
displayName: "Install dependencies"
5556
5657
- script: |
@@ -84,15 +85,15 @@ stages:
8485
sudo dpkg -i libyang_1.0.73_amd64.deb
8586
sudo dpkg -i libyang-cpp_1.0.73_amd64.deb
8687
sudo dpkg -i python3-yang_1.0.73_amd64.deb
87-
workingDirectory: $(Pipeline.Workspace)/target/debs/bullseye/
88+
workingDirectory: $(Pipeline.Workspace)/target/debs/bookworm/
8889
displayName: 'Install Debian dependencies'
8990
9091
- task: DownloadPipelineArtifact@2
9192
inputs:
9293
source: specific
9394
project: build
9495
pipeline: 9
95-
artifact: sonic-swss-common
96+
artifact: sonic-swss-common-bookworm
9697
runVersion: 'latestFromBranch'
9798
runBranch: 'refs/heads/$(sourceBranch)'
9899
displayName: "Download sonic swss common deb packages"
@@ -104,6 +105,27 @@ stages:
104105
workingDirectory: $(Pipeline.Workspace)/
105106
displayName: 'Install swss-common dependencies'
106107
108+
109+
- task: DownloadPipelineArtifact@2
110+
inputs:
111+
source: specific
112+
project: build
113+
pipeline: sonic-net.sonic-dash-api
114+
artifact: sonic-dash-api
115+
runVersion: 'latestFromBranch'
116+
runBranch: 'refs/heads/$(BUILD_BRANCH)'
117+
path: $(Build.ArtifactStagingDirectory)/download
118+
patterns: |
119+
libdashapi*.deb
120+
displayName: "Download dash api"
121+
122+
- script: |
123+
set -xe
124+
sudo apt-get update
125+
sudo dpkg -i $(Build.ArtifactStagingDirectory)/download/libdashapi_*.deb
126+
workingDirectory: $(Pipeline.Workspace)/
127+
displayName: 'Install libdashapi libraries'
128+
107129
- script: |
108130
set -xe
109131
sudo pip3 install swsssdk-2.0.1-py3-none-any.whl
@@ -112,20 +134,22 @@ stages:
112134
sudo pip3 install sonic_yang_models-1.0-py3-none-any.whl
113135
sudo pip3 install sonic_config_engine-1.0-py3-none-any.whl
114136
sudo pip3 install sonic_platform_common-1.0-py3-none-any.whl
115-
workingDirectory: $(Pipeline.Workspace)/target/python-wheels/bullseye/
137+
workingDirectory: $(Pipeline.Workspace)/target/python-wheels/bookworm/
116138
displayName: 'Install Python dependencies'
117139
118140
- script: |
119141
set -ex
120142
# Install .NET CORE
121143
curl -sSL https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
122-
sudo apt-add-repository https://packages.microsoft.com/debian/11/prod
144+
sudo apt-add-repository https://packages.microsoft.com/debian/12/prod
123145
sudo apt-get update
124146
sudo apt-get install -y dotnet-sdk-8.0
125147
displayName: "Install .NET CORE"
126148
127149
- script: |
128-
python3 setup.py test
150+
pip3 install ".[testing]"
151+
pip3 uninstall --yes sonic-utilities
152+
pytest
129153
displayName: 'Test Python 3'
130154
131155
- task: PublishTestResults@2
@@ -145,7 +169,7 @@ stages:
145169

146170
- script: |
147171
set -e
148-
python3 setup.py bdist_wheel
172+
python3 -m build -n
149173
displayName: 'Build Python 3 wheel'
150174
151175
- publish: '$(System.DefaultWorkingDirectory)/dist/'

config/main.py

+17-14
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,8 @@
3434
from sonic_yang_cfg_generator import SonicYangCfgDbGenerator
3535
from utilities_common import util_base
3636
from swsscommon import swsscommon
37-
from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector, ConfigDBPipeConnector
37+
from swsscommon.swsscommon import SonicV2Connector, ConfigDBConnector, ConfigDBPipeConnector, \
38+
isInterfaceNameValid, IFACE_NAME_MAX_LEN
3839
from utilities_common.db import Db
3940
from utilities_common.intf_filter import parse_interface_in_filter
4041
from utilities_common import bgp_util
@@ -106,7 +107,6 @@
106107

107108
CFG_PORTCHANNEL_PREFIX = "PortChannel"
108109
CFG_PORTCHANNEL_PREFIX_LEN = 11
109-
CFG_PORTCHANNEL_NAME_TOTAL_LEN_MAX = 15
110110
CFG_PORTCHANNEL_MAX_VAL = 9999
111111
CFG_PORTCHANNEL_NO="<0-9999>"
112112

@@ -439,7 +439,7 @@ def is_portchannel_name_valid(portchannel_name):
439439
if (portchannel_name[CFG_PORTCHANNEL_PREFIX_LEN:].isdigit() is False or
440440
int(portchannel_name[CFG_PORTCHANNEL_PREFIX_LEN:]) > CFG_PORTCHANNEL_MAX_VAL) :
441441
return False
442-
if len(portchannel_name) > CFG_PORTCHANNEL_NAME_TOTAL_LEN_MAX:
442+
if not isInterfaceNameValid(portchannel_name):
443443
return False
444444
return True
445445

@@ -1830,7 +1830,7 @@ def reload(db, filename, yes, load_sysinfo, no_service_restart, force, file_form
18301830
if multi_asic.is_multi_asic():
18311831
# Multiasic has not 100% fully validated. Thus pass here.
18321832
pass
1833-
else:
1833+
elif "golden" in filename.lower():
18341834
config_file_yang_validation(filename)
18351835

18361836
#Stop services before config push
@@ -2206,8 +2206,8 @@ def generate_sysinfo(cur_config, config_input, ns=None):
22062206
if not platform:
22072207
platform = device_info.get_platform()
22082208

2209-
device_metadata['localhost']['mac'] = mac
2210-
device_metadata['localhost']['platform'] = platform
2209+
device_metadata['localhost']['mac'] = mac.rstrip('\n')
2210+
device_metadata['localhost']['platform'] = platform.rstrip('\n')
22112211

22122212
return
22132213

@@ -2484,8 +2484,9 @@ def add_portchannel(ctx, portchannel_name, min_links, fallback, fast_rate):
24842484
db = ValidatedConfigDBConnector(ctx.obj['db'])
24852485
if ADHOC_VALIDATION:
24862486
if is_portchannel_name_valid(portchannel_name) != True:
2487-
ctx.fail("{} is invalid!, name should have prefix '{}' and suffix '{}'"
2488-
.format(portchannel_name, CFG_PORTCHANNEL_PREFIX, CFG_PORTCHANNEL_NO))
2487+
ctx.fail("{} is invalid!, name should have prefix '{}' and suffix '{}' "
2488+
"and its length should not exceed {} characters"
2489+
.format(portchannel_name, CFG_PORTCHANNEL_PREFIX, CFG_PORTCHANNEL_NO, IFACE_NAME_MAX_LEN))
24892490
if is_portchannel_present_in_db(db, portchannel_name):
24902491
ctx.fail("{} already exists!".format(portchannel_name)) # TODO: MISSING CONSTRAINT IN YANG MODEL
24912492

@@ -6881,8 +6882,8 @@ def add_vrf(ctx, vrf_name):
68816882
config_db = ValidatedConfigDBConnector(ctx.obj['config_db'])
68826883
if not vrf_name.startswith("Vrf") and not (vrf_name == 'mgmt') and not (vrf_name == 'management'):
68836884
ctx.fail("'vrf_name' must begin with 'Vrf' or named 'mgmt'/'management' in case of ManagementVRF.")
6884-
if len(vrf_name) > 15:
6885-
ctx.fail("'vrf_name' is too long!")
6885+
if not isInterfaceNameValid(vrf_name):
6886+
ctx.fail("'vrf_name' length should not exceed {} characters".format(IFACE_NAME_MAX_LEN))
68866887
if is_vrf_exists(config_db, vrf_name):
68876888
ctx.fail("VRF {} already exists!".format(vrf_name))
68886889
elif (vrf_name == 'mgmt' or vrf_name == 'management'):
@@ -6901,8 +6902,8 @@ def del_vrf(ctx, vrf_name):
69016902
config_db = ValidatedConfigDBConnector(ctx.obj['config_db'])
69026903
if not vrf_name.startswith("Vrf") and not (vrf_name == 'mgmt') and not (vrf_name == 'management'):
69036904
ctx.fail("'vrf_name' must begin with 'Vrf' or named 'mgmt'/'management' in case of ManagementVRF.")
6904-
if len(vrf_name) > 15:
6905-
ctx.fail("'vrf_name' is too long!")
6905+
if not isInterfaceNameValid(vrf_name):
6906+
ctx.fail("'vrf_name' length should not exceed {} characters".format((IFACE_NAME_MAX_LEN)))
69066907
syslog_table = config_db.get_table("SYSLOG_SERVER")
69076908
syslog_vrf_dev = "mgmt" if vrf_name == "management" else vrf_name
69086909
for syslog_entry, syslog_data in syslog_table.items():
@@ -7932,8 +7933,8 @@ def add_loopback(ctx, loopback_name):
79327933
config_db = ValidatedConfigDBConnector(ctx.obj['db'])
79337934
if ADHOC_VALIDATION:
79347935
if is_loopback_name_valid(loopback_name) is False:
7935-
ctx.fail("{} is invalid, name should have prefix '{}' and suffix '{}' "
7936-
.format(loopback_name, CFG_LOOPBACK_PREFIX, CFG_LOOPBACK_NO))
7936+
ctx.fail("{} is invalid, name should have prefix '{}' and suffix '{}' and should not exceed {} characters"
7937+
.format(loopback_name, CFG_LOOPBACK_PREFIX, CFG_LOOPBACK_NO, IFACE_NAME_MAX_LEN))
79377938

79387939
lo_intfs = [k for k, v in config_db.get_table('LOOPBACK_INTERFACE').items() if type(k) != tuple]
79397940
if loopback_name in lo_intfs:
@@ -8680,6 +8681,8 @@ def add_subinterface(ctx, subinterface_name, vid):
86808681

86818682
if interface_alias is None:
86828683
ctx.fail("{} invalid subinterface".format(interface_alias))
8684+
if not isInterfaceNameValid(interface_alias):
8685+
ctx.fail("Subinterface name length should not exceed {} characters".format(IFACE_NAME_MAX_LEN))
86838686

86848687
if interface_alias.startswith("Po") is True:
86858688
intf_table_name = CFG_PORTCHANNEL_PREFIX

config/vxlan.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33

44
from jsonpatch import JsonPatchConflict
55
from .validated_config_db_connector import ValidatedConfigDBConnector
6+
from swsscommon.swsscommon import isInterfaceNameValid, IFACE_NAME_MAX_LEN
67

78
ADHOC_VALIDATION = True
89
#
@@ -24,6 +25,8 @@ def add_vxlan(db, vxlan_name, src_ip):
2425
if ADHOC_VALIDATION:
2526
if not clicommon.is_ipaddress(src_ip):
2627
ctx.fail("{} invalid src ip address".format(src_ip))
28+
if not isInterfaceNameValid(vxlan_name):
29+
ctx.fail("'vxlan_name' length should not exceed {} characters".format(IFACE_NAME_MAX_LEN))
2730

2831
vxlan_keys = db.cfgdb.get_keys('VXLAN_TUNNEL')
2932
if not vxlan_keys:
@@ -317,4 +320,3 @@ def del_vxlan_map_range(db, vxlan_name, vlan_start, vlan_end, vni_start):
317320
config_db.set_entry('VXLAN_TUNNEL_MAP', mapname, None)
318321
except JsonPatchConflict as e:
319322
ctx.fail("Invalid ConfigDB. Error: {}".format(e))
320-

doc/Command-Reference.md

+31
Original file line numberDiff line numberDiff line change
@@ -5137,6 +5137,37 @@ This command is to display the link-training status of the selected interfaces.
51375137
Ethernet8 trained on up up
51385138
```
51395139

5140+
**show interfaces errors**
5141+
5142+
The show interface errors command provides detailed statistics and error counters for MAC-level operations on an interface. It displays the status of various operational parameters, error counts, and timestamps for when these errors occurred.
5143+
5144+
- Usage:
5145+
```
5146+
show interfaces errors [<interface_name>]
5147+
```
5148+
5149+
- Example:
5150+
```
5151+
admin@sonic:~$ show interfaces errors Ethernet4
5152+
Port Errors Count Last timestamp(UTC)
5153+
---------------------------------- ----- -------------------
5154+
oper_error_status 5442 2024-11-02 04:00:05
5155+
mac_local_fault 2 2024-11-02 04:00:05
5156+
fec_sync_loss 2 2024-11-02 04:00:05
5157+
fec_alignment_loss 2 2024-11-02 04:00:05
5158+
high_ser_error 2 2024-11-02 04:00:05
5159+
high ber_error 2 2024-11-02 04:00:05
5160+
data_unit_crc_error 2 2024-11-02 04:00:05
5161+
data_unit_misalignment_error 2 2024-11-02 04:00:05
5162+
signal_local_error 2 2024-11-02 04:00:05
5163+
mac_remote_fault 2 2024-11-02 04:00:50
5164+
crc_rate 2 2024-11-02 04:00:50
5165+
data_unit_size 2 2024-11-02 04:00:50
5166+
code_group_error 0 Never
5167+
no_rx_reachability 0 Never
5168+
```
5169+
5170+
51405171
**show interfaces mpls**
51415172

51425173
This command is used to display the configured MPLS state for the list of configured interfaces.

dump/dash_util.py

+90
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import base64
2+
import uuid
3+
import socket
4+
import ipaddress
5+
from google.protobuf.message import Message
6+
from dash_api.types_pb2 import Guid, IpAddress, IpPrefix
7+
from google.protobuf.json_format import MessageToDict
8+
9+
10+
def format_ip(node):
11+
return str(ipaddress.IPv4Address(socket.ntohl(node)))
12+
13+
14+
def format_mac(node):
15+
b64 = base64.b64decode(node)
16+
return ':'.join(b64.hex()[i:i + 2] for i in range(0, 12, 2))
17+
18+
19+
def format_guid_dict(node):
20+
b64 = base64.b64decode(node['value'])
21+
return str(uuid.UUID(bytes=b64))
22+
23+
24+
def format_ip_address_dict(node):
25+
if 'ipv4' in node:
26+
return format_ip(node['ipv4'])
27+
28+
29+
def format_ip_prefix(node):
30+
ip = format_ip_address_dict(node['ip'])
31+
mask = format_ip_address_dict(node['mask'])
32+
network = ipaddress.IPv4Network(f'{ip}/{mask}', strict=False)
33+
return str(network)
34+
35+
36+
def get_decoded_value(pb, pb_data):
37+
pb.ParseFromString(pb_data[b'pb'])
38+
json_string = MessageToDict(pb, preserving_proto_field_name=True)
39+
json_string = find_known_types_sec(pb, json_string)
40+
return json_string
41+
42+
43+
decode_types = [IpAddress, Guid, IpPrefix]
44+
decode_types = [cls.__module__ + '.' + cls.__name__ for cls in decode_types]
45+
decode_fn = {'IpAddress': format_ip_address_dict,
46+
'Guid': format_guid_dict,
47+
'mac_address': format_mac,
48+
'IpPrefix': format_ip_prefix}
49+
50+
51+
def find_known_types_sec(pb2_obj, pb2_dict):
52+
53+
def process_msg_field(obj, proto_dict, field_name):
54+
class_name = type(obj).__name__
55+
obj_type = f"{type(obj).__module__}.{type(obj).__name__}"
56+
if obj_type in decode_types:
57+
proto_dict[field_name] = decode_fn[class_name](proto_dict[field_name])
58+
else:
59+
find_index(obj, proto_dict[field_name])
60+
61+
def process_rep_field(obj, proto_dict, field_name):
62+
final_list = []
63+
requires_change = False
64+
for ind, value in enumerate(obj):
65+
if isinstance(value, Message):
66+
obj_type = f"{type(value).__module__}.{type(value).__name__}"
67+
if obj_type in decode_types:
68+
requires_change = True
69+
class_name = type(value).__name__
70+
final_list.append(decode_fn[class_name](proto_dict[field_name][ind]))
71+
else:
72+
find_index(value, pb2_dict[field_name][ind])
73+
if requires_change:
74+
proto_dict[field_name] = final_list
75+
76+
def find_index(proto_obj, proto_dict=pb2_dict):
77+
for field_descriptor, value in proto_obj.ListFields():
78+
field_name = field_descriptor.name
79+
field_type = field_descriptor.type
80+
if field_type == field_descriptor.TYPE_MESSAGE:
81+
obj = getattr(proto_obj, field_name)
82+
if field_descriptor.label == field_descriptor.LABEL_REPEATED:
83+
process_rep_field(obj, proto_dict, field_name)
84+
else:
85+
process_msg_field(obj, proto_dict, field_name)
86+
elif field_name in decode_fn:
87+
proto_dict[field_name] = decode_fn[field_name](proto_dict[field_name])
88+
89+
find_index(pb2_obj)
90+
return pb2_dict

dump/main.py

+13-4
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def state(ctx, module, identifier, db, table, key_map, verbose, namespace):
9393
vidtorid = extract_rid(collected_info, namespace, ctx.obj.conn_pool)
9494

9595
if not key_map:
96-
collected_info = populate_fv(collected_info, module, namespace, ctx.obj.conn_pool)
96+
collected_info = populate_fv(collected_info, module, namespace, ctx.obj.conn_pool, obj.return_pb2_obj())
9797

9898
for id in vidtorid.keys():
9999
collected_info[id]["ASIC_DB"]["vidtorid"] = vidtorid[id]
@@ -145,7 +145,7 @@ def filter_out_dbs(db_list, collected_info):
145145
return collected_info
146146

147147

148-
def populate_fv(info, module, namespace, conn_pool):
148+
def populate_fv(info, module, namespace, conn_pool, dash_object):
149149
all_dbs = set()
150150
for id in info.keys():
151151
for db_name in info[id].keys():
@@ -157,7 +157,9 @@ def populate_fv(info, module, namespace, conn_pool):
157157
db_cfg_file.connect(plugins.dump_modules[module].CONFIG_FILE, namespace)
158158
else:
159159
conn_pool.get(db_name, namespace)
160-
160+
if dash_object:
161+
conn_pool.get_dash_conn(namespace)
162+
redis_conn = conn_pool.cache.get(namespace, {}).get("DASH_"+CONN, None)
161163
db_conn = conn_pool.cache.get(namespace, {}).get(CONN, None)
162164

163165
final_info = {}
@@ -170,10 +172,17 @@ def populate_fv(info, module, namespace, conn_pool):
170172
for key in info[id][db_name]["keys"]:
171173
if db_name == "CONFIG_FILE":
172174
fv = db_cfg_file.get(db_name, key)
175+
elif dash_object and db_name == "APPL_DB":
176+
try:
177+
from dump.dash_util import get_decoded_value
178+
pb_data = redis_conn.hgetall(key)
179+
fv = get_decoded_value(dash_object, pb_data)
180+
except ModuleNotFoundError:
181+
print("Issue in importing dash module!")
182+
return final_info
173183
else:
174184
fv = db_conn.get_all(db_name, key)
175185
final_info[id][db_name]["keys"].append({key: fv})
176-
177186
return final_info
178187

179188

0 commit comments

Comments
 (0)