Skip to content

Commit 9cd6c8d

Browse files
authored
Debug dump utility dash objects update (sonic-net#3387)
#### What I did Added support for the following DASH objects for the dump utility: `dash_acl_group` `dash_acl_out` `dash_acl_rule` `dash_appliance` `dash_prefix_tag` `dash_eni` `dash_qos` `dash_route` `dash_route_rule` `dash_vnet_mapping` `dash_vnet` This PR also adds Match infra update in order to consider Dash objects, so new match requests can be created with the fields present in the dash objects to obtain the fields/keys. #### How I did it Added `protobuf` library and `libdashapi` to `sonic-utilities`, this is required as the dash objects are stored in the APPL_DB in protobuf format, and we need the .proto files which are available in the `libdashapi` package. We also use `redis` package instead of the `SonicV2Connector` since `SonicV2Connector` `get_all` function from the connector considers null terminated strings so the complete protobuf data is not obtained using `get_all` function #### How to verify it `dump state all <dash_object>` Examples: ``` admin@sonic:~$ dump state dash_vnet Vnet1 -t +-------------------+-----------+--------------------------------------------------------------------------------------------+ | dash_vnet_table | DB_NAME | DUMP | +===================+===========+============================================================================================+ | Vnet1 | APPL_DB | +-----------------------+----------------------------------------------------+ | | | | | Keys | field-value pairs | | | | | +=======================+====================================================+ | | | | | DASH_VNET_TABLE:Vnet1 | +---------+--------------------------------------+ | | | | | | | | field | value | | | | | | | | |---------+--------------------------------------| | | | | | | | | vni | 50 | | | | | | | | | guid | 5526cce8-26ab-4193-b946-ccc0e8f930b0 | | | | | | | | +---------+--------------------------------------+ | | | | | +-----------------------+----------------------------------------------------+ | +-------------------+-----------+--------------------------------------------------------------------------------------------+ | Vnet1 | ASIC_DB | +------------------------------------------------------+---------------------------------+ | | | | | Keys | field-value pairs | | | | | +======================================================+=================================+ | | | | | ASIC_STATE:SAI_OBJECT_TYPE_VNET:oid:0x7a000000000021 | +-------------------+---------+ | | | | | | | | field | value | | | | | | | | |-------------------+---------| | | | | | | | | SAI_VNET_ATTR_VNI | 100 | | | | | | | | +-------------------+---------+ | | | | | +------------------------------------------------------+---------------------------------+ | | | | +----------------------+--------------------+ | | | | | vid | rid | | | | | +======================+====================+ | | | | | oid:0x7a000000000021 | oid:0xffff70009130 | | | | | +----------------------+--------------------+ | +-------------------+-----------+--------------------------------------------------------------------------------------------+ ``` ``` admin@sonic:~$ dump state dash_acl_rule all { "group1:rule1": { "APPL_DB": { "keys": [ { "DASH_ACL_RULE_TABLE:group1:rule1": { "action": "ACTION_PERMIT", "terminating": true, "src_addr": [ "0.0.0.0/0" ], "dst_addr": [ "0.0.0.0/0" ], "src_port": [ { "value": 80 } ], "dst_port": [ { "value": 5005 } ] } } ], "tables_not_found": [] } } } ```
1 parent f705c45 commit 9cd6c8d

35 files changed

+2468
-10
lines changed

azure-pipelines.yml

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,7 @@ stages:
8484
sudo dpkg -i libyang_1.0.73_amd64.deb
8585
sudo dpkg -i libyang-cpp_1.0.73_amd64.deb
8686
sudo dpkg -i python3-yang_1.0.73_amd64.deb
87+
sudo dpkg -i libprotobuf*.deb
8788
workingDirectory: $(Pipeline.Workspace)/target/debs/bullseye/
8889
displayName: 'Install Debian dependencies'
8990
@@ -104,6 +105,27 @@ stages:
104105
workingDirectory: $(Pipeline.Workspace)/
105106
displayName: 'Install swss-common dependencies'
106107
108+
109+
- task: DownloadPipelineArtifact@2
110+
inputs:
111+
source: specific
112+
project: build
113+
pipeline: sonic-net.sonic-dash-api
114+
artifact: sonic-dash-api
115+
runVersion: 'latestFromBranch'
116+
runBranch: 'refs/heads/$(BUILD_BRANCH)'
117+
path: $(Build.ArtifactStagingDirectory)/download
118+
patterns: |
119+
libdashapi*.deb
120+
displayName: "Download dash api"
121+
122+
- script: |
123+
set -xe
124+
sudo apt-get update
125+
sudo dpkg -i $(Build.ArtifactStagingDirectory)/download/libdashapi_*.deb
126+
workingDirectory: $(Pipeline.Workspace)/
127+
displayName: 'Install libdashapi libraries'
128+
107129
- script: |
108130
set -xe
109131
sudo pip3 install swsssdk-2.0.1-py3-none-any.whl

dump/dash_util.py

Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
import base64
2+
import uuid
3+
import socket
4+
import ipaddress
5+
from google.protobuf.message import Message
6+
from dash_api.types_pb2 import Guid, IpAddress, IpPrefix
7+
from google.protobuf.json_format import MessageToDict
8+
9+
10+
def format_ip(node):
11+
return str(ipaddress.IPv4Address(socket.ntohl(node)))
12+
13+
14+
def format_mac(node):
15+
b64 = base64.b64decode(node)
16+
return ':'.join(b64.hex()[i:i + 2] for i in range(0, 12, 2))
17+
18+
19+
def format_guid_dict(node):
20+
b64 = base64.b64decode(node['value'])
21+
return str(uuid.UUID(bytes=b64))
22+
23+
24+
def format_ip_address_dict(node):
25+
if 'ipv4' in node:
26+
return format_ip(node['ipv4'])
27+
28+
29+
def format_ip_prefix(node):
30+
ip = format_ip_address_dict(node['ip'])
31+
mask = format_ip_address_dict(node['mask'])
32+
network = ipaddress.IPv4Network(f'{ip}/{mask}', strict=False)
33+
return str(network)
34+
35+
36+
def get_decoded_value(pb, pb_data):
37+
pb.ParseFromString(pb_data[b'pb'])
38+
json_string = MessageToDict(pb, preserving_proto_field_name=True)
39+
json_string = find_known_types_sec(pb, json_string)
40+
return json_string
41+
42+
43+
decode_types = [IpAddress, Guid, IpPrefix]
44+
decode_types = [cls.__module__ + '.' + cls.__name__ for cls in decode_types]
45+
decode_fn = {'IpAddress': format_ip_address_dict,
46+
'Guid': format_guid_dict,
47+
'mac_address': format_mac,
48+
'IpPrefix': format_ip_prefix}
49+
50+
51+
def find_known_types_sec(pb2_obj, pb2_dict):
52+
53+
def process_msg_field(obj, proto_dict, field_name):
54+
class_name = type(obj).__name__
55+
obj_type = f"{type(obj).__module__}.{type(obj).__name__}"
56+
if obj_type in decode_types:
57+
proto_dict[field_name] = decode_fn[class_name](proto_dict[field_name])
58+
else:
59+
find_index(obj, proto_dict[field_name])
60+
61+
def process_rep_field(obj, proto_dict, field_name):
62+
final_list = []
63+
requires_change = False
64+
for ind, value in enumerate(obj):
65+
if isinstance(value, Message):
66+
obj_type = f"{type(value).__module__}.{type(value).__name__}"
67+
if obj_type in decode_types:
68+
requires_change = True
69+
class_name = type(value).__name__
70+
final_list.append(decode_fn[class_name](proto_dict[field_name][ind]))
71+
else:
72+
find_index(value, pb2_dict[field_name][ind])
73+
if requires_change:
74+
proto_dict[field_name] = final_list
75+
76+
def find_index(proto_obj, proto_dict=pb2_dict):
77+
for field_descriptor, value in proto_obj.ListFields():
78+
field_name = field_descriptor.name
79+
field_type = field_descriptor.type
80+
if field_type == field_descriptor.TYPE_MESSAGE:
81+
obj = getattr(proto_obj, field_name)
82+
if field_descriptor.label == field_descriptor.LABEL_REPEATED:
83+
process_rep_field(obj, proto_dict, field_name)
84+
else:
85+
process_msg_field(obj, proto_dict, field_name)
86+
elif field_name in decode_fn:
87+
proto_dict[field_name] = decode_fn[field_name](proto_dict[field_name])
88+
89+
find_index(pb2_obj)
90+
return pb2_dict

dump/main.py

Lines changed: 13 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ def state(ctx, module, identifier, db, table, key_map, verbose, namespace):
9393
vidtorid = extract_rid(collected_info, namespace, ctx.obj.conn_pool)
9494

9595
if not key_map:
96-
collected_info = populate_fv(collected_info, module, namespace, ctx.obj.conn_pool)
96+
collected_info = populate_fv(collected_info, module, namespace, ctx.obj.conn_pool, obj.return_pb2_obj())
9797

9898
for id in vidtorid.keys():
9999
collected_info[id]["ASIC_DB"]["vidtorid"] = vidtorid[id]
@@ -145,7 +145,7 @@ def filter_out_dbs(db_list, collected_info):
145145
return collected_info
146146

147147

148-
def populate_fv(info, module, namespace, conn_pool):
148+
def populate_fv(info, module, namespace, conn_pool, dash_object):
149149
all_dbs = set()
150150
for id in info.keys():
151151
for db_name in info[id].keys():
@@ -157,7 +157,9 @@ def populate_fv(info, module, namespace, conn_pool):
157157
db_cfg_file.connect(plugins.dump_modules[module].CONFIG_FILE, namespace)
158158
else:
159159
conn_pool.get(db_name, namespace)
160-
160+
if dash_object:
161+
conn_pool.get_dash_conn(namespace)
162+
redis_conn = conn_pool.cache.get(namespace, {}).get("DASH_"+CONN, None)
161163
db_conn = conn_pool.cache.get(namespace, {}).get(CONN, None)
162164

163165
final_info = {}
@@ -170,10 +172,17 @@ def populate_fv(info, module, namespace, conn_pool):
170172
for key in info[id][db_name]["keys"]:
171173
if db_name == "CONFIG_FILE":
172174
fv = db_cfg_file.get(db_name, key)
175+
elif dash_object and db_name == "APPL_DB":
176+
try:
177+
from dump.dash_util import get_decoded_value
178+
pb_data = redis_conn.hgetall(key)
179+
fv = get_decoded_value(dash_object, pb_data)
180+
except ModuleNotFoundError:
181+
print("Issue in importing dash module!")
182+
return final_info
173183
else:
174184
fv = db_conn.get_all(db_name, key)
175185
final_info[id][db_name]["keys"].append({key: fv})
176-
177186
return final_info
178187

179188

dump/match_infra.py

Lines changed: 84 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
from swsscommon.swsscommon import SonicV2Connector, SonicDBConfig
77
from sonic_py_common import multi_asic
88
from utilities_common.constants import DEFAULT_NAMESPACE
9+
import redis
10+
911

1012
# Constants
1113
CONN = "conn"
@@ -60,6 +62,7 @@ def __init__(self, **kwargs):
6062
self.just_keys = kwargs["just_keys"] if "just_keys" in kwargs else True
6163
self.ns = kwargs["ns"] if "ns" in kwargs else ""
6264
self.match_entire_list = kwargs["match_entire_list"] if "match_entire_list" in kwargs else False
65+
self.PbObj = kwargs["pb"] if "pb" in kwargs else None
6366
err = self.__static_checks()
6467
verbose_print(str(err))
6568
if err:
@@ -194,6 +197,51 @@ def hgetall(self, db, key):
194197
return self.conn.get_all(db, key)
195198

196199

200+
class RedisPySource(SourceAdapter):
201+
""" Concrete Adaptor Class for connecting to APPL_DB using Redis library"""
202+
203+
def __init__(self, conn_pool, pb_obj):
204+
self.conn = None
205+
self.pool = conn_pool
206+
self.pb_obj = pb_obj
207+
208+
def get_decoded_value(self, pb_obj, key_val):
209+
try:
210+
from dump.dash_util import get_decoded_value
211+
except ModuleNotFoundError as e:
212+
verbose_print("RedisPySource: decoded value cannot be obtained \
213+
since dash related library import issues\n" + str(e))
214+
return
215+
return get_decoded_value(pb_obj, key_val)
216+
217+
def connect(self, db, ns):
218+
try:
219+
self.conn = self.pool.get_dash_conn(ns)
220+
except Exception as e:
221+
verbose_print("RedisPySource: Connection Failed\n" + str(e))
222+
return False
223+
return True
224+
225+
def get_separator(self):
226+
return ":"
227+
228+
def getKeys(self, db, table, key_pattern):
229+
bin_keys = self.conn.keys(table + self.get_separator() + key_pattern)
230+
return [key1.decode() for key1 in bin_keys]
231+
232+
def get(self, db, key):
233+
key_val = self.conn.hgetall(key)
234+
return self.get_decoded_value(self.pb_obj, key_val)
235+
236+
def hget(self, db, key, field):
237+
key_val = self.conn.hgetall(key)
238+
decoded_dict = self.get_decoded_value(self.pb_obj, key_val)
239+
return decoded_dict.get(field)
240+
241+
def hgetall(self, db, key):
242+
key_val = self.conn.hgetall(key)
243+
return self.get_decoded_value(self.pb_obj, key_val)
244+
197245
class JsonSource(SourceAdapter):
198246
""" Concrete Adaptor Class for connecting to JSON Data Sources """
199247

@@ -249,26 +297,53 @@ def initialize_connector(self, ns):
249297
SonicDBConfig.load_sonic_db_config()
250298
return SonicV2Connector(namespace=ns, use_unix_socket_path=True)
251299

300+
def initialize_redis_conn(self, ns):
301+
"""Return redis connection for APPL_DB,
302+
as APPL_DB is the only one which stores data in protobuf
303+
format which is not obtained fully by the SonicV2Connector
304+
get_all API
305+
"""
306+
# The get_all function for a SonicV2Connector does not support binary data due to which we
307+
# have to use the redis Library. Relevant Issue: https://github.com/sonic-net/sonic-swss-common/issues/886
308+
return redis.Redis(unix_socket_path=SonicDBConfig.getDbSock("APPL_DB", ns),
309+
db=SonicDBConfig.getDbId("APPL_DB", ns))
310+
252311
def get(self, db_name, ns, update=False):
253312
""" Returns a SonicV2Connector Object and caches it for further requests """
254313
if ns not in self.cache:
255314
self.cache[ns] = {}
315+
if CONN not in self.cache[ns]:
256316
self.cache[ns][CONN] = self.initialize_connector(ns)
317+
if CONN_TO not in self.cache[ns]:
257318
self.cache[ns][CONN_TO] = set()
258319
if update or db_name not in self.cache[ns][CONN_TO]:
259320
self.cache[ns][CONN].connect(db_name)
260321
self.cache[ns][CONN_TO].add(db_name)
261322
return self.cache[ns][CONN]
262323

324+
def get_dash_conn(self, ns):
325+
""" Returns a Redis Connection Object and caches it for further requests """
326+
if ns not in self.cache:
327+
self.cache[ns] = {}
328+
if "DASH_"+CONN not in self.cache[ns]:
329+
self.cache[ns]["DASH_"+CONN] = self.initialize_redis_conn(ns)
330+
return self.cache[ns]["DASH_"+CONN]
331+
263332
def clear(self, namespace=None):
264333
if not namespace:
265334
self.cache.clear()
266335
elif namespace in self.cache:
267336
del self.cache[namespace]
268337

269-
def fill(self, ns, conn, connected_to):
338+
def fill(self, ns, conn, connected_to, dash_object=False):
270339
""" Update internal cache """
271-
self.cache[ns] = {CONN: conn, CONN_TO: set(connected_to)}
340+
if ns not in self.cache:
341+
self.cache[ns] = {}
342+
if dash_object:
343+
self.cache[ns]["DASH_"+CONN] = conn
344+
return
345+
self.cache[ns][CONN] = conn
346+
self.cache[ns][CONN_TO] = set(connected_to)
272347

273348

274349
class MatchEngine:
@@ -293,10 +368,16 @@ def get_redis_source_adapter(self):
293368
def get_json_source_adapter(self):
294369
return JsonSource()
295370

371+
def get_redis_py_adapter(self, pb_obj):
372+
return RedisPySource(self.conn_pool, pb_obj)
373+
296374
def __get_source_adapter(self, req):
297375
src = None
298376
d_src = ""
299-
if req.db:
377+
if req.PbObj:
378+
d_src = req.db
379+
src = self.get_redis_py_adapter(req.PbObj)
380+
elif req.db:
300381
d_src = req.db
301382
src = self.get_redis_source_adapter()
302383
else:

dump/plugins/__init__.py

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,22 @@
11
import os
2-
import sys
32
import pkgutil
43
import importlib
54
from .executor import Executor
5+
from sonic_py_common.syslogger import SysLogger
66

77
dump_modules = {}
88
pkg_dir = os.path.dirname(__file__)
99

10+
log = SysLogger()
1011
# import child classes automatically
1112
for (module_loader, name, ispkg) in pkgutil.iter_modules([pkg_dir]):
12-
importlib.import_module('.' + name, __package__)
13+
try:
14+
importlib.import_module('.' + name, __package__)
15+
except ModuleNotFoundError as e:
16+
if e.name != "dash_api":
17+
# dash_api is only used in a specific platform
18+
log.log_debug("dump utility - dash_api package not found for platform")
19+
raise
1320

1421
# Classes inheriting Executor
1522
dump_modules = {cls.__name__.lower(): cls for cls in Executor.__subclasses__()}

dump/plugins/dash_acl_group.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
from dump.helper import create_template_dict
2+
from dump.match_infra import MatchRequest
3+
from swsscommon.swsscommon import SonicDBConfig
4+
from dash_api.acl_group_pb2 import AclGroup
5+
from .executor import Executor
6+
7+
8+
APPL_DB_SEPARATOR = SonicDBConfig.getSeparator("APPL_DB")
9+
10+
11+
class Dash_Acl_Group(Executor):
12+
"""
13+
Debug Dump Plugin for DASH ACL Group
14+
"""
15+
ARG_NAME = "dash_acl_group_name"
16+
17+
def __init__(self, match_engine=None):
18+
super().__init__(match_engine)
19+
self.is_dash_object = True
20+
21+
def get_all_args(self, ns=""):
22+
req = MatchRequest(db="APPL_DB", table="DASH_ACL_GROUP_TABLE", key_pattern="*", ns=ns)
23+
ret = self.match_engine.fetch(req)
24+
appliance_tables = ret["keys"]
25+
return [key.split(APPL_DB_SEPARATOR)[-1] for key in appliance_tables]
26+
27+
def execute(self, params):
28+
self.ret_temp = create_template_dict(dbs=["APPL_DB"])
29+
dash_acl_group_table_name = params[self.ARG_NAME]
30+
self.ns = params["namespace"]
31+
self.init_dash_acl_group_table_appl_info(dash_acl_group_table_name)
32+
return self.ret_temp
33+
34+
def init_dash_acl_group_table_appl_info(self, dash_acl_group_table_name):
35+
req = MatchRequest(db="APPL_DB", table="DASH_ACL_GROUP_TABLE",
36+
key_pattern=dash_acl_group_table_name, ns=self.ns)
37+
ret = self.match_engine.fetch(req)
38+
self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"])
39+
40+
def return_pb2_obj(self):
41+
return AclGroup()

0 commit comments

Comments
 (0)