Skip to content

Commit c05845d

Browse files
Add trap flow counter support (#1868)
Add flowcnt commands * counterpoll flowcnt-trap enable/disable * counterpoll flowcnt-trap interval * show flowcnt-trap stats
1 parent ef82f00 commit c05845d

File tree

14 files changed

+641
-4
lines changed

14 files changed

+641
-4
lines changed

clear/main.py

+8
Original file line numberDiff line numberDiff line change
@@ -482,6 +482,14 @@ def statistics(db):
482482
def remap_keys(dict):
483483
return [{'key': k, 'value': v} for k, v in dict.items()]
484484

485+
# ("sonic-clear flowcnt-trap")
486+
@cli.command()
487+
def flowcnt_trap():
488+
""" Clear trap flow counters """
489+
command = "flow_counters_stat -c -t trap"
490+
run_command(command)
491+
492+
485493
# Load plugins and register them
486494
helper = util_base.UtilHelper()
487495
helper.load_and_register_plugins(plugins, cli)

config/main.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -5948,7 +5948,7 @@ def rate():
59485948

59495949
@rate.command()
59505950
@click.argument('interval', metavar='<interval>', type=click.IntRange(min=1, max=1000), required=True)
5951-
@click.argument('rates_type', type=click.Choice(['all', 'port', 'rif']), default='all')
5951+
@click.argument('rates_type', type=click.Choice(['all', 'port', 'rif', 'flowcnt-trap']), default='all')
59525952
def smoothing_interval(interval, rates_type):
59535953
"""Set rates smoothing interval """
59545954
counters_db = swsssdk.SonicV2Connector()
@@ -5962,6 +5962,9 @@ def smoothing_interval(interval, rates_type):
59625962
if rates_type in ['rif', 'all']:
59635963
counters_db.set('COUNTERS_DB', 'RATES:RIF', 'RIF_SMOOTH_INTERVAL', interval)
59645964
counters_db.set('COUNTERS_DB', 'RATES:RIF', 'RIF_ALPHA', alpha)
5965+
if rates_type in ['flowcnt-trap', 'all']:
5966+
counters_db.set('COUNTERS_DB', 'RATES:TRAP', 'TRAP_SMOOTH_INTERVAL', interval)
5967+
counters_db.set('COUNTERS_DB', 'RATES:TRAP', 'TRAP_ALPHA', alpha)
59655968

59665969

59675970
# Load plugins and register them

counterpoll/main.py

+38-2
Original file line numberDiff line numberDiff line change
@@ -54,12 +54,12 @@ def disable():
5454
# Port counter commands
5555
@cli.group()
5656
def port():
57-
""" Queue counter commands """
57+
""" Port counter commands """
5858

5959
@port.command()
6060
@click.argument('poll_interval', type=click.IntRange(100, 30000))
6161
def interval(poll_interval):
62-
""" Set queue counter query interval """
62+
""" Set port counter query interval """
6363
configdb = ConfigDBConnector()
6464
configdb.connect()
6565
port_info = {}
@@ -314,6 +314,39 @@ def disable():
314314
tunnel_info['FLEX_COUNTER_STATUS'] = DISABLE
315315
configdb.mod_entry("FLEX_COUNTER_TABLE", "TUNNEL", tunnel_info)
316316

317+
# Trap flow counter commands
318+
@cli.group()
319+
@click.pass_context
320+
def flowcnt_trap(ctx):
321+
""" Trap flow counter commands """
322+
ctx.obj = ConfigDBConnector()
323+
ctx.obj.connect()
324+
325+
@flowcnt_trap.command()
326+
@click.argument('poll_interval', type=click.IntRange(1000, 30000))
327+
@click.pass_context
328+
def interval(ctx, poll_interval):
329+
""" Set trap flow counter query interval """
330+
fc_info = {}
331+
fc_info['POLL_INTERVAL'] = poll_interval
332+
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)
333+
334+
@flowcnt_trap.command()
335+
@click.pass_context
336+
def enable(ctx):
337+
""" Enable trap flow counter query """
338+
fc_info = {}
339+
fc_info['FLEX_COUNTER_STATUS'] = 'enable'
340+
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)
341+
342+
@flowcnt_trap.command()
343+
@click.pass_context
344+
def disable(ctx):
345+
""" Disable trap flow counter query """
346+
fc_info = {}
347+
fc_info['FLEX_COUNTER_STATUS'] = 'disable'
348+
ctx.obj.mod_entry("FLEX_COUNTER_TABLE", "FLOW_CNT_TRAP", fc_info)
349+
317350
@cli.command()
318351
def show():
319352
""" Show the counter configuration """
@@ -329,6 +362,7 @@ def show():
329362
buffer_pool_wm_info = configdb.get_entry('FLEX_COUNTER_TABLE', BUFFER_POOL_WATERMARK)
330363
acl_info = configdb.get_entry('FLEX_COUNTER_TABLE', ACL)
331364
tunnel_info = configdb.get_entry('FLEX_COUNTER_TABLE', 'TUNNEL')
365+
trap_info = configdb.get_entry('FLEX_COUNTER_TABLE', 'FLOW_CNT_TRAP')
332366

333367
header = ("Type", "Interval (in ms)", "Status")
334368
data = []
@@ -352,6 +386,8 @@ def show():
352386
data.append([ACL, pg_drop_info.get("POLL_INTERVAL", DEFLT_10_SEC), acl_info.get("FLEX_COUNTER_STATUS", DISABLE)])
353387
if tunnel_info:
354388
data.append(["TUNNEL_STAT", rif_info.get("POLL_INTERVAL", DEFLT_10_SEC), rif_info.get("FLEX_COUNTER_STATUS", DISABLE)])
389+
if trap_info:
390+
data.append(["FLOW_CNT_TRAP_STAT", trap_info.get("POLL_INTERVAL", DEFLT_10_SEC), trap_info.get("FLEX_COUNTER_STATUS", DISABLE)])
355391

356392
click.echo(tabulate(data, headers=header, tablefmt="simple", missingval=""))
357393

scripts/flow_counters_stat

+283
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,283 @@
1+
#!/usr/bin/env python3
2+
3+
import argparse
4+
import os
5+
import _pickle as pickle
6+
import sys
7+
8+
from natsort import natsorted
9+
from tabulate import tabulate
10+
11+
# mock the redis for unit test purposes #
12+
try:
13+
if os.environ["UTILITIES_UNIT_TESTING"] == "2":
14+
modules_path = os.path.join(os.path.dirname(__file__), "..")
15+
tests_path = os.path.join(modules_path, "tests")
16+
sys.path.insert(0, modules_path)
17+
sys.path.insert(0, tests_path)
18+
import mock_tables.dbconnector
19+
if os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] == "multi_asic":
20+
import mock_tables.mock_multi_asic
21+
mock_tables.dbconnector.load_namespace_config()
22+
23+
except KeyError:
24+
pass
25+
26+
import utilities_common.multi_asic as multi_asic_util
27+
from utilities_common.netstat import format_number_with_comma, table_as_json, ns_diff, format_prate
28+
29+
# Flow counter meta data, new type of flow counters can extend this dictinary to reuse existing logic
30+
flow_counter_meta = {
31+
'trap': {
32+
'headers': ['Trap Name', 'Packets', 'Bytes', 'PPS'],
33+
'name_map': 'COUNTERS_TRAP_NAME_MAP',
34+
}
35+
}
36+
flow_counters_fields = ['SAI_COUNTER_STAT_PACKETS', 'SAI_COUNTER_STAT_BYTES']
37+
38+
# Only do diff for 'Packets' and 'Bytes'
39+
diff_column_positions = set([0, 1])
40+
41+
FLOW_COUNTER_TABLE_PREFIX = "COUNTERS:"
42+
RATES_TABLE_PREFIX = 'RATES:'
43+
PPS_FIELD = 'RX_PPS'
44+
STATUS_NA = 'N/A'
45+
46+
47+
class FlowCounterStats(object):
48+
def __init__(self, args):
49+
self.db = None
50+
self.multi_asic = multi_asic_util.MultiAsic(namespace_option=args.namespace)
51+
self.args = args
52+
meta_data = flow_counter_meta[args.type]
53+
self.name_map = meta_data['name_map']
54+
self.headers = meta_data['headers']
55+
self.data_file = os.path.join('/tmp/{}-stats-{}'.format(args.type, os.getuid()))
56+
if self.args.delete and os.path.exists(self.data_file):
57+
os.remove(self.data_file)
58+
self.data = {}
59+
60+
def show(self):
61+
"""Show flow counter statistic
62+
"""
63+
self._collect_and_diff()
64+
headers, table = self._prepare_show_data()
65+
self._print_data(headers, table)
66+
67+
def _collect_and_diff(self):
68+
"""Collect statistic from db and diff from old data if any
69+
"""
70+
self._collect()
71+
old_data = self._load()
72+
need_update_cache = self._diff(old_data, self.data)
73+
if need_update_cache:
74+
self._save(old_data)
75+
76+
def _adjust_headers(self, headers):
77+
"""Adjust table headers based on platforms
78+
79+
Args:
80+
headers (list): Original headers
81+
82+
Returns:
83+
headers (list): Headers with 'ASIC ID' column if it is a multi ASIC platform
84+
"""
85+
return ['ASIC ID'] + headers if self.multi_asic.is_multi_asic else headers
86+
87+
def _prepare_show_data(self):
88+
"""Prepare headers and table data for output
89+
90+
Returns:
91+
headers (list): Table headers
92+
table (list): Table data
93+
"""
94+
table = []
95+
headers = self._adjust_headers(self.headers)
96+
97+
for ns, stats in natsorted(self.data.items()):
98+
if self.args.namespace is not None and self.args.namespace != ns:
99+
continue
100+
for name, values in natsorted(stats.items()):
101+
if self.multi_asic.is_multi_asic:
102+
row = [ns]
103+
else:
104+
row = []
105+
row.extend([name, format_number_with_comma(values[0]), format_number_with_comma(values[1]), format_prate(values[2])])
106+
table.append(row)
107+
108+
return headers, table
109+
110+
def _print_data(self, headers, table):
111+
"""Print statistic data based on output format
112+
113+
Args:
114+
headers (list): Table headers
115+
table (list): Table data
116+
"""
117+
if self.args.json:
118+
print(table_as_json(table, headers))
119+
else:
120+
print(tabulate(table, headers, tablefmt='simple', stralign='right'))
121+
122+
def clear(self):
123+
"""Clear flow counter statistic. This function does not clear data from ASIC. Instead, it saves flow counter statistic to a file. When user
124+
issue show command after clear, it does a diff between new data and saved data.
125+
"""
126+
self._collect()
127+
self._save(self.data)
128+
print('Flow Counters were successfully cleared')
129+
130+
@multi_asic_util.run_on_multi_asic
131+
def _collect(self):
132+
"""Collect flow counter statistic from DB. This function is called on a multi ASIC context.
133+
"""
134+
self.data.update(self._get_stats_from_db())
135+
136+
def _get_stats_from_db(self):
137+
"""Get flow counter statistic from DB.
138+
139+
Returns:
140+
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}}
141+
"""
142+
ns = self.multi_asic.current_namespace
143+
name_map = self.db.get_all(self.db.COUNTERS_DB, self.name_map)
144+
data = {ns: {}}
145+
if not name_map:
146+
return data
147+
148+
for name, counter_oid in name_map.items():
149+
values = self._get_stats_value(counter_oid)
150+
151+
full_table_id = RATES_TABLE_PREFIX + counter_oid
152+
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, PPS_FIELD)
153+
values.append(STATUS_NA if counter_data is None else counter_data)
154+
values.append(counter_oid)
155+
data[ns][name] = values
156+
return data
157+
158+
def _get_stats_value(self, counter_oid):
159+
"""Get statistic value from COUNTERS_DB COUNTERS table
160+
161+
Args:
162+
counter_oid (string): OID of a generic counter
163+
164+
Returns:
165+
values (list): A list of statistics value
166+
"""
167+
values = []
168+
full_table_id = FLOW_COUNTER_TABLE_PREFIX + counter_oid
169+
for field in flow_counters_fields:
170+
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, field)
171+
values.append(STATUS_NA if counter_data is None else counter_data)
172+
return values
173+
174+
def _save(self, data):
175+
"""Save flow counter statistic to a file
176+
"""
177+
try:
178+
if os.path.exists(self.data_file):
179+
os.remove(self.data_file)
180+
181+
with open(self.data_file, 'wb') as f:
182+
pickle.dump(data, f)
183+
except IOError as e:
184+
print('Failed to save statistic - {}'.format(repr(e)))
185+
186+
def _load(self):
187+
"""Load flow counter statistic from a file
188+
189+
Returns:
190+
dict: A dictionary. E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}}
191+
"""
192+
if not os.path.exists(self.data_file):
193+
return None
194+
195+
try:
196+
with open(self.data_file, 'rb') as f:
197+
data = pickle.load(f)
198+
except IOError as e:
199+
print('Failed to load statistic - {}'.format(repr(e)))
200+
return None
201+
202+
return data
203+
204+
def _diff(self, old_data, new_data):
205+
"""Do a diff between new data and old data.
206+
207+
Args:
208+
old_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}}
209+
new_data (dict): E.g: {<namespace>: {<trap_name>: [<value_in_pkts>, <value_in_bytes>, <rx_pps>, <counter_oid>]}}
210+
211+
Returns:
212+
bool: True if cache need to be updated
213+
"""
214+
if not old_data:
215+
return False
216+
217+
need_update_cache = False
218+
for ns, stats in new_data.items():
219+
if ns not in old_data:
220+
continue
221+
old_stats = old_data[ns]
222+
for name, values in stats.items():
223+
if name not in old_stats:
224+
continue
225+
226+
old_values = old_stats[name]
227+
if values[-1] != old_values[-1]:
228+
# Counter OID not equal means the trap was removed and added again. Removing a trap would cause
229+
# the stats value restart from 0. To avoid get minus value here, it should not do diff in case
230+
# counter OID is changed.
231+
old_values[-1] = values[-1]
232+
for i in diff_column_positions:
233+
old_values[i] = 0
234+
values[i] = ns_diff(values[i], old_values[i])
235+
need_update_cache = True
236+
continue
237+
238+
has_negative_diff = False
239+
for i in diff_column_positions:
240+
# If any diff has negative value, set all counter values to 0 and update cache
241+
if values[i] < old_values[i]:
242+
has_negative_diff = True
243+
break
244+
245+
if has_negative_diff:
246+
for i in diff_column_positions:
247+
old_values[i] = 0
248+
values[i] = ns_diff(values[i], old_values[i])
249+
need_update_cache = True
250+
continue
251+
252+
for i in diff_column_positions:
253+
values[i] = ns_diff(values[i], old_values[i])
254+
255+
return need_update_cache
256+
257+
258+
def main():
259+
parser = argparse.ArgumentParser(description='Display the flow counters',
260+
formatter_class=argparse.RawTextHelpFormatter,
261+
epilog="""
262+
Examples:
263+
flow_counters_stat -c -t trap
264+
flow_counters_stat -t trap
265+
flow_counters_stat -d -t trap
266+
""")
267+
parser.add_argument('-c', '--clear', action='store_true', help='Copy & clear stats')
268+
parser.add_argument('-d', '--delete', action='store_true', help='Delete saved stats')
269+
parser.add_argument('-j', '--json', action='store_true', help='Display in JSON format')
270+
parser.add_argument('-n','--namespace', default=None, help='Display flow counters for specific namespace')
271+
parser.add_argument('-t', '--type', required=True, choices=['trap'],help='Flow counters type')
272+
273+
args = parser.parse_args()
274+
275+
stats = FlowCounterStats(args)
276+
if args.clear:
277+
stats.clear()
278+
else:
279+
stats.show()
280+
281+
282+
if __name__ == '__main__':
283+
main()

setup.py

+1
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@
101101
'scripts/fast-reboot-dump.py',
102102
'scripts/fdbclear',
103103
'scripts/fdbshow',
104+
'scripts/flow_counters_stat',
104105
'scripts/gearboxutil',
105106
'scripts/generate_dump',
106107
'scripts/generate_shutdown_order.py',

0 commit comments

Comments
 (0)