Skip to content

Commit 10f31ea

Browse files
authored
Revert "Replace pickle by json (#2636)" (#2746)
This reverts commit 54e2635. Due to #14089 Signed-off-by: Mai Bui <[email protected]>
1 parent 05fa751 commit 10f31ea

File tree

8 files changed

+236
-235
lines changed

8 files changed

+236
-235
lines changed

scripts/dropstat

+7-7
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
# - Refactor calls to COUNTERS_DB to reduce redundancy
1212
# - Cache DB queries to reduce # of expensive queries
1313

14-
import json
14+
import _pickle as pickle
1515
import argparse
1616
import os
1717
import socket
@@ -117,10 +117,10 @@ class DropStat(object):
117117
"""
118118

119119
try:
120-
json.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
121-
open(self.port_drop_stats_file, 'w+'))
122-
json.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
123-
open(self.switch_drop_stats_file, 'w+'))
120+
pickle.dump(self.get_counts_table(self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP), COUNTERS_PORT_NAME_MAP),
121+
open(self.port_drop_stats_file, 'wb+'))
122+
pickle.dump(self.get_counts(self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP), self.get_switch_id()),
123+
open(self.switch_drop_stats_file, 'wb+'))
124124
except IOError as e:
125125
print(e)
126126
sys.exit(e.errno)
@@ -135,7 +135,7 @@ class DropStat(object):
135135

136136
# Grab the latest clear checkpoint, if it exists
137137
if os.path.isfile(self.port_drop_stats_file):
138-
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))
138+
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))
139139

140140
counters = self.gather_counters(std_port_rx_counters + std_port_tx_counters, DEBUG_COUNTER_PORT_STAT_MAP, group, counter_type)
141141
headers = std_port_description_header + self.gather_headers(counters, DEBUG_COUNTER_PORT_STAT_MAP)
@@ -162,7 +162,7 @@ class DropStat(object):
162162

163163
# Grab the latest clear checkpoint, if it exists
164164
if os.path.isfile(self.switch_drop_stats_file):
165-
switch_drop_ckpt = json.load(open(self.switch_drop_stats_file, 'r'))
165+
switch_drop_ckpt = pickle.load(open(self.switch_drop_stats_file, 'rb'))
166166

167167
counters = self.gather_counters([], DEBUG_COUNTER_SWITCH_STAT_MAP, group, counter_type)
168168
headers = std_switch_description_header + self.gather_headers(counters, DEBUG_COUNTER_SWITCH_STAT_MAP)

scripts/flow_counters_stat

+5-5
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import argparse
44
import os
5-
import json
5+
import _pickle as pickle
66
import sys
77

88
from natsort import natsorted
@@ -185,8 +185,8 @@ class FlowCounterStats(object):
185185
if os.path.exists(self.data_file):
186186
os.remove(self.data_file)
187187

188-
with open(self.data_file, 'w') as f:
189-
json.dump(data, f)
188+
with open(self.data_file, 'wb') as f:
189+
pickle.dump(data, f)
190190
except IOError as e:
191191
print('Failed to save statistic - {}'.format(repr(e)))
192192

@@ -200,8 +200,8 @@ class FlowCounterStats(object):
200200
return None
201201

202202
try:
203-
with open(self.data_file, 'r') as f:
204-
data = json.load(f)
203+
with open(self.data_file, 'rb') as f:
204+
data = pickle.load(f)
205205
except IOError as e:
206206
print('Failed to load statistic - {}'.format(repr(e)))
207207
return None

scripts/intfstat

+32-32
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
#
77
#####################################################################
88

9-
import json
9+
import _pickle as pickle
1010
import argparse
1111
import datetime
1212
import sys
@@ -28,7 +28,7 @@ from collections import namedtuple, OrderedDict
2828
from natsort import natsorted
2929
from tabulate import tabulate
3030
from utilities_common.netstat import ns_diff, table_as_json, STATUS_NA, format_brate, format_prate
31-
from utilities_common.cli import json_serial, UserCache
31+
from utilities_common.cli import UserCache
3232
from swsscommon.swsscommon import SonicV2Connector
3333

3434
nstat_fields = (
@@ -96,7 +96,7 @@ class Intfstat(object):
9696
counter_data = self.db.get(self.db.COUNTERS_DB, full_table_id, counter_name)
9797
if counter_data:
9898
fields[pos] = str(counter_data)
99-
cntr = NStats._make(fields)._asdict()
99+
cntr = NStats._make(fields)
100100
return cntr
101101

102102
def get_rates(table_id):
@@ -153,14 +153,14 @@ class Intfstat(object):
153153
rates = ratestat_dict.get(key, RateStats._make([STATUS_NA] * len(rates_key_list)))
154154

155155
table.append((key,
156-
data['rx_p_ok'],
156+
data.rx_p_ok,
157157
format_brate(rates.rx_bps),
158158
format_prate(rates.rx_pps),
159-
data['rx_p_err'],
160-
data['tx_p_ok'],
159+
data.rx_p_err,
160+
data.tx_p_ok,
161161
format_brate(rates.tx_bps),
162162
format_prate(rates.tx_pps),
163-
data['tx_p_err']))
163+
data.tx_p_err))
164164

165165
if use_json:
166166
print(table_as_json(table, header))
@@ -186,24 +186,24 @@ class Intfstat(object):
186186

187187
if old_cntr is not None:
188188
table.append((key,
189-
ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
189+
ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
190190
format_brate(rates.rx_bps),
191191
format_prate(rates.rx_pps),
192-
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
193-
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
192+
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
193+
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
194194
format_brate(rates.tx_bps),
195195
format_prate(rates.tx_pps),
196-
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err'])))
196+
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err)))
197197
else:
198198
table.append((key,
199-
cntr['rx_p_ok'],
199+
cntr.rx_p_ok,
200200
format_brate(rates.rx_bps),
201201
format_prate(rates.rx_pps),
202-
cntr['rx_p_err'],
203-
cntr['tx_p_ok'],
202+
cntr.rx_p_err,
203+
cntr.tx_p_ok,
204204
format_brate(rates.tx_bps),
205205
format_prate(rates.tx_pps),
206-
cntr['tx_p_err']))
206+
cntr.tx_p_err))
207207

208208
if use_json:
209209
print(table_as_json(table, header))
@@ -229,17 +229,17 @@ class Intfstat(object):
229229

230230
if cnstat_old_dict and cnstat_old_dict.get(rif):
231231
old_cntr = cnstat_old_dict.get(rif)
232-
body = body % (ns_diff(cntr['rx_p_ok'], old_cntr['rx_p_ok']),
233-
ns_diff(cntr['rx_b_ok'], old_cntr['rx_b_ok']),
234-
ns_diff(cntr['rx_p_err'], old_cntr['rx_p_err']),
235-
ns_diff(cntr['rx_b_err'], old_cntr['rx_b_err']),
236-
ns_diff(cntr['tx_p_ok'], old_cntr['tx_p_ok']),
237-
ns_diff(cntr['tx_b_ok'], old_cntr['tx_b_ok']),
238-
ns_diff(cntr['tx_p_err'], old_cntr['tx_p_err']),
239-
ns_diff(cntr['tx_b_err'], old_cntr['tx_b_err']))
232+
body = body % (ns_diff(cntr.rx_p_ok, old_cntr.rx_p_ok),
233+
ns_diff(cntr.rx_b_ok, old_cntr.rx_b_ok),
234+
ns_diff(cntr.rx_p_err, old_cntr.rx_p_err),
235+
ns_diff(cntr.rx_b_err, old_cntr.rx_b_err),
236+
ns_diff(cntr.tx_p_ok, old_cntr.tx_p_ok),
237+
ns_diff(cntr.tx_b_ok, old_cntr.tx_b_ok),
238+
ns_diff(cntr.tx_p_err, old_cntr.tx_p_err),
239+
ns_diff(cntr.tx_b_err, old_cntr.tx_b_err))
240240
else:
241-
body = body % (cntr['rx_p_ok'], cntr['rx_b_ok'], cntr['rx_p_err'],cntr['rx_b_err'],
242-
cntr['tx_p_ok'], cntr['tx_b_ok'], cntr['tx_p_err'], cntr['tx_b_err'])
241+
body = body % (cntr.rx_p_ok, cntr.rx_b_ok, cntr.rx_p_err,cntr.rx_b_err,
242+
cntr.tx_p_ok, cntr.tx_b_ok, cntr.tx_p_err, cntr.tx_b_err)
243243

244244
print(header)
245245
print(body)
@@ -305,20 +305,20 @@ def main():
305305
if tag_name is not None:
306306
if os.path.isfile(cnstat_fqn_general_file):
307307
try:
308-
general_data = json.load(open(cnstat_fqn_general_file, 'r'))
308+
general_data = pickle.load(open(cnstat_fqn_general_file, 'rb'))
309309
for key, val in cnstat_dict.items():
310310
general_data[key] = val
311-
json.dump(general_data, open(cnstat_fqn_general_file, 'w'))
311+
pickle.dump(general_data, open(cnstat_fqn_general_file, 'wb'))
312312
except IOError as e:
313313
sys.exit(e.errno)
314314
# Add the information also to tag specific file
315315
if os.path.isfile(cnstat_fqn_file):
316-
data = json.load(open(cnstat_fqn_file, 'r'))
316+
data = pickle.load(open(cnstat_fqn_file, 'rb'))
317317
for key, val in cnstat_dict.items():
318318
data[key] = val
319-
json.dump(data, open(cnstat_fqn_file, 'w'))
319+
pickle.dump(data, open(cnstat_fqn_file, 'wb'))
320320
else:
321-
json.dump(cnstat_dict, open(cnstat_fqn_file, 'w'), default=json_serial)
321+
pickle.dump(cnstat_dict, open(cnstat_fqn_file, 'wb'))
322322
except IOError as e:
323323
sys.exit(e.errno)
324324
else:
@@ -330,9 +330,9 @@ def main():
330330
try:
331331
cnstat_cached_dict = {}
332332
if os.path.isfile(cnstat_fqn_file):
333-
cnstat_cached_dict = json.load(open(cnstat_fqn_file, 'r'))
333+
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file, 'rb'))
334334
else:
335-
cnstat_cached_dict = json.load(open(cnstat_fqn_general_file, 'r'))
335+
cnstat_cached_dict = pickle.load(open(cnstat_fqn_general_file, 'rb'))
336336

337337
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
338338
if interface_name:

scripts/pfcstat

+31-31
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
#
77
#####################################################################
88

9-
import json
9+
import _pickle as pickle
1010
import argparse
1111
import datetime
1212
import os.path
@@ -37,7 +37,7 @@ except KeyError:
3737
from utilities_common.netstat import ns_diff, STATUS_NA, format_number_with_comma
3838
from utilities_common import multi_asic as multi_asic_util
3939
from utilities_common import constants
40-
from utilities_common.cli import json_serial, UserCache
40+
from utilities_common.cli import UserCache
4141

4242

4343
PStats = namedtuple("PStats", "pfc0, pfc1, pfc2, pfc3, pfc4, pfc5, pfc6, pfc7")
@@ -101,7 +101,7 @@ class Pfcstat(object):
101101
fields[pos] = STATUS_NA
102102
else:
103103
fields[pos] = str(int(counter_data))
104-
cntr = PStats._make(fields)._asdict()
104+
cntr = PStats._make(fields)
105105
return cntr
106106

107107
# Get the info from database
@@ -144,14 +144,14 @@ class Pfcstat(object):
144144
if key == 'time':
145145
continue
146146
table.append((key,
147-
format_number_with_comma(data['pfc0']),
148-
format_number_with_comma(data['pfc1']),
149-
format_number_with_comma(data['pfc2']),
150-
format_number_with_comma(data['pfc3']),
151-
format_number_with_comma(data['pfc4']),
152-
format_number_with_comma(data['pfc5']),
153-
format_number_with_comma(data['pfc6']),
154-
format_number_with_comma(data['pfc7'])))
147+
format_number_with_comma(data.pfc0),
148+
format_number_with_comma(data.pfc1),
149+
format_number_with_comma(data.pfc2),
150+
format_number_with_comma(data.pfc3),
151+
format_number_with_comma(data.pfc4),
152+
format_number_with_comma(data.pfc5),
153+
format_number_with_comma(data.pfc6),
154+
format_number_with_comma(data.pfc7)))
155155

156156
if rx:
157157
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
@@ -173,24 +173,24 @@ class Pfcstat(object):
173173

174174
if old_cntr is not None:
175175
table.append((key,
176-
ns_diff(cntr['pfc0'], old_cntr['pfc0']),
177-
ns_diff(cntr['pfc1'], old_cntr['pfc1']),
178-
ns_diff(cntr['pfc2'], old_cntr['pfc2']),
179-
ns_diff(cntr['pfc3'], old_cntr['pfc3']),
180-
ns_diff(cntr['pfc4'], old_cntr['pfc4']),
181-
ns_diff(cntr['pfc5'], old_cntr['pfc5']),
182-
ns_diff(cntr['pfc6'], old_cntr['pfc6']),
183-
ns_diff(cntr['pfc7'], old_cntr['pfc7'])))
176+
ns_diff(cntr.pfc0, old_cntr.pfc0),
177+
ns_diff(cntr.pfc1, old_cntr.pfc1),
178+
ns_diff(cntr.pfc2, old_cntr.pfc2),
179+
ns_diff(cntr.pfc3, old_cntr.pfc3),
180+
ns_diff(cntr.pfc4, old_cntr.pfc4),
181+
ns_diff(cntr.pfc5, old_cntr.pfc5),
182+
ns_diff(cntr.pfc6, old_cntr.pfc6),
183+
ns_diff(cntr.pfc7, old_cntr.pfc7)))
184184
else:
185185
table.append((key,
186-
format_number_with_comma(cntr['pfc0']),
187-
format_number_with_comma(cntr['pfc1']),
188-
format_number_with_comma(cntr['pfc2']),
189-
format_number_with_comma(cntr['pfc3']),
190-
format_number_with_comma(cntr['pfc4']),
191-
format_number_with_comma(cntr['pfc5']),
192-
format_number_with_comma(cntr['pfc6']),
193-
format_number_with_comma(cntr['pfc7'])))
186+
format_number_with_comma(cntr.pfc0),
187+
format_number_with_comma(cntr.pfc1),
188+
format_number_with_comma(cntr.pfc2),
189+
format_number_with_comma(cntr.pfc3),
190+
format_number_with_comma(cntr.pfc4),
191+
format_number_with_comma(cntr.pfc5),
192+
format_number_with_comma(cntr.pfc6),
193+
format_number_with_comma(cntr.pfc7)))
194194

195195
if rx:
196196
print(tabulate(table, header_Rx, tablefmt='simple', stralign='right'))
@@ -256,8 +256,8 @@ Examples:
256256

257257
if save_fresh_stats:
258258
try:
259-
json.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'w'), default=json_serial)
260-
json.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'w'), default=json_serial)
259+
pickle.dump(cnstat_dict_rx, open(cnstat_fqn_file_rx, 'wb'))
260+
pickle.dump(cnstat_dict_tx, open(cnstat_fqn_file_tx, 'wb'))
261261
except IOError as e:
262262
print(e.errno, e)
263263
sys.exit(e.errno)
@@ -271,7 +271,7 @@ Examples:
271271
"""
272272
if os.path.isfile(cnstat_fqn_file_rx):
273273
try:
274-
cnstat_cached_dict = json.load(open(cnstat_fqn_file_rx, 'r'))
274+
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_rx, 'rb'))
275275
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
276276
pfcstat.cnstat_diff_print(cnstat_dict_rx, cnstat_cached_dict, True)
277277
except IOError as e:
@@ -286,7 +286,7 @@ Examples:
286286
"""
287287
if os.path.isfile(cnstat_fqn_file_tx):
288288
try:
289-
cnstat_cached_dict = json.load(open(cnstat_fqn_file_tx, 'r'))
289+
cnstat_cached_dict = pickle.load(open(cnstat_fqn_file_tx, 'rb'))
290290
print("Last cached time was " + str(cnstat_cached_dict.get('time')))
291291
pfcstat.cnstat_diff_print(cnstat_dict_tx, cnstat_cached_dict, False)
292292
except IOError as e:

scripts/pg-drop

+4-4
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
# pg-drop is a tool for show/clear ingress pg dropped packet stats.
66
#
77
#####################################################################
8-
import json
8+
import _pickle as pickle
99
import argparse
1010
import os
1111
import sys
@@ -144,7 +144,7 @@ class PgDropStat(object):
144144
port_drop_ckpt = {}
145145
# Grab the latest clear checkpoint, if it exists
146146
if os.path.isfile(self.port_drop_stats_file):
147-
port_drop_ckpt = json.load(open(self.port_drop_stats_file, 'r'))
147+
port_drop_ckpt = pickle.load(open(self.port_drop_stats_file, 'rb'))
148148

149149
# Header list contains the port name followed by the PGs. Fields is used to populate the pg values
150150
fields = ["0"]* (len(self.header_list) - 1)
@@ -216,10 +216,10 @@ class PgDropStat(object):
216216

217217
counter_pg_drop_array = [ "SAI_INGRESS_PRIORITY_GROUP_STAT_DROPPED_PACKETS"]
218218
try:
219-
json.dump(self.get_counts_table(
219+
pickle.dump(self.get_counts_table(
220220
counter_pg_drop_array,
221221
COUNTERS_PG_NAME_MAP),
222-
open(self.port_drop_stats_file, 'w+'))
222+
open(self.port_drop_stats_file, 'wb+'))
223223
except IOError as e:
224224
print(e)
225225
sys.exit(e.errno)

0 commit comments

Comments
 (0)