4
4
weighted_shuffle:: WeightedShuffle ,
5
5
} ,
6
6
indexmap:: IndexMap ,
7
- // log::error,
8
7
rand:: Rng ,
9
8
solana_bloom:: bloom:: { Bloom , ConcurrentBloom } ,
10
9
solana_native_token:: LAMPORTS_PER_SOL ,
15
14
const NUM_PUSH_ACTIVE_SET_ENTRIES : usize = 25 ;
16
15
const ALPHA_MIN : u64 = SCALE ;
17
16
const ALPHA_MAX : u64 = 2 * SCALE ;
18
- const DEFAULT_ALPHA : u64 = 2 * SCALE ;
17
+ const DEFAULT_ALPHA : u64 = ALPHA_MAX ;
19
18
20
19
#[ derive( Clone , Copy , Debug , Eq , PartialEq ) ]
21
20
pub enum WeightingMode {
@@ -53,6 +52,17 @@ impl Default for PushActiveSet {
53
52
}
54
53
}
55
54
55
+ #[ cfg( test) ]
56
+ impl PushActiveSet {
57
+ fn new_static ( ) -> Self {
58
+ Self {
59
+ entries : Default :: default ( ) ,
60
+ alpha : DEFAULT_ALPHA ,
61
+ mode : WeightingMode :: Static ,
62
+ }
63
+ }
64
+ }
65
+
56
66
// Keys are gossip nodes to push messages to.
57
67
// Values are which origins the node has pruned.
58
68
#[ derive( Default ) ]
@@ -148,7 +158,6 @@ impl PushActiveSet {
148
158
let f_scaled = ( ( num_unstaked * SCALE as usize ) + nodes. len ( ) / 2 ) / nodes. len ( ) ;
149
159
let alpha_target = ALPHA_MIN + ( f_scaled as u64 ) . min ( SCALE ) ;
150
160
self . alpha = filter_alpha ( self . alpha , alpha_target, ALPHA_MIN , ALPHA_MAX ) ;
151
- println ! ( "alpha_target: {}, alpha: {}" , alpha_target, self . alpha) ;
152
161
153
162
for ( k, entry) in self . entries . iter_mut ( ) . enumerate ( ) {
154
163
let weights: Vec < u64 > = buckets
@@ -297,15 +306,15 @@ mod tests {
297
306
}
298
307
299
308
#[ test]
300
- fn test_push_active_set ( ) {
309
+ fn test_push_active_set_static_weighting ( ) {
301
310
const CLUSTER_SIZE : usize = 117 ;
302
311
let mut rng = ChaChaRng :: from_seed ( [ 189u8 ; 32 ] ) ;
303
312
let pubkey = Pubkey :: new_unique ( ) ;
304
313
let nodes: Vec < _ > = repeat_with ( Pubkey :: new_unique) . take ( 20 ) . collect ( ) ;
305
314
let stakes = repeat_with ( || rng. gen_range ( 1 ..MAX_STAKE ) ) ;
306
315
let mut stakes: HashMap < _ , _ > = nodes. iter ( ) . copied ( ) . zip ( stakes) . collect ( ) ;
307
316
stakes. insert ( pubkey, rng. gen_range ( 1 ..MAX_STAKE ) ) ;
308
- let mut active_set = PushActiveSet :: default ( ) ;
317
+ let mut active_set = PushActiveSet :: new_static ( ) ;
309
318
assert ! ( active_set. entries. iter( ) . all( |entry| entry. 0 . is_empty( ) ) ) ;
310
319
active_set. rotate ( & mut rng, 5 , CLUSTER_SIZE , & nodes, & stakes) ;
311
320
assert ! ( active_set. entries. iter( ) . all( |entry| entry. 0 . len( ) == 5 ) ) ;
@@ -317,28 +326,9 @@ mod tests {
317
326
}
318
327
let other = & nodes[ 5 ] ;
319
328
let origin = & nodes[ 17 ] ;
320
-
321
- // Debug: print expected nodes
322
- // let expected_indices = [13, 5, 18, 16, 0];
323
- // println!("Expected nodes:");
324
- // for (i, &k) in expected_indices.iter().enumerate() {
325
- // println!(" [{}] nodes[{}] = {:?}", i, k, nodes[k]);
326
- // }
327
-
328
- // // Debug: print actual nodes returned by get_nodes
329
- // let actual_nodes: Vec<_> = active_set
330
- // .get_nodes(&pubkey, origin, |_| false, &stakes)
331
- // .collect();
332
- // println!("Actual nodes returned by get_nodes:");
333
- // for (i, node) in actual_nodes.iter().enumerate() {
334
- // println!(" [{}] = {:?}", i, node);
335
- // }
336
-
337
329
assert ! ( active_set
338
330
. get_nodes( & pubkey, origin, |_| false , & stakes)
339
331
. eq( [ 13 , 5 , 18 , 16 , 0 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
340
-
341
- ///////////////////////////
342
332
assert ! ( active_set
343
333
. get_nodes( & pubkey, other, |_| false , & stakes)
344
334
. eq( [ 13 , 18 , 16 , 0 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
@@ -371,6 +361,63 @@ mod tests {
371
361
. eq( [ 16 , 7 , 11 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
372
362
}
373
363
364
+ #[ test]
365
+ fn test_push_active_set_dynamic_weighting ( ) {
366
+ const CLUSTER_SIZE : usize = 117 ;
367
+ let mut rng = ChaChaRng :: from_seed ( [ 14u8 ; 32 ] ) ;
368
+ let pubkey = Pubkey :: new_unique ( ) ;
369
+ let nodes: Vec < _ > = repeat_with ( Pubkey :: new_unique) . take ( 20 ) . collect ( ) ;
370
+ let stakes = repeat_with ( || rng. gen_range ( 1 ..MAX_STAKE ) ) ;
371
+ let mut stakes: HashMap < _ , _ > = nodes. iter ( ) . copied ( ) . zip ( stakes) . collect ( ) ;
372
+ stakes. insert ( pubkey, rng. gen_range ( 1 ..MAX_STAKE ) ) ;
373
+ let mut active_set = PushActiveSet :: default ( ) ;
374
+ assert ! ( active_set. entries. iter( ) . all( |entry| entry. 0 . is_empty( ) ) ) ;
375
+ active_set. rotate ( & mut rng, 5 , CLUSTER_SIZE , & nodes, & stakes) ;
376
+ assert ! ( active_set. entries. iter( ) . all( |entry| entry. 0 . len( ) == 5 ) ) ;
377
+ // Assert that for all entries, each filter already prunes the key.
378
+ for entry in & active_set. entries {
379
+ for ( node, filter) in entry. 0 . iter ( ) {
380
+ assert ! ( filter. contains( node) ) ;
381
+ }
382
+ }
383
+ let other = & nodes[ 6 ] ;
384
+ let origin = & nodes[ 17 ] ;
385
+ assert ! ( active_set
386
+ . get_nodes( & pubkey, origin, |_| false , & stakes)
387
+ . eq( [ 7 , 6 , 2 , 4 , 12 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
388
+ assert ! ( active_set
389
+ . get_nodes( & pubkey, other, |_| false , & stakes)
390
+ . eq( [ 7 , 2 , 4 , 12 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
391
+
392
+ active_set. prune ( & pubkey, & nodes[ 6 ] , & [ * origin] , & stakes) ;
393
+ active_set. prune ( & pubkey, & nodes[ 11 ] , & [ * origin] , & stakes) ;
394
+ active_set. prune ( & pubkey, & nodes[ 4 ] , & [ * origin] , & stakes) ;
395
+ assert ! ( active_set
396
+ . get_nodes( & pubkey, origin, |_| false , & stakes)
397
+ . eq( [ 7 , 2 , 12 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
398
+ assert ! ( active_set
399
+ . get_nodes( & pubkey, other, |_| false , & stakes)
400
+ . eq( [ 7 , 2 , 4 , 12 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
401
+ active_set. rotate ( & mut rng, 7 , CLUSTER_SIZE , & nodes, & stakes) ;
402
+ assert ! ( active_set. entries. iter( ) . all( |entry| entry. 0 . len( ) == 7 ) ) ;
403
+ assert ! ( active_set
404
+ . get_nodes( & pubkey, origin, |_| false , & stakes)
405
+ . eq( [ 2 , 12 , 16 , 9 , 14 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
406
+ assert ! ( active_set
407
+ . get_nodes( & pubkey, other, |_| false , & stakes)
408
+ . eq( [ 2 , 4 , 12 , 16 , 9 , 14 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
409
+ let origins = [ * origin, * other] ;
410
+ active_set. prune ( & pubkey, & nodes[ 2 ] , & origins, & stakes) ;
411
+ active_set. prune ( & pubkey, & nodes[ 12 ] , & origins, & stakes) ;
412
+ active_set. prune ( & pubkey, & nodes[ 9 ] , & origins, & stakes) ;
413
+ assert ! ( active_set
414
+ . get_nodes( & pubkey, origin, |_| false , & stakes)
415
+ . eq( [ 16 , 14 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
416
+ assert ! ( active_set
417
+ . get_nodes( & pubkey, other, |_| false , & stakes)
418
+ . eq( [ 4 , 16 , 14 ] . into_iter( ) . map( |k| & nodes[ k] ) ) ) ;
419
+ }
420
+
374
421
#[ test]
375
422
fn test_push_active_set_entry ( ) {
376
423
const NUM_BLOOM_FILTER_ITEMS : usize = 100 ;
0 commit comments