@@ -7,6 +7,11 @@ use opentelemetry::{
7
7
8
8
use crate :: metrics:: { aggregation:: Aggregation , internal:: Measure } ;
9
9
10
+ use super :: meter:: {
11
+ INSTRUMENT_NAME_EMPTY , INSTRUMENT_NAME_FIRST_ALPHABETIC , INSTRUMENT_NAME_INVALID_CHAR ,
12
+ INSTRUMENT_NAME_LENGTH , INSTRUMENT_UNIT_INVALID_CHAR , INSTRUMENT_UNIT_LENGTH ,
13
+ } ;
14
+
10
15
use super :: Temporality ;
11
16
12
17
/// The identifier of a group of instruments that all perform the same function.
@@ -207,27 +212,53 @@ impl StreamBuilder {
207
212
///
208
213
/// A Result containing the new Stream instance or an error if the build failed.
209
214
pub fn build ( self ) -> Result < Stream , Box < dyn Error > > {
210
- // TODO: Add same validation as already done while
211
- // creating instruments. It is better to move validation logic
212
- // to a common helper and call it from both places.
213
- // The current implementations does a basic validation
214
- // only to close the overall API design .
215
+ // TODO: Avoid copying the validation logic from meter.rs,
216
+ // and instead move it to a common place and do it once.
217
+ // It is a bug that validations are done in meter.rs
218
+ // as it'll not allow users to fix instrumentation mistakes
219
+ // using views .
215
220
216
- // if name is provided, it must not be empty
221
+ // Validate name if provided
217
222
if let Some ( name) = & self . name {
218
223
if name. is_empty ( ) {
219
- return Err ( "Stream name must not be empty" . into ( ) ) ;
224
+ return Err ( INSTRUMENT_NAME_EMPTY . into ( ) ) ;
225
+ }
226
+
227
+ if name. len ( ) > super :: meter:: INSTRUMENT_NAME_MAX_LENGTH {
228
+ return Err ( INSTRUMENT_NAME_LENGTH . into ( ) ) ;
229
+ }
230
+
231
+ if name. starts_with ( |c : char | !c. is_ascii_alphabetic ( ) ) {
232
+ return Err ( INSTRUMENT_NAME_FIRST_ALPHABETIC . into ( ) ) ;
233
+ }
234
+
235
+ if name. contains ( |c : char | {
236
+ !c. is_ascii_alphanumeric ( )
237
+ && !super :: meter:: INSTRUMENT_NAME_ALLOWED_NON_ALPHANUMERIC_CHARS . contains ( & c)
238
+ } ) {
239
+ return Err ( INSTRUMENT_NAME_INVALID_CHAR . into ( ) ) ;
220
240
}
221
241
}
222
242
223
- // if cardinality limit is provided, it must be greater than 0
243
+ // Validate unit if provided
244
+ if let Some ( unit) = & self . unit {
245
+ if unit. len ( ) > super :: meter:: INSTRUMENT_UNIT_NAME_MAX_LENGTH {
246
+ return Err ( INSTRUMENT_UNIT_LENGTH . into ( ) ) ;
247
+ }
248
+
249
+ if unit. contains ( |c : char | !c. is_ascii ( ) ) {
250
+ return Err ( INSTRUMENT_UNIT_INVALID_CHAR . into ( ) ) ;
251
+ }
252
+ }
253
+
254
+ // Validate cardinality limit
224
255
if let Some ( limit) = self . cardinality_limit {
225
256
if limit == 0 {
226
257
return Err ( "Cardinality limit must be greater than 0" . into ( ) ) ;
227
258
}
228
259
}
229
260
230
- // If the aggregation is set to ExplicitBucketHistogram, validate the bucket boundaries.
261
+ // Validate bucket boundaries if using ExplicitBucketHistogram
231
262
if let Some ( Aggregation :: ExplicitBucketHistogram { boundaries, .. } ) = & self . aggregation {
232
263
validate_bucket_boundaries ( boundaries) ?;
233
264
}
@@ -356,3 +387,279 @@ impl<T: Copy + Send + Sync + 'static> AsyncInstrument<T> for Observable<T> {
356
387
}
357
388
}
358
389
}
390
+
391
+ #[ cfg( test) ]
392
+ mod tests {
393
+ use super :: StreamBuilder ;
394
+ use crate :: metrics:: meter:: {
395
+ INSTRUMENT_NAME_EMPTY , INSTRUMENT_NAME_FIRST_ALPHABETIC , INSTRUMENT_NAME_INVALID_CHAR ,
396
+ INSTRUMENT_NAME_LENGTH , INSTRUMENT_UNIT_INVALID_CHAR , INSTRUMENT_UNIT_LENGTH ,
397
+ } ;
398
+
399
+ #[ test]
400
+ fn stream_name_validation ( ) {
401
+ // (name, expected error)
402
+ let stream_name_test_cases = vec ! [
403
+ ( "validateName" , "" ) ,
404
+ ( "_startWithNoneAlphabet" , INSTRUMENT_NAME_FIRST_ALPHABETIC ) ,
405
+ ( "utf8char锈" , INSTRUMENT_NAME_INVALID_CHAR ) ,
406
+ ( "a" . repeat( 255 ) . leak( ) , "" ) ,
407
+ ( "a" . repeat( 256 ) . leak( ) , INSTRUMENT_NAME_LENGTH ) ,
408
+ ( "invalid name" , INSTRUMENT_NAME_INVALID_CHAR ) ,
409
+ ( "allow/slash" , "" ) ,
410
+ ( "allow_under_score" , "" ) ,
411
+ ( "allow.dots.ok" , "" ) ,
412
+ ( "" , INSTRUMENT_NAME_EMPTY ) ,
413
+ ( "\\ allow\\ slash /sec" , INSTRUMENT_NAME_FIRST_ALPHABETIC ) ,
414
+ ( "\\ allow\\ $$slash /sec" , INSTRUMENT_NAME_FIRST_ALPHABETIC ) ,
415
+ ( "Total $ Count" , INSTRUMENT_NAME_INVALID_CHAR ) ,
416
+ (
417
+ "\\ test\\ UsagePercent(Total) > 80%" ,
418
+ INSTRUMENT_NAME_FIRST_ALPHABETIC ,
419
+ ) ,
420
+ ( "/not / allowed" , INSTRUMENT_NAME_FIRST_ALPHABETIC ) ,
421
+ ] ;
422
+
423
+ for ( name, expected_error) in stream_name_test_cases {
424
+ let builder = StreamBuilder :: new ( ) . with_name ( name) ;
425
+ let result = builder. build ( ) ;
426
+
427
+ if expected_error. is_empty ( ) {
428
+ assert ! (
429
+ result. is_ok( ) ,
430
+ "Expected successful build for name '{}', but got error: {:?}" ,
431
+ name,
432
+ result. err( )
433
+ ) ;
434
+ } else {
435
+ let err = result. err ( ) . unwrap ( ) ;
436
+ let err_str = err. to_string ( ) ;
437
+ assert ! (
438
+ err_str == expected_error,
439
+ "For name '{}', expected error '{}', but got '{}'" ,
440
+ name,
441
+ expected_error,
442
+ err_str
443
+ ) ;
444
+ }
445
+ }
446
+ }
447
+
448
+ #[ test]
449
+ fn stream_unit_validation ( ) {
450
+ // (unit, expected error)
451
+ let stream_unit_test_cases = vec ! [
452
+ (
453
+ "0123456789012345678901234567890123456789012345678901234567890123" ,
454
+ INSTRUMENT_UNIT_LENGTH ,
455
+ ) ,
456
+ ( "utf8char锈" , INSTRUMENT_UNIT_INVALID_CHAR ) ,
457
+ ( "kb" , "" ) ,
458
+ ( "Kb/sec" , "" ) ,
459
+ ( "%" , "" ) ,
460
+ ( "" , "" ) ,
461
+ ] ;
462
+
463
+ for ( unit, expected_error) in stream_unit_test_cases {
464
+ // Use a valid name to isolate unit validation
465
+ let builder = StreamBuilder :: new ( ) . with_name ( "valid_name" ) . with_unit ( unit) ;
466
+
467
+ let result = builder. build ( ) ;
468
+
469
+ if expected_error. is_empty ( ) {
470
+ assert ! (
471
+ result. is_ok( ) ,
472
+ "Expected successful build for unit '{}', but got error: {:?}" ,
473
+ unit,
474
+ result. err( )
475
+ ) ;
476
+ } else {
477
+ let err = result. err ( ) . unwrap ( ) ;
478
+ let err_str = err. to_string ( ) ;
479
+ assert ! (
480
+ err_str == expected_error,
481
+ "For unit '{}', expected error '{}', but got '{}'" ,
482
+ unit,
483
+ expected_error,
484
+ err_str
485
+ ) ;
486
+ }
487
+ }
488
+ }
489
+
490
+ #[ test]
491
+ fn stream_cardinality_limit_validation ( ) {
492
+ // Test zero cardinality limit (invalid)
493
+ let builder = StreamBuilder :: new ( )
494
+ . with_name ( "valid_name" )
495
+ . with_cardinality_limit ( 0 ) ;
496
+
497
+ let result = builder. build ( ) ;
498
+ assert ! ( result. is_err( ) , "Expected error for zero cardinality limit" ) ;
499
+ assert_eq ! (
500
+ result. err( ) . unwrap( ) . to_string( ) ,
501
+ "Cardinality limit must be greater than 0" ,
502
+ "Expected cardinality limit validation error message"
503
+ ) ;
504
+
505
+ // Test valid cardinality limits
506
+ let valid_limits = vec ! [ 1 , 10 , 100 , 1000 ] ;
507
+ for limit in valid_limits {
508
+ let builder = StreamBuilder :: new ( )
509
+ . with_name ( "valid_name" )
510
+ . with_cardinality_limit ( limit) ;
511
+
512
+ let result = builder. build ( ) ;
513
+ assert ! (
514
+ result. is_ok( ) ,
515
+ "Expected successful build for cardinality limit {}, but got error: {:?}" ,
516
+ limit,
517
+ result. err( )
518
+ ) ;
519
+ }
520
+ }
521
+
522
+ #[ test]
523
+ fn stream_valid_build ( ) {
524
+ // Test with valid configuration
525
+ let stream = StreamBuilder :: new ( )
526
+ . with_name ( "valid_name" )
527
+ . with_description ( "Valid description" )
528
+ . with_unit ( "ms" )
529
+ . with_cardinality_limit ( 100 )
530
+ . build ( ) ;
531
+
532
+ assert ! (
533
+ stream. is_ok( ) ,
534
+ "Expected valid Stream to be built successfully"
535
+ ) ;
536
+ }
537
+
538
+ #[ cfg( feature = "spec_unstable_metrics_views" ) ]
539
+ #[ test]
540
+ fn stream_histogram_bucket_validation ( ) {
541
+ use super :: Aggregation ;
542
+
543
+ // Test with valid bucket boundaries
544
+ let valid_boundaries = vec ! [ 1.0 , 2.0 , 5.0 , 10.0 , 20.0 , 50.0 , 100.0 ] ;
545
+ let builder = StreamBuilder :: new ( )
546
+ . with_name ( "valid_histogram" )
547
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
548
+ boundaries : valid_boundaries. clone ( ) ,
549
+ record_min_max : true ,
550
+ } ) ;
551
+
552
+ let result = builder. build ( ) ;
553
+ assert ! (
554
+ result. is_ok( ) ,
555
+ "Expected successful build with valid bucket boundaries"
556
+ ) ;
557
+
558
+ // Test with invalid bucket boundaries (NaN and Infinity)
559
+
560
+ // Test with NaN
561
+ let invalid_nan_boundaries = vec ! [ 1.0 , 2.0 , f64 :: NAN , 10.0 ] ;
562
+
563
+ let builder = StreamBuilder :: new ( )
564
+ . with_name ( "invalid_histogram_nan" )
565
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
566
+ boundaries : invalid_nan_boundaries,
567
+ record_min_max : true ,
568
+ } ) ;
569
+
570
+ let result = builder. build ( ) ;
571
+ assert ! (
572
+ result. is_err( ) ,
573
+ "Expected error for NaN in bucket boundaries"
574
+ ) ;
575
+ assert_eq ! (
576
+ result. err( ) . unwrap( ) . to_string( ) ,
577
+ "Bucket boundaries must not contain NaN, Infinity, or -Infinity" ,
578
+ "Expected correct validation error for NaN"
579
+ ) ;
580
+
581
+ // Test with infinity
582
+ let invalid_inf_boundaries = vec ! [ 1.0 , 5.0 , f64 :: INFINITY , 100.0 ] ;
583
+
584
+ let builder = StreamBuilder :: new ( )
585
+ . with_name ( "invalid_histogram_inf" )
586
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
587
+ boundaries : invalid_inf_boundaries,
588
+ record_min_max : true ,
589
+ } ) ;
590
+
591
+ let result = builder. build ( ) ;
592
+ assert ! (
593
+ result. is_err( ) ,
594
+ "Expected error for Infinity in bucket boundaries"
595
+ ) ;
596
+ assert_eq ! (
597
+ result. err( ) . unwrap( ) . to_string( ) ,
598
+ "Bucket boundaries must not contain NaN, Infinity, or -Infinity" ,
599
+ "Expected correct validation error for Infinity"
600
+ ) ;
601
+
602
+ // Test with negative infinity
603
+ let invalid_neg_inf_boundaries = vec ! [ f64 :: NEG_INFINITY , 5.0 , 10.0 , 100.0 ] ;
604
+
605
+ let builder = StreamBuilder :: new ( )
606
+ . with_name ( "invalid_histogram_neg_inf" )
607
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
608
+ boundaries : invalid_neg_inf_boundaries,
609
+ record_min_max : true ,
610
+ } ) ;
611
+
612
+ let result = builder. build ( ) ;
613
+ assert ! (
614
+ result. is_err( ) ,
615
+ "Expected error for negative Infinity in bucket boundaries"
616
+ ) ;
617
+ assert_eq ! (
618
+ result. err( ) . unwrap( ) . to_string( ) ,
619
+ "Bucket boundaries must not contain NaN, Infinity, or -Infinity" ,
620
+ "Expected correct validation error for negative Infinity"
621
+ ) ;
622
+
623
+ // Test with unsorted bucket boundaries
624
+ let unsorted_boundaries = vec ! [ 1.0 , 5.0 , 2.0 , 10.0 ] ; // 2.0 comes after 5.0, which is incorrect
625
+
626
+ let builder = StreamBuilder :: new ( )
627
+ . with_name ( "unsorted_histogram" )
628
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
629
+ boundaries : unsorted_boundaries,
630
+ record_min_max : true ,
631
+ } ) ;
632
+
633
+ let result = builder. build ( ) ;
634
+ assert ! (
635
+ result. is_err( ) ,
636
+ "Expected error for unsorted bucket boundaries"
637
+ ) ;
638
+ assert_eq ! (
639
+ result. err( ) . unwrap( ) . to_string( ) ,
640
+ "Bucket boundaries must be sorted and not contain any duplicates" ,
641
+ "Expected correct validation error for unsorted boundaries"
642
+ ) ;
643
+
644
+ // Test with duplicate bucket boundaries
645
+ let duplicate_boundaries = vec ! [ 1.0 , 2.0 , 5.0 , 5.0 , 10.0 ] ; // 5.0 appears twice
646
+
647
+ let builder = StreamBuilder :: new ( )
648
+ . with_name ( "duplicate_histogram" )
649
+ . with_aggregation ( Aggregation :: ExplicitBucketHistogram {
650
+ boundaries : duplicate_boundaries,
651
+ record_min_max : true ,
652
+ } ) ;
653
+
654
+ let result = builder. build ( ) ;
655
+ assert ! (
656
+ result. is_err( ) ,
657
+ "Expected error for duplicate bucket boundaries"
658
+ ) ;
659
+ assert_eq ! (
660
+ result. err( ) . unwrap( ) . to_string( ) ,
661
+ "Bucket boundaries must be sorted and not contain any duplicates" ,
662
+ "Expected correct validation error for duplicate boundaries"
663
+ ) ;
664
+ }
665
+ }
0 commit comments