@@ -11,6 +11,7 @@ use rustpython_parser::ast::{
11
11
} ;
12
12
use rustpython_parser:: lexer;
13
13
use rustpython_parser:: lexer:: Tok ;
14
+ use rustpython_parser:: mode:: Mode ;
14
15
use rustpython_parser:: token:: StringKind ;
15
16
use smallvec:: { smallvec, SmallVec } ;
16
17
@@ -655,7 +656,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
655
656
656
657
/// Returns `true` if a [`Range`] includes at least one comment.
657
658
pub fn has_comments_in ( range : Range , locator : & Locator ) -> bool {
658
- for tok in lexer:: make_tokenizer ( locator. slice ( & range) ) {
659
+ for tok in lexer:: make_tokenizer_located ( locator. slice ( & range) , Mode :: Module , range . location ) {
659
660
match tok {
660
661
Ok ( ( _, tok, _) ) => {
661
662
if matches ! ( tok, Tok :: Comment ( ..) ) {
@@ -870,7 +871,8 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
870
871
let mut fix_start = None ;
871
872
let mut fix_end = None ;
872
873
let mut count: usize = 0 ;
873
- for ( start, tok, end) in lexer:: make_tokenizer_located ( contents, start) . flatten ( ) {
874
+ for ( start, tok, end) in lexer:: make_tokenizer_located ( contents, Mode :: Module , start) . flatten ( )
875
+ {
874
876
if matches ! ( tok, Tok :: Lpar ) {
875
877
if count == 0 {
876
878
fix_start = Some ( start) ;
@@ -902,7 +904,9 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
902
904
| StmtKind :: AsyncFunctionDef { .. }
903
905
) {
904
906
let contents = locator. slice ( & Range :: from_located ( stmt) ) ;
905
- for ( start, tok, end) in lexer:: make_tokenizer_located ( contents, stmt. location ) . flatten ( ) {
907
+ for ( start, tok, end) in
908
+ lexer:: make_tokenizer_located ( contents, Mode :: Module , stmt. location ) . flatten ( )
909
+ {
906
910
if matches ! ( tok, Tok :: Name { .. } ) {
907
911
return Range :: new ( start, end) ;
908
912
}
@@ -933,7 +937,7 @@ pub fn find_names<'a, T, U>(
933
937
locator : & ' a Locator ,
934
938
) -> impl Iterator < Item = Range > + ' a {
935
939
let contents = locator. slice ( & Range :: from_located ( located) ) ;
936
- lexer:: make_tokenizer_located ( contents, located. location )
940
+ lexer:: make_tokenizer_located ( contents, Mode :: Module , located. location )
937
941
. flatten ( )
938
942
. filter ( |( _, tok, _) | matches ! ( tok, Tok :: Name { .. } ) )
939
943
. map ( |( start, _, end) | Range {
@@ -951,7 +955,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
951
955
( Some ( _) , Some ( type_) ) => {
952
956
let type_end_location = type_. end_location . unwrap ( ) ;
953
957
let contents = locator. slice ( & Range :: new ( type_end_location, body[ 0 ] . location ) ) ;
954
- let range = lexer:: make_tokenizer_located ( contents, type_end_location)
958
+ let range = lexer:: make_tokenizer_located ( contents, Mode :: Module , type_end_location)
955
959
. flatten ( )
956
960
. tuple_windows ( )
957
961
. find ( |( tok, next_tok) | {
@@ -978,7 +982,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
978
982
location : handler. location ,
979
983
end_location : end,
980
984
} ) ;
981
- let range = lexer:: make_tokenizer_located ( contents, handler. location )
985
+ let range = lexer:: make_tokenizer_located ( contents, Mode :: Module , handler. location )
982
986
. flatten ( )
983
987
. find ( |( _, kind, _) | matches ! ( kind, Tok :: Except { .. } ) )
984
988
. map ( |( location, _, end_location) | Range {
@@ -992,7 +996,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
992
996
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
993
997
pub fn find_useless_f_strings ( expr : & Expr , locator : & Locator ) -> Vec < ( Range , Range ) > {
994
998
let contents = locator. slice ( & Range :: from_located ( expr) ) ;
995
- lexer:: make_tokenizer_located ( contents, expr. location )
999
+ lexer:: make_tokenizer_located ( contents, Mode :: Module , expr. location )
996
1000
. flatten ( )
997
1001
. filter_map ( |( location, tok, end_location) | match tok {
998
1002
Tok :: String {
@@ -1046,7 +1050,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
1046
1050
. expect ( "Expected orelse to be non-empty" )
1047
1051
. location ,
1048
1052
} ) ;
1049
- let range = lexer:: make_tokenizer_located ( contents, body_end)
1053
+ let range = lexer:: make_tokenizer_located ( contents, Mode :: Module , body_end)
1050
1054
. flatten ( )
1051
1055
. find ( |( _, kind, _) | matches ! ( kind, Tok :: Else ) )
1052
1056
. map ( |( location, _, end_location) | Range {
@@ -1062,7 +1066,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
1062
1066
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
1063
1067
pub fn first_colon_range ( range : Range , locator : & Locator ) -> Option < Range > {
1064
1068
let contents = locator. slice ( & range) ;
1065
- let range = lexer:: make_tokenizer_located ( contents, range. location )
1069
+ let range = lexer:: make_tokenizer_located ( contents, Mode :: Module , range. location )
1066
1070
. flatten ( )
1067
1071
. find ( |( _, kind, _) | matches ! ( kind, Tok :: Colon ) )
1068
1072
. map ( |( location, _, end_location) | Range {
@@ -1092,7 +1096,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
1092
1096
_ => return None ,
1093
1097
} ;
1094
1098
let contents = locator. slice ( & Range :: new ( start, end) ) ;
1095
- let range = lexer:: make_tokenizer_located ( contents, start)
1099
+ let range = lexer:: make_tokenizer_located ( contents, Mode :: Module , start)
1096
1100
. flatten ( )
1097
1101
. find ( |( _, kind, _) | matches ! ( kind, Tok :: Elif | Tok :: Else ) )
1098
1102
. map ( |( location, _, end_location) | Range {
0 commit comments