Skip to content

Commit 2f9de33

Browse files
Upgrade RustPython to match new flattened exports (#3141)
1 parent ba61bb6 commit 2f9de33

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

49 files changed

+195
-235
lines changed

Cargo.lock

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ libcst = { git = "https://github.com/charliermarsh/LibCST", rev = "f2f0b7a487a87
1313
once_cell = { version = "1.16.0" }
1414
regex = { version = "1.6.0" }
1515
rustc-hash = { version = "1.1.0" }
16-
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "6d71f758170d504817cc47720762c41d9031506d" }
17-
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "6d71f758170d504817cc47720762c41d9031506d" }
16+
rustpython-common = { git = "https://github.com/RustPython/RustPython.git", rev = "edf5995a1e4c366976304ca05432dd27c913054e" }
17+
rustpython-parser = { features = ["lalrpop"], git = "https://github.com/RustPython/RustPython.git", rev = "edf5995a1e4c366976304ca05432dd27c913054e" }
1818
schemars = { version = "0.8.11" }
1919
serde = { version = "1.0.147", features = ["derive"] }
2020
serde_json = { version = "1.0.87" }

crates/ruff/resources/test/fixtures/flake8_return/RET503.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -289,3 +289,7 @@ def x(y):
289289
return 1
290290
case 1:
291291
print() # error
292+
293+
294+
def foo(baz: str) -> str:
295+
return baz

crates/ruff/src/ast/helpers.rs

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,7 @@ use rustpython_parser::ast::{
99
Arguments, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprKind, Keyword, KeywordData,
1010
Located, Location, MatchCase, Pattern, PatternKind, Stmt, StmtKind,
1111
};
12-
use rustpython_parser::lexer;
13-
use rustpython_parser::lexer::Tok;
14-
use rustpython_parser::mode::Mode;
15-
use rustpython_parser::token::StringKind;
12+
use rustpython_parser::{lexer, Mode, StringKind, Tok};
1613
use smallvec::{smallvec, SmallVec};
1714

1815
use crate::ast::types::{Binding, BindingKind, CallPath, Range};
@@ -656,7 +653,7 @@ pub fn has_comments<T>(located: &Located<T>, locator: &Locator) -> bool {
656653

657654
/// Returns `true` if a [`Range`] includes at least one comment.
658655
pub fn has_comments_in(range: Range, locator: &Locator) -> bool {
659-
for tok in lexer::make_tokenizer_located(locator.slice(&range), Mode::Module, range.location) {
656+
for tok in lexer::lex_located(locator.slice(&range), Mode::Module, range.location) {
660657
match tok {
661658
Ok((_, tok, _)) => {
662659
if matches!(tok, Tok::Comment(..)) {
@@ -871,8 +868,7 @@ pub fn match_parens(start: Location, locator: &Locator) -> Option<Range> {
871868
let mut fix_start = None;
872869
let mut fix_end = None;
873870
let mut count: usize = 0;
874-
for (start, tok, end) in lexer::make_tokenizer_located(contents, Mode::Module, start).flatten()
875-
{
871+
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, start).flatten() {
876872
if matches!(tok, Tok::Lpar) {
877873
if count == 0 {
878874
fix_start = Some(start);
@@ -904,8 +900,7 @@ pub fn identifier_range(stmt: &Stmt, locator: &Locator) -> Range {
904900
| StmtKind::AsyncFunctionDef { .. }
905901
) {
906902
let contents = locator.slice(&Range::from_located(stmt));
907-
for (start, tok, end) in
908-
lexer::make_tokenizer_located(contents, Mode::Module, stmt.location).flatten()
903+
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt.location).flatten()
909904
{
910905
if matches!(tok, Tok::Name { .. }) {
911906
return Range::new(start, end);
@@ -937,7 +932,7 @@ pub fn find_names<'a, T, U>(
937932
locator: &'a Locator,
938933
) -> impl Iterator<Item = Range> + 'a {
939934
let contents = locator.slice(&Range::from_located(located));
940-
lexer::make_tokenizer_located(contents, Mode::Module, located.location)
935+
lexer::lex_located(contents, Mode::Module, located.location)
941936
.flatten()
942937
.filter(|(_, tok, _)| matches!(tok, Tok::Name { .. }))
943938
.map(|(start, _, end)| Range {
@@ -955,7 +950,7 @@ pub fn excepthandler_name_range(handler: &Excepthandler, locator: &Locator) -> O
955950
(Some(_), Some(type_)) => {
956951
let type_end_location = type_.end_location.unwrap();
957952
let contents = locator.slice(&Range::new(type_end_location, body[0].location));
958-
let range = lexer::make_tokenizer_located(contents, Mode::Module, type_end_location)
953+
let range = lexer::lex_located(contents, Mode::Module, type_end_location)
959954
.flatten()
960955
.tuple_windows()
961956
.find(|(tok, next_tok)| {
@@ -982,7 +977,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
982977
location: handler.location,
983978
end_location: end,
984979
});
985-
let range = lexer::make_tokenizer_located(contents, Mode::Module, handler.location)
980+
let range = lexer::lex_located(contents, Mode::Module, handler.location)
986981
.flatten()
987982
.find(|(_, kind, _)| matches!(kind, Tok::Except { .. }))
988983
.map(|(location, _, end_location)| Range {
@@ -996,7 +991,7 @@ pub fn except_range(handler: &Excepthandler, locator: &Locator) -> Range {
996991
/// Find f-strings that don't contain any formatted values in a `JoinedStr`.
997992
pub fn find_useless_f_strings(expr: &Expr, locator: &Locator) -> Vec<(Range, Range)> {
998993
let contents = locator.slice(&Range::from_located(expr));
999-
lexer::make_tokenizer_located(contents, Mode::Module, expr.location)
994+
lexer::lex_located(contents, Mode::Module, expr.location)
1000995
.flatten()
1001996
.filter_map(|(location, tok, end_location)| match tok {
1002997
Tok::String {
@@ -1050,7 +1045,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
10501045
.expect("Expected orelse to be non-empty")
10511046
.location,
10521047
});
1053-
let range = lexer::make_tokenizer_located(contents, Mode::Module, body_end)
1048+
let range = lexer::lex_located(contents, Mode::Module, body_end)
10541049
.flatten()
10551050
.find(|(_, kind, _)| matches!(kind, Tok::Else))
10561051
.map(|(location, _, end_location)| Range {
@@ -1066,7 +1061,7 @@ pub fn else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
10661061
/// Return the `Range` of the first `Tok::Colon` token in a `Range`.
10671062
pub fn first_colon_range(range: Range, locator: &Locator) -> Option<Range> {
10681063
let contents = locator.slice(&range);
1069-
let range = lexer::make_tokenizer_located(contents, Mode::Module, range.location)
1064+
let range = lexer::lex_located(contents, Mode::Module, range.location)
10701065
.flatten()
10711066
.find(|(_, kind, _)| matches!(kind, Tok::Colon))
10721067
.map(|(location, _, end_location)| Range {
@@ -1096,7 +1091,7 @@ pub fn elif_else_range(stmt: &Stmt, locator: &Locator) -> Option<Range> {
10961091
_ => return None,
10971092
};
10981093
let contents = locator.slice(&Range::new(start, end));
1099-
let range = lexer::make_tokenizer_located(contents, Mode::Module, start)
1094+
let range = lexer::lex_located(contents, Mode::Module, start)
11001095
.flatten()
11011096
.find(|(_, kind, _)| matches!(kind, Tok::Elif | Tok::Else))
11021097
.map(|(location, _, end_location)| Range {
@@ -1212,8 +1207,8 @@ pub fn is_logger_candidate(func: &Expr) -> bool {
12121207
#[cfg(test)]
12131208
mod tests {
12141209
use anyhow::Result;
1210+
use rustpython_parser as parser;
12151211
use rustpython_parser::ast::Location;
1216-
use rustpython_parser::parser;
12171212

12181213
use crate::ast::helpers::{
12191214
elif_else_range, else_range, first_colon_range, identifier_range, match_trailing_content,

crates/ruff/src/ast/operations.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
11
use bitflags::bitflags;
22
use rustc_hash::FxHashMap;
33
use rustpython_parser::ast::{Cmpop, Constant, Expr, ExprKind, Located, Stmt, StmtKind};
4-
use rustpython_parser::lexer;
5-
use rustpython_parser::lexer::Tok;
6-
use rustpython_parser::mode::Mode;
4+
use rustpython_parser::{lexer, Mode, Tok};
75

86
use crate::ast::helpers::any_over_expr;
97
use crate::ast::types::{BindingKind, Scope};
@@ -285,9 +283,7 @@ pub type LocatedCmpop<U = ()> = Located<Cmpop, U>;
285283
/// `CPython` doesn't either. This method iterates over the token stream and
286284
/// re-identifies [`Cmpop`] nodes, annotating them with valid ranges.
287285
pub fn locate_cmpops(contents: &str) -> Vec<LocatedCmpop> {
288-
let mut tok_iter = lexer::make_tokenizer(contents, Mode::Module)
289-
.flatten()
290-
.peekable();
286+
let mut tok_iter = lexer::lex(contents, Mode::Module).flatten().peekable();
291287
let mut ops: Vec<LocatedCmpop> = vec![];
292288
let mut count: usize = 0;
293289
loop {

crates/ruff/src/autofix/helpers.rs

Lines changed: 5 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@ use libcst_native::{
44
Codegen, CodegenState, ImportNames, ParenthesizableWhitespace, SmallStatement, Statement,
55
};
66
use rustpython_parser::ast::{ExcepthandlerKind, Expr, Keyword, Location, Stmt, StmtKind};
7-
use rustpython_parser::lexer;
8-
use rustpython_parser::lexer::Tok;
9-
use rustpython_parser::mode::Mode;
7+
use rustpython_parser::{lexer, Mode, Tok};
108

119
use crate::ast::helpers;
1210
use crate::ast::helpers::to_absolute;
@@ -372,9 +370,7 @@ pub fn remove_argument(
372370
if n_arguments == 1 {
373371
// Case 1: there is only one argument.
374372
let mut count: usize = 0;
375-
for (start, tok, end) in
376-
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
377-
{
373+
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
378374
if matches!(tok, Tok::Lpar) {
379375
if count == 0 {
380376
fix_start = Some(if remove_parentheses {
@@ -406,9 +402,7 @@ pub fn remove_argument(
406402
{
407403
// Case 2: argument or keyword is _not_ the last node.
408404
let mut seen_comma = false;
409-
for (start, tok, end) in
410-
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
411-
{
405+
for (start, tok, end) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
412406
if seen_comma {
413407
if matches!(tok, Tok::NonLogicalNewline) {
414408
// Also delete any non-logical newlines after the comma.
@@ -431,9 +425,7 @@ pub fn remove_argument(
431425
} else {
432426
// Case 3: argument or keyword is the last node, so we have to find the last
433427
// comma in the stmt.
434-
for (start, tok, _) in
435-
lexer::make_tokenizer_located(contents, Mode::Module, stmt_at).flatten()
436-
{
428+
for (start, tok, _) in lexer::lex_located(contents, Mode::Module, stmt_at).flatten() {
437429
if start == expr_at {
438430
fix_end = Some(expr_end);
439431
break;
@@ -455,8 +447,8 @@ pub fn remove_argument(
455447
#[cfg(test)]
456448
mod tests {
457449
use anyhow::Result;
450+
use rustpython_parser as parser;
458451
use rustpython_parser::ast::Location;
459-
use rustpython_parser::parser;
460452

461453
use crate::autofix::helpers::{next_stmt_break, trailing_semicolon};
462454
use crate::source_code::Locator;

crates/ruff/src/checkers/ast.rs

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,18 @@ use std::path::Path;
66
use itertools::Itertools;
77
use log::error;
88
use nohash_hasher::IntMap;
9+
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
10+
use ruff_python::typing::TYPING_EXTENSIONS;
911
use rustc_hash::{FxHashMap, FxHashSet};
1012
use rustpython_common::cformat::{CFormatError, CFormatErrorType};
13+
use rustpython_parser as parser;
1114
use rustpython_parser::ast::{
1215
Arg, Arguments, Comprehension, Constant, Excepthandler, ExcepthandlerKind, Expr, ExprContext,
1316
ExprKind, KeywordData, Located, Location, Operator, Pattern, PatternKind, Stmt, StmtKind,
1417
Suite,
1518
};
16-
use rustpython_parser::parser;
1719
use smallvec::smallvec;
1820

19-
use ruff_python::builtins::{BUILTINS, MAGIC_GLOBALS};
20-
use ruff_python::typing::TYPING_EXTENSIONS;
21-
2221
use crate::ast::helpers::{
2322
binding_range, collect_call_path, extract_handler_names, from_relative_import, to_module_path,
2423
};
@@ -2060,8 +2059,8 @@ where
20602059
value,
20612060
..
20622061
} => {
2063-
// If we're in a class or module scope, then the annotation needs to be available
2064-
// at runtime.
2062+
// If we're in a class or module scope, then the annotation needs to be
2063+
// available at runtime.
20652064
// See: https://docs.python.org/3/reference/simple_stmts.html#annotated-assignment-statements
20662065
if !self.annotations_future_enabled
20672066
&& matches!(

crates/ruff/src/checkers/logical_lines.rs

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -152,9 +152,8 @@ pub fn check_logical_lines(
152152

153153
#[cfg(test)]
154154
mod tests {
155-
use rustpython_parser::lexer;
156155
use rustpython_parser::lexer::LexResult;
157-
use rustpython_parser::mode::Mode;
156+
use rustpython_parser::{lexer, Mode};
158157

159158
use crate::checkers::logical_lines::iter_logical_lines;
160159
use crate::source_code::Locator;
@@ -165,7 +164,7 @@ mod tests {
165164
x = 1
166165
y = 2
167166
z = x + 1"#;
168-
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
167+
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
169168
let locator = Locator::new(contents);
170169
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
171170
.into_iter()
@@ -186,7 +185,7 @@ x = [
186185
]
187186
y = 2
188187
z = x + 1"#;
189-
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
188+
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
190189
let locator = Locator::new(contents);
191190
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
192191
.into_iter()
@@ -200,7 +199,7 @@ z = x + 1"#;
200199
assert_eq!(actual, expected);
201200

202201
let contents = "x = 'abc'";
203-
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
202+
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
204203
let locator = Locator::new(contents);
205204
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
206205
.into_iter()
@@ -213,7 +212,7 @@ z = x + 1"#;
213212
def f():
214213
x = 1
215214
f()"#;
216-
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
215+
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
217216
let locator = Locator::new(contents);
218217
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
219218
.into_iter()
@@ -228,7 +227,7 @@ def f():
228227
# Comment goes here.
229228
x = 1
230229
f()"#;
231-
let lxr: Vec<LexResult> = lexer::make_tokenizer(contents, Mode::Module).collect();
230+
let lxr: Vec<LexResult> = lexer::lex(contents, Mode::Module).collect();
232231
let locator = Locator::new(contents);
233232
let actual: Vec<String> = iter_logical_lines(&lxr, &locator)
234233
.into_iter()

crates/ruff/src/checkers/tokens.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
//! Lint rules based on token traversal.
22
3-
use rustpython_parser::lexer::{LexResult, Tok};
3+
use rustpython_parser::lexer::LexResult;
4+
use rustpython_parser::Tok;
45

56
use crate::lex::docstring_detection::StateMachine;
67
use crate::registry::{Diagnostic, Rule};

0 commit comments

Comments
 (0)