Skip to content

Commit 69ee5e4

Browse files
committed
Use Jupyter mode for lexing
1 parent e8eddd2 commit 69ee5e4

File tree

28 files changed

+261
-72
lines changed

28 files changed

+261
-72
lines changed

crates/ruff/src/autofix/edits.rs

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,15 @@ pub(crate) fn remove_argument(
8181
args: &[Expr],
8282
keywords: &[Keyword],
8383
remove_parentheses: bool,
84+
is_jupyter_notebook: bool,
8485
) -> Result<Edit> {
8586
// TODO(sbrugman): Preserve trailing comments.
8687
let contents = locator.after(call_at);
88+
let mode = if is_jupyter_notebook {
89+
Mode::Jupyter
90+
} else {
91+
Mode::Module
92+
};
8793

8894
let mut fix_start = None;
8995
let mut fix_end = None;
@@ -96,7 +102,7 @@ pub(crate) fn remove_argument(
96102
if n_arguments == 1 {
97103
// Case 1: there is only one argument.
98104
let mut count = 0u32;
99-
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
105+
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
100106
if tok.is_lpar() {
101107
if count == 0 {
102108
fix_start = Some(if remove_parentheses {
@@ -128,7 +134,7 @@ pub(crate) fn remove_argument(
128134
{
129135
// Case 2: argument or keyword is _not_ the last node.
130136
let mut seen_comma = false;
131-
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
137+
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
132138
if seen_comma {
133139
if tok.is_non_logical_newline() {
134140
// Also delete any non-logical newlines after the comma.
@@ -151,7 +157,7 @@ pub(crate) fn remove_argument(
151157
} else {
152158
// Case 3: argument or keyword is the last node, so we have to find the last
153159
// comma in the stmt.
154-
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, call_at).flatten() {
160+
for (tok, range) in lexer::lex_starts_at(contents, mode, call_at).flatten() {
155161
if range.start() == expr_range.start() {
156162
fix_end = Some(expr_range.end());
157163
break;

crates/ruff/src/checkers/ast/mod.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ use ruff_python_semantic::{
5252
ModuleKind, ScopeId, ScopeKind, SemanticModel, SemanticModelFlags, StarImport, SubmoduleImport,
5353
};
5454
use ruff_python_stdlib::builtins::{BUILTINS, MAGIC_GLOBALS};
55-
use ruff_python_stdlib::path::is_python_stub_file;
55+
use ruff_python_stdlib::path::{is_jupyter_notebook, is_python_stub_file};
5656
use ruff_source_file::Locator;
5757

5858
use crate::checkers::ast::deferred::Deferred;
@@ -76,6 +76,8 @@ pub(crate) struct Checker<'a> {
7676
module_path: Option<&'a [String]>,
7777
/// Whether the current file is a stub (`.pyi`) file.
7878
is_stub: bool,
79+
/// Whether the current file is a Jupyter notebook (`.ipynb`) file.
80+
pub(crate) is_jupyter_notebook: bool,
7981
/// The [`flags::Noqa`] for the current analysis (i.e., whether to respect suppression
8082
/// comments).
8183
noqa: flags::Noqa,
@@ -126,6 +128,7 @@ impl<'a> Checker<'a> {
126128
package,
127129
module_path: module.path(),
128130
is_stub: is_python_stub_file(path),
131+
is_jupyter_notebook: is_jupyter_notebook(path),
129132
locator,
130133
stylist,
131134
indexer,

crates/ruff/src/checkers/imports.rs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,13 @@ pub(crate) fn check_imports(
104104
for block in &blocks {
105105
if !block.imports.is_empty() {
106106
if let Some(diagnostic) = isort::rules::organize_imports(
107-
block, locator, stylist, indexer, settings, package,
107+
block,
108+
locator,
109+
stylist,
110+
indexer,
111+
settings,
112+
package,
113+
source_kind.map_or(false, SourceKind::is_jupyter),
108114
) {
109115
diagnostics.push(diagnostic);
110116
}

crates/ruff/src/importer/insertion.rs

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -137,16 +137,22 @@ impl<'a> Insertion<'a> {
137137
mut location: TextSize,
138138
locator: &Locator<'a>,
139139
stylist: &Stylist,
140+
is_jupyter_notebook: bool,
140141
) -> Insertion<'a> {
141142
enum Awaiting {
142143
Colon(u32),
143144
Newline,
144145
Indent,
145146
}
146147

148+
let mode = if is_jupyter_notebook {
149+
Mode::Jupyter
150+
} else {
151+
Mode::Module
152+
};
153+
147154
let mut state = Awaiting::Colon(0);
148-
for (tok, range) in
149-
lexer::lex_starts_at(locator.after(location), Mode::Module, location).flatten()
155+
for (tok, range) in lexer::lex_starts_at(locator.after(location), mode, location).flatten()
150156
{
151157
match state {
152158
// Iterate until we find the colon indicating the start of the block body.
@@ -427,7 +433,7 @@ x = 1
427433
let tokens: Vec<LexResult> = ruff_python_parser::tokenize(contents, Mode::Module);
428434
let locator = Locator::new(contents);
429435
let stylist = Stylist::from_tokens(&tokens, &locator);
430-
Insertion::start_of_block(offset, &locator, &stylist)
436+
Insertion::start_of_block(offset, &locator, &stylist, false)
431437
}
432438

433439
let contents = "if True: pass";

crates/ruff/src/importer/mod.rs

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -121,6 +121,7 @@ impl<'a> Importer<'a> {
121121
import: &StmtImports,
122122
at: TextSize,
123123
semantic: &SemanticModel,
124+
is_jupyter_notebook: bool,
124125
) -> Result<TypingImportEdit> {
125126
// Generate the modified import statement.
126127
let content = autofix::codemods::retain_imports(
@@ -140,7 +141,7 @@ impl<'a> Importer<'a> {
140141
// Add the import to a `TYPE_CHECKING` block.
141142
let add_import_edit = if let Some(block) = self.preceding_type_checking_block(at) {
142143
// Add the import to the `TYPE_CHECKING` block.
143-
self.add_to_type_checking_block(&content, block.start())
144+
self.add_to_type_checking_block(&content, block.start(), is_jupyter_notebook)
144145
} else {
145146
// Add the import to a new `TYPE_CHECKING` block.
146147
self.add_type_checking_block(
@@ -353,8 +354,14 @@ impl<'a> Importer<'a> {
353354
}
354355

355356
/// Add an import statement to an existing `TYPE_CHECKING` block.
356-
fn add_to_type_checking_block(&self, content: &str, at: TextSize) -> Edit {
357-
Insertion::start_of_block(at, self.locator, self.stylist).into_edit(content)
357+
fn add_to_type_checking_block(
358+
&self,
359+
content: &str,
360+
at: TextSize,
361+
is_jupyter_notebook: bool,
362+
) -> Edit {
363+
Insertion::start_of_block(at, self.locator, self.stylist, is_jupyter_notebook)
364+
.into_edit(content)
358365
}
359366

360367
/// Return the import statement that precedes the given position, if any.

crates/ruff/src/rules/flake8_annotations/fixes.rs

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,20 @@ pub(crate) fn add_return_annotation(
1010
locator: &Locator,
1111
stmt: &Stmt,
1212
annotation: &str,
13+
is_jupyter_notebook: bool,
1314
) -> Result<Edit> {
1415
let contents = &locator.contents()[stmt.range()];
16+
let mode = if is_jupyter_notebook {
17+
Mode::Jupyter
18+
} else {
19+
Mode::Module
20+
};
1521

1622
// Find the colon (following the `def` keyword).
1723
let mut seen_lpar = false;
1824
let mut seen_rpar = false;
1925
let mut count = 0u32;
20-
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
26+
for (tok, range) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() {
2127
if seen_lpar && seen_rpar {
2228
if matches!(tok, Tok::Colon) {
2329
return Ok(Edit::insertion(format!(" -> {annotation}"), range.start()));

crates/ruff/src/rules/flake8_annotations/rules/definition.rs

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -706,8 +706,13 @@ pub(crate) fn definition(
706706
);
707707
if checker.patch(diagnostic.kind.rule()) {
708708
diagnostic.try_set_fix(|| {
709-
fixes::add_return_annotation(checker.locator(), stmt, "None")
710-
.map(Fix::suggested)
709+
fixes::add_return_annotation(
710+
checker.locator(),
711+
stmt,
712+
"None",
713+
checker.is_jupyter_notebook,
714+
)
715+
.map(Fix::suggested)
711716
});
712717
}
713718
diagnostics.push(diagnostic);
@@ -724,8 +729,13 @@ pub(crate) fn definition(
724729
if checker.patch(diagnostic.kind.rule()) {
725730
if let Some(return_type) = simple_magic_return_type(name) {
726731
diagnostic.try_set_fix(|| {
727-
fixes::add_return_annotation(checker.locator(), stmt, return_type)
728-
.map(Fix::suggested)
732+
fixes::add_return_annotation(
733+
checker.locator(),
734+
stmt,
735+
return_type,
736+
checker.is_jupyter_notebook,
737+
)
738+
.map(Fix::suggested)
729739
});
730740
}
731741
}

crates/ruff/src/rules/flake8_pytest_style/rules/fixture.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -351,6 +351,7 @@ fn check_fixture_decorator(checker: &mut Checker, func_name: &str, decorator: &D
351351
args,
352352
keywords,
353353
false,
354+
checker.is_jupyter_notebook,
354355
)
355356
.map(Fix::suggested)
356357
});

crates/ruff/src/rules/flake8_pytest_style/rules/parametrize.rs

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -95,18 +95,19 @@ fn elts_to_csv(elts: &[Expr], generator: Generator) -> Option<String> {
9595
/// ```
9696
///
9797
/// This method assumes that the first argument is a string.
98-
fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Locator) -> TextRange {
98+
fn get_parametrize_name_range(
99+
decorator: &Decorator,
100+
expr: &Expr,
101+
locator: &Locator,
102+
mode: Mode,
103+
) -> TextRange {
99104
let mut locations = Vec::new();
100105
let mut implicit_concat = None;
101106

102107
// The parenthesis are not part of the AST, so we need to tokenize the
103108
// decorator to find them.
104-
for (tok, range) in lexer::lex_starts_at(
105-
locator.slice(decorator.range()),
106-
Mode::Module,
107-
decorator.start(),
108-
)
109-
.flatten()
109+
for (tok, range) in
110+
lexer::lex_starts_at(locator.slice(decorator.range()), mode, decorator.start()).flatten()
110111
{
111112
match tok {
112113
Tok::Lpar => locations.push(range.start()),
@@ -131,6 +132,11 @@ fn get_parametrize_name_range(decorator: &Decorator, expr: &Expr, locator: &Loca
131132
/// PT006
132133
fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
133134
let names_type = checker.settings.flake8_pytest_style.parametrize_names_type;
135+
let mode = if checker.is_jupyter_notebook {
136+
Mode::Jupyter
137+
} else {
138+
Mode::Module
139+
};
134140

135141
match expr {
136142
Expr::Constant(ast::ExprConstant {
@@ -142,7 +148,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
142148
match names_type {
143149
types::ParametrizeNameType::Tuple => {
144150
let name_range =
145-
get_parametrize_name_range(decorator, expr, checker.locator());
151+
get_parametrize_name_range(decorator, expr, checker.locator(), mode);
146152
let mut diagnostic = Diagnostic::new(
147153
PytestParametrizeNamesWrongType {
148154
expected: names_type,
@@ -173,7 +179,7 @@ fn check_names(checker: &mut Checker, decorator: &Decorator, expr: &Expr) {
173179
}
174180
types::ParametrizeNameType::List => {
175181
let name_range =
176-
get_parametrize_name_range(decorator, expr, checker.locator());
182+
get_parametrize_name_range(decorator, expr, checker.locator(), mode);
177183
let mut diagnostic = Diagnostic::new(
178184
PytestParametrizeNamesWrongType {
179185
expected: names_type,

crates/ruff/src/rules/flake8_raise/rules/unnecessary_paren_on_raise_exception.rs

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
6666
return;
6767
}
6868

69-
let range = match_parens(func.end(), checker.locator())
69+
let range = match_parens(func.end(), checker.locator(), checker.is_jupyter_notebook)
7070
.expect("Expected call to include parentheses");
7171
let mut diagnostic = Diagnostic::new(UnnecessaryParenOnRaiseException, range);
7272
if checker.patch(diagnostic.kind.rule()) {
@@ -78,14 +78,24 @@ pub(crate) fn unnecessary_paren_on_raise_exception(checker: &mut Checker, expr:
7878
}
7979

8080
/// Return the range of the first parenthesis pair after a given [`TextSize`].
81-
fn match_parens(start: TextSize, locator: &Locator) -> Option<TextRange> {
81+
fn match_parens(
82+
start: TextSize,
83+
locator: &Locator,
84+
is_jupyter_notebook: bool,
85+
) -> Option<TextRange> {
8286
let contents = &locator.contents()[usize::from(start)..];
8387

8488
let mut fix_start = None;
8589
let mut fix_end = None;
8690
let mut count = 0u32;
8791

88-
for (tok, range) in lexer::lex_starts_at(contents, Mode::Module, start).flatten() {
92+
let mode = if is_jupyter_notebook {
93+
Mode::Jupyter
94+
} else {
95+
Mode::Module
96+
};
97+
98+
for (tok, range) in lexer::lex_starts_at(contents, mode, start).flatten() {
8999
match tok {
90100
Tok::Lpar => {
91101
if count == 0 {

crates/ruff/src/rules/flake8_simplify/rules/ast_if.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -374,6 +374,7 @@ pub(crate) fn nested_if_statements(checker: &mut Checker, stmt_if: &StmtIf, pare
374374
let colon = first_colon_range(
375375
TextRange::new(test.end(), first_stmt.start()),
376376
checker.locator().contents(),
377+
checker.is_jupyter_notebook,
377378
);
378379

379380
// Check if the parent is already emitting a larger diagnostic including this if statement

crates/ruff/src/rules/flake8_simplify/rules/ast_with.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,7 @@ pub(crate) fn multiple_with_statements(
119119
body.first().expect("Expected body to be non-empty").start(),
120120
),
121121
checker.locator().contents(),
122+
checker.is_jupyter_notebook,
122123
);
123124

124125
let mut diagnostic = Diagnostic::new(

crates/ruff/src/rules/flake8_type_checking/rules/typing_only_runtime_import.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -447,6 +447,7 @@ fn fix_imports(checker: &Checker, stmt_id: NodeId, imports: &[Import]) -> Result
447447
},
448448
at,
449449
checker.semantic(),
450+
checker.is_jupyter_notebook,
450451
)?;
451452

452453
Ok(

crates/ruff/src/rules/isort/annotate.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ pub(crate) fn annotate_imports<'a>(
1313
comments: Vec<Comment<'a>>,
1414
locator: &Locator,
1515
split_on_trailing_comma: bool,
16+
is_jupyter_notebook: bool,
1617
) -> Vec<AnnotatedImport<'a>> {
1718
let mut comments_iter = comments.into_iter().peekable();
1819

@@ -119,7 +120,7 @@ pub(crate) fn annotate_imports<'a>(
119120
names: aliases,
120121
level: level.map(|level| level.to_u32()),
121122
trailing_comma: if split_on_trailing_comma {
122-
trailing_comma(import, locator)
123+
trailing_comma(import, locator, is_jupyter_notebook)
123124
} else {
124125
TrailingComma::default()
125126
},

crates/ruff/src/rules/isort/comments.rs

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,18 @@ impl Comment<'_> {
2222
}
2323

2424
/// Collect all comments in an import block.
25-
pub(crate) fn collect_comments<'a>(range: TextRange, locator: &'a Locator) -> Vec<Comment<'a>> {
25+
pub(crate) fn collect_comments<'a>(
26+
range: TextRange,
27+
locator: &'a Locator,
28+
is_jupyter_notebook: bool,
29+
) -> Vec<Comment<'a>> {
2630
let contents = locator.slice(range);
27-
lexer::lex_starts_at(contents, Mode::Module, range.start())
31+
let mode = if is_jupyter_notebook {
32+
Mode::Jupyter
33+
} else {
34+
Mode::Module
35+
};
36+
lexer::lex_starts_at(contents, mode, range.start())
2837
.flatten()
2938
.filter_map(|(tok, range)| {
3039
if let Tok::Comment(value) = tok {

crates/ruff/src/rules/isort/helpers.rs

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,20 @@ use crate::rules::isort::types::TrailingComma;
88

99
/// Return `true` if a `Stmt::ImportFrom` statement ends with a magic
1010
/// trailing comma.
11-
pub(super) fn trailing_comma(stmt: &Stmt, locator: &Locator) -> TrailingComma {
11+
pub(super) fn trailing_comma(
12+
stmt: &Stmt,
13+
locator: &Locator,
14+
is_jupyter_notebook: bool,
15+
) -> TrailingComma {
1216
let contents = locator.slice(stmt.range());
1317
let mut count = 0u32;
1418
let mut trailing_comma = TrailingComma::Absent;
15-
for (tok, _) in lexer::lex_starts_at(contents, Mode::Module, stmt.start()).flatten() {
19+
let mode = if is_jupyter_notebook {
20+
Mode::Jupyter
21+
} else {
22+
Mode::Module
23+
};
24+
for (tok, _) in lexer::lex_starts_at(contents, mode, stmt.start()).flatten() {
1625
if matches!(tok, Tok::Lpar) {
1726
count = count.saturating_add(1);
1827
}

0 commit comments

Comments
 (0)